pax_global_header00006660000000000000000000000064145027531520014516gustar00rootroot0000000000000052 comment=4211ec6874825bd9623e941e3e3f76c78cb056c0 PynPoint-0.11.0/000077500000000000000000000000001450275315200133555ustar00rootroot00000000000000PynPoint-0.11.0/.codecov.yml000066400000000000000000000002441450275315200156000ustar00rootroot00000000000000coverage: status: project: default: threshold: 100% if_not_found: success patch: no comment: off ignore: - tests/* - setup.py PynPoint-0.11.0/.github/000077500000000000000000000000001450275315200147155ustar00rootroot00000000000000PynPoint-0.11.0/.github/workflows/000077500000000000000000000000001450275315200167525ustar00rootroot00000000000000PynPoint-0.11.0/.github/workflows/ci.yml000066400000000000000000000023521450275315200200720ustar00rootroot00000000000000name: CI on: [push, pull_request] jobs: build: runs-on: ubuntu-latest strategy: matrix: python-version: ['3.9', '3.10', '3.11'] steps: - uses: actions/checkout@v2 - name: Setup Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | sudo apt-get install pandoc pip install --upgrade pip pip install flake8 pytest pytest-cov sphinx pip install -r docs/requirements.txt pip install -r requirements.txt pip install . - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Build documentation run: | make docs - name: Run pytest run: | make test - name: Upload coverage to Codecov uses: codecov/codecov-action@v2 PynPoint-0.11.0/.gitignore000066400000000000000000000012061450275315200153440ustar00rootroot00000000000000# Python *.py[cod] # Distribution dist/ build/ htmlcov/ .eggs/ # Testing coverage.xml .coverage .coverage.* .idea/ .tox/ junit-docs-ci.xml junit-py27.xml junit-py36.xml junit-py37.xml .pytest_cache/ *.nbi *.nbc pytest.ini # Sphinx docs/_build # Mac .DS_Store .vscode .vscode/settings.json # pip install -e pynpoint.egg-info pynpoint.egg-info/* # Vim .tags # Tutorials docs/tutorials/PynPoint_config.ini docs/tutorials/PynPoint_database.hdf5 docs/tutorials/betapic_naco_mp.hdf5 docs/tutorials/hd142527_zimpol_h-alpha.tgz docs/tutorials/input docs/tutorials/.ipynb_checkpoints docs/tutorials/*.fits docs/tutorials/*.dat docs/tutorials/*.npy PynPoint-0.11.0/.readthedocs.yml000066400000000000000000000003211450275315200164370ustar00rootroot00000000000000version: 2 sphinx: configuration: docs/conf.py build: os: ubuntu-22.04 tools: python: "3.11" python: install: - requirements: docs/requirements.txt - requirements: requirements.txt PynPoint-0.11.0/CODEOWNERS000066400000000000000000000000211450275315200147410ustar00rootroot00000000000000* @tomasstolker PynPoint-0.11.0/LICENSE000066400000000000000000001046371450275315200143750ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Pipeline for processing and analysis of high-contrast imaging data Copyright (C) 2014-2023 Tomas Stolker, Markus Bonse, Sascha Quanz, and Adam Amara This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: PynPoint Copyright (C) 2014-2023 Tomas Stolker, Markus Bonse, Sascha Quanz, and Adam Amara This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . PynPoint-0.11.0/Makefile000066400000000000000000000036201450275315200150160ustar00rootroot00000000000000.PHONY: help pypi pypi-test test coverage docs clean clean-build clean-python clean-test help: @echo "pypi - submit to PyPI server" @echo "pypi-test - submit to TestPyPI server" @echo "docs - generate Sphinx documentation" @echo "test - run unit tests" @echo "coverage - check code coverage" @echo "clean - remove all artifacts" @echo "clean-build - remove build artifacts" @echo "clean-python - remove Python artifacts" @echo "clean-test - remove test artifacts" pypi: python setup.py sdist bdist_wheel twine check dist/* twine upload dist/* pypi-test: python setup.py sdist bdist_wheel twine upload --repository-url https://test.pypi.org/legacy/ dist/* docs: rm -f docs/pynpoint.core.rst rm -f docs/pynpoint.readwrite.rst rm -f docs/pynpoint.processing.rst rm -f docs/pynpoint.util.rst sphinx-apidoc -o docs pynpoint cd docs/ $(MAKE) -C docs clean $(MAKE) -C docs html test: pytest --cov=pynpoint/ --cov-report=xml coverage: coverage run --rcfile .coveragerc -m py.test coverage combine coverage report -m coverage html clean: clean-build clean-python clean-test clean-build: rm -rf dist/ rm -rf build/ rm -rf htmlcov/ rm -rf .eggs/ rm -rf docs/_build rm -rf docs/tutorials/PynPoint_config.ini rm -rf docs/tutorials/PynPoint_database.hdf5 rm -rf docs/tutorials/betapic_naco_mp.hdf5 rm -rf docs/tutorials/hd142527_zimpol_h-alpha.tgz rm -rf docs/tutorials/input rm -rf docs/tutorials/.ipynb_checkpoints rm -rf docs/tutorials/*.fits rm -rf docs/tutorials/*.dat rm -rf docs/tutorials/*.npy clean-python: find . -name '*.pyc' -exec rm -f {} + find . -name '*.pyo' -exec rm -f {} + find . -name '*~' -exec rm -f {} + find . -name '__pycache__' -exec rm -rf {} + clean-test: rm -f coverage.xml rm -f .coverage rm -f .coverage.* rm -rf .tox/ rm -rf pynpoint.egg-info/ rm -f junit-docs-ci.xml rm -f junit-py27.xml rm -f junit-py36.xml rm -f junit-py37.xml rm -rf .pytest_cache/ PynPoint-0.11.0/README.rst000066400000000000000000000064541450275315200150550ustar00rootroot00000000000000PynPoint ======== **Pipeline for processing and analysis of high-contrast imaging data** .. image:: https://img.shields.io/pypi/v/pynpoint :target: https://pypi.python.org/pypi/pynpoint .. image:: https://img.shields.io/pypi/pyversions/pynpoint :target: https://pypi.python.org/pypi/pynpoint .. image:: https://github.com/PynPoint/PynPoint/workflows/CI/badge.svg?branch=main :target: https://github.com/PynPoint/PynPoint/actions .. image:: https://img.shields.io/readthedocs/pynpoint :target: http://pynpoint.readthedocs.io .. image:: https://codecov.io/gh/PynPoint/PynPoint/branch/main/graph/badge.svg?token=35stSKWsaJ :target: https://codecov.io/gh/PynPoint/PynPoint .. image:: https://img.shields.io/codefactor/grade/github/PynPoint/PynPoint :target: https://www.codefactor.io/repository/github/pynpoint/pynpoint .. image:: https://img.shields.io/github/license/pynpoint/pynpoint :target: https://github.com/PynPoint/PynPoint/blob/main/LICENSE PynPoint is a generic, end-to-end pipeline for the reduction and analysis of high-contrast imaging data of exoplanets. The pipeline uses principal component analysis (PCA) for the subtraction of the stellar PSF and supports post-processing with ADI, RDI, and SDI techniques. The package is stable, extensively tested, and actively maintained. Documentation ------------- Documentation is available at `http://pynpoint.readthedocs.io `_, including installation instructions, details on the pipeline architecture, and several notebook tutorials. Attribution ----------- If you use PynPoint in your publication then please cite `Stolker et al. (2019) `_. Please also cite `Amara & Quanz (2012) `_ as the origin of PynPoint, which focused initially on the use of PCA as a PSF subtraction method. In case you use specifically the PCA-based background subtraction module or the wavelet based speckle suppression module, please give credit to `Hunziker et al. (2018) `_ or `Bonse et al. (preprint) `_, respectively. Contributing ------------ Contributions in the form of bug fixes, new or improved functionalities, and pipeline modules are highly appreciated. Please consider forking the repository and creating a pull request to help improve and extend the package. Instructions for `coding of a pipeline module `_ are available in the documentation. Bugs can be reported by creating an `issue `_ on the Github page. License ------- Copyright 2014-2021 Tomas Stolker, Markus Bonse, Sascha Quanz, Adam Amara, and `contributors `_. PynPoint is distributed under the GNU General Public License v3. See the `LICENSE `_ file for the terms and conditions. Acknowledgements ---------------- The PynPoint logo was designed by `Atlas Interactive `_ and is `available `_ for use in presentations. PynPoint-0.11.0/docs/000077500000000000000000000000001450275315200143055ustar00rootroot00000000000000PynPoint-0.11.0/docs/Makefile000066400000000000000000000011041450275315200157410ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)PynPoint-0.11.0/docs/_static/000077500000000000000000000000001450275315200157335ustar00rootroot00000000000000PynPoint-0.11.0/docs/_static/betapic_center.png000066400000000000000000000640071450275315200214170ustar00rootroot00000000000000PNG  IHDRi#gAMA a cHRMz&u0`:pQ<GPLTE )DW2@EXEZE[F \DUF ^DTF _UUUwwwWWWFaGbGcGfHkGiGjGeHnHoH"sH qHpHlGgH#tG%uH!rG&vF0}>H9T;QC;C<,r8v#'|>IE5*wyQ"5L({B==K"D<")':R5]?$Gn6ZG'w/{8G>)=L%!b_(zE6=J+}3y!AAD7?@^_!E5{OHWiq)yu书3;)!|MOϗ#D W٩{Ct5A?Y65ʏ)WF?3跺?~7\oΠv(peC9~wÕ n珢 WF?3跺?~7\oΠv(peC93z)Re`Gﻼ|bGsd}VݰsM|1;?M>(xJՍ5X,ůMv,\~MG?,4Y0r=U6=Oon8wdg~WMv,\~MG?,4Y0r=U6=Oon8wdg~WMv,\~MG?,4Y0r=U6=O_qs7k)o\[y ~4J<yrT:s"_j3)6ܗ>*@?C?C?C? Šo=~~jAC?C?C?mr<#C?Xc䑻ǮGϡ_ ,.X/\-~e;ʃyd%xH`=5~I6rrv=1w䳔S,͡V]>)>C>d\8rپ P+S{FbI]etZ}R!.,;.D?C?;~qE/ Wos=~3Y>aAP+S"-.oyc./ڴo ;+<>r$\i_P<ٳLǻ+[#͏z+oV}5J՛'^rr5#麒A~~~w,kTC?3Av"q~g{GBEAozYO_  o~~so3ވ~>>րC?u%~iOו U?z}O}Jm_|۝jk03afFE],zq;_V=9/~^@?C6~w)/?'|w{^Jo~~_N%;Jo~~_Pese~ `n7+4Q\pYt}]|Aq^]_zwm]i$z׶9zJsy6bחLoW˸BV]>>Ñs&~fU:~_$~fU:_C?C?C?C_C)~gVկ35C?C?C?[./52~fU:IٟX[w7qy܇?q߳*ϲ~%waF==k^ Ŗ+#C??qC?!~ _yC~G??~ _yC~G??~ _yCٻr ޽sWΏs[\ULYJwor V,_7`r57|l|u[m@vC?k7ngvO6/m._@?Jl C?C~~E"}Go_C~~cQ'~W$Я7 @?k~7Ey~~E"}Gol'W_䌀_R_VYy^+z}嘋VH:l6I>+}Fz~. ?yyi1E+g+ǯ3߰8oD??y.5C?C?{mЯXA?k~~~<lW,uk~| fGvJYO5iT,vC ;JseTcdD}}oh=C?݁~7EoU~7_C?C[gCWJF@?k~7\"C?s~AU~ K~~n^=*]C? kX\YrhUG0ZYfRS9F.cg[ĺWk'SvC?LYi'~A?x6~~~=~O6~'OA?W(m>z3e/mO~7گQ|Omq} [iUeuz(^Yxnyo~|Dz)[#,7OS:SUt&AWN\goE&##k)oI C?C?3B?C?C?C?A?C?C?CQWF?#9~5{~3v]oro+Ư'_Y,!JQ?Ɏ6Ó+PsotBP\T^W,or?wѯX`~UW/TC?k~D?!+~~7څz_C?+l%_/_W,C?C. _a+u~~ʾfv^~ [~_~ ɞO+{׿!eĚYg9#Ywi=*^/~qj~\>~5d{◨P~~7uo5Bk~7C?E?C?C?9!5C?[ק߃`Y~Vm9/XZ6rW_<9~!w?&;]ԡlAQCrO^5T+ѯAQCrO^5T+ѯAQCrO^5T+ѯAQCrO^5T+ѯAQCrO^LΗ\.اC3j~E{dNٗlgQD@q玮3Zs|f. ;4Z [Hmou1=.2 V y_+ ϧl1SmN6rGkkw[&*pn?Š Я{I]V'_ N7~ ާΟKߨ /N7~i/TCQ_ o>Mw^wGWCM_MWCAG.?EQ_ orVG._ߨ /N7~»+|&Ri_[OYY_wCs]0oܕ,kfV ԃ\Y\YuyIo__Q]/]^7^~~[:~;E̒Ơ_@6O9.RK~msz[R2z04 >osTo/~~:~> CH|~~%A ? ԃ-~o{-^j?[YyoտyrV>9W.!$?M*T,yb l^f61i9Uߩ1{a2W U\yv."CBo}/3F}~~~~7go4_w~藯~~1~sF}~~~~7go4_w~藯~~1~sf^STmƾ~-W>ӝ)<MC!Nְo|_9;*^7kyJzF&;EN3|,Cb5C?C?Gfw6c6(ȭxC?YC߲"5C?C?Gfw6c6ڛONMWi\ܪ8%M2^j9_?x?ګ7{8*?_|}e7?ɧ傉+_3`o/~~3`o~~'H> Xds}g d#\(^^0kFqj|A7{X7Q|֬_x.m~gRo[~跷Im:%F???~~&5(~{ ֬_x.m~gRo[~跷Im:%F???~~&5(~{ ֬_xq{Vv) ʮi݇_ȫңQ,AӀA*ӏGWo_+ܿxuQNyn:rFfr%~+Oߨ8/؏,N_u~ >~7UЯ2_C~S|WZCQq6C?_E?Ym O7~~_|/S=xA{J+G}ZK0;=3+u_b S8}[dbqޏEb-TVxbuwЯO ϛ[x)vSYysuB?}C?Ŏ{*+_۟bG=7X]'C:G?kSh~7^g~m-TVxbuwЯO ϛ[x)vSYysuB?}{yg|GuOM`$>[l,Qoܣ *rEW,i\U߼~0r;r ݫa]HqTE6>~̩WF?I"+V$Я~r̘+$S\+Y~A?z~~f~ ބ~f֣s}ˮ&Z1Ȯ뻲x_).Aŋȹ{inVoqbmˬ_ ɧP~wB4Z+~7+C߬A?C?WF?; v oTW~'CY3d{z޷~T/|x_M)"\?*_:+{y2eҷ&9*D\\"/]05Ɠ?T}5dCy}J _C?C?C?C?C?C?ueC~7h C?˷@?_~跪C?k~;~O]Я -Яr;}2 r /OɨC|oI;|<_)~u_7T|E=(oO` /C=;xC~WЯ,l8ЯxA?k~7>e/ (&B?k~^Oû_C~G?]G< WW\rdRvQk68~ɑKH֤VQy8s|Rv9_˵~ G^=~~7oC?k7`>~~3&~~ GoD?{*[o/H_-J5ۤX򨿲(7GT(INt<ϔ U޷_>fS~/rީ[杍<ڢ_.NC~~~~~~~~g~C~s~VE?C?s[ONj˺2[ mƣyW(+1wS+ƾo})߻`C~uͯ~C3ѷdoUEr]m kE?C?C?C?C?C?C?CɃ~~~~~~~~~~~~~~~~~~~~''}+F+{6/zaV ()lڤY.t?k~s͟o61RDC.QQIg| c.XϾ&N~~jA?C?!~~~~~~~~{C>LMކ~rdx/"SEk34;~=[Ϩ+kTC?IAI"fO~~~7soeE}~;~}=*;Wߢ_-+ͧ_?-a}=ヾ}{vQBw%ק(?*&%՟[/,oIHW*55D?y?uMrHd#>x,r|W-~_N6yEQbwzF?C?Co/~~oI?S WZ~~o`;?on߅#߃~~oﶿE?C?;~#{%}sΣ#[JrёMl{6f$ߩZ=ϗ=}1i=F)^!pnc'?\^υ !n& ʕύ]H^+_W/Do1br$G:"}>rپ߱4#E߳d:ȧSded w[&_$}1]_"[!55j߾\k8-dUO)} 錯ɢ3r< [r~Ҍo:_^+C?3~~NG&~~~~z&+/hjX^Y:/1_ٛgY%߮7G)!55vȿsM?{u1yՐV!+26;(]ټ~Fuu}C z PA?C?C?C?C?C?C?C?C?C?C?C?C?C?C?C?C?;~~ xI ɵGGN+ ,VfoEE?T>7oC\Dg~sͿM(?_gzt{=a)ȗ*hyv>4Ay_];'Lk~~~}]],֓Oڼ`ARV\2ӭ????//Bk&俽g!^?)FYE6r ^Wި46rA C?C?C?C?C?C?C?C?C?C?C?C?C?C?C?C?C7Wѣ ~Gݯ^l)7hLTKo|6i ty;A~u߽ ;)? 8*yB|5Y̢^ L>M7/o~~3/2Vҹ~u f_ eЯsC?CS?ɶPK~O_s>s=k~? o!5#^nc;_`~׌l߅ɂ売}d)S0ыE?C?C~G~~~Fu叢=E?C?C?C?{ Օ?~~oI?~ȽGV~H|~U*;Z 䟄_!55.3EH\uo _$SӔSrWA?C?C~~~~~~~~~~~~~~~~~w\or=Q_r^% n?x9*/L>EB _3!_7s)j~Ilt\ A_庫~T"`|=R2އwՒMoW?YKyMQ_BQ1Z+ (Gc>%4$#jFC:߫ڨy~y_`cH]</_~D?C?C?C?C?<~~~~~~~~~~~~~~~~~~~7ϗI.Ib}|_9_Y{ҟY.C/C:\?F]noT3~FZᷣPYu~>}:l[d3h7C?C??~~'*A~~~-_CC?lO`%]/hOS))9Y\wh+JZx/@Ϗ!h0^f}8{C(ڥ3F>-\+rzf>_)rFUy4Β3y}~~~SxꋜsÞ :czss˗UZ7*#t~?>~~~~IU^R$#_Q,^9v"l>7B.0ߘG~~~~ F~~gD?k~w MC?;~GEVbҊ1+˷ +MWyff@KS~r%ʋ<ȕG.rLl[Xe_N}9B_C?C?_pA~-/_C?C?C?C?C?C?{AC~W5C?C1 ި~5C?C?S{]O eIn'd:$WB>MlC>I6f~Ph1y+JW eVuG~Srߩ̻+V|C?C??~~~~~~~~~~~~~~~~~_~|{>/!%fsg/#*GGnȚ\4Y~5,Q3qѲ}Sw?߫~Vw!g~1\IC?C,oWVב@z|O$h+~~Y_=~~~~rr'Wް~1rHg| +}GK;)]< r5|deW+~ȕ=[l^fQ5FnnW|ڦɦFؓkCC?C??~\{~~bB~C|.=B?C\~~~F!CW_> /r^=z~Zʫ'Ȏ.|T|Vo kkE|kGWuKkX'Oh4/7q̲Srˣr)re%{tWF?^@.>_jo^K+O~;{w%mKw??p.}~7yCie%+_joV>~mw^s?CQͣͪ/}RJ ߋw~VrǾ~G?TO/._Ed9G '#k)zZ#$Ʌo`|exr~͒nlwr|t|.6ُʿ$ʵQ@o[^\'2FQVMw~~~.F~# nhF;wzB[i~ߥ+;D74;=!4CZɬ*~~/~~.-ͶW?hpC3ARJ3<;_n/z4e oތ~ O=F nhF;wzB[i~_wc }1ЌwԷ NoI?mf~'fxwB-SW/U|f/4Y~~׼$ީZd1jɋ פwCƒdc."GeE˃\X9͊}Tl"\W93^޾~2˯V?Mַ~~oC/}C?C?{#=~w9_!-o3~7~BC?C?{#-{=Q~$\Tw)Uz7/*_y"—WJ+N_$>yӼ~=+W2kluy++p?x/O<,gorwys~|s|C?CcԠW?9GټyfɣR dy) _BҨԿPKuH:\?H0)r)y)~sڸA_iJ~MbKWF?C?C~SFD?C?C?KWF?C?C~SFD?C?C?KWF?C?C~SFD?C?C?KWF?C?C~SFD\,^\gy47^K_K7t zkU|6fH6?߮bm5(]{$*?MWySV]m} +?C?C/~+^uŲG{C?C?C?Ceo4B?C?C/~+^uŲG{C?C?C?CeՓ6f='/w* Oֿ/Q=ޯ__^}}| ʉv`Z)GߋPȾkw5&$$:~ f E?tVoo^CJg&E?tVo%9G_r^:/?o! k2MdZFc#`26,ɣ^~ey)Re++#"o||׌85 G?o<k~~;5C?KA?~跧e|ɡ=x"lwD?k~藂~~+oOC{C?CDЯ;~׾bR+wwVV'D*ob9;yey//Xn8#ey|82A:7).+_C?>~4ЯMC?~rv~zF?9My;C?k~|=^Vy[NnA(4^?*Dijy;y彫!kCm8B(ToQͽOMCEЯ-~~7*TC~~7qVA~~~ ߨPA?[]}}ѐ%${?ˠ/Z}MGbjqTJ?4 n }owC{Wnl_kϟgT+~~yv7[os߸o7Cq١lA?C?C?!{Vne~hoq?Blr-¯F}$8EW|e^dq㞢n}ϳotzso앙HP4)w~7${NGM^~yGVF~w~~g#w f&RC?\8z٧+o_j)oEYhn^2Je E-滣'[_ ~?wN6}/ u%WϔK*_^_\%~sVCE~WXI]~k~E~WXWkΟ ~ 9~V]UCPѯu;-\VCE~WXoޒoϖ/5C’Ne/~ Яb~ ЯS9epl[?۳zSss#5_,N9*~T~l̢,n\+yj}%֤<~~+uźVeKeG|]oCPON FV?٥~㡢2~n^;O9G?k7^Skoo0*;~/V?q~ B7voMoo0*;~!k޿j'~o0*;~]?? ;(L?x^>  F~)[  F~9|ف  F~|YxFM>ySk6q^xA~+wM\!G"oד;Z+ے})V'?O@ܣɷG3rFr̒J0}\|SluLaK]m/{k<~~~?~o~~?~o~~?~o~~?~ފ~q嗟& "f$\q0UzrUˉv"~`*ĆnMv_g:=?y*R]zr/?  "~~J9*k~~ FWB~~~ioT{OJ9*k~~ FWB~跥__hey|> ͘ύkՏjI0OaȦ9x+V>w}P?y/T|mv8G?CY~f0J~'~~7k T)cdf *E?{ ݣ߬A?3S~A?A{C?5gc1~2w~~&gdhe}=[7WK?*/gۭ"x+|%Ϩ.|yoT rF͉-"?mV)oo]GO^GWeA?C?[W~~~~8Qyeב~~YSWRʾF& _ey}sѯA?,Yy.5;~=+E~sЯgoU<C_C~WY^߳\k7w*{V~ _ey}sѯA?,Yy.5;#2ƿz [RzWK\YK-ֶ1KGn s3ߩ+gkXhPyP~_F#7ݬm88kA?C?C?_{ vm5{/W~6vϗ+^C]~ ^;~~ X)|T Ȇ-(YcGc"_b/u&w7̷׳\Z}3z~㹝S^O#~ o,~c~<׿o/Fہ~7Z1w?~7hlj~~@?-E?q4wD?Cv~ ݏ~%7dCl`E`2t8쌜xC9~L(&Vkn"_VJ$f}6Wҟŷ@վD6|X~~Td~~~~{/~~/QYs3~C?E?9*C?3k~Ճ~~~'G~~f6zޘ~#kWi;vި,wτ*Gjȧ6{T<>,g^2F[ ERnx(nxg)hk~ )T9 iO7`C?C?#~W<>o~~7GЯx}C?߀ o_\~A?C~^(փ\qQ\YiZ !O}+ fROCQLߧMn3TlՕS^rl~s2$|p_5ur_JG?Cɂ~~~~wVOC?3~~~~7 o4C9~~~rkI+[=.%k7Ƴ`>}nqyӵUjީģvrm~7ev^WC+84O{_\(UCo~~Q]}F~vٗ1_erToW/lC]ce~~Wg~ oט}U&.G~vF?5f_~~Q]}F~vٗ1~Ҋd~mʛ]YҢQ\_ir=q$:l򙦄$jʥ,ն_5︝ {dvZ~~K~~~~~~~7ЯC?C?C?k~3g^_r|W~}훱F\RyަKxe3~.op.#{//~]| Ev<Ο=E?C|.wUC?C|.r\e٠_C׎}/~pػl~~\kGׯ/sѯ'~wQyM7B)y@/[#{>Ey#?mx1dӨW=Y H8š}UtG1^. h0켥G˗ i˸G ;+4^4˿{߭'^OH8š{qt~VG.gK}o1N,qYǸhC5`3߸#&X?g]ǚTOs|qG{yQawzĥiȓ0P?y:PXb1q㎋ٸ#fKT5{I}o;<;m8ޱ污;R/d}yǚԧs.dhpyo~cL%r<(I?:ҤXO/?}nsSrɧjR@/0UQ?c:qGoKd=PcnR>yi s>⇙G=qID C1`:_kv_Muy|8~GK`P ;KsO* w<߇7x9<ЯA~+&u[= / kx9<ЯcA>P:X>ǚGY/?rI5ma>g"tѱ1c[|0ECE|Ց&uy]3#^b}=Q}&9/ h;Ҥ/0/q?G\̑_L &@b5{7iR[%mfipǛ@?GԋD+q~jRN?C=q=<~GtGeR/lg@Nʲ=3ߤe{py2g;/|IH6#6swR[_ ~y{yqIL"!.ǟyIh)_}#cWdGdR/Ͽξ}.1˚=P?N㼘Ѵ9G{Gjy|Y}#:bI-r/{rҟ>ڤwoʬFVHX"<T].<:2<㪚}F?Oy].qbKcN*_yꇚGPp{R/+k旜unRy081g9܉[bqtē"B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!uγM]%tEXtdate:create2019-08-02T18:30:07+00:00ZO}%tEXtdate:modify2019-08-02T18:30:07+00:00'tEXtpdf:HiResBoundingBox306.59x297.184+0+0%?&tEXtpdf:VersionPDF-1.4 G:xIENDB`PynPoint-0.11.0/docs/_static/betapic_pca.png000066400000000000000000000706041450275315200207020ustar00rootroot00000000000000PNG  IHDRi#gAMA a cHRMz&u0`:pQ<PLTE 1B#00h#Lg/i2c2b.k.l3a0g.m,p+t1f-n/j1d1e,r,q-o3`*u4_5\*v+s4^8V5]UUU6[6ZwwwWWW*w7Y7X(z)y8W)x({:S9T'|%9U:R;Q=L=J?ED9>HI?GG,{HoHl;PD7E4AB"Mg&"Ph#*~'}&%B>F1~B@CsyQI,}G(xC;Eo$3"G*yG&v!;uDDD222DWGf2zCrU'GjDTH!r!$'~$0"Oh8L pbKGDH pHYs,,sRtIME& nTIDATx$g~w`hV`!vwȸ;$iDl9J$9Ҷ٭~Ptu=WS+c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1ƘΫgzls1zfPcܱ̎eL7T0Ƙ|ز1Q?ccgs=~P9&_51#wZޘ]o{=Fɛ.`̝ȱ;(g3ӊCE̴~PQ?39TL+g3ӊCE̴~PQ?39TL+g3ӊCE̴~PQ?3rGHy<-Gjw#omi^|qBl+GoQ?s娟2gSYF̕~7˨rOf3W,~Q?e\9~KvIХ~w"Ȟw#`)Yp45yX{= ݟH9is챽Q?s_^WN$g.߉D%~y];D/~'3uD~QH\"UKDw"Q?sZ= CxuaEv ,|0&NOɼ?m2"KVli^^ii#rjryqQ?sSk3Q?vQ?sSk3Q?vQ?sSk3Q?vQ?sSk3Q?vQ?sSk3Q?vQ?sSk~#Ŗd+) # 7;ȣ(4*$JaP#A>sz UcOG_O̓y~Q?3O~/~IE<]'QS?$w~gDN:~8#7AGȉٺz4i9m?KVƋZ Z!\ecϾGN:~fc藍~g6FN:~fc藍~g6FN:~fc藍~g6FN:~fc藍~g6FN(uasGYwi#oC\v okK&`J7xKdORcnEx[ ?C'R.]TqWo&^9l@~~/; DN(~Q@~~/; DN(~Q@~~/; DN(~QyN?FrX2~ZUn\sUV (M@|Pw#4݅ފp ]WXץ}E{{Vr)h/%t HQyFOgOQyFOgOQyFOgOQyFOgOQ'$2Vԍ y ,(J{Kkc59W%k AoֹI۽5)v|'%z#mD?8Ba0Х4ғQ{$oJ8BIOkMtLw)'Ҵ"Tg{ᨕv~s=7EW#R d3/RAGḺĮOM?oOOoٻ~k~%~ߨT~~>;[׆9/[zxWOOvCʪ~O'ˆ΢~S77ը~3i~.sHX{]7߹y~ޯDO?R1LVT/- \KwoQ?S#d=gNS?S}g{nZc_ū4-\P.NKS?Sf=5}}O8o4=͊q-k==Xy JV |vz"OOv=?-^Sԏ`f 1sZX!dɼ:N H֙瑺1]ѕ!Eem) {R2۷<*p7R#9 tcڒ@ QAcVWw(.Q w_\b6;wȚr' ؟2Q?S?iv7=GOoQEO;^OQEO;^OQEO;HrAՇj7qކQoG\7euURr-p>zndBU{Z )1O#TBr4pޖÚV[Zɶ7*VY2?޺?%pWPًGS?\ Q?SES?\ Q?SES?\ Q?SES?\ Q?SES?\ Q?SES?\oaEUbd2Jr ȧ Td"RN&3o"cQplF`2e,l/g~#ȝFZJ3ځߏQ1z$[DZos"LӲE|oZQ̱WۋGS?noQ?SFS?noQ?SFS?noQ?SFS?noQ?SFv\YL,pQsyAkQ.g!Z+x]y.`gAWR+d׽!iz#B/dގx+Romo| w*vBQ{2]_׶wQEOOOOOߴ~;q SiEvS?S?S?S?>݌ga,ч k<@)!ota~~mд4UIQH1H@~toZQ*vR^:FᔶEto*?:kQEOOԯSiEvS?S?SgOQOOOڞQ?VoGQ?S?S?k{FoZQEOOԯSiEvS?Sk_kg0I`VLOǠ3e>+WYڮwJn{@Cnocd,c1s<-vVqZQw Fg۲h8Gs.u~7Kh~~N'39QΣuQ&$ƕzJUVg^ܴYu?śu(  lo.R9䵒<crOM==Y+׭M˾x:ke(n y[-խM 4x{hxNE^lY2}w꩟߱eS?S?S띡oWky l?~vAh?uO!٢ɯXrs ˣ[] "W}s eEx}1mWis G5[{#B \d;F2\1'z3{P@޿zG%9F3%mDOOOOOOOOOOOOOOOOO-Kir١`a۷"t8$\u֥_{֊K'PCЀ>l?UF|97WV}t@Z՘Ƥ7Kua~mlmOR:trP ̒Zͼ_bSNLV2?K?-[GOONL!S?S#ˋ~~w Y^ߕu/@.d)Hsr98o^=k%(ANpE!f]9Q*Nh3.G%9%tu*d'T@vC3ۻj,uFޚņ&E@sE59F:^I)u(~[nGi)9޲og^OOfv6QʱlA~~3o1k܊]~~o3^OON[MxQ?S?;i= y~ߋ~~76>c5 _;T0.9S0 dj!Cq 3R:!sNNȠݾ#[frJky\fAޖ#h;K1jmW%pRoF_WKﵷ-w%" vN;'k_2X;2V:~kk~~~ׯS?SLO\qO9P?S?;~~p0S?SSo~!75/pSS?SyY]|w^>? DO[[n^O9(.OOoN>tx]M?fKY[sַVF3v "V✇\YS.u(;rZr^ ڥم_=UL#9(ʧ`41# %Z %ǷȲVt8fOֆy1{OW!o;2ŨLEOOOOOOOOOOOOOOOOcy콺s.1Ix%:%`Bs8ȹT\$-ks^vQ֙BtmHjP˵Ϙջ[VDϹLޟP4kE^/`+j#[ TKB/ F*_7c?%gx']fCOOOOOOOOOOOOOOOOOOOokjG\!,U^q\b1sQVz9Z"^<3NȜշ#"{ȣuVȹAe' ig]Mt ^NG: K!ٓ˝%Uۜu QauLU bM;~~~~~~~~~w}Qv"(0|.]}~FBX -z\"W4/9{P5YfIֹv~;d:3 \AiC(;KJ:BRjE,ΥóhQWHόZ(Rۛ_kg;jӆpJw5H9dkz!9aOOOOOOOOOOS?S?S?S?S?S?S?S?S?a?eg9t8s nj`C)GcrQvwA,.fLt&y9NuГ<"̘8BNz #ij6_ГL 3s(xޑ *ջCLVTXC(u1(M;~~~~~~~~~~~~~~~~~'= R7əpB-c\ݘp^݅:3!s.qJ]bCʥu~ӷw> 3+^ə|_ƗsrL:>0X^ZI o7OOOOO-]GjnEꬎΐ!c}4hcu0m<3uR=oV9Xug87:s˜BVTBrJ*m{]9ՙͤ펛imKosax=Be($FZv!7KVu)(KE,QBۣ~~~~~ǖMOOOOOOOOOOOOOO~,9\ }>+tٷu(s9ΖKx~ڿ.3T/wdp ޟJQ|[7JZ{:s"Q z Q~>&] od{m̫;]*\Fx%3Xx*_® d+Nh֙Q<7ԊhԞ3G,c >Ħ4S?S?S?S?Sc˦~~~~~~~~~~~~~~~~~~~~~~w{7 -RAkʣLnu+C ]9E*!?͘4Ke2SfR! aF0ϭX坂dLf|7&(4f+ϥi~.漢` t{].Ar^NH:SҔ}sn=G۵~~~~~~~~ߩ+L`VY!tc}/R73o [3ފ{]Bި z:E0&?&ޟ}Qh4c8@C"~i̟%SUP2++ܑ7G)1aڻj]wzӿ^0-m󊡩sPZq Ȼ Z.2_!T?S?Sc˦~~~~~~~~~~~~~~~~~w嚥91g )iV`/~إ,g<\fH ;#hE)/2! ruLMSaD_˒#Q1ѥys۹Qwc lt3'P2:D=Cs3Ix%Y%3*3|#A~~wlOOOOOOOOOOOOOOOOOOOOOOOOoeӜVwKo+>N^osLy¨ef(G4d<5m<䕜BwK>xD?oL|BX]lK2#7S?SׯжS?Sy鷚Z A~~s|g[noNS?SY鷈OON[~~Dg/_ƿzl[%w_ÿߩkY@\"4B쟪;?mҲ\*/ɣ7K\1]@Ȩr:0yʰ,FO/D18Gf=oayׅhgwn56UI?9OBٓ\w}y8:QZ׀s:缢:|{7Vtiu,lQM+S?S?S?S?S)գ}OOڟvø?{MOO_89 ROoF{?U?S?~[#FKS?SYl:r~_;&$C2rQ-{^!  =>,20uȐq\p;Ke3iZp~~D☿4/GrL{cmR32x~]ޡ$ɧvNRy4g+9 uߏpn+'׭ɾru[n-[s߈TNFrMG.OOo^v^.6S?Syʓ4:#~~7;vS?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?~\i͔م)H#}\"R%HzfW$+Y<ےio;5N٘_ޘ2#%uIy br.9ޖN6Sv9)s8վ>i$Kԥ+*O!;OOOOOOOOOOOOOOOOO-]߻FqI\wW*P-^ɞgO#i+ 4ʢde<_@}>~7ǤQJR%1W4m1jy.dԲ kC(QJ0ݏrϟ"n @*k}nw?d_q#]:.B8LS?S?tOOOOOOOOOOOOOOOOO_FEFnm)PZY6k aGvy]2>viS2'0ψ3n*i*Ĕ)VoϿ9&ZY+/Iw }ba`\PU̞#¹{QjYy]=Յ˽-iʸ\=3Y<.db ZP?S?Sc˦~~~~~~~~~~~~~~~~~~~~~~~~~sկiōk6bts L R,+zZi#GȪPU.i\wT1i&f_ѥhowxzم:|lɧOc_69-9 }cDeڧvJ}T ofXBNz fhf{k9y+[TFsgW?S?SIGOOOOOOOOOOOOOOOOiL?eKH;({$sYbٷ # Ϫ$?WO(]Y1[Vuy')sʣl[,/c~/ߍ}EGCi=1#HH~KPr7G*TosC(Wۊ^ l:ḩ$ϒkr߱eS?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?.7hyhX\Š\BZH`GS!4`s&KN`e*4BUEu\:-kU;-Ljژ;ʘ\rY2M "Q`U`֊+<&K2R @!~#X}>Խr8_"/1)9(IITQdhM&7#Ts{v1\+7xI&MJu$f{YuU>_~~7騟U4]QzOgs 3eg 96o\fK2R7>Wr"Mk "#t(Kz?M!٢9  > \G)-}Œ g&‹*I{wۥN9 sQE2g,U$P)nNF1V?E0%흦~~~ǖMOOOOOOOOOOOOOOOOit둪A+9)B|XNϣ50ry/`ɰ"ڞdyQ#ZԖQ\̓^0.vJw"oLju܉%ԀQR?!Krɞ_p -b>1ܠ&*k(fY~~wlOOOOOOOOOOOOOOOOONYL6gt ` }D5&6gVNZĪ!L{JBКF*uߌ"^mˋ VG*}**c (K0/I*36Ǘm9tK$iwPr_nu'RssEX z*okF,sgQ~~~):I\X`B.dsoG^v|ΜkQ>9ϨeiC(J;QZ RR @ 3g_";G2I颿6?Gc1#ݘ_"y {=~7Be@}F?ǷrF̙/d^gd՟:rG[JGN7:I~~wlOOOOOOOOOOOOOOOOOOOOOOOO(F{9*tkp +vHt"*S <¹4-wSfi>bve?ۑjL;u#]2?E1_1pL~#0Of`ЊԀy 9s=73W Tm\NQyϭsIOOOOزuԯ1a3@rE3mC%a@=s)= [xe?m-zR2=G9oE>%=e߉ޏ1rwb9濊cј9f ;A]UJ`Ll/M.y]&=XO_ј:y.-ھ#zx#9LLo0srQ 'Y<;Oy]OT{ܲP?S?; rߐ S?S?~GP?S?;=UWOov=o&_?S?SwbM]\?S?Sw*w(~~wz-Z #tE~wPH%zj?Ϭ0|F=mu:1rH,}vQ+%,7ГYrE„d{LեAr ݍ'۳"x3&vcv. tiΌi֊BQg{ӸփHgS1i[~߉@*ץ]7. ?-/OOo^-W]S?So1~~wV|[vm̋ߜ[\oDPOo>-V[WwS?S䩟]g^T?U?SY{2COf x|Ot{/zS?SүIݕ, 7׳*K٥J%ǔNO_ϣ.s!uGy۸D>!P<ȼ"ȒyOWsݱXYiY_8#P?_"27#tj]. &rtGr>sBf9< ϭ$WJ=#Xx 9[D{omMHg~~~7Vڟ=.OO$[7U?S?; _OOD{r1Bh~~77 ?S?So H]FVb~Μ1eowe‹\+?gNWP2^i4N!~ }E{ܺ%aznʹCXq]r`Ǥ~t6"s9Pg ѧ79VZr(h{{CgɜޯhڛrJa~|!GSNE &4{e|OOfrȢ8ȷ\\|S?SywըGҷ:ПF(ޖ}^IR*õub\@ޖ-_R=fJ*le{i*io>{(#Z~9gRN^caMs"ucB7D2/ƨ1/@?-ٓuEe8Zh`\T04fIIzi~~~~~~~~~~~~~~~~~~~~eK ZN7dF%M7`sL:{Cױe%ڛ=]w#yH^XWb)(2+c8E0F0e_$;c76MomC?Mly'B ǛgL(NkHyzgL)3rRU.U *OOoQ?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?;MpWrLp43K0j ɜ!ds;u eLN.+1ճΕߜK*Oϩ{4UjoyJrpK^WǴ@31 Eރ'9zK"u}\ҽmu~ǃ͹AU뮓!w#~~~~~~~~~~ӎ]_Z #Xgf)#{}o˙Ƌ0ov2\' ߎӴץ;M˞Ez2alL^1E39󟪶6xzrUR6n+t^6p5[C#ES0^zy]-#r9m]uTgqyAOOoQ?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?SGOOOOo[=6W󰵱|H4Na>8#PHSՔ}nIZr*T>Ǡ6`ykc}eѴVպ9\ދo^K`$Yf{i&(JB]PFRz/ku? BOOOOOOOOOOOOoQ?S?S?S?S?S?S?SaUtk[tݮ_*)BrpJhcx׮,|v~=7K_qiwR- J~  tz@sM9yK䊮PgJ[7u('[oyn])irw0ҁ3kE9~~~~~~~~~~~~~ӎm #Ψ}7HG709Lꖁ!7#[ZDd"@sR*C{q.M㕼B(cyJ򺟔䊦@cDڳ*+VCRޟeEBC-#[X;-NyWLΜBڟ@vߴ~~~~~~~~~~~~~~~T?)tK]I?Pr;]Ϫֵ]98?TڕEod\^ޠ\;Ke@{Aޡ|VT3|L~04T/{~+_z`BץW[D9V9kH59Kw;PsU?S?S?S?S?S?S?S?S?S?S?S?S?S?S?pOOOOO^> e:._aL䎃0rRT+ɥ٢BZ"qy.sSgˋT/W%e庣.̍_2?kŹRpYG0HMG<㛣PoU;%YbϫWBOOOOOoQ?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?SGOOv609k.Ll'$PL9QYP>ՁaE֪.޼ÒzwZ1 \HA38BڧXPH:[ɘ!v 4ޟ0`pn{ՏPrjVoFgDdT r@e>~~~~~~~~~~~~~~~7᨟ 2Y9Xţ.V/.\\/jz0!ӊ:sr &dv"]荷#9Ki%_E8.m s Gy姑rL=eu Le{ oAtKރh=MI]D{@{yymmNOfopnS?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?S?pOOoa!HjEރs i\N!t5@~ȵnGE2/[CԥiGoW{޲_i%gݫģ~~~~~~~~~~~~~~~~~~~~ӏ.Z 97íyjn~ϣL ͙x%I؟o_lOu볽:bq\;,OF0 @xAr9|ב:\uud(|z3[Ŵ9\:RMq~~~H9K94ƔEkߘy]^lke1oιL O (F\|aޠ')G'SGS yE6n:94b^1'r&IqI\[o*ۙ߆M#R+^Q~/S?SiDVj+wC~~~7JxEn\oCOO[ SmH9[`?24{! :v^n&M++ϥ(NIjs #fYw+̚Mym=!lz~~;AKz_OO߳w׍[<7SOPٯ~O}k-.ßN׿zCY\?S?S怟˟V)|EO[vh}OOv#Έ3ڵZO8RmLTp.>ޖs17\_]pV}E+F^y.@"~BYuiEUP/z7[7_ 2\d֊,WJmf]E9o濻W?S?sROh$S?S`Wʱl[*~~귳JNi귊5o}+%A~~A+1~~~;!q)|먽ar21,혔q`¼M+]NM ˒@vdBdN 2ץ7 eR閬s~~2~~/ߩk7] uO^Ng6Pg)?ݔӨ[7U?S?ml#kbjDOi-W3r9o$~4wIV~~MU.OOo9~,/0!9K@ȧ :(H9KykeشV9iZ^|-Q+ZΰhZru(ЅQl/kZBGJQosp/hO)y=Yo{J֙+w#uv#{6e͚'.La~~;jQ?S?i異~~~~~~~~~G!~~7KEOzW;%*6ovyo skޅ919q)?ϴgަpJs|ֳ!x{XZAA&[Wq4pJ֊lB޿Yǹ`'CFv8hZnJ36KT ϥKg^X^*~~~~~~~~~~~4~~we~/S?S?S?S?>մ+˴r,жvrW4G.jYZ<ۼn]#\B>p ];%ssʞ짺>̹T g8Q[MP;]DիHd%GS(~~~~~~~~~(~~~~~~~~~(~~~~~~~'(ۘv<ڡ_A -Π9;[\"Wp}VjEY2PH}@QG!OdZq /|2ߎɞd-ԻҝI}[]-~4~~~~~Ӎ]-~4~~~~~Ӎ]-~4~~~Aa|`0ފ"ϩkW%aiJoR{.2=T?1!u{EW,\qIZA{VG39H+[(bE;a}'o=\ a8ڶ?RD3r_ԣ~M7Ϩt~M7Ϩt~M7Ϩt~M7Ϩt~ 3$gSImh@!hwC0|>qԯnjHI 4jvsPVVnk4N˵OET9u$' $Vpľ R:UY+6}=9o9"BoQ?SFS?noQ?SFS?noQ?SFS?noQ?SFS?noQ?SFS?noQ?SFL ?OA!aa&$GS] T^uV&w߳*'F^iK=SN*%>%oȖ.s x%MK37*\y.މ|0Nkg_9_o 4do{]!w1~Nj3w1~Nj3w1~Nj3w1~Nj3w+IX<_=qVn.*KU~T~\uG^G =\fU(o}aw+B1 tv ߱'{{"QGO&;xOoQGO&;xOoQGO&;xOoQGO&;xOoQ $yaمq-8+.,rrn* ,?w7T|rBr .'˽Ykd{7 SG&~w~7O@~ ˳!֮Y$&rc%T.&oEj} aݵRU(4SXڹ.Zy.SOWdL$`xWPcD9/Ycp %%oobQ?SE&S?;PobQ?SE&S?;PobQ?SE&S?;PobQ?SE&S?;Poa括~aV4rgiw_}oQ?30Nؕ2q[vm̋Q?29~xWly֢~GD~e2iw?syL7?ܘ&u9v +5~c̎{O;1fQ?c̑[v٫sO'xؕA~8uE]]bvx'\]'vyҎY|m?8vv,g{VoZqy#+^O~ӊM+.y$oqv_|obQi1|//7ߴIw~7ߴK/M,7͸5dsjbĚz;΢e{d㊘z~ծC:Fu߿If Ω5vĚڪ4F5;֟eslTwg^~o!ͩ-kmo՜'N3?_[ h\kDZxAdV8[|ڱM+b:@-sy:bgUڳv-syӾ~ѢjGóҎ-m^So^]SלU]ߗsVXmJէގ;M67jYֱ>v.[[3l95Vw1uŗkʹ[0v4~ˊIU7Ogl:om(8y̫@siG1F#[ژS!3ll{eͫ7vTo>޿Xy5Yއ'k7vΥU-+b.G;jF!|62S6̪/~mKA!oZsjb{vͪ?,r^ͥwɜu2Ɨ?ͪQܺRe71'S=SjXѸٴyE̦QzgՏͭQKWXosmiGmJ9 ׯ_i>R\8zY[կصݔ?連W;v>XWVCA!}fiڍ󚘫?)-{6xe3^fdfEjl_,ڨzT/nHjg̦w >9jbXkbycowɬwm/O_=|ݜU;7vlx_"ҨdS@&ۨlOWJ7̪[K;6=yj^r)g#~mԶ_'JQҎM;6ׯ4j?IBfK/k֪U_Ym@So=uF튘Ig_?-3svrtʮif^۵؛I;{9y5v<[0ZJ"ި꿷޳sk[lj$AvȤ۱ފ{tQ67jYn蕉g~3mT}YutdXlUSo7jy/8q~lT~^U;Yc~[VĴ,[r^-_֬O56'͘ Okxdxg˶nG]+ 7c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c1c/_%tEXtdate:create2019-08-02T18:38:12+00:00L %tEXtdate:modify2019-08-02T18:38:12+00:00=B 'tEXtpdf:HiResBoundingBox306.59x297.184+0+0%?&tEXtpdf:VersionPDF-1.4 G:xIENDB`PynPoint-0.11.0/docs/_static/betapic_snr.pdf000066400000000000000000000264731450275315200207330ustar00rootroot00000000000000%PDF-1.4 % 1 0 obj << /Pages 2 0 R /Type /Catalog >> endobj 8 0 obj << /ExtGState 4 0 R /Font 3 0 R /Pattern 5 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /Shading 6 0 R /XObject 7 0 R >> endobj 10 0 obj << /Annots [ ] /Contents 9 0 R /Group << /CS /DeviceRGB /S /Transparency /Type /Group >> /MediaBox [ 0 0 416.410625 312.808875 ] /Parent 2 0 R /Resources 8 0 R /Type /Page >> endobj 9 0 obj << /Filter /FlateDecode /Length 11 0 R >> stream xWnG W19LKc$Sl !eEo?%{!p<>Vsyp*TYo 3|·ۥdzXjP+[7ٲTڇ18Q73iוL<J !/q&U@u./_0wFU2|z+ Xߗ_Η xܷXk`,\uNmD4> )y89Mnm~NW[k|Xu-\ͤF] D) 2E힑K>OÛrqmPϿzsZaThØc{c`ta"av1ke> d*OF۠ͧ4iք;MO(-7i<ܴX'Zxjx!$dn p<3Q"!4Zڛ`{ۡD&cK4cbtvhm^1 aBFS o___ݽy͗Ï{aV!fN˧G+R~+Xf_Y .8i4Q^?_s}qE,2+Esȃ94P-*SV)p»j$-"D 5@6zv )#L\2) [ 霹#|iުc;D<6(w aĴG ]13qHi#S3u(*K3zWpBRr~`ieie^P HhQ֐TDAY^\| @cхlXsI7 *̤> stream xE;! C{N#y6;I,%?+:cck'R: g]a2So\\`nN|愯U—L 0;Q|$ ꔍ0G@LM/yjAPkGkS4g*[ :L?= endstream endobj 17 0 obj << /Filter /FlateDecode /Length 338 >> stream x5RK[AۿSjy쬠8K Qry:i>ޅԂ"iǚ5Q4b)>(!SHfd2r >֛L9is(Iz-v|YAғu-Mk"925Ǟ2#A, IOH3;g-yx/~2U2 `; ("$h(fƚۅ"=D^ ,z2'; Jjaq6JS]g&a氳R2 qp/${1aļ]l\̹&ӏ(H\M(?7} endstream endobj 18 0 obj << /Filter /FlateDecode /Length 304 >> stream x=;0 C{Ȍd'>2VI(/u< i& b;w؞D/)ϡ+E:Ū0[M*K õ}74uK hY pu;Gw5<TQ!OJ|<(!\{0FS@\^BAjI'> stream x5QIn0 .AO ``hKxA[֌]< 'Q^{ .o8|^Z9O2 D`@ai'ώH5YN_KK(O~ J.kO8'O [WLcD.A|!]'@;綟Wuu)O645I"%Ҹ[{TS endstream endobj 20 0 obj << /Filter /FlateDecode /Length 245 >> stream xEPC1 =`,{wHۿ=JFp!Z?ZK oGFA= 3A΄@xFnvpμ39Zpә\'mBITqTqLύׁlӑ!KI%&~S*)[*EH䁓M4,?Cb̠Q0qGuٜ9-L|X&Q)2>'\N}䢥Uޑ"ۡW%Qէ<Y> endstream endobj 21 0 obj << /Filter /FlateDecode /Length 247 >> stream xMQmD1 \ky R]oC /)%K [UC?13,=?TPbht/"+ߏe s`&4`oI&ռ3d‰ATwM,3V7: lx%D`r Z`Q+ tĺv7C/਺x} K{,|BL;wI#fR:=b}@e+ (\* endstream endobj 22 0 obj << /Filter /FlateDecode /Length 90 >> stream xMA "OPDtz_NE5jK02kP)U0\ 2IL{qIqzz"X endstream endobj 23 0 obj << /Filter /FlateDecode /Length 338 >> stream xERKr0\ 3gNWp:<  2=eH6dWdՐFD)򹼖\nJ?72ͮЪG6F5+# CzVQdv!:Sp,Cu)mA#o<rLn[ :[m@ s` )(UI­\';PЪt79`Òho>F, f1H'N=q:őpI8@/ u:eMžBRq"n]ElO ?*3b Ԓ枾?9 endstream endobj 24 0 obj << /Filter /FlateDecode /Length 52 >> stream x363T0P0T026Q064bC.@rs`r endstream endobj 25 0 obj << /Filter /FlateDecode /Length 68 >> stream x32P0P4& f )\@B.H  %[B4AXf&fI8"ɴ endstream endobj 26 0 obj << /Filter /FlateDecode /Length 45 >> stream x32P0P4& f )\V.L,іp "} endstream endobj 27 0 obj << /Filter /FlateDecode /Length 255 >> stream xEK D#> stream xEK CBGG|tJ■!M@w'/mK >[ x6n5uVhR}ith6s+ fz:rGp_Gdf)|Q]dcnk]3s: endstream endobj 29 0 obj << /Filter /FlateDecode /Length 214 >> stream x=PC1= |7˥m$B6BLɔ:ʒ)O>Kbnd6%*E/% }ՖC4h9~ 3*K6p*3 mtV[ Ф`׶ r " JMrR=ot-N=Dkq: DpFjtaŲC5=kz7hGt4CָR endstream endobj 30 0 obj << /Filter /FlateDecode /Length 80 >> stream xE 0D{`~&f( JpO{:2Sa ,S`5FR죰n_uzS*Ovvq= endstream endobj 31 0 obj << /Filter /FlateDecode /Length 236 >> stream xMPKnD! s\I$!CU۱T*HUSbuM2yOQ nAb$<4#',;Ofč.`5[́9m:7@dP "B.ї9`Jw&\>աqZ coYaLq_֨ɲY I!}{ y+ՠ0u J*}$]S endstream endobj 32 0 obj << /Filter /FlateDecode /Length 49 >> stream x36P0P040F@B!H Y@8&+ & endstream endobj 33 0 obj << /Filter /FlateDecode /Length 157 >> stream xEC1DsUA wJo-%S'"h0yM%V,&rAJ1xN1븨ufihW3=5'M<[ }@8IP1}bv">G)#qbn fW7y endstream endobj 34 0 obj << /Filter /FlateDecode /Length 332 >> stream x-R9$1 ~`LtIUls#h/#xE=f۴[iGiK,W ;BjW0wy.2meDkag؏]e8*Jl !2J'Qw\I2[E™w2;yNE{ kF9+%|6vzrYɩHHӺ NKؖߗ3| endstream endobj 35 0 obj << /Filter /FlateDecode /Length 68 >> stream x336S0P0 F )\@>,́,# .C c0mbl`fbdY 1 r endstream endobj 36 0 obj << /Filter /FlateDecode /Length 17 >> stream x36P0C. endstream endobj 37 0 obj << /Filter /FlateDecode /Length 131 >> stream xE ! CT>՞0ABA";06Ѣ76իc,zRV鐇Pi0QąYLCaΘȖ2MlTv<e~ma, U^ ?KwUBS0 endstream endobj 38 0 obj << /Filter /FlateDecode /Length 338 >> stream x5R9@ } ] v͜~߆_ CVie!U-.Im W%ڥ Pt,6˯JH+kLwIi"Eo7o}=@.^ AS(i|Ъc(ew 4<3}(~_K&(? _osџa`Ś}@*z`yT endstream endobj 39 0 obj << /Filter /FlateDecode /Length 248 >> stream x-Q9AzBsˑ C :-qPO+Uwu9HTM]vf5,?c 7zqxLu5{kOfP2+qSușO \ ȹeƌ#M!RH&3AQ~#aU#j \Ks4;<9GW +ET<pC7ҹ^s0XM7/=[ endstream endobj 40 0 obj << /Filter /FlateDecode /Length 210 >> stream x5P C1g dVukm;aBXȔy)K>:L." u%ʚ +`p&^7`i5tႦ.B%|u{OxjrvC` jMX> /FirstChar 0 /FontBBox [ -1021 -463 1794 1233 ] /FontDescriptor 13 0 R /FontMatrix [ 0.001 0 0 0.001 0 0 ] /LastChar 255 /Name /DejaVuSans /Subtype /Type3 /Type /Font /Widths 12 0 R >> endobj 13 0 obj << /Ascent 929 /CapHeight 0 /Descent -236 /Flags 32 /FontBBox [ -1021 -463 1794 1233 ] /FontName /DejaVuSans /ItalicAngle 0 /MaxWidth 1342 /StemV 0 /Type /FontDescriptor /XHeight 0 >> endobj 12 0 obj [ 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 318 401 460 838 636 950 780 275 390 390 500 838 318 361 318 337 636 636 636 636 636 636 636 636 636 636 337 337 838 838 838 531 1000 684 686 698 770 632 575 775 752 295 295 656 557 863 748 787 603 787 695 635 611 732 684 989 685 611 685 390 337 390 838 500 500 613 635 550 635 615 352 635 634 278 278 579 278 974 634 612 635 635 411 521 392 634 592 818 592 592 525 636 337 636 838 600 636 600 318 352 518 1000 500 500 500 1342 635 400 1070 600 685 600 600 318 318 518 518 590 500 1000 500 1000 521 400 1023 600 525 611 318 401 636 636 636 636 337 500 500 1000 471 612 838 361 1000 500 500 838 401 401 500 636 636 318 500 401 471 612 969 969 969 531 684 684 684 684 684 684 974 698 632 632 632 632 295 295 295 295 775 748 787 787 787 787 787 838 787 732 732 732 732 611 605 630 613 613 613 613 613 613 982 550 615 615 615 615 278 278 278 278 612 634 612 612 612 612 612 838 612 634 634 634 634 592 635 592 ] endobj 15 0 obj << /P 16 0 R /S 17 0 R /a 18 0 R /c 19 0 R /e 20 0 R /five 21 0 R /four 22 0 R /g 23 0 R /hyphen 24 0 R /i 25 0 R /l 26 0 R /m 27 0 R /n 28 0 R /o 29 0 R /one 30 0 R /p 31 0 R /period 32 0 R /r 33 0 R /s 34 0 R /seven 35 0 R /space 36 0 R /t 37 0 R /three 38 0 R /two 39 0 R /zero 40 0 R >> endobj 3 0 obj << /F1 14 0 R >> endobj 4 0 obj << /A1 << /CA 0 /Type /ExtGState /ca 1 >> /A2 << /CA 1 /Type /ExtGState /ca 1 >> >> endobj 5 0 obj << >> endobj 6 0 obj << >> endobj 7 0 obj << >> endobj 2 0 obj << /Count 1 /Kids [ 10 0 R ] /Type /Pages >> endobj 41 0 obj << /CreationDate (D:20190802204309+02'00') /Creator (matplotlib 3.1.0, http://matplotlib.org) /Producer (matplotlib pdf backend 3.1.0) >> endobj xref 0 42 0000000000 65535 f 0000000016 00000 n 0000010445 00000 n 0000010251 00000 n 0000010283 00000 n 0000010382 00000 n 0000010403 00000 n 0000010424 00000 n 0000000065 00000 n 0000000399 00000 n 0000000208 00000 n 0000001788 00000 n 0000008891 00000 n 0000008691 00000 n 0000008244 00000 n 0000009944 00000 n 0000001809 00000 n 0000002047 00000 n 0000002458 00000 n 0000002835 00000 n 0000003138 00000 n 0000003456 00000 n 0000003776 00000 n 0000003938 00000 n 0000004349 00000 n 0000004473 00000 n 0000004613 00000 n 0000004730 00000 n 0000005058 00000 n 0000005292 00000 n 0000005579 00000 n 0000005731 00000 n 0000006040 00000 n 0000006161 00000 n 0000006391 00000 n 0000006796 00000 n 0000006936 00000 n 0000007025 00000 n 0000007229 00000 n 0000007640 00000 n 0000007961 00000 n 0000010505 00000 n trailer << /Info 41 0 R /Root 1 0 R /Size 42 >> startxref 10659 %%EOF PynPoint-0.11.0/docs/_static/betapic_snr.png000066400000000000000000000711341450275315200207400ustar00rootroot00000000000000PNG  IHDR2hN?gAMA a cHRMz&u0`:pQ<iPLTE))) wwwDDDw.yL[=jUUU222666WWW bKGDH pHYs,,sRtIME- LpIDATxb8aOONWIbm93gU9V;]Bu%y|Nڍo8Ko9aIױ 2Apu½0&~<)Ԅ=1܎L sB| +ގ}9~RzwS9Vi_T9|Uup XqvIwS9U d͎#Orcug$a`1ݦ>ﺳ3Iɱ f˒u =Or2<jk;lo6y0 ~ޟhٲJA9ע2Z?<AnlL96| Z@۱IsIםEnyt?U__)V@k=Ab9ֽwq{KbJuٚp^2r7 ~*V lecr oy-oAʱk c@,_6~2xR[[S? dlݯQrltz4Ǚdy- 7Vd\?6b:u9kn#M2ذI9I@ήS7~2xJ?w:29+~c`uWeQ7(8 kH@&ע2D?wDV1g;ScUpaU)Kn oTj\f. $uhU^Xuwc@h%cAob %Y)XCfy$Q7vܽ~9 $7}TDDTץ[7ر[$>8 ~$sL=H{UMs5 v h$9V[S̰F0ٺ]MPށ#ȰT{QcD'#KXnұ#c#\jrKI;S}nfRԺQwvY/17dϥ ,eӴ#ʡ8RԺڥc>}M:ON,C~.n`!?K Y,~.HR|>rc,8Nj?Z7cڻ9c!~.n`/D ~-)HRt=Jq 4RԺ%o܏ ;*9HR?cJj?Z7ұLf˹g(~.n`T͈&ϥ wؓdͭ&ϥ .z~^!9\j\5NnWޱ"susbxu֮f=^su3y_:0wGޣCj?Z70ұ'#Ⱦh_l\j<=mjڝz~.n`K4 ;l~.n`vX<#w3\jAio=xٗ>מIRfhaA7,B#K!.G; ?&9B \jtұ03Ǐ vT0*\jd5Σtݲ"TRԺړ??ຽ0Dϥ Lndϻ/-Sϥ Lnd߆/6YBj?Z70Q;>ܳ9w,ڏ7Dϥ L~dM"K6: c#qHA\j4_g./Ga7 ~.n`]ؙ7bEHR ?8vch7c:Hj?Z7r=nÎQ;HÊ?du" #K0'#"S=b +Gݹ"YP\j(O9܇VT )[;89\j( QR;fq6 x(#"IRF zOFuw#ozuSv"IdNj?Z7 p166#ڰrH֎W#~.n@}2?N] .7A6(hٳ$RԺ;*ک۟:}SadQ\j_tv91tqXuvyFϥ֍mGokY 9:lƦTiM9RS\jnyҴ,1_GM; #5ϥ֍ݬ4= c+iIj?Z7rwmzwXQ_Q0h7bdQ \j]3oDEE3r"k9RS\jܷQ-Kwvza{Kq0RԺ;vG]RaEm-Gj"Kyk}pOgH#E5#HRFKVkS=b+n6<3(~.nEVTZFGj~sf~.ndb]5hÊJtב"HRF֎YX[ч_+=jxigmHϫjTUOՃozmBuS|mݣ+k-^,;#{ WR5yL=67Mn`"[+UZaS;C[JE4 s{UO#ݺnuN`X5aGjo %751{qo{s-ah~;eb^1 \SRFL|d~VTw?o°kg*/MGjn)x?bKkwOVHd&iVT۟=;&z4Çv[Uc;&2CylU7#n|i50HtVTϜLcXGl=+.~^s" VU퓭skECч}oÞ}>E`c{?@CRcÊaW%[70֊*̯;\Ru-YK];nǺy__)V0[+jgG%; æb\rV Xqw.aVT-ųqYvZZ967>O*Fr 򾵢5̻@Lp9ʱVR91d֊KÊ|=Gki`rK/Ψn`DUܙ^[+j+ZЀϽ9„ZQݛ*vQhPd#QVTmwKnLX^*gc(KThU"kJlkE؅ /9{񻑤na1%֊qz ;$[bun̏;Ώ*)-P[+#4/9? '`f.ZUNprkE|$yf3(*}e=6L$WL}uOv ΠnEحQz?? M_VSW}^7zYu.nWdynO9@݀[+}Ɣ$ϗX=5Œp|kE|g|ۓ IW}MՆOi_~ymcuo|tKj?Z72syRx.=J}w$.\j-&26 }.Kyak5eaʱ,gx#Kyy =J1=su#+{V +(8rLD\j [+#ۣ4Pj?Z7gUs7=su#'w֑Q_Je8K\j [+i\?RԺ/|$ף4wJ^*/Kakd:F1k su#lL<+9)yq~.n䃭sgc=Hj?Z7֊(O}bYj?Z7֊+(vzJO<ϥ֍lIRFY/]~X8*~.nV+(MrߨWRԺSrvRԺm$-5XO~.KY`rS]?Oϥ֍,GAvkԳfoϥ֍,HCvCsVZ'zHRF\q} i[g'\jQ({ѕݤ_RԺCnEmYipORԺc~{KٖBRԺ[f Ǽ&tn/\jw~KrKQ۳eϥ֍ \^GfweϱGRԺVAG٦n3\jȀ@䵖H{$K>~8wRWCU\jH߬#SIRԺ'嵖Nfϥ֍ez):-7RD\jH^QT9=bvc+Mj?Z7~e4a0oS &KɻIp,9¾IRFO)Oν)}ϥ֍DoUS%o#K۲ :?~.nN$!ϥ֍I.4MCy~.n,i}ta"~.nS/s3lJܜ[z&KiAy*a]y(K;E giѳ`su#qǬ)N6S%O~.n$͌"3Ye\A>IRFھ%o\Ҽw;Cj?Z7vfAr9<2@}ϥ֍3L즲3.;Uj?Z7& G"2Is|{?jTUuV@l#JE&xY~\x繟FYIF!ALnʕ I.T;)rcHNT1 ,i*# #ǐ G!4/R~nDCnȐ!#ǐ? bGs.歟[nD1$gų]2۵4&3UENBNJQdm&5L|@F!9Y i3$x &t"9$,%&= w(B"ǀDdp' .&6Jc.ǧ# qSb}gVuiNH|rlqs&>ac {NMkљ+}~}t}ﺁ\~ƚtcaڽye_?]mK=q3^n`SG:e|/ځNZ~1J0Ǵi ]_n`1Ô}{KXBM$ccM=N]^n`)%W SAW/+OM9Vo|_ ,ctrC0G%d+37V9V^[^ P7z%Ƃ-!s\t~dEoa{~I ;kƜKFMy;QƔOTt}Ϻ?aQ#1f 'ks־G@"ǐ+S46Sb,^Kȼ<6#xUSbs'Ջ#3OF!=&+hYB8KxǤGcA9!ǐ iKVsa aw|X%ddMz{~sqEUIpOiT%d{>_)rï S7|]o0B,!sG׍ʐP7QcWD6yu=N1` ل}eJ[1 -_$lYqsK󾄬ywEa@sӱaFncdk$} ٔ, KO$P:q'Ú`*m a %͇8wsmru xLl QڥOuRI;(*#Mݶ%dɋm1 Op}]кN0%dSyV2nS5EڦJ ȑ {&:ϻ {mt(ڬk?8U3p8nd͎ ?,~^[ f?aެze\]cufۉZ*޴Mς&zxE9yMun7.|lzH^Mv/k~&I+y P7 ^70^؏L>MJ4 7 n& }-ǔ{/j7zzz$-O;O{Wgo|]躁~ecnX %y `M:^ ;\:̻n` KȦl-)t}Mȷn`UHRlyLv?cϼs9I veޖM4k=DN$me|;˹_%|@F!>6 i?m@91*v:45#< k 83n`~nӮ ueg Ŧ폟3]<sL̈́SYBvrv򻟇`pel7fIےwL1O/8}Y~1~WͻקCd["m^CO;,cScbcXQKfl_3~^T9Vo1}cBr?Ӳ7 +Ja ejé'd+d?Ӽ ^70aEQ΋GOs~.naEQ/!1^Q^Hq}躁Y +J/]B6k4CKr Q]VeIl}_Kns"%r `/ZKK$/;;/Ĺ-9 UH%d_\Q܏cXQK.t 9J3Ĺ-9 Ê2LL!Gi8%"bXQeKfn{qd ch'@,!k,BOpVAF!Ê-ZB6k4=*} L%@L՘1JQ΃vEl'8 h%dkfNut]dȺnBv89fNr&9aE??&ayGicVݤI+cXQvs$?]CQ^y.2dD +}dIq~.nw`XQd7q~.nwX`u9OOr8JSj?Z7^%$C\od|!vAIRFt }î75ΣIdآ3!K5ÊFlGiJRFl +IO/6K&J8JSj?Z7bcXN]N6y=vw%x(~.nư"^l3.Gi~\2"KmVbSt0.Xϣ4}ϥ֍ +BolGi^2 ϥ֍Ț_h|ĀXI֏ekކN~.n ~FٲyW~6Зϥ֍)6w%e˦:JsOJ}TRԺÊ0$<[2QLj?Z7bXv߇%ʖtf{> ϥ֍VĠ1sfsx۞ 2\j݈aE1EA/Gis ;r~.nDgX,Q6{4nH-Ȥsu#Ce3Q$X1 ϥ֍ 0r96_dv};XbA&K1}<W5X>-ɭDLj?Z7"6 {Gqܖgd-u" bXs<ιW?K(|#vk bX3]>&i>XbhA淟7!ϟcn`ÊdQfdwCtg?7nʟBam;ײb~d7!^c?Wc]tcX}1>%d;ÔJu`ʾ`EGi6Dn }A毟'x9 9B-9J2T_I~^1c;`DeI~^K˱VDL 4ϹWlWͱ1@Ű"f'kzEcZ%8уúVDDu7V4~k6"ǀӹO;ɚ$c1""{icEwكVpbXσc@;ob2udX<ᵾw_c@;q`QG]-:M1aED6(ͣÜSr=K҇S#ǰ&ٌ4Il#eOpX]mI!ǰ"uQy?;েzVDlӏX-+#TjO_S^mow=8)g?ׂ%QEr kz0ئ9i1ږ^yMcX6 Qģ46V]mṟw,gcQP~Lx'^M2r YrWjFfFz쯟k w ǰ'gGif͹hs4?:z\Һuvf&a6y c$96VDtGiՈs~V{YykRv9OqcX2r }^)1vn8izͱ6ԻDo1uxߌ)p5c^^szVc=HX- c,Er 'x|XDy[@75!rV4gޓcf{|ߌ=VX9yY]o!r" =cq:lsQ|Yu:eiq!v%[zwXTL@Fy50M9̫Uf#1Gi|laK+^TJ0_ƼU!^ؑ HndizuUgo`1U!ZeQ!=cGil*aKse_*+oI!m3<4ۭ~#q2̓C)2y)$daE[umn1qg7x[&q:Nuzɓ1xN1: .Z]}K 5u1{chk([+מyP?ɱ!Jr (QC†bk4#9VcHe"iCGizZ  }C,ٗvZe496<@Y3ck8~>c եcNYP1hAGizZa یd${'!֣4l {s_Ӟ-Jh.yy9<CGiZe4{RM:sl}j(6z3<QVlםIo~*VuvJ%{Ȅ(M_[+\J6_<P?11Dz1xǭ_V#~n1SzӲ,40p46H!,!_4I^'eGBܞcSfy|ݾ%T72^62OQ^VlzfBܚcSy>X6\sK5ǦhnF^PcGb &ìY>~rٯWJ[+֏!!o4=oZ0MzRrv׋os`Kc{? PvV~iz!ǐ9;}ϻI $ǐ/;|Y8 [?ooǘ|]Y x˱{Uʧ%9c K%1r n=lxwl˱Tc 7d^LFŭhO]apߗa;OiXGg8ȑ]LQv<0<̜61d[m9~]?uw/ X~"9urylf,cq;-wd$9],3'*F Y7}Lٳْj>@v<̹-H0mJ[R"P3ْ 9'^%/r joW[nǀ|yaȐu#E8ْ Ș~.n1#1lIdLj?Z w\&>'N \j"(cnS=ؒ ș~.nvډؒ Ț~.n ڧ]{'nǀIR떠|xt'gp 9Vv. kCaO׷v 9( ؒ 9ho !9hk.v6r Es;c%=r EL>lcz@10_mvD$`ْ 9:a_S=!9ւLÐn c(q-v`` @9C0Pr Űma/-HR.2h/-"HR.}k~܎Eϥ]9nebm}=DIR.Nْ (~.톶0@6OUUǮnźEdlIs?o3d+H, fK*^y'RN2r4c/PcdXaFG %P c9Vh,*+~ %%*=bn!nǀRUyXnceq%l >|%Po2F󣱋 Y7R6J̖T@9XepOȱb_1ƫNt`+(k` 5LyUcy/-x1ǫ4cqtC&Lx9ƖT@1䮽s%l%PUchҤ;p^1+"I6ؒ (c™8/-Rxϣd5y vyF1ymKbE6X-'X)n?TX1_-R:Ls׍&ߎ8?b|Q1Fbu>1Fz@r;ࣜk>]-c%vz[*sy>c>@&X ɻ/^1z~&Rc<*K|w5,)چ~=( ] 9| {PۿSv-tի>$Y9 ,cwYO.7f 1{}uItX,(v}?; 0hq?)c0oZP)!S9~;ed7 !RгFMy4RT}~ʸ!Bb8RB2cǙmL96| WN]+)b493n9_)]J}vlnn"!Cz0:[: qIb}7dweH-$q<Ʉ^F9DsW ٗ2.TL㫗'X6u/|xc7dCFz&7eby7d)B&s߽{M(qbHUN9fa;brٕYTU ٵA) /tAţv3g>QUp~Kɱ&m-n`H9Xmza~zz; Y =ĝxD)Gk 7{LJYmA!k@/@PIXg׼ߢnͺݻrC>Lò1,]W5)JɱKo"Ľ2()fX#VY^bYʸ!{GfA$/8~~iCK,$tEܐ@g]U$cԽaF_7dv4"Dzf|& re&(KJgX E\jM pTF?Zr  LSF?Z2 kwC@&BڦCd|C.~A䢈~.ulo,0~^9]Ӡ&[7Nb4R!kOcw`ȱ=dT{c}͙Ĉ#Z9!=tlYezx+ <|]7=vCZQH@ \jݻ9QRa{(R+ސ}O%Xϥm9DSސ9̺a4 R~qzIu7׷:S瑴維15Wa;DS>v $KۍL7m-X `ؗj%C4U YȝX9&Y݁ᵟW쯸!,buX,$c?'xMM|9*Ԑz3v=7g'㱟T5[l&Cۆ~*>"ʘm%\\mN Z3~e)X 9wШ7c{nD㭟W5S}}fS=<1V: @<~snfWjecW1>{90 @L;{["C!29MKK9vv{w 5 {敞cUmB432|dAVbc?ck4i{*=>o܌oKY96z=myNM߆ Bc"ǂ8LnjT_b{@X}cܧyضSff @2W֌%b9p]D6}dbg ]<8M{1v0y,C4u@]MQ6M| ,OW."c3y ,c48M]-izjH}^=o~7X\rgynY&I^YCe>UU6 ]dȺ}RW ,.:Dy`m|cx`<"B%⠭nX|?GciÅ/_ X"^ū]^y.2dݾ(~X eIKu`tEP%ϥ-b'@к{MԺ_h,^ϥݛhX= Xe?McnO|2 ,~3]t+XrgEcMi z9i昖YEh@V̱ N,ǴS4[EA'B+bb92 ,8Dty=UIgd13.o6 ,>?r@:pU«N, { ]1c:'dWo`ήTwGt1;"h@>\'5XYN,>nx綻1;s"h@׊9Vc6[KuL.r,e?.$k䘃m>!5,<ȱ11E*3km?<+{@i =~׏UC%Ǭ ,wZYyie~EFKXM~nƒ=Y9f=>y=cCsM k|>fjWd s96|X;!`P?wW|>6a43hLWXx-άEvSY_bS44 a|S?m?EPrYc)ue^9VKhw.,a 1 sYT/DfSV96cbu;M9f2x@b2ɱM_1E]96!?e?LW]96)Ff۫LyLLslN5$m9657Hn}LW%ǦX968b,+t$96#rlփi`P?7î_Pݣ~9DcKc[6(~n1{'BV.s{|؏;Ê`P?7XeRJKZBufp\w0ݰqXʍVIƵhZ=_|ѥ 38`Vp9 oC=Bncch,'XHcN F9VOMrlM?9~FmGz`||y3trlM7X;qmd1qZ cjPic{nss۟3<Kywևkm4G+!K'Oαrl `6X]|-`2>[*)+9k"9 cuɱ X>1e^>RM!~?Z7\jݜ >RM!~?Z7hԺv>BM"z?Zw|B@su ϥ)E~.nAԺn.z?Z7hԺ9Eϥ1"z?Zws!uRfK@ԺY>~RM#v?Z7\j7r ϥ1#v?Z7h'X cQ\Y>~cQp&xBE2hS4r, A'X ~cQ| c1p&L/g6wp{]u| |I"Ǫ&}~nS뒨S4rLI^Vui1%kr3t}]us&xxUqB叶]us&xFN 9f[5p4M&krnLA7u,Fi>XY ]_H,oα3Bh0t}]*us&xtuo6CץR7ˠsl^Jac!"`xtmLdxDM'Sȱ_GS4r1̯{VwsW%cTrcK~A7r,Y>dcc%>_S4s̸.J c1'sʴAc9:Nϱ>sx]us&xtu&pKnN1CKnAGJҫS4sV>u2h(~hR]cu)Q9y}?^_1fxzUwcu}M(S_1M)zO?:M)U7'rlm >ckcDS4'rlm,ȱq&xES4+rleAON >cjb u``[Wwت +_تȱU=8" +bW İ"FȰ"xFiϰ"xF;. B^9>vPrl= +zv +wzn +wjcإ@Qȱ4Êإ@Qȱ<?KckaXB °"@İ"@c؅@aȱ9v]8[I3p( 9fXn( 9 rlg rl[ rlgb !VxW8a[<VPȱ50ck3c+2c+8>sh(9>vP"r,fXf(9Ê9ލaE aE ṉ{HXpamHXhͰ#vP&r,4 $r,Ê97Ê9c@ȱa@ȱaSzTXX +@XXXg (r,-Ê9c@ȱ]. VŮEİ"Fİ"FgX#2c9v], 9]+ V<ƮJFð"GscX#ǂbX#ǂ=sT(9sXsT(9J3]) aEX9ʉaEX97Êr,3 ‘c4Ê߱ ‘ca4Êu@ȱ0VucaVUcAlVucAVucA<9L(9B3x]% VŮG" °" İ"˃aEX9vc@r̻ ={R 1߾>y:!|;3YVDy,Įd ޞ1v] AL1XI9VW}n _7<`]IX髌t{Uoؗ |j6VǾr 9<`mIXoB_]џX֖t骁-{­X7+r1ˍV9vl6Vd'{c$CBcU9֜yru@|rl3U +@ bcU5:>X$GkecCI=k9Gѳ&}>IxX9Gѳ&c}X=kG㚄Gc}>I=k9Gѳ&}>IxX9Gѳ&c}X=kG㚄Gc}>I=k9Gѳ&}>IxX9Gѳ&?7gH͟k?5\H U_U|~. TWM¥_Su{Qbq<|P5Ip1_W=>)JՆ-||ltEQ/to5_$jL1Ԏ ?V5~FSzMrL5 moHbH)kS ] l&|MB -V>_7II|͕0ḵ;k-ICYTXOIY RZ &U$:2HꭇkҮbm{ԟkMʂIgS'_zz`1.kvZ`o*!~]~ &|sXp]d^wbD5H< vr Mxc]U|l6U6&x>}rG(fzc"hZcͫ#:r @U,&s@(Ps@(P/M6r @w_jUÿ W^a_>^oT K.9f;w?Tu+LIVm4{oyNϱ[ߵyߪ'+c'^H}c_Ʀ2_Wj%*r;K5puv]%U ]nS}~_%zׅ8C tfCȱ a/q=]_S81i18cN~^оZ}XH=zMR7Fl9׃])2/~~/|Vޮ}Õ2FW460Xw};uαNڤҿx$zZW󧪍j3)I}A/leLvu+ɥcW. SZ3si;Yۿ_wzwm^ Bt' ߹wGe+c{Cnesrֻ[ǢL`ȱD?ĘcUOȱ0zmWc2~λW2eRk@WLwBWT7{W;=zɠM)cA(H|c[*Ch/lWZ>iRٔ2&;M{lܐqE1scpazM3WK ~)eLv^9p8$ːbUg2+7z1N9fӿ!:Lϱr2J9 iÌ3iL 96>z5r0uj;`e2|63XG%Yc.tub@nzS|s=Z`rغ-9c_ }a @^rɱQs,yc933~?2d%lO>w=9Xo7:~gsαOºhy cA/GslhtdžI96v )J,7056$S˘ yTO켆nL=Ǧ= 3O?|I1c@ ;slM;92` I(ssVo7+_c:L'o>`;<6'oR!'Uu1`S;AVٟ!d't'0+àMrϱywc7:i N޸G?:5R<ǔhpyeۨsϱ131}X\=z 1y c}sbwNM)9f؝qS ˜7lRo>: X y|c1|y96PoHӾ؜c̬c[ кՔ} _QRwVeLvALAQi߹ UkMx5u0'żI0f,1:c*[ۊ57)\rj[o6vvwT9( T@!e*$ )0U!PH BLUHR`B2S( TN;r2Fp+0R%tEXtdate:create2019-08-02T18:45:18+00:004%tEXtdate:modify2019-08-02T18:45:18+00:00EYq(tEXtpdf:HiResBoundingBox416.411x312.809+0+0V(tEXtpdf:VersionPDF-1.4 G:xIENDB`PynPoint-0.11.0/docs/_static/eso.jpg000066400000000000000000001673411450275315200172370ustar00rootroot00000000000000JFIF@ICC_PROFILE0ADBEmntrRGB XYZ  3;acspAPPLnone-ADBE cprt2desc0kwtptbkptrTRCgTRCbTRCrXYZgXYZbXYZtextCopyright 2000 Adobe Systems IncorporateddescAdobe RGB (1998)XYZ QXYZ curv3curv3curv3XYZ OXYZ 4,XYZ &1/tExifMM*>F(iNuZC      C  Zu" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?5l7Z4v9h|%:& 3J>SihAuYwҽ8q^_lOJ+Sim)^FIDz\$Yǜ}kmhӸ|΋XKfB95 s֗kJ&S'Ѽ?W&S-.P>ofoYKrqFs[ZgMa]fnUFFow9a cKg"*8=+|$Os% $6N}a^q&%R $} 68E AY^1 hj֣A#nZxXa' ?|: _ԷN(GD~Txt7ʻY<?8涮w*ZFZUVȒ?'1"팧'U,?v8޻`#qQ'Z&c8-TgG+1Z[Vd5\v3g_-I#9ɆR>:qV|eV̙^B.sAC<;:d4Uj9Vx < fBh 7j*aN:֡Z4*h=ՌFVQdG\*h=ingԐôWǑg1 §?qNTi3R rzs@FSSUi#9rp5--aҮYPGzH DA .V8zl=M\Qگf6+P˵jht3 aVώE9AsP ԛ5.|L7UHڥrh)~3$,޲ ]I2} 4'ZjlG"\i5mC҂~ ;wvR\,hٸAF3zV]qZ0eRC<ՈbS {֍>f[jh_}^]8hPd@ Vu˦ϻ>.%yHJYWa5^,XK31f)pFwZʼ+˼ '"+M֗/XDWs^k"Lu)\XR94+tǧ56Nk53P78-vw_"/q+%n[qnjH9{x'M{}^kuEZ_3>a_[͡:75j~[fHȭDrj1zwS}1P4ց-aՄ'5 7q ÆӮؼwHEYgFf'w +ÚrF]hnjM- M;D8ִf@ pT>NI1#_'-K@ڵfv:V$R ,*䑊>q.N泮2#|2~2٤Y rCA[g ꮪ1J|8}+wC|vIRRzu|{Z5*c+Džî$zKCy_+Gdk:qW`VcxrV0W1VqTdqHy$q9j6 M; s@Dիep@U޸=`b/`];;O9xZ*@ǹg#"neAbr9 Ts1Y(fL RJcu>4ع4icqDdY!7g5~7c$|9H'ʫNhrs*h(-q!5+*+ط GZ9I9(Vk#d [Q>3>P1$xj,8WB`uڌT8\نҫidZ gԺmji`KiVu݁{p ][ T9;MƲn,N]ͮrf؇$t"TlVm]v`Ȭ{a#aPF/1Ȭ[N5jZzDA#ǿ*9k L/t~$i6!bp=+N"MdРR9+4ⲵ2a{ik+PӶ$tEq؝/EniIJ*n>PsRCn.J 澩H]Ka6^Hɑ?Z]yYHd#-PLuxfjS#'[MgQ].`ja854:9"z62IAi#BI^q،s] iZ~ciu?[9{Kaq֦*sڦ[c5C˴5ZKִsLmH(JVk)oSY i%*vjqVx׆fu]sKo3q^\J]2\fy|W蜐Yw&AqC{TI4@Uʱr.aJ|&5+Wc6˽U:qCdv*G#w`N*d]=Ŏ*G"t$SZ#ڿ*YWv˓.C-e]YETu9ejrxsAsbI8Fn&! }pi J]J׵c Hរj/^^j|{3'z)6g֚ĭUJI|>H⋏I%ZB|搦:sJ贈ܒywP:,1Ԇgң^+ų9%UCcXRbHW'בYv%Mu: FbGDeiQZZ7 Egbzn$ՆAkRPW_Ӟ27 -Y+]t4[]7=(;goA"mi`z:lQQq+3Tk͝ ZțƲ?-T15:kmp/ Lch9GV:#Aǡ>s&k.DgiIw1MBoX3Q[ kGMҟ YpH:W_i1QPFrW{%dPӴFx+1*ŵqxC޺!R<Wh-F0iE FWzK_hAXzF ;l%WsSMҺ;4O8\MÊ{}+\N6y>pw.*+3ei{85iGҩh6 3 psaHcm ǐ5?-RV0Kedt?VZAc)w!hi fFp(5ɫGxs6A$%mZXj#˘k_hJWR,L*)"砩?TDsS$]ڪ"7@ Z򰣊k!'@XےyjtSv6ˊ/ryV<>8r~QW#Wg8hZDWu5i"$ , o&ok 73l(ʵ-\b)7Exm|tvSO*I3B>[dֵZܷQցs3-cjZi OBԌImJuWyolVd10tB{ WJ69??mX,/4Xֳo4]}Ձog]3Yv'XYRKV=n2KSvjsr@?YLm?O` )Hg*жÎ57uil`6Ze_!sN[P@ y#ޞ#jPn㚞%b%̓?x nje _&<{ZbH-9T b5$s楇%kiRj@6ȵ9#whZ|i\N5R>҄$2@XhJn\3e!Af]6PUN2Uu6zշ?7jŘgk[tZyQvɫ!I#:Ԫ.kxsahuݴUo(dj'bLy)⩥>d >BP,c*7$tv\uT\N[欺rʢ2zS/Z׏µ3!>8ԑGٽkh[Q uܮs8[稧IEmnjCoZl?/r3N:v+ɥn}_¤K"0+NS&V$Yl[MecDs1mֳbo^ڀx/`҂s֠ް61.Nk.HOJۉYW2k1 uۛ069u!˗Yg}9RM&Sj,w`QXȰ֥|߭r6wm(})e1MiZʬ7W#k>cU]d㏭5LBn\mSҸm;V˃5i:Nze7:XuCh M9[IC)SdjOۚ#8Tb_B4k$ 6/ZM$P36jv<{Cr!'&΂.?SG! PnHc=iPp٪+)-p-ҴDco4ӒjSJv? Ֆȁȥ5 3'Q I-brGF?.>Р)|3mAo4xRó0fknĢ͓NUR< : @SK `qK9Hs{ջiqVZKrN})\r͋YcL XjddsR sP4̣h$Yf3mMiN ~уl(v.^E >מ@ync{kzW!t+)_mK)jYJfNk'"t6l_qRMmɵo`Z[Zd?9jyF턄uN!@#roWI:֭!-N ѱf=Vղ5b'JiUTiD\ՈNTPZ[HTL`L$r5x\z]5fQ]>nE׹ ܳĻU=) >Q.,-R<>vReXM\dUӒ(hwUr.T>R$f b➖DըTa!}kImp"?*4̊ jIR&q+QmqE湖m`'HZfS~͹a4(SJ>03nqR%~@ -*KZ ՕJC֚ )`wKr<ԡ1ɩT h[sNdLdQcKZd4liHA0vdQ xO2$~|UiЇJp9Htr*)W$<& ℮h<*ʐ9猑$Rq< l]'+r,+6T皞Ryn[k:xS[ױ ͹*9{XZɹq]-ݯ^+"݃ms<{J;ֲg$TceÙ< Z*ܶ7NTy-NEhGxBpkuZPܩA8R2kB10TO84Qgaa|O5k%eg5eoR5H'W\ǭo9V 'YsJ(mk8>kup9kfZ>^M" SiG0iԿ5eTiӄ=zTӍw <)nNsAIkO5 9"yxq1mq{UP Ю =FR >cSt7I/5zڡ5n g9XPML5j;+5jkQZ}DZs[ZNzڷN yNݎ]N(ΦaMleC ԉ.]<\*18#&hUZy`E6TBNۈ~5Xf1WCD˃@v*0SԜPewYކ?,ެ_9{**k{{43* O9\՜[R`⹻.4ݍn]ح0~N+{Jg4雺ì瞵|1[o+hLNbH[va?+^/6Ʋ7ojf16T鄚8f71r+ !NsM ST;zԱlZ6 T|Ԩ"@['s9Ȅ@[bstl3i7"[ҥK}V HU$fS6͟ h,,<۷|ӱ>f,Wc9ը5)JAZ%?ʛPNM l*^=IJ13s(_joSU{Rrz֍hKeTj̇<;Ry< AxHPՃZk&{SY5[vj" sրܪEF5;?8O)2m 2FvJм e>|Sv#)Qԅ3=hh"e9z5 Q@Yc lLVUwSah3.mvz9` -pM;ܨL+RXVuEtSʺx'Йuȼ8f6Z̼p?E-NvwʊlVmũRpIt9]B܆ʸ+"(sdZoQZ76dҹ-ӎjWxNk/9c<_<+8VB=oLddk_&KZE&tڂzU;OZlnh+]L3WIkAac5jL*\KR=?LpVՖS^u#i=SqRjwZkbVܹ&W%Vmч<>m FIbT9?A9X:uW5G}ҤhfɛrKȨ$2.TȒy񦮦'5sHƄ$c$V, 0$ֱy3R- VSfSkőH jIw]N'5w7l$Yf+cL=r+6י0qWk$&V7csWl$ s&S मv:cw&W_la]gu;>&x^1jezDXk;c|8-=;$j&02s^\sjd 3U=[a={[cxkҔݞͬnV_Yb[hU97Gޡ\W?)+|WTXsQ)u6XMoF~5uZ﷑릅wp˃Uam=Cz-7N&h-ME#jzVs/-#5w YH5tZާ1oyҙ5hJ ;Li TZ8 [(hcU59=qң|UqF61w ޹GK1+[<"SGˡ~RHᶭvRq\SJ]& HZW5ݥzU}5&am.<.Q0PgX^y.ߕG\޳_ΜFԱgk>+nB$9_|f •0́8EeZd91Kɫ1Ic(#mǚ7UZn8n*g?w=khHb=pN}+hMtQ|¬$Җwߚy^kX-g"~JZUxa-=+HCg%⏰AkN[Oju!2iVȞի`(=QBK<oQZڒOfITYm3ژf34,)`-0n+UTcj Ojsқjf h_2#6* -gR5PȖjjۙ Xfd9u|Tf  i|ƔCVEJC:H.Wޔ0?_*8&L㚼x 44emF+شΜߌg'Op,E`.OLQQ)cyc'*i# ljA3Pi\<a5 0 Ta1婼%c`5< dW#ogWm}k&:kێWw.I\I =`XjΡoCG4%$\gz9SIs'N߱˩&O#I ̈́܌?Nke]ǡ-dyca?Ӯ K80E_-pi"OVXSLy=*\A;v~==*c Z+J֡棕 2$UK-ޮ^遍"ёt: LZ7K)#T̋6k" 5q9{vY02.ĆWk5vIbAӑӚɝpλN*j,Jr2śW+AiaUqXH(/9U[(rMROcHPHw4IW L[ZGx=*Ѵ`,An,&ꅢhj),:NGD=T%y8XIkuk؊ǜYY~S"J5| %Qc4+1H]7=MOǥiæYO@ R6Dp+b8㨦Xn kYyLwHKKbqZЅKa֝ӜsDh[jZ6pMMmi\еP6{ֵǦJԲa%tyesɕTJl(=+CͨZ,qU#bAvA,B91۟LZbn9PU= ]%_>&n?Z |^.jrJDЎ޴EtE$K xud-8$ $J sF=jh2y`U8F<Ϟp1Gـn J3[*wekivN[ƍuJ)i\-'j7Tk\!Za#L{`޵KXdtO=Eml4ƶSaT^ktZb>梚#3p8BI Ⳝ-VG<^ ^Eod*75S0gx+t k I.C iXsZj3ҠԜQ5RdTmlִC|2;zdkBKbw>Can=):U@O 5v3,kC]N4+BԌ^!ZMnNA̷ yop;ҩ[QIEWV"$g5shrIR9M#9ug*t[I|܎V䞔TrE6~m* NqN^ĹL;aL:SZҮ6DwZX \-g{zs-x&Xg8h۱gߘw~u<4_o%xڦ",z6 K\FV%FAl5 rE?lmu1|Em_*UEgy jCrDdםI⣿sW,|M\qiXРa$\<!y>/l'WDqH 55 F1YwڂJ0H5C"}VmοTjk%d_\ާm5nWfcxje88JθД E%ˊU91RpEkxP(9㚆M!fTT[.8gMцVт@m;@7 `)aPP|'Zu?Z0Cv>f_ +XzrY-6u-!+[Z'Uȴy\ңVeěK2~ˁվ/'k]j(|dƍz#XqA>I%;⼓U?Z5Ony7 }iKYEzV{'qU[N=rA8q9M!?V%ӎ[H7$W֨guj⪻P1מ?pBzsɮoll s枟y??;(ӖpOLf' ۘPO֮Y ǩ__v2 ':>409#+JըRgFOӆg'O&>ыSu9U+ډVl~?M1Ƀ]Wh, @k-8 *)-372.Sh_4u@Xc?xZjU ~*41ϥD8ɭ+[N <&hP㚿S̪э5d@=+Ze e*jh¯CPIl$=qYR)<9^*"Á*XYE#>hvU ;֤|ZKp3,Rf\7a5V,KFf9U%څ@W5ZK0x6S9״(~s#8ۑS^H"CG ;BZf8SIFW݋QsL\Km852\>sYsDͨT$GjktdCXvEdۚkqjX jl\gU;Fp1PiXnK.3Vpnk||ִm'CH謯Bgر~ܕq浭n¨b{VSFfuWXsҴF85_2Z%~fz1j{6jZے:5=a˨R9PFjSo`߲iǟJFWLcY:-L\fA)WPe 飿.0 +AZI4c,==C-5hj#g\߅R5jG<#R` i隆OI]@&5Jơ*kރjk޹5'zs(.y{/RqU&$-NdߐxuʲNys ߇\!y&:ْ5C^MRbH'T]czøޙli<{Zf19sr>J.JU"TcZ|DY'ㆠSũ;S.\j[*ط7y?9 G uϖ׭+]`y5Z V!\|FeKJ\8,9dZ(\NsYRZѺ=+27N̽&F'dޓm\ǷYZ1=GJq:Q{!9ǭfܒ߅jJȪ"2qU2Oau qҩϒqXΨS@NG^qPLmF]ŎbHع$Y85[=a%Si5Ȗ6'Vb/tY[aśFW)g *Z0v+|*ݽ qɔc9v{ًKs*V2[^F]JAZ۷ҽ*:)T3H^LPP3I]`]>7Qeh[ixt׫1i/S.PF9;M,2v@tzs Q:KsV(";19^r ґ,FA~Nk23E%FE[[\S#T+CkUG5fPHVaEl1B |}jx҇[BR ;l cڴlrNi֖EcZ.\%]4NoGj 64][ |ǦEtzvlP3+Z^beUP1+0YJkǏS)W)^ Kd5?zwM?AA2?!_&V"F>k'W2JXĚUCWxz&hax♚V0o5 +f\ߌ$lfm{}&#,KtFIbkԾ ;FV9q+IyfKn1\JHYwnʒEan|x4N;{@CfChkS_c֧)0JW0qrr>;=I%afGeTlgԥwKc 07 z 9k4dos]\.s^uu>~*Q|V2nU^!k^Xjlc$r!Ul)Kޙ,r{SzګkLIt)޵bHs*  Ԣr*"΀H QiL[5VkjrT. Jܣ$^*'ǚn% "[>LtkIq;ǽB_qR[WPh$sDzT)0$@ȭ9*(1֦BfՋLr6Sqk^4i\LUk+͌fp_n#+6(n`Y:T;e9p_k<*f V}Ma%shGzc=+i\` 9|Z1j<ZQxVDFTjgP2>]m5;OZyF(5m5zuG'ɸ']4) wzqREq<:]՘$5fk~'Vwcu[]?6~eLmM^PȮb;͍5r C.s΍ήT9cQ9ZPGZ3C884ǕȪjHn4ٓ<9^'UeEPHϵMnMeZdӵNn65"T:ԫ ?w+u3qLwqUZ_$dINy'YlOI&Lؓ!2W?7ZvnY!`n]l jE[G'"JnӖ|tnĺ*zU,ܜщl曕a~ \uձ95%cWpZӵ|5Y^rj>A9BGk3ϭEt;U 0xc"͠m9Lڠ3κ©3?P;sKEqۊm㞬쎈p3R۟fhZjRΑ=5aUts`+N>|zH;BzRu{\bV1U5r1YUq]oSY[[L:vק.XcKV_x.]\Hv6]Lz ԺkcT;y>#vkf:dt+!X^w2y 0F XڃY kf2j5u k+o ;|Hr͍$ !3}+\irZr=\1']&չ'9=sIi2!j\ε&Y ug2f&5jmx#-5ci{QNUpet1ml_Ue%uEܢ@'FUԨ&~=ZvЫ*gZH8V$$MFk'DʎE0Ó֬Hri+6_12T'm8={̏;%LMBj!>`4s?J$Y+|T !*r)J>ZCzsNVRw&;䀑SajdM4^|4;jiZr)o ^G⠤T Ul97Thܞbŕ(\2NNZkn ۾q N$nH-V_rx4R'rcЎ,Ru-(<ԢMUasF˚ q&t35$W!=鸚k#5r٘XlmpD9Fڳ5|W5e9ZqW6RZ\ ޹{ Jϖ*HX~fmֱmKѳy_9q6BsS!Th!H/nj'k3R Be)4+?JzH#*z5|_v{՘ Q>n*h6yd.tn%Z S 2O` H\RgC%f 5:j<ҹo֥7j7^jx;JVU/њwbi3/STjFaC|[Ogn̒r*eȲ4w7ޘ!5*n("Z 8T>@)_֦I>PR15cԋ)=69[oEMퟚܼ(U@"f>V?Zd,Eͻ kViJ*Hp8-DrM+W\p-!95AC6޴&D6&G9R ȵ c5r[AJvX +n&;yb-5"$N&β;zW88XKy5T[ncV"N5yVq4*m=(T䊚(nW?\p=:K]1bv"Uf@yj?~V555^6a: H]0cA5z-D:gsMĒViÞ});XF>[O58Z xO.S,,Kҷ|5Fe«wnd͚f mtXn^5R;uc{seҺy sU4*W7[ ^ZexklE&]NAұdq:{Ve u j;-CH&o+:IJ˽1=W4So=qUe:Pŋ|3J A_PÊK6M,sBv99̮U},'&,U)tzs\G\'s n£Ār04ߕ+oسP)kM6-7b-FF-|ZvvqYpsK +N PPqV?vj ``-0d" KK#(zԳێ8V! WjͽU-8RvQVYcӊkDAֱQv'­qriD+'M+r8 / s! NMN ҝ 75v;|v)Mfav޵?-{zAZZ)I!8U G@T{;rcp L+1sI-:v'*H)8K@`x+h@^x pK}{GK9p bߋf5"'xmlW+@|־krn`p*A.bp}#Ě$s:[eW7jx++W&UzkעBsږ,?3 Ng,pkRlaFj"l,A6GpQ3ֲZ1<ג7\8Bϫ,Aj&Laϫ~餈XvNg#N=EXNDz罆Q,d5FPydbb:DDƞ,6ǶY5F0Wk%{3ilMkvl22 .r>d?TCv.~KT6vkz& 5va$Tj!r=k,cypKԒڽ[#ʓ{w^*"*eq)$or>_c&^,?\fb!`l֣y) ovF~$8S KCO0~w+F `[&'<0s{+WPysݶ֖:" bl9uO!<>ll |M|`R=] )`N~Ȭ]ydZo ua3 Oc3\><`Unm>Z<= حՌXW1ֽW>F\ԥfcMȪSCn`j4X=+lu#2Q;s*mΜV#\0%4 $cu)F ELT,@(O֊{h|=0MYO pڰ`m#1=Enl,rE2Y7 .K V]`h)?v Prd"CqWjO($oD}˅4b $T-2w΀V2I#g-ˋ)?ysQ) (:rEfp?ZjC,s֟6P{c*i ;lizɤXje8{N9]u+pp}i!qH\EڥB:%OGHD=ivGF jw2pjZ4l.~c+ⳙךXޥhe1g&|Z\\rqJXnZaF3MA.q "/sQn#'XX蓊[R!sjݼ&-/j4̍$bOzi.q+*$6Rw4|4xZ)asVBsib؈&p;&\G?+jp֠I6OJU9l1dP[TsTE9DHNhA{_㊱x ֱ85n\ j[w95n%ܝ8M2ݜu]D䊥S7] |Fc4_+)"^h}5.UXzLJ)Ui21Yh駈Ŷ M#M8ʩ%Mj]NFM-+U&ǢF`y\&j`ťW,̸[Qi,q1i%['4*L!4T҃(ȫiX]4Ut&9nrN36\U*FXǦi+rvެ: [^M>(MS1s8iY*pښdVh Y'鏜`),~OLY%HvU ʔFX歈4銅M%̭'5~ qR[ErjuyٶfqCZ@cKomcp]/ oo>te{88MYDŽ|(t?ijh0 =Cz鱔T2͸}mZCZ8xw2|TAmrx^ykc}i|K\uFɱݫUܟ8 V&okvKͼ ^*,E"dDyk4{pԵ):_s:\ΫPrGq${;jK=L>oZڻ;k>#i ~ Grǭs]r޺RԽex[t uiSko^~u[Y#{gsUv=9ۏ4-"ޢIOc3̙UZ;[^.ʜs Q>t,1.U9ֽ;sTn!>&1Q2n)Cԗ6i^Um Ԟhφ=hTkK><3ba9yVǫ jz0+)>aZ7P3K%N 9)}w @PN>0 1Մc)~2hF{1.|ذȪ)~ iS`Z&ӃW5_ 'r7zE1'KZ.ՇQJYzv)y0dѡ1iқ,ؓWY^M29Ҿ}sJ%\nUQp7ϟ”N] .2-5$AVa-S-qAi~щ3֝$ibEs' GF&1H,R /sv[<ɬx.dU'8k0یY8yB9\/Sް#SWg'EloZҵrz㎕g.ZM+l[PUX]Մ0Iv7_5ӟZ.[UP=pi]᳚%-ƐdlVsa|*gviDS.jljHU$ ;^PSa\ֻ)^aw>.i /-j2r1k6n9Zl70"$6jZMɭK_Y6q9kLsRdmrRRjsTzնymW}*pZpxz0:ԥr)vY"j`$犴F)cbH7$pc9BĶ֮‡<U˜DQ ؚ%ڼTr*4ajޥFG5)MS&̍)DZb=G-T"֙,`Wmx0N B[j2TM*OkDٛE-JFV#|V}L^럘՘o+>#ծp-Ʊ!]Yoa\McҴƜA" VnKSGPʺPMCsV&e3\s:)g+' ATRAXJY˾:Gz]ZMiՋâ̶AT=|˚#ыSzҏ_YsK5n-8+`i$>ԋy[ooζ%t^ˎY؟is5zsHH,TJ j#=E=` җTFTJxLy9^Ԝ4- "/j_+a ?\b$8S%*PsO*b=ȭ`UmujffT N?T(ʤzkz nT>URKT )Vi6C[s\OҼ^Գ!u?{5[w>+(E6(k #\u99jo_=lkg%5Us"ί!wkXNs\歪LEpM:XJN,qK{H\T yW*헌#P;rk\u=gZXlV\ *퇨?VWPӵ%ۣY-c\#wҵAs4̍xQng+C+]՞2Ǽ`FqQiul6`aH≄R4]+#l4[Y]<ԧ˫ I xO|f#2aTl*^>WnօJNV m^WLusirMrQTA|]B!:FcUPR9vms)\d98;ZΕy.,nUc`ںAmAf|wqձ9y$@Cr֍k+g:,` ק|4l&WI9=zWtK#ߦ\ x8rl1^Ms8cHVpx m{!=E~B)UXנ~=.q'orW'Oҽ,>7e/SZwo|/[F$Y~S&W{wGAĞ=w&>k>3W,:U[l;S\6*̜w0sEaf#bqcVfUx\wƬz*W6! ~nwb%' pOqV eldEJO*zTvolz΂}}{T K _VDۏ^Հc:im뻰5 Wqm⤒SJf-}OkfIyzP=jUٌ:S'E$$j<.;I)V}נhiZL9n[ oI:XO1y<ҬVzLp*3.zthiNi *ͅ櫤D4gt|\O4K.2*p/^5ޝL{TI[]zU'+з p*W~[Y)p~~*ռ$sJ4R6a28vq:5]4'yɨqoYMօ/2^#'PijߓY1ލ8j;185qVVl::VE۱^u8jxȩP9nI.U?Tw-IrޙUlWp֎PLˌ8^k'T"Fwb 30r@-OGxIWErKg6MPYM  VLhA&5YҎR[4# *uyi:g;S\dӗґ5 W9ngZZ ɫR2lYS gU@JV%o8,,p8[4r_/Q~񧬴!cOITl09⇔玔n)8HnMFgA$rD{*IMV~jF2ԷZn=M#\`|Ӓ7~u/TخqnNy5Ct@RЮb-ҕmUcb~j`ϭa8"ߎjdR\Ոm˜V [[C#]74czV~8[ډf.6r"~qy*3<[y6呸$ο3i5^8wr]eXT[e۳E^9cr/!8|yl}-CU1K յ.pڝ_6kuHb}+ϔ{h]u Cn9\1&ovϥ((2غb %ܡ{fAVnuX$,'bzW i]\/|v\e$6+nxh9e7z»-AS=+V9FKlzq\s]mWsiwK;].GEYֺ|K?ϭw<,rgGdDt0=j֧F]*Rŕs;Jɂ㴖 v^~WE-I;߳DlI3NkYy5(U\8jKY@VPJI$j]p)kbՅn69 <đS׏NJ𦡙eB@uڐJvFPq,篽z<:%{Ѵ\KB`fa.pXF|~5r+g [",k+7n8b?Jd.AC^sb`[AwRi:F2E9IܚiF%u<5/12<%{ۆ"3H֛\SbT>z{T557ӴbVd_`uU; &Ή F_jAr]>2鲪n)jZO ^L`^.znz$c|9ZRlD`FsY#{g=z\j8DMAsR躍7'>b!O#XjOo O=;ԩp ڂ!.:k'N:SStoɥ.r ٗkWgU ㌁χ5~Km?{GvB)'8T=_y-U?f>x?xJP .=ZpaN?#ŧ^N }k닍, Wإ\ ,٘Gk6]˿5F,1uBVv2&sV?Y3j+JUhWx<j ÃXK"!RYзbO&Kp7枬qPo FzӼi 23T}Ƭǎ"4#a57*s(䞵j)Wz XU#mS۾R5WmYpIWdն v wXɁ֭CpGZR5n)1Ҳ>cV"Iqܑ5 #j!ʊt5 *XAW-[ՄlXwt𹠗: W3ޤ2Rd1K3Tcn֦Y8G0.sUc5B$EGqҜ O ݑV"o.LM l /D8QNUʌVZl8zԊiL A&]'IrTѶjR=jKE4)cW$17CQosV--GTe(XնHd* N$曓0IL)&5Wq"*8^j)Ro pj$3jR2+&2c[Ʋ] Hļg+BkJR@z;cbyĥyʹs];js܎eFMSsXLI/5R%I`*7ZAm #ķߥRcmog^KBJjH1V3r>DcK~zV{Uy-'NƊE(r*hSwARNzU>Q\\>INbp1OHوVP dZ(u,_مC!swuO*8<\HǀWTpQ[Xh:nyߍ5"F%,O,n.nzWadzחכc񘚭T`kW ku׮{֮}9 \yҼɶ}*I$Sծ+swd3ZNX5x.zzZ䅤;Z _q.zW=sv%`N6Խt=4Ջ]1WAEqlč}k YR1ULg s}:fݞ{pXՙmK3R1\Q>"Dvy95^̳hd22$\-ŷ#u\5.<STpV71 I5*] K)֩g)`}+xT 'ꄕMQ3}ݮN1D[arNkPS ]Ri& 0 ǭodf[scDeewHTvY-Ѽn#ֲM[=sϥe9KR5thj! 2=oe~zJtmP tE5`GPqsfքHvVV8ʫp1-`G[\UXBO]TJ-l=*m?2i梳׍1;^鳰e*5N-(o$+7Aӵs: t2z۽ ]>+XHjum"H FEuQjrV +5mvbmm}i+(b8QkV&9NOS[~v9Zǁ^1,5t ?R|E)u;%;N V })SP]ێ.ivP?t@j}(Xߑ]J:%/ľ^$9;h!{a1|skѦI8|vqE]|v\ޚ,/+H?ը..El沤u v4Zf;ኞ$mX&G*QGd{9]g;^e nyDNc&#*Ǟz᭮ǥBJ'_ę;(9<_~#/<j۠g2:~ŭ qpuk1H0oC\JUsl?>,mR+T}x!mxGjں'2U`@YsǼkdv?<'+ǹ^MgGk+>hc+jڞ=qE:Hyܵ~yu ++:^nsdm]ni'b=+;Cih;WԬ7"!z_18x\54s3Tsj+6I0flPI-ȧDrF)X;7HrDnԂF6rOZIo7>/rsO!:R#kǞ2'&_X@\bmpdZ7^j5W-3NOb#b}iV!/pfHI,1Mcrd9Kų4ߵowҕ$h0敧(?g%zԆqtsc<4r h]&YK:T9'94`R.ÀI-9bqɧsԵ~NS<ֲRd5H#K Py|xY",3iQh#N }S qX76*˃H#bW uO5NcެG9ֲz;1 CW!<`{V4zUk{(܎ij%1.| Is &خ0y1\gҲ4LoΫ$HIzXLyA *_0- ,jx[Jah[>5n CT"`j5/]{TۆUXU|BN S!5@2/LcodWaqj8PNƵN*t#=+*nE^liM;i@򜞕9=iXj>n5FdWO2Oz[5msV1ⲒVRX qd1Qzˊʌ QO %PPGZ&R*XUQ$zzH) c4d(3UΑ`* $棒B~#f:;Ґ/4SHXz=M_괬yj^["~<0]zwt&\f%Q;ԓA/bk}ɭWKVirSZEbCqZ Vd ^{VExC`t~9N+D8H g SL n=sNGZ|GQѱSZz)ԶWj*gFMlKV,+f/JU\Նpi=jI="8A5dsO*GZc7j+!996Hi^&u7{ehiAn3KHI9H>wҜTmg޹IlbJ+DzM\J#t|+OFI3jK@2+tsj4__m8=K"7Bc?xύ#TyG[QFLMVk|A|re$L~\qrNfrlsd'>$MfK .|4[v3uҜr0`qX77AdSc3Vʂ5unYثn|YwmqQRwUO_VǂF}*;(Tjw™|NXUouWxUI#fcBχuW$qDafT`>U+\7ѪcK[йoe7z-mcݤĤ9$z>F 8gJH5ʝEd\Ikn~ W*AmrOX kѴI|dc^9iZ6؊XC|8ðOZ[\en[sԺi\moU[n^kw j̤ތȾmyvJ~<U&ƜWxV/+|ڴG'#snuqe}$m$57)+jXЦ qRrVXmAXfF25.}qȪw)J:gk;VdJFvǜ[:ovz 5ϥݴp)u#zVX.yO@ l><6bZ-iv(x$uֽđ]iOR9= q7ϗ=i-1"pzWO=dGkkѡHn9Zm9,PRxM*&0߈'j0)1沵XMtFU3aK6.|WFͿx=-֕sjZI.I: 8"-22 9j-gX`W%G4*JzIoIѮs$EGRxXӼKY ۟/b}rzt1%dnq\i%}6G^y[wDQ>W@: V:-RX\o ,^T.;Q5*=WѴ/to[s+%wdhXƫ,mF8=+~UhKzZƋLwY,?1 ں[kU/cy+҅:l|wxMTq@d]DaU%[TX. 7dyD9C-ޭC9"7z9KR5;3dzTi@zU ՔyfσjH$oqYH_5a%Se1XwCpsR ڄ]duC7ϊKjkK$$6AuUݬv YWzJ#L1V`99lffՊ5ay9>V$R`j+I^ [ͮ+;Ո%MfZz'hɬ[{$ hrs sX]l&zjr4&E>-T'l|An*\4ļuS jũ(n/J?ZgK)-n?#5Fb8rש6^8jG8QsVn~8.\z7g5,rjPeAQ6YHr*XpjlNV4c]KTVPXM4;h8\φS$œ1n NqV%05eFڱF}hьԊRN y1 [GjJ)-XRX+:\c5e&f4U5r9+&NkB7b"/G'J$)ҤLҞUϖj{R HAOI(a[ 0j_'Tz9&EjMUDO5aVZBF+ {'* &ܭ:%ْiTIO6c(~AAeq&hjO੪t܎:8M+|mw95>n6K- (kB9SGM[k]QU<I}Ҹ:m`Qw>'QBdW$ssw {<FT@^E*O 7#{`!UR 溴B r>wMkX.Ȯ_"hb96K;7#wӚO?̂}@3yRgU%z:V wnq1j$q긌ekjzl;T\Ʌf+3tEĭe| x*݌R/"u#P#O)@Rzo/}+C,W )tc=yC.0ϵqkB˻ߓ[ޥҋh+^3ӄpH3Z׭}85ZxT60~:YPc#+GJѮ_%=s(B?c?j9290iDɭ H0q\xvAu!=M1MgOĐ``xoƜk)"Tѷe,Z+"θjRmgۃ1jrXOovJ?0k> i]#OSVFT j٣o;hL<(oQSR9hճ F$=Gf_ x9mR]9z\rᝋ מx|۹.UQߥz^+/K2O^ZPrh0k3؏##O<1H4[iֿ$?gmdZlP+Wş 휜jw?֖OSs\ah ~UqmAoa{W^GїC"e8Kx\cdX7m*~o2VC{[Ċ.femz+h.ͨzƅdU=k%6jxn'z ɧC0g=k37\yn09Gtiv $TjYHIp8M8_QIEwP*Rқ֕]I_sI~M6^+ 嘮r0݉MN4Oz&'4rbɫ0LA9;Wi4nw ڥiH Gw$wG?ZYKϕxϽfՍ5c|Es*ھJtw>`-:@',yVF' 0?-gJ6=M\iݜQpzvV\ry̸݃V-7#ދ\.j4֨Z_p[5`]n8q*!сF c&FJo}c5#M[K cGqZm9B<'YKj夁fr)h_/ִwY\)үph rcN\pIVܺR#USGl ퟚ dT'ndN/F  wp JRL`{T˓T:Հ*@./j A%ؕ$7g 4QM>7mI ԇ1v' >b8Kc'Zjr|~j98 ˱&*h.W]5JkffɩϊmǓVĀ4 hdUT,T-[O1NL-K^Y{X͜gMߌԀRI*& k!7*jO4z m_`H95 IqrTIN)n8{/$g8H%0HD=E*<ܭSduv'wROJ{Kx/5'6Иb;MX9,XOjW4cp<):НJ ԑgg Iy^iXEaJY8'i'Pqr^''3; SM^Y7]8%*EbO5l<1OiY4T+/ӑ b[G=*\Lj212r Ua5anH*m{T/I`>j&zѹffˋZTl)洉b!U!!Uq-s=[j<Ԋ]00{1sQ+VJZ=j:w1F4HFwWҧ&P犝yg TjW@I iԆ([&i*sީ{ a9#T_|{׎x|H^=;|דĴOB+1l%cQ:渟(3A߷wL8nMxӖ`։~n nX:1b^S\S HQXZn|KdV$qV::;K+*}"[CyӫFu~l$)>c;Ɵ6i7W#S I8#=뉟"C=zWR-52FfYX"yzⴠ&Kb_i늣)k:qkC N{ Yܟz}~{QY$x'>ҺB cS vf.[EKa#*hZ_ƲȈ*"=Vy 򃍧#J61 :P^7t8)j:h">LmN;>-ں]K 8^z wwuwc QTyU* "WΊ=T0j Ŏpr*%_1#@qoTB3TcxG9ޞĺ=Ȕ~K7e #{2q6N;}mp WZ=|>Ksb"}ŌPY}Rb":!@[IH; 9B+\$͕ICk~fJpenQoq*C ,z³Tt6|aL }:yY[.xS~WJZs.d9EMr}cKC7gCSYO3C&ԢC2u_PG36%rNOxXxΣw4sNN=-Hڎ`NIJC8?|dᙗ=i=%N ?y59p#M~cTjb?b%=b[0aqIn ^MgwfI~t{U>R*v!c8>0*sǽXI#rwI2֍&+If4pљ5:^1E`r[LY2jx$³5n7>bMҧYypj|hQ5 sk4OTI4TPtjdb"UT5 |R3eríHbhe$ę#˻IӃ4XI*Q&G5U[yԄYI9IOcTCR=i~+K~Z00M9$m*I(S, ry5YiC i,hi9'@e'QAd\VO5 &pJ_sHdޞGiA  R#Z1PZ11GnȤC-A'jNƳcVXCsެE&*MS/?Zc6@L@(9z`udJ6݀j2Ո%v!<5J * "d Fx"+g '$ӕ֣IC/[FF$S|u2+U-?81ɦ<'zrcE" ZKR R}z sSWUSW)dI23ҵ,F!b'W\sS"<ƌ'WDp>0q8q^o7:2.>/vHZC^o+lWi$^_TG' ̾W8ŒJu+_nn\gfxRoZ>/F $8\߈b06>r񍢹껳yVGPX3uPs]0=v#r#@zTw=jiy*cTsSY.>kU+'dPϔD#Y~tZTkKC?#>%5Lepٵw㊂/ %܍)F`9n˦ `9Hu6&i=MqWҶR/M22"V=8?:ayqel;rX!md"Uڣ U9ڙc+jiQXK gW=J.N)@fe=Vɨq==O.{U\7*ρ2N֤ PJ+B,4|AG> Cɪ 흶QT2@/\u8*=+-5oTml7-[;P .(fcoUdjsKl`dQ}+Wo 浲n{__VU ;uY~ / nh܃?ư|1~%sm1":};}7]śy3PɨCK>kO\ŷBLNTXi\i:>Re U˔!N;IUX`YUX~bXw# xg>-'m+zồ[1~~Y?G%*d?S hZv2Waݟ:F9V0hA$>[r FAcl rR֗lȷiV #,¡GVNeb 0ϥZ5O.ȶ2X&hk6" Њ,6^_[~~&:k2*뒫M|M؟Ze*p/}S(x09${cQ{:̩\$?T6 ?AZI3~?V_|U#ierQnh#G82b=k>IX~NFF-O>eu |H/n> eCH4E<⤖S嬱rN:TlS4R.lH.5$*zJ"Jf2Z/ imaQI8܊y}q]Otp7:Ta.~j#n(*wORIZ!8//jxe6A:J.8&|Ʒځc'ޤK(aeD?75m/(\]r[< nă;&t_k.Fr{˷O5jF"9cڮCcߵK@;DfMpPn2-&i>G&?#޳=jheau1I)V%9"KbrpWȷTd[1/wl_1kvC8 xx5j+t);6I rsY0dj9Py_jHܷd9EO6$!any&ӳsZɳdn:ȜfF˞v8hC"yimbwY} ѩ4/x&x9QLO#~)<ըgYIjVB׽r)0j[TcDfo5,, J+'KW㊎D"⸍zTɸҪ$ EW-v.%QGJo qL#4Xb'$1 'Sc'"'Ox.߽& :1|KS#G.%ڸM j1f#c8dY#WLzu&8&r4 59J_Φir57k)d*~SV'$!I/ 0*x_zF)wuVb) NܚgҪ.Z99KRW5e f^ \WzL6jDz_zI;b6?_2I{Sղy"A'R+nPwNN5*֠-,y#Mvg4f)9J0:RBJ< ;$zTJLjG:%MT &9nI 䚱!8jU|PI(9!v"9cE8RsY]8j >Z94g50njN3ɪ'B Pg(␞nL?5 ջvی9iܜUU8 aVszVw璹,SUC4\ɢUsL [MIuQyGZ;"Q"~RsQ+`Pń}ÚvsUZ\c$znĸ9LvGS֥͑XM@T@QZSwMhnYO[cZu6ؙzhrP7Lal{ם פx畏J54$@ƿx, ya.Wx "ʙM{.hJˎkqe o(|y>/ccdc* p:*],͹CkumE ΍dxY6>H?Zәݔ3ߜT7f 5Gԁڶzݝ-)O~;U{g}*ӊkc4/=1,qֺ"hޗ9|SjX)c zs[wr_ƫ3]1lo*"o%إi9~0 @ *eIZ24ӥ7&QB>G\ԟnz|z5n'mT2ƀF2`{Rz׌ ,U\CO4\O5b,dIT'7QYW~]zLRkS)^/B6vF2zUJqFnn=ؕIrr(hd5%n,9tn$al#Q޷3qq"kmaaTcɤy$}j8`,ۦNw?k3!oRH5ܬZ{U?:S4g NpфQk3[K2Wmŀ?Z5u`P`U2\"c 3w I` qImEoozIO-Jg֓Wov9[$bQ^iǥCF&(u5ݡˌ$(P?-Vdڠ`zVrD~j,6p296* YVO$tWR7 Q1γ+{?L/*[kcDGn<ݝ٫W8#fhF>ET] `mz5~߭64I8歬ؑk4cVYFI Ci ?ɢrzU' ֨N04,5r);/~+2>cҬۜ5MS*~cMHwSS!*Т/z[XUD*׭O0c}T'oZ}M=@yc}jɪ*N޵=$Sl Ijd YSVrw)$Rتr*Xzj' U ֪ީV֥|:ۊ YN[- ԰I)[CKEd *4'qr ZIPz9r!.wflNr#J–z|(:8 *e|js@JzO]O0(dVPZrZ #*0IʿJk)lYU!q)Xtm՟YK%sS:ԩҐv&Pޜ$p2"ɱ>\ߚH>,#/ϥg-@%'p+'Z@O F{SXҧJ~L݅AIv<(ԓ,\PziGQ@|"O=j:,zR\NRcD,,rTg>$G)s15sAnj8x:4$kG!ao(=}k+N9N}j}a#QpW!~Oz?)9٣o?JYӒUsktf8 W{xݑ\nJ[Ibg|y 릓W)lI]Of#/&~Z68&LDs.^ ^Qں)Ly_(YWOVpNx\noa6_gr z'bfIɑMǦk т}IP ojC_kFen8E{S%I֛a6?J,/peӝkW0#ֻē蟾U~ZŻUzy}Yǖ&6Gm)U$^(kr N+2RWzŧ4+?33sҺUUȟ_6m*%Tk H/vt[\jZMAEHaФZ-7DXk*@zn5ᘑk䚱@:vPv"vu CT۱8,m~rK'z֖+XLFNz+ʁ#ЊdlGI4 YgiT`aNolfO4Izm:pKHxk6٧2ߎ75{U!B) k*ϨmJǭ44LSFZ-FE7A2z{ GֲiXySe7ϹBmKuN\kcFc!,HyA@1W{5 ÃF)ZLM^~7f>y3_]>'kſ$^"W/C@'켮 }y^aKk,%ET<߳?߫)]\bLUY\\W:PynPoint-0.11.0/docs/_static/favicon.png000066400000000000000000000207501450275315200200720ustar00rootroot00000000000000PNG  IHDRkdiTXtXML:com.adobe.xmp pynpoint_favicon {iCCPsRGB IEC61966-2.1(u+Q?6lLqҸB3Q&ZZ3eRVU낿[Z)"%w5q^ϻdc/N?CAr*kh,&C~QWzR֔IY;l`ۆ-:2c>e_8ѷp~Up ]jL$LK*ohY,rD嫮ao$޹ 6g[Kh pHYs+IDATxyUY2[ D" *BD@ .BUTGD;*.,QTMl0ӳ$9_5]^t>O?tW_s50 o 'La!La!La!La!La!La!La!La!La!La!z[=t`Y&\`^Q`5*y5w,N-9`;`O9 Ż2]>$;{=Tej:/;BvKVA¸%$&0 G 4<"Gh9ֵ ! .-Qu׀S-Q 0A̎ERbe${A̙BwB  5v x$Gk'8_daZ .:޿]CBwwl <;x<!LWOb,\^aAW2g5ڔ~ -ځ#aIrZV _9:M[Q}?mhMH{k%bހnfwSF0oA$B7&S/7:ش1Al0aV5y'G3&}sW'G~?IKqt99{qsq\g97朻9ws.xsn93|~˜s1N >$ ιks[m]K먼,E :Un(Gs9d[FC|}IbG014Q ߥ$/Ѧf/Ox}/ hjNhZNw ^җeztˡch: -8/E D FdOpHǑzQ/r)#3Ǣ;N=Cm)#8ڐ_<lE=` 0t3&dM ?D['J@zEPJPE 6*+kǁWh$A} E$|b(w4 oDKjaťE@$p^/B%(+cDfפ>rWLk`"mTeU^*C@j3̮BF% 9FPj4[_J/h7Ja3fIhϑ AvT5tG Ta` .  9~ ZZ_'I;܏jwSQ^{( |s;%$%± _.:9&s?p~.w:^Mk9)4r(j4wr>CSd bAy(KCK 0fQDJ\ tve (5{C48G* *Xv9w"^Fb>AyZ. ~B~UFNꝀ? &M>N a Z@uk7Ey]p]q)ݗϥMM x~k"Z>Ei$KoHC+Y- Dہ c% sh9P7cZ{JklV:g70RcV{a^Q8(/;\JdʧG S[ڃ UhUxPX]@7)vREKbu@zқ}! P_f(SdS ׮ЙԞV1ud=ڛ}D>d=sNr,8ȏuVVބ%D-OڞUGA,&3JR>WJH. }0!dX+QSx8fz9捨|',BG+I.@iU+ː0Fs!jZAQA܏iv#ƿ9Se}jfPT1ugc)J ǷbO)dN[ga,>\{. >kIΥ/(P iJ/D k>Ch\O(E4{26 qq$j&L)|s߷Q> bK`si.r;1R ϼ@c h4u&|Mu(GJN#=3QUP:r\'+U3, WbK`9͠XL秵z Ix݂gE65شm潚&;-MߖL >Lk]̳Nn"uL@z 1riѯ1M8-3bJ Jxa9޳Ɍ j@ͣ_o$SN #G](A$Q<~QƁM > "m (M8רq)6ޏdm v ͩT>fdݙF8Khc1߂Y*Ox~k Vhi8d _LqmN!>KѼQ ˁz i @I%#h!J)Hq"HJ IKIJGJJIKa8y"Ֆ͒="{MvV.g)Ǘ+;-Hז,L_~NAQI!ER¬"CV1NLMZ)VL $L`V0{"eyeg :JJU*[5JLGU殖֢HVQ?ާ>حѩ1͒fX9֘&YF3U^FuX6m]}G1Չ93 |UҪU$]nnCM/OS~~>&   .y݆i񍪍&v\mu:ƑGLMv|253Θ՘lOv19|y-Եl^Zä́UUКij}ZhlfSoV6¶vʼn伲3صs-[{'BCSGhGfhgWΣ<lqu%V>svu.֪MZ26,b&*4r**4j:*@LMLyl,7*us\m|GD\bh$jR|Robrv`NJA0"`H*hL֧uӗ? ͌]֙ՙdKd'eo޴gTcOQ{rswol m ڳMu[NO [A^i۝;OrR V (m? Yrˆ[EEE[?Xި%%Ga%oDDiNe̲²7Yn\^{p(㐰­Rr_bULp]u[|͞iU-x4:zccǞ77Q'z̚KZ!'lsWnk]8q/v=s}ٚvZ{aԱC) _}ABEKr.]N<{%DƞWzuW8}nX8[[Mowf[@;]wv8d3tyoy02*|`a׏2-<>+|"ߵ~o /ۏ?yx??'󟓟O)M5MMqb݋ɗ)/f 櫳/ M^̛oy=}na>|ZZ.V|R?B, pHYs&:4tEXtSoftwareGPL Ghostscript 9.54.0 IDATx1xȕ'Y׊MFMG v:R'y6ㄔ8 }߉ H(Z`4h:1uG`} Y@* fYH{}@1~R|^}?"W4MӴV뺢(f3#dkjy^U/Ko( 4@ 8u=ߏlԵ(\Tu0TU-{E@]5a.8' Ji0|@ 7\ N|7 0F naX,^rd(MڭVul|bQZ#N74D)ZVU)khV 7H\׽tbQP݈u>M j!BrJajD[\amƬa*SR(:EDVAoonA0|˫Rq03Nxw:_ G̲v쟭(aJ<].s 5M;vQg@- ۶gKEܾlJd'%Vcna7wv [f9w*j i p( !hM ..vlj!nyKmp4/^npudvM:N4g߲c'E&A<~|[p0_B[(ȸ:sc}8[^N&3˚L&kE n~0,:?Sh".?0nqkxgQn72PC E Ɏ4 C{>mi ΦSp ;>r1Idc Q] 0"c"݆w)2Nksyp4blvǘmv]z#p)g-[4];,ƭCm 蟞_S] \C0 Dt(mɄDQY?p05p8,{ <իWI)joo];IǺW߯_Ԗ2ܻb,#Qc~Ò` K t|v=eew:C{{a9C78h/9 X'SԄwݿw ^wރvZ1H 0<>9>5Ҋu&>UA&f_ezUՕX$ϿWX.--<ӍCU4MMHllFN}۷o݇F0"%F=7@m!q !i8~'UJL}Møu17M0Dm4&(7f Xoc+qq?bn`88<}B!Ȃq~{} pn&,6-WyhcyI, >M Ex %u{0 '`w珦ܔt!.%iڀ(h]8ޗO}6=ϝ{dcߒwޅQ T 4_Wv\;gݻw/=Ͻuf3xp' " q]SbnUQRfz ͹ 5m%ܺXMxlm޾MK ѶiZu}6*{%TE''aJXpjickc~ofOOY%%%gjGaZ hZ~R)a(4EUuJ5Mp鍵%I%"⠕;@%!EAy^ZEQtVREQtJJ5[MhZP1u㓓'T#`\uEQ ui7ܭVk2gAEQ._*AM n0 E)Avj[|1N*Ipo8MtaۭVKpfǤ:?O^w>nSBRmq^bHo-V\l۝ä8p]$RU5}p4 O Y.mD(CkK)<. o׿z[֢(?Ã?s'~JY7o|ֿ߿o޼YS߼yVvwujqլ̲NOONZzRZ_8>~W`VȦaڏe\m"vHRbnt]w<ԓ(^saw=a IFI[!g\xݓW :v{t$`=aVEѨ{|!7U:\.w0>;K*探hm`n[V' QIIz1 t2s^R:կ^je M~1I޽ Vo?4'oݻ^7o}Z\TjFʣV1wzm몘^ҏXų`rQoq1 }m&Ur۟=z+z޽#޽#w5’7o޴[[LVş @ܞ}&=/\Ox }ܷQWfmzJvdMX,P OOz},7jScNi]hSWJX4FQtw{\ -P *gqRkJqy=xDQt|r8N/{4-O&w;dm?rUОϑEe0 :ҋI0LɾCQ%)WuҺn'Q}>!0& ܖe%i{;>2)}b\^*G|bݺ0&?Z)HWElBHʶBarQl}02 JW;))9 6J . )JLt]}JDVuݤٍe5'FI)2: IRaANS\j%t]^S;b!p4BD-,Izh$VkC<)5)[͊8NiZuG$w91U$vB 00ߡ*G%)I78l|2n!U!v'iAN-3A1*,IIr']KjOp'%Ep'A1r,,IJrV2nwIn)nƓ 7Y;Y:NAEaw]O(5rIpLkd7;۶[@"7̎%]-c~w+5 Ip'ՓPJ9o j{Oh}ųgiaFh66Çea{a>O>R:Ea<[<c5:ryEq_@|rgS'r. Y.LjEِ wRη-BSɥn'0v;Nps.ՓW;GԄ8I6*mJ%ɝu2 8`Rh4xZ.čF#i{R Tw҈l$HoWnbkNF~u kA҈!|% a-0 w|Nc}EQ/ RI ƄEQxλ<}noEQdt _oup4*x2Aa I fM,;&a3*HE(,O;)Ah48jk, J)ΔV,;@<1 >|뺗޴Z.O4>}@po߾}3(M ZIcܥl-)ϝagpx#x<~_U*v|i P1Bf%o [`n>&X"5 D.=C npp1m|s.:BQPj^p+mn>& 1!DU/n^:FUxSb3i6<\3otL'Iقe/.~rPUl\]甆Y'𴿿p7 %e/ ېj܅XC)dMt]/{u!SAdŻkd QnnI bJn;P#2Z}|nCU3ieoPOLwXn$7B ݂VP2" u4z$ 2IsF%]A%Ԏdw f F}1Z-TdwRm$ 2ބnĎ%! P n5!! A|ggeݥ@)$ *: $hTR{ܪΦS1p0@+@Hp'Ut3 __u1p0@gIp'pj !!SXc\nmN;i !<>k@R>1w%EQf)mIpBlGʀ;RB!J)j%)]br[Jkؚ7I.p뺎1u(J4^Jր;[Ur|;($P1I}Ð2:0*U Yn]ד"bs%(66J).d Ir_8v&t4`GIyGFBRT#6Q9s%HV`w܍F#+K !f3(J-{ =nܫsU !YtL`wr̈́2$+mȋ뚦}sU !d8p~G(NCzr!wMNsp!FJ4 I /)*a;;@Sz\6t8)R6H$*}?΋vIe @pL'YϕBTUE&QST!VU5ibul61]F~_*jpBJ(s΋! EQ/땽v;ruDBݣfY**pՊXFa,4M\I(HnL0Y ̕T'N:\.] !ΦS z=@qpt]G1L\I(TFaڇJrB:)JD)Eg(Zn"d2CYEtVl &̦Slh D$76P e8`$Q;=]JO6P 4ͤ`MRɄJnt:eh8,{P# Sܫժas6ч^C!\=>9Y(/^۞¶hTj6py^EJ)zaA7!/*. IDATu>܂'DP fX,h;ffC U 8x8ыgt]繞sh ex2j_ΦS5"}՗Op^-w_2 6j0~~uuwBЋ檹iZK7\.Kl}\<{88m8 PUU&,+ CYKtiZ xr;m@q `i7Z8,DVrRJ>EQzuٳuBV̦v]Bfn*oyӧIzA9jYV!PUu46 Wu]7A\. |\C)4if]c\u]7X|ֿ(3]Y n ÐdL"p]qns{|#Zd !a<~0 Ú8/)yѨ\EA`۶XuOgAw<۶_nҸ)1N#BȦDkahYVRBv8x*KMq'b[’k8C1>;pf㈖fiZ阦PUŗ|.D4m@쳍7nax|rJ}oׯ߼yr/9*E|}}RkOzpWBq?=.QV(˥XSӠ0oOW^I}\}Oϟ_O)WO?C4ͯzWWWWF#R܄a8?s&w| uj4 V넚w;|u~.o(p^jtիW$.O?u7 %xRN駟u+F%%1uONmZz;LP}MQ~'Mja41ȫbPP.ܭV>W1뻩{tsyze !FW^}͛7{Umh4;{/\tgg07Twj! rwH]ߵw'ʲ/~/FC_b0|BMϟ?gѣe Cl:v3J3 c8uɳV;" @(1GvE^!GnBy''Iw9"g <O&U*FHӴ~'Z׿R% `-sj5hv6j4͹M?}-]^ңQ G0 y.)]׭34^K ØM%mXA٫ؠn6!dZYU*Dyc븢܄NҴ[/LTjͦSk6>i\ThE 'umuce@h.su\c܄~G)Mzt\dQh4ⰻ{tT4/&㱀6qHrlFe4Sy3p״ZODN9vCa7QJ {H`]m/g%j>|Ȓ/9on37iBv%T$q-~P4JiCr_0V?uEq_kUyn20 ='Bv?hcg3@!ߵJ?$l6A-!bnUU;N Y,oiVn0 ǓIQcnWӎ(\j˶g0]>Ɏxgխ`-M-7 ( ry\|kfgn[23ԾqhyU% q11C0FU`1M""?CbkjN(Ryk+oqZaXYAK*Űi#ēmD P0 ,,bJJf|vvDQt|r"KNUv}}8jAZVSREQtJU)4M۶mBTUMYcn)@MaXhX͘5<7-짭"NfcQ&UUW=ߏL)EQ*;Yf; :>;cǂ ȋR2LLnL3ú6-Hj rsDKѸ78a2F43Wnm3>Xv3ܱ @%1)|߬l:M 6}tk]t6 yX eUU;ɣ` E 6܄h> `gM)&UU[h0_ JL͐n; c=,nKQe,&Iޫ-bXQnB p_J~ w"{2 [ct+L<)A۝(OYbMmFA;q#0bpW b3m~2~QJ;F:s87ns=+BFEQ(l6Ef<`;)e"9V+9uM4M)u)Ieݜ"jA0r)-y(x2>=9{\}׫W)`w! d.iVn|?k܊Ԥ]`,k2<G۶=LRM^/؏(gE$c{aSEp4P^>w͓%;EhiV Mr)H+y=ϓCxi1ƧM&qz{Z5e>VWddqZgeN0y1wa!?QYq88ɉ훢(b)`cľdAAeМ[ve,Ib-u:cqȏ{8Tt1eE M~OYu~^غ}b~'жm RX.yvr˼Tz;&C.5͋g6EQ4Lz~>:5~ОK^ʜqcѨCj|=?[lFryFzj&RJ0BHh IјMKɏ;) {v P\3(dKEeEnLU]vߠEۄݛ܄r5O5yRJ wj\ҽ$Nu;bܿc%z/|vG""H[ ؿA] AEQg[楒_]F(gl:w>nNL{UBmQGqȩ|d9%{*|&OZS`/N'~F.j&nWқT px}I x2eY,M4s{>v"97eUh%s(hox5ŧGQ_FQ]<$nfRnSsRra[~WUu2{8N쇯O=:[;Of2, 0m{8TcOOȾ/B{>/iE|aX,yb].}U}^Oﻦiݮa[aheeya? ˥[߾A˧E'7>dyalgK4qvKNM~Y1V}(5_rׄsj /^l]R![ 2ԓd7g*;NqOUn niu14M]׏]k6c]K-8[@]&rD-7ey^q=#q+@R|e]9qU9{;̲4(Ϟm]UUnOE-CfӾz[,w6G_ .PΔB-7eŞBⶩkrDSvO^[BT"ay&4}7t6 R*b!nߴ9vY&mR(%C7ubsv7 6}_&qe$3Eɚ @gx"eIGv{OY2 cŧs7٣mBMeߦ¡S MI2\-f fX,Yqaa: =t4Ý5&-&k n>b*|J5nZ[IJoSЭ[j·ԃ[8Fc4{=˲łsr5M4-Q:/}d{tݧEҭ׸~}5ꎗ*|Jyp4b&(EzT8ɼA M C7I܂}q^nZjZw'k"~ X*m4Uݝ)Nm7`5%}i-&׊Cm{>ߢ4ziz tn6b`oȳDخ/堪q{z8Ύ&]^>{=Ǐ%& X.ElRo' e߾~c >\׽tPJGvo-Lӛ8s˼ߴxrYp~DE-]u]q.9EQ4 27ťGa%p0+{no_6?YA-V+Պ}Zͦ~Vm*| d^3żi#f:yț|rd.G `;E4О]ݢDixB|fU`[EQm؎(㳳Qop(zwwK,.x.ԃ[zFh뺮z'+sum'WA3Msձ KS()n@[S,(tT8t֦SDbe<@])7x._PdcW]/k?]AtnKrM~ eĭ= 0 ON$j4MhC>nURbga'|\E)7ppWu Q>v;XXh{8V eĭ= ImJl:1})t>\וzggz[kAU[#j Տ_sMuɱs2ުy_=Ƨ( -їjmrgr(@vwKDb3S+>t<N#f5kofYyJ.o_2ؾm<ϓS;vsy)nPmf%˺nU]<ϓ!I.E7/[kra_R: rL6g3/)*\AG &?E ߩ_2t٧,qc{-xEfW΢bĜ+fהpciŕ˾OS9\Pi{ܿ)[{ +B~,A gD^4ͥZz'H ytCODq8g^v<|1ݡ=;CwPwꚆ1yIE0 )zvܶc|o ". )t'z9󩒒}[q 7]t]7 C:n[->u5bܶcZoXt\ᯊZfїDd8sei#g] PwoOOK5M3 3]7 ~|%O Q1PјM''%܆aP44݈[{%]Snc,UUqtEòFiZ;B*O){MSQ|~ٰ MTE!4Kr)tnr?֊յmw"f8Xk엑; QU5Q2RO.swSptbM]㖫>ыg F¶쳨8tV%DE`L~7{nH*d]28[߂dF-{IqALM ]v.l[ge^q%;K/Uaq+@  xuNFM_ o]-"DLD̤Z272)+Yf'Vk2L {7(wfY<6y84@ EE=F3| ؾ&U^ZT1o.2/ߴнL!}PB0}R>m5FG="mܺ[E6e?tp7THO&ŭd ܪ-{eo1)=y٫ ۶ُtw'"DL$$d+b1)6M3V&ܶL 8\(:>9)7AFge^ ]|5%U|Yb1M33[߂p+@sE86M3]oZ.OYV.Q/mY|8從܍vUI/|\C }PRuy"$Ib\Gp|n[eSIDATĭw%8\tl1"M19":bh|>,U)K TϦ<wynoڈYxn#ZBUU4^߱sm"zA9\CjUv|n'g"f:{w+aHfb:qz/%nbאZ'ȅ(;nD%p~@ju]EKuOܶ>e[5*@]PJ/_%0sw 퉊~Vn1UCچPW>d!t}*Bs7^Vt˟-oڈY[jn\F.=q16} tǃz=k%dȆJGGE^Z>lo(bv b<(Q X#@Ywx{k<Ӑ("mbY:>;;Mgg;$Oyyb)N.P) k("NV:{tT ؘp0ˈ[.>]gө\ynEQGGϞuOqx8,i~0jуjgc(J^egiY鴸Ƨqz☦)lmY4wO@6͍\ިt]xL)q}E/huzB.MW r7"|RjWWWkm17ȥho޼Im!V gyVGvYW߿z* i>q[wn?ޱ.IӴi~.?Co`8|_F~i?FO"ъ(_ɓ'W*dRh0v,wiw5͎i*J܎(s8l !gyNg 0Jw _hf۷w7*~noTsF'O|w/"ƾujZO<_u,.Ծ/ݻw0٣.qZQA|\䓻,09|~Çeay!5n~nju]0dBQfbOi}.y5%{8qحS뺰e$kIF;WzFϴN:%ym/?x)mEy!4M8a.?N(Im M>Lgp@5>dQJ  æIg\p@> 䂀j}Ȝ@.v;p!nq8! nYJYP/;&EA @ 5)ka$7tIP#vUJ>p@]XOjF@ u1,g>vInx8FQI ?l۶1M !m{<0>YQn[zn~Rx2aoH  `\tL6!D4 wLmt]WUk!mn(8D8;=:j >x60٬U@5K 6,mJU@e! \{|rR*FQtz Bb3DP(l~9}wa7헮EQSJi6R@ wmCIENDB`PynPoint-0.11.0/docs/_static/near_limits.png000066400000000000000000000346321450275315200207570ustar00rootroot00000000000000PNG  IHDRDf-gAMA a cHRMz&u0`:pQ<PLTEUUUݻxxxdddwww888(((gggDDD222[w5Ֆ.y*~l=LjKøaVƴ;Iz&{eP'|g+~@(|W))) WWWYYY_bKGDH pHYs,,sRtIME37qIDATx݉zJbQxqԞhǝ8v=yNJ."@l,U(-RQ ww WJzC /n72RlN9,MeʯiҌ.\ ϥCQ|'Ezmj,HEc|.<ܘqPFQiE`XEkDۉOt:G?l2uo;[lDpkh3ԓ,?Wx}DG/7.`R}7r׉/+LlI`b" rh.." @\FqfQ1|^0\#ZsSn`bYGx9@TDZ} ?u`bF gU΢qZ0l#Q1|`bF^(S8'-*L-n? i9tJю0#z `b" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" hQ|`bF8t|FEezC? WTDR":\Q`b9Gt?з(6'EEDX-j@m3+Ll-ED-#> EDԉE Ll <o'(Lk-:Csmԛ mD>}k9rhﺹ>2踵+`byG􂆊(S:4TDZ" 2e QqDGEDX" Բ QkD:(45"L,^001@" 2hQ`bF401@" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" DhFSoG"o߾=D4#" 0/"͈̋fDDED3""Qyь(hFD`^D4#" 0/"͈̋fDDED3""Qyь(hFD`^D4#" 0/"͈̋fDDED3""Qyь(hFD`^D4#" 0/"͈̋fDDED3""Qyь(hFD`^D4#" 0/"͈̋fDDED3""Qyь(hFD`^D4#" 0/"͈̋fDDED3""Qyь(hFD`^D4#" 0/"͈̋fDDED3""Qyь(hFD`^D4#" 0/"͈̋fDD%V1<`bGlQ&}Dw Q=FDH$" @*GtRDH"VDDD$hu z'uD\ #Z)$qD" @"Gt?+$oDc:;#NQDV=ND}%[G܌'?gND`^rhyNG$iDYEw> L#Z릳sH"FD~q+kQсFZ`b"͈̋fDD%N8$D4#" 0/"͈̋fDDED3""QyюK;r8/|0w9`bsםAoID<"" ĢE(Çz[(wvnQUxYn]!" El*Q&3K2+L,jD_*NEo" ĢEmVD/#2":,"z'(Čhaˈ(Dhٹ"z_qD`^-X_DQyb⹧ [h$xmKy|@DGQyƹ0߰%IBEEhߩ0MHDfDDED3""Qyь(hFD`^F(d-(ѢsD];WD/"/FD$/wCGQ&-pY'0yíwS`b"z< eD`^FC9˘][@DZDf%fDDRJhFGJhF~{J$L3Lԛ^K\_Dk`/jDN.&oh-N?=:ы"{`'ڹjwsvן>Pw {H!Tbޔ>l\$]Dxz4Ma)So w"]DGMNDڃAD3!zDu8ܔCQYѲ΂.u,\Ĉ"zCD^-0"P1/V\ Ћu{ Dg3c.WY`E4rw+.5Cj5I"hbF4l"zaf7E=;7;]ZDRD{zD_,Xħ.. `"[ZlJZ wq/ZVhV\@:1#ZνJ3V\H&t^,H)E"zV=,Պ+4;R{+'.3Պ Hĭ2j?+.a܌DZqiKF"ZpzID3PԊ IHkD,BԳsS?܈nzVɭ2ѻH#E)+܈Z kr#Z-gXdܫtEIhF"j?4D4#}ۮ`?D4#@.q)[#Z-g"آ#zd?D4#=@"[q :H_D,f7V\MD3ѻ+.D&CQnO~$j?+.Dt눖GqhF"Z-z#VDD32Q%hѲ5j?+.#bUDhF#Z-z;BD32"(b buDZqqX/#c"Z-g(D4#c"Z`?(D4#"ZpzSVAD32*աbьh"ьv񿟩` D4###zQhFFqhFFZϊ 7'{ QD!ѷ 1Z mhF.ݓnND3rID-p{"Z{KEw[q֢Eliʐ V+.XD4#Eъ 7&,wV\#Zn_vkE͡(mebsbj?+.NʈWF܈hTˆu7TD[lH!>׉~YDYRG4UfrƈV+.XVGK}$c>YgD-p;"ZlWsղ4O7`^'zDQNm9LFZq7D[oDp YGdѵFZqsD8o'ֈV+.ܧv%JɺלuZnr,ɖvVV\x-N:WXCbӭ4V\E8dlhEQ,Elz(%.Zs|&:Ç/P{&A^ٹw_?<<8`2"U0{M.DnŅËصXu[OD& _ _ PD/[OD&(_^m›L*~DO>2N,@L*~D׺\~":ZE`JL|n1ӕG,SBqryMDkc.9]{DD`JQ#zXatՅ+n$ZZh}Q[d1zuq]}DD`B"Z4zzu/8j){}QiE!zۈ^vVqkRQIE)VǿMps+*޽m#rDhR"zwo>_R ь\ѿJ6CD3ruDS_RьLoRhF& ]čhQk.q#jB`1#Z4 *$6 0mI^dgI쟈${0 0 nRRXo*"zxB D]x  YB`o}wB"x<¯&t&-D}&P ])D7NE+&t3e"&t dB`"N&t&3CD4jB7Fd.%.?/z֙ZԈ?);ѺjB>Vd-ZDEN4kVڹɼ0 px=wZx"zф.ubޔ*-L\'ID'e;maB:i"3 p]5׈w觘ՄSU"mo SMED[}CT"^MѵeB 0 (]\R&tBEK҈hR,@v2 &Ee۶4hjB=V'g-ܔ{ ݗ[gE?Ro@vDOo _ 7"֓ ]ŻĥlQeBr'Jń.DQL\*gͯLtNL\*ٹͯ9;wV?L'm't_So@FDψ~{LQr ] h!scB #ZĽMaB":љ1 0^{": ]G4ށTM<$hEDt4cEΡh&hEDGzNާ $N4*k;3F@DizaB` ]qDs&tFQΙEDiaB`Vm\*흊%L1- ѹ&tRoŋhѾhQw*N> yэf㳢7`ޢ.]/b_D2/&tčhމ~-*3 0 jDS ]1#$6"z1Dn&tzݪ [0[Q#ĢT/`.t>WDC<|V#FUeRVTDCm't }w& dBSeD~&t:(LtQ-E*ޥ"ʄ.@g&&?LQm't_So(?#1V̎2« ]"?M"IG"z -"Gj!ѫ85fTDcBLĈŬ^DŽ.m[/bEEJ zCf#^DseZnQѫm'tvn3EtE.]\/hF^E&ZD[>NS85ͯ"jB]- 3gS"VUF*"vwnTLr ߾xO)34N,6/{Q/q)Ͽ$YF.?-]Dk-,%.o>(nԗ,-Xq\kLJ}F Yoⲽ)h!eXwqiM4rCEtr?u V1o]l>.*fD-cޒ[DoYFόn$9"z#/lXMGDo Z(W;eʈ(N"$/ܧx"D}yȋY}X-͈ۗ!>eCQ >,,@@OV_VNeCQ ĉh#r/ոyD7Et/J%74u]+w%3),12j`ђG4̢~Kh\dX({,̢{hlV_/JDB#z-GVTDpɧs#Zs?;"~o.)׳s7O"ƻyD{.:z5Ǭf/W_xJ%SuŢF5Et4qmU7CѬX}XHm ܘ:hf,Qc-!fDT?uh-iDsa-@K9=o!&@2FDAFeѲoyzgԒ@~2#хx2=+%=2iD02 d)ӈ]7x2iDX4t/4;+5ͥs #'@frkD4njZRAߤtĒ"+98maMaFFD3!@>ED42 BDGϖfMD%+9QfJFQfKFQfޒ(fez`DQ`D;fԒ̈(2=0O"Jd#%2 ̏ݒ%uQaez`fDEQr3 氤DV_O%ʉ(9:,bz )%OF #j]tD|.}J!Z(;z %k/]~H@DɜFtDwp .QpQ׻1( Q >e)\D',ۤ(Kz *eY>& FDY׻ш(6i@$"BDY'InODY(',ۤ&,FQmҀQe(6i(+Q6D5p p":Mp"ZۤY&]~(0 e]vI{(0em^MSQVٔ.0 e}^jFOUMQV:)]*":5 \CDY+SD2 \KDYk%{u)] j?KF"wS@0eLD3 Q0 Q[2#m7+vyQ.&cJž)]R" Gtˈ(|O麱 0‰jJ=vyQM~ 0Bݣ)]`,SX" gL({S@?/SMDSUS@vt!" LCD:(tO>`Dz|wcBjS@ ~{uΉ( 0 tQdJh'0̔.JDaWS9Q~z;9Qm7+v3"0)]ADa4S@x)W*" M~(p'p]F8D.}Q¥^~3Rj" 3aD8D9Qz<}w8 +$p_>O-@l" Wz;t8 +#p=R" Sx{}> ?Gk!0 " Sr8 "0-"" S{yyX(, ܻ_M85Qx" ~ip" 7~id" 7pKDB(ݰH" : ːADȦRy%wKH!3hd[6K(sp<݆e oI&XlN (WGRYdz;+dzdk5+B {D3ʖs(*ΛBѽ϶6(tҧԛkD3\e!}RD-#sEYRQmznsE O!gQUH!" #L#ih"JV!&j0?GEkDۉ?(RD?lRœ-:e_!u5EGԑ( %Q0*):;hTI!L#zh"r?Ֆ6 %EEDyTRHND!gJ I]\D,FI_߭dƙEyE"ʲ<~4JpkF%7-%Ȳsā%}KYHFt)hq—_ϵ>?RR#ZT9>9(G-'­R N%L"߈+:PeͶ%m, Gp#zRr"ݿ7KjpG|j\:gG'l:4?&b]2|vϜ>="_{|him2r" +wvү%}y?qƪjcw[WOQ3#E^sQK!K5 4ÙH $kޤY:1*d^^SsSg"-_^ Ï=oO<0.?3?{jQe<|8y9\&g9QEW z&OUߙHK$@|eס Dzn" 0ovs>Y_+?-D\O7Wʋgt|Xzv@r" rvpo{{yުOw^%9J (0^sQگqiC"zqX.s˷~UOBحh0P{aBDarŐ?BT߇m_}u#ց>:t~ȊW:L_&9q{h[ֵ:,Ma(0LP:]s<3燥-&!MlsYxgf:,}v$:" [oO; i^_Ro" D?UR(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ @" D(Q$HD @ "Z/ovbˈ(yDl،(wDOS@e" Ėኊ(9Iܲ" ꌢ%'E^KDؼ#wѢ8{\D,ӈ6Q&>ND@eDg$:ѡD-!Ŀ|o0RGa#yUDzz|#u$hߚD:WDwgk3ѫmgw S0';dxDǭ=_qv8m S0'P 5\p=Cq d1"zIC )d1\fP8CqY Ŏ^Pe Y 3'`(NP@C=5pBe S0'Pls.b̞8Cq .g(NP8,b[D/m2,0`(N Kwn9eL 2{ )P,wΝ7'_=ṗK& S0'0xFNf>\2a(NP8EL0`(N`Cqˆ6~;;w7"{ )P,^]>ie;K#N>:Cq;Pۉ}9ý#ةڋE'y_Q{즮z^r1}PQߊ!N\o{|qFcwc1{4M4;bSoy{-z^iK͸rC#j(5M4;?qP~톲oȽS{fFPsAD 0V,FPlS$nZC<"=rwJG*"j(B*yأkJ?k|eYVfD04 1D }i((jlɇN+;:;~cdJ'YE7V8h85zd?\b8Ns~PwGFPl5*Ⱎ_XE_s": "z.<'S8F!8RoC":c":KةwӘP)!)\W  Q.hhCĢ͸fVo(l9GpaD VCq8E q(O~[N~%D Ŧ~m .khC񚈦;vb+-L\vb a64ǝ()DZ.nh;qܻ}m+w2.18DD4mM;rԢ6D 3eOo}D,>;nݝ}_/]-O>dxf666[eTݹ^0o"Om\R~oZu^&sVΉ{"Z{^o8}5;SyE_(E 3V5[:Ytі6xΥ{#zEw`0WDr86y Hӗha[^6i`_tlF{qi=~so'E ?eի=#\8}mH{ ߌ5?9y" B:B ,1&&#%"&({(z$%'}^h*v*u+t.l,q" )y6[&2b'~$1f)x-oC<:S )0g!3a"+s/j4^,r'|1e!#$8V-n!3`"*w %1d7X#,p9U;P#/i 8W6Z5\>I=K2cB=,}4_UUU0h!9T>H5]C:wwwWWW8v?G;Q:RAA?EsGeF1~G%uF ^DTYdGfH"sGgHo ,0dgjgbgdgfg_hhgcgEL bKGDH pHYs,,sRtIME3!V~IDATxluGwal0&7̔YC.X2ҙ܄*T+/jv8֚Fb |p@g\)' Ь|Q}E*Ce(þ 2Ra_QPn)ʰ(Ce eW2T[2+P*-Ee ᖢ 2TpKQ4G)m%szm*""_- p wTPMjQN- z x4`yb('-@Ac0A[B\)u=Mg@.b2P*mFe 6 2TpQ@*Ce(~ 2fa?PPn3ʰ(Ce e2Tی2P*mFe 6 2TpQsaȱ-\y{ ^B.= H=Hi+ Q+ pٽE#y=t~@svÊ,4uBt!OcU:ƀ}Au| =EN+Vra~&8W/Fe8w(Ce g2;2T@P*Y e ,Ps2Tp(ùC*Ce8 ܡ 2pPP΂m`B;Vx, qFQ]Ǘ›*-\YyogK#/V$@QFJvp"G dK18g_( Cp&'fKh9OgṂ)K]򺀖`<`gq ݞ[qd8+SU ; 2T[0p+NX#Je 2fvV{%8l9oP*Cel Wd!pLycP*CelOk:d828vj 2T[Np_qkP*Ce 'N\<R*Ce b?-׼}eH$ȭ+(|? D>3;UHU=FnYIH`Y( )RPQPv9ᦠN?V ̓҅™)(E^zN@)3+9#܍U쭠=&/p>;-2TP0p(Ce .kʥP*Ce xq~!Ee 2Jf(eD*Ce e82T2P*O2Dp2TPn%3Ŀ&&._2T;Ȭgfgg 2af-n)Ce  wB-仏$Gr$Jb@1v @ F|"/#Yۗ<邞ݘ&h m,°f1;! y6_BT>.)mn@Y3ÃY'4vZvMR*Ce p/ ZP*CeN7hxn 2T^BrnB2TpKI*s2TP*-fd8\eEo)oyV] 2T;6pW>O 2TpkYpr2TPn-;,ðpHMpexx CG) \A^Up*. 1-iͲ@#gxֽESZв|%E$݂Qq?2rltږɊF]gU?Y´2,y JOo.d6siHDb+7uʰ3;flOygg(Ce ֲm(Ce ᖢ e 2T2TP 2Tp2TP*2TPVP*!|t@OK`6&Y @YgɅ7邂MS;]<=,8y*<0ȐBܴE4Ć,Tz-5+EQB#kqDpP1|@n%P*Ce w+Ce 2\A*Ce ne 2T+(Ce 2ܭ 2Tpe 2T2TpeȌcჹ)=9+?cN@4bl (i0!e p/R_+xhnOaj>t e<"g*TyTp[&a'%R'b-PPCnN:Ҧm=jր^\ IQ[2TP*P*Ce WPP*Ce[*Ce P*Ce w+Ce 2\A*Ce ne 2T+(Ce 2ܭ 2Tpe 2T{#h ꀰ$s߃ l42-#h0n܍Gܖd(Ty`_L+q܁^3#l/ (h9dHp7q7y83@) H{\~ %t}ZpхY98Adkav e(Ce 2ܭ 2Tpe 2T2TP 2Tp2TP*2TPVP*a}26g2⍻eRR[@LW;>$E֘"no ι%d,*VФû/:HyN.8Br/cO=d",)4iO*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TpeZ1J㠺S 64}h2 7ẓx%=2<"hJ/rIƼHZ:怖*ڀv.#<.˙ȍ< 6`stc*_#+<=`XXr_ǝ5ra@`ƄЅN;nEe 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce }^,22Uexy?G-o6.x+O#V+0-;OΣIx'ó(H-3md1Zxz@-F<0&b\B&8蝀DYYD!mږ4ఀ;s=ɠ]L:&iUP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2T3aрj `x8 }pK#Z \"!'Ox"7'nGA<*ޛE$(#6ག˘gL|'`Y7aT-$$65],\8粻C xOGP9iݗ +zNg: 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce8[QQO4f$7k!bLwlH)?8`>KXdw,6A:f2LZs`0*d |C1'vn^y{M)D nW8pSƐm:낶<؎β:\ }€g1t؀$ D{ '|AP0bRP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2T;/CB%>X/΂ bWT pZJ!( 3>]7 S3'3/h' z0\\r(1|#vl´r\4ːR0pܝ`:LdAA) x$`%oŠF۲zY_&.c%.뤔2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 08|X|* )Q>7eؘμ }b&2B:#Z3ryx;0$TVВvk^d[MZ׭dF=/,L"w;>h{[}$rTwb) 'H&yD3r)xC𕂌(1* ㊀"{MHSc³2EnOECg-L[Zf 0t2&4#ם$?nܙ2 meS[4{ Xc>m*4U,k4*k5AhҠke 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*}! |8@?C[" $w SԯyavV))@O3+ŗσyw W- (%#cUbbH{s3Ns/*,9fࠑQ3WhFJQs&`\3JƵA{7ĞMz֝T%8(Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ï/ÜS0#~={ ,X= C k8#Eql+Ҥ;F   -aEI*aH6g &XKAt3^(u _J,P|PڑtͰ6FФ ?&iRP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2T!)vֈ鸫"2Xf=價e= Iv'fn,  ) bL]?[fӿ"c=[08 Q͊!Ȁ\X1_ / <$^'2+O=`%G\s\2K ?6?!@n;g`FT:2`vX XRP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*CeH2 w26}k8PP*Ŗ!*\^ 72Tpd85NJoLr5d!B}/`Aʐ;\GK ZYcPA;!9|uO_W4S`.lC~XGђF "OcE]F57-&4)W%h}bJi0 k3~fhrr㖂+m!J;'XB?Oc=eYBZH-p=S0d& 3 ƊF{\p gKkF*Ce +É;a ? 2TN De2TPUSvQP*å]k2Tpdw d /*Ce a_e8uPde p4Cfgg 2\pvvʐ_;_g(TL0YEp7ʖY* 4*VD)W < H6jlCfAYB*18pn3VgojJFſ0bx9oO'h$Ït^(`65d*\teআ<&NF8J8gʻ!yRY3T2TPvn\(Ce p7î-־2TP. 'e+6wUP*9MvUP*p0w3ص2Tʰ2@٧}oզ(Ce p4CsP*Ceh2QC޺.f Z  B#Ө)T_mG&!T., >E.u̖YtAeU OH FX04#B)yh|ARsQ,1)(tǘ0JEL,ѯ _ rAME%r5Ȳ#|˳NGY42u8o$.TP*^p# 3m2Tʰ2"2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP [0}ņ2U(破K!6h AH(KO{c"검a+v~m{3BHo[Ah,/JK|r!(Q rf@r\%4FpLM}A>8.hG}!xtc)e7O.a)CZU"<1YƂc"s!$`81,cb@Ɉ u>S2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce "z/"Ib Nte Ac Ȕ,7wSA؛3%A>Bh' o 0wBF,(9& Bԩv x fXz g>B&3ye9_x2_eQ3`Y B.ce2]\Nb6)'t(s'_ ~`ᣈS 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce2@!yae#1re{u˂bDɕ54܋W %)(^DvV?( rX._0_gYB*1õ-ǢP9M,|/ ?,  ft<u:m@v7ɚ, Npfh|` vq!nP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2\r0 -m=$TG#Zf^!L9E.E`0} 8832t4gSȓ4LE| gX=T?Ocwxa^M.{tƘ/(u R򬧜G,L:=cLԉ|Fڛ,k2#YӒ`n\gYrd4FU*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP$CĻ.]{xY# f&oy_w/ٍ_|Bf1YC;QVV0Է6| ~M]0ԹPZB˓T0J!ϑy2n\( LԦ@j09&}:;c’(3-)(lIs rGPPD 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce\2齣qP:$}(m7 \ł9eyE &Vr^/dGtQC) Ϣ% ;nQ#H狿Q,AB K &@0G$2EOƗ x4֑!΀"s%X P;EA{LA; v&|O+E{\07~OȒzA2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce2<%@AS7F!̃-)VvE rM 2%bD3pȐ"sCֹ-go$QbQ2; sS犓 D= ^a. 2.Ksg1JaF}A(}#` ␘O{6%ut&Ϣ t'ў p&=eՖ.8Ñ˘a qh2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce R''NF0]=47T%)(nL1?>#H<S?O4aׂ/ 0?ʲ>4i81|8@LUqc,;q~uh /`NJZ/Ctp7F<(Qg"ә". -Ģ&AtfNAiބ)"oB"tP:=#拘ow(15i9I',2GoYtSţ de?,~ȣn'Fn 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce2??1g;r?= ?zdeS[L}D/"m)ݢͲZ`;˻,zD##"Imd!U,;B QgJ$+ b#W Ɛ!G r*㐓OSa)(NÁRxGú ptG~%4=Ɓ*e 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Åῶw Sv'B#ArWvnH%Y{.ȂhdGR} Y=Aw(|x_ ͊g-2gGNJ42Nx`~_pwÓDf *qY9;)FŴ-W@?XC `J 3%& Yr"F?;clGȉPP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2\Xޱ`#"ϑ6CzM@b2}7K\^<ూcxy2$ShFCAwJ#(<iTjF#􍀖3/J t~.(eƱ{GS(3gWK#C:> $xdasY`vA5;\3H:y$ B Y4 \8P*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2\|{o'Gw`:2USJ* p1v9 P\ M Ly^F' j.jgW n{Y@s4SY⮀a?ݘ8iLk@ib"0U'Zݍ -7',(rn xD0Jy:ף&Ƅ.0܄i"no0eIP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2T&p*/O_̣U UN=c^ SsIG1EAؖld xXdQZdt"CAȻD#шyx gikQYv\3Yx3h'J?3<.ɉ6DA!z܍]Z@qr_ȴryaTJQXwp 7aq?)+Ce ae8#09Ln䫡2TPWé8epP*Ce[Npe 2\dNb8 2Tʰ2g65GOiP5pA^Ha9S (.>0ɩob/MxVjP@-A+y;ELwܼ2 3Sz2:̓oEnV%P 1+/c~c`t_>K )gvAD0Ԩ3 Uh0΁ŊPep -e}dp 3PB`eO& 2T+ý*SP*eaWݰKkF*Ce {-q49yisO+Ce ae!Ew)SP*ŗ`򿕡2Tpd8&fd(Ce ae80P2T}p߈2@_2T .`h*Ce G 2Tpe\w ΰ#|^!/ &8P─H72)G`.c)0@mipgr ж_._V.ْu +X; jAl0yQAO wV|K7 #FYO xeC#mpYlOAHpW,Wne/Ne8P*Ce2-־2TPVhʛ n 2TacsX2TPWc 2T=`F#O3\VP*Ce_>YυP*CegN8o0u] 2Tp0G_ ?'ARPL.pOS@ *_bz~΢.#LLddȳ(ObHf~ .{2`yAMX)6(z2azt^Ac/jq[෋QJlrQT&< NB$ njQdZw/'۞h OpIܼ3nBFg >L ]&e /(_-ca` s"n`lD23<m暓&0e\#'/*~fħ r^g)P{WP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce BpS) 9G)02<2,1TgK,&glHmvARB45Rx.ցkT_|0L`ŤAJFfiBaj5ބ/``?RP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2\`2n9=#x Cŕ]:O'A Zbं^L? yR.jKZUtJmmb%Oݼ0L+9_#6狚6Gzb[ ҒA s7R)'W TWR3@h,7YkXVnw2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce XȌ$E ev4'&Sȏ U]P (7aCDvt?JAy((x?xY]xHekפM/9}N *p^B`.\k7wKˡ IS'G J d-] WN >*0y-`Xot,&`aWP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 29%рthO3 ʝ iu$~ı6 ˅F%̔!w_kěud7a2iE2׭o8 [ tğlv(elQstb1y>sY~`fsxCpglO~n ]#H^9&` ;3 KHLr 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce2LixjpQV0Dm'.YBO$6wiM"{k@:\^];yR|Jr Yd NP.zf%-Z=>Igj 8kE!&i[VP OfUE0DGI| &d/\sph' GX<6F wIdIJSe 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP**rh@Ɵ <.܄o2#"s cmYM0̀g 8k଀Re,Fͺ$O8#Al0& ;E. N.(\ U?#x+:SAq֝C=a(= a ; FecHhi+ϔ2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2L^tH2Vl@Y`$&gy$EhW<g4-tӇ3л`-rA¨f"77aHU?g^0t?Z@v8[3_Գ,bb~/cRš`JQN"$/ :Kd2ѩV[`[>xY 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce2L:6OD&۞SǨ ~. }{zxc{XtavIIAHA2ƄZ}6¢na0`sS^0 P4P)` F) U("$MA[ +;:!K5+P!j5;?NZݲNDY[F\o.|0Լ³_+]e 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP. @ɠeqtlj]W1mJ#o6D>NKn#.hnIbEʖyG>c>p=ͣAeWCL EV3*Ga`c ݸ& Y; C r_0kwn@EÙR^_Y0-jr}W &{E P'saw`pr>FTP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2\d 0\FX[,u{{?sD`Yd A tGԓv{tbt BA{c-di~rxAU-+4YȕT%y%oi })%E < C`(B핀Q;]`/ 1OrXJ.^5w3 Wbi+{P*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2T&CvG (z0c&(otQCtG ɽ zsv*;<;e<]kv<΀~6JLs@۸0fx/W 1L+crZ@Kbx|A%R݂3W[;䐀aG_5˵gCd9B-M@0.#G%3LE:~1q?"C.ge2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce Ms:XG 9$2E[2E[} N@P?ϐ,Gt܄Wʀ)l9 jLȖ M G2rۿ_+?Pe;ۓz5n+DG!dqx,$>}9 }, %7 vG@8di %0qO\Ƙg  JKO֣p0`ו2TpAea 72TpAe8p^pĺ_2Tʰ2t|,?2TP. ^lOP*CewqpBxcP*Ces|Lpj 2T=q0EQ*Ce ^?wpܗ2TP[{=W_ 2Tʰ2.VP*Ŗ<2<$o希a$ I0y]kS܂tbا_/: W nJl@^l19Ѷ:6ؽf%<"ǐ3 Np vd( ,G'4Svr|wyЗ>(BFŀd~ga ׃ >Țf?(=;<*OYroAヶ#L1WM Xms+~tB(m6u~`$2(_ kGgB>9dPf)Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ö JbqC(gZZY4Gno0uY@هE_HAhD|1ZBݘ&o=ä0<[F-FV񐡈l7N@_O<́%V`"DdA{Ɵ.dKX'+3Br|`f0әh&)Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TPn<dʃATy BsSк-hhZP9T='smzM42GKkђ<2*$wVMD;00ZL+2rx츀nqHp24ϲa!|eF/^pr-5ۅxK 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce @b3|W還F4r҆7 ҇~1jv8 wI r<p@JzoY;n ("x} CV lZY)8Vրe1۱;oB(5 cŤ'CKZytS<@|Pc|e28~m2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2 ;cB{@P RGpUgG`* ⏊L҇D$ `$R08V|OA1&TLd^/?Q: ,:e x2=8I\2'b x༌%&:, QP`R֔5 2ē}P?!| 6ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*u`Qb'JB:]Ko U23Fas5i wgH x⼭ryeZ9/.8N=~--(.z!@#,v1 K|0#vA0#y"7aX F iv.cƄI?=`=2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2\iN\/Bꉌ ůsCuM?]#8} 3#ိۢ4*SvL?+ C'MJn+ܱؠI'ChG P< -dc[ut w ^aA9" /"8>`Yr=S8@~yP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2Tp A# ~T7|ᙂϟ_V*5Du+O/aC|h9~ hY4JCHS gt pO Ԋ1O, CzJڅW tXxz f"TjIPږEwcZ~.`y`xD%쀡h2ݔ2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce [? \H@K*jtAc=](RS{چ\Z$ד-aaF^x"agEvc '5`p8Î͓i0Aҹy!|y2nPQc'|0Cw,|Z '9@*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TPnP!JD8,(JQ-^F̚ |]. YYC7>H^Hn{H& hW J62*GEmzPIpP%#DŽ8E V\)Vb'J'Cy#68cvM2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce "7gS?+>f  G Q"cP""h )tqTIʜ,yuMr3rDÂ)K dco% 8H({$PRA xfqͣ/ ?DZ{R_*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TPU'T0.="(2iR5ږ%\OrַWHAxc ,|NJ$yןk,|8!eۑ9{JF -s,a?>Ka6b DrYDl L/М/JM)Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TPΛ [8™tT4zd|6`)56%@J Pk7O'3HjoP%" "e4&}Â.vAAB>nq{(q=㐱R}졁=. +ŀX+ WsQ|=HUq<%*%ûPP*Ce 2TP*Ce 2TP*Ce 2TP*Ce 2TP. U2Tpad8(v;/ί @1{#2??! g!IPyk EF֘0#mF.q+0 Pԗ3_%\' h0 Vʐ8i`,@n-!K۩Ap`~9TP*Åa*\VP*Å?O*oP*Ce2&P0o#_ 2Tʰ2G gP*CeWNx2TP. 2xkPVP*W_ 2TpAd8O&)ͅ C6o*!'E4fA[%>J?[SpH@ek#, =qlPDR/$m\Aӓ .{'`y#Vd]Ȣ$ F._0qm%_yjDgKa38o,dy{2*Ce 2{a q2TP. ɰS2TP. cxe8Yͧ 2T= 2Tp9e8Y~ 2T(f+ P*Ces4&+Ce ^[t5;3;Ce ˰sBe 2\2voݪ 2T%)XOoԐ`EO. ȂS ՞)Rd45[^F2R EK8.oȐPrMx(#ᔆW 97rÕjExaRe"mcFr"XGڐ8=`~>#Pŋ{8|&`F3 yum2Tʰ2o_PxsWe 2>N9y{*Ce ae85r< 2T[sDs}T2TPn P*Ce S6\ׅP*Ce2<`0܄ 2Tʰ2 [&t9wldG6ikd <9=+ Z_SPqY+ 2eraHk#C2U5%Y^qZo #=RT ݣA)Cۙ9CG_ \@I>,'Ddv('Dzs>_óUfw:C*Ce e(Ce w+Ce 2\A*Ce ne 2T+(Ce 2ܭ 2Tpe 2Tːgg{s% DP0ꉌd UEE*J AE˳Q A5MGN$0J܍H`542)`~GhYf7U?9H-񳲆&}9Tp78k kpAݼYNѮ qIL+Dʏ Xj;D?P*Ce8s2TpKQ2TPe 2R 2T3G*Ce e(Ce QP*-En%$r,#ɋsJAxԀl 2-iCI4& d,cJ^NpOzK+It: PA *^+hmV0Ѐ1$$XY xx8'xx8v>\m9b:2J2TPVP*Ce2TP*P*Ce WPP*Ce[*Ce P*Ce w+Ce 2\A*Ce ne 2T+(Ce 2L2~xئy?, Āy$+((gŀGP^@g۳N ـL!=S FPۂ2TP*P*Ce WPP*Ce[*Ce P*Ce w+Ce 2\A*Ce ne 2T+dhWȏnDu5L(x*P 7T(p[nF}#H;ŵ'XaL8E.eXFjgRy`dPVr=>9!x`@X89z( $(EC+,wwYXgAn P*Ce w+Ce 2\A*Ce ne 2T+(Ce 2ܭ 2Tpe 2T2TPN@qw=mR[C%HR>@Ц ,gdkIKJζ+h'|-<,_U'#=M2,uo@Yn!xtA]1NAn_>]\_仌t we  we  we  we  we  we  we  we  g@g`Ej SF%CLNKeM/%.|zFqCpcA-]9ۓ),R_rIE+8ʰ(Ce eW2T[2+P*-Ee ᖢ 2TpKQ}E*Ce(þ 2Ra_QPn)ʰ(Ce eW2T[2+P*-EtDcsNjC_lsCYvO1J38^t~x/Ϥ㶘Ԁ&!O~A p; p6_ޤ=Y8:H)U8'*;{}\Q:g2P*mFe 6 2TpQ@*Ce(~ 2fa?PPn3ʰ(Ce e2Tی2ȐW(^" R+>ҍҾ|vvP>x7ݓJl/< a|˹r'\[א e9Cgg,zFgs2P*mFe 6 2TpQ@*Ce(~ 2fa?PPn3ʰ(Ce e2Tی2=wAd>Pl2|`*V ( ;-Y0<p uHe{2T[2=P*í@e V {2Tp+PG*Ce(ޣ 2 aQPnʰ(Ce ePd/>z6!x;l@Y3+(azwlOM U!ox麂NZdPSЁn 3kg(.z̕Rr:z`Zg}}G.P  e eQ 2T(ÅB*C_B /pPPeP(Ce(2\(2E.P  e eQ 2T(+"9պ3nx׊Y!e7{o]QUSLo$? ၻDDg-ѵ2-GNZ {x'zΨ86|\d׮?u>.{Inm?Iڅ[xmc{mqf݆dzn6ss L _ Wqe8؟7* bȿ%yA&B"ឯ'7!~NP  {9s2L`y:2La0ܬ yA&0p8ȿ# p>0!~NP 8s2LŹ|`B,/>f݈˨xmO'n1!DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDd=Д6I/] 52liQpD4}X a;Sݣi]=lrǬƻY7u#tpݧ>FޛN9p56Oӌc7G74[s6C2EZD ~-b>xk! az6'˰}heFY3`r$Ec/Sޛ?{[^`?Gǔa ?5z1pj5}j(wF(v7 ^Anm8Vm5xNhaAӓޭӓC+Qd̀?pXsGVʰ}>Y}X_p/15wa;ƺ2\mX)P"kʮ52*z1k1Y`٣arXYvEVы^ؼ?=v&׷=؜a0A/ q/NFWĻuU}Ќ'oЋ>/ÉMEd͂Ο$>c1~0kW}OxOdM!׋>lw+Qd̀^te'jW/}򓶉:5ы>뭢=)2pNXȽ2*&#G0e/֓aV&G^a ݟȚ1?”vL:Sdؗ>wޙZD}舎Od͂˿!^mY3ͻNBE  /cnpч 'EB8vo0MOd͂ pyN3L |0U0DV sއ?Nov"k_ϺAh`oN[M 3N!;U~ݟȚߋo]6dg&ڣ> 6/F`{sՇ5r`z{Y`8&O|g ѴG}Nmj_"kF sއr`z{Y` k7@ط }ofmj0\}pWfLov"k& Cۼ3_ڡ9}/}XɯΦ%Նv5u^NDv|I5m%tEXtdate:create2019-05-17T06:06:16+00:00F9%tEXtdate:modify2019-05-17T06:06:16+00:00'tEXtpdf:HiResBoundingBox310.09x300.684+0+0WTtEXtpdf:VersionPDF-1.4 G:xIENDB`PynPoint-0.11.0/docs/_static/uml.png000066400000000000000000015132631450275315200172510ustar00rootroot00000000000000PNG  IHDR K IDATx^ E?J!@0< "0 !!B8܇ܧ(Gx-Ap Wd] g5𔕀@ o2ݙ=Lj|U߮_"@ @ @ @ @ @Mׄ65I @ @ @ @ @@I@ @ @ @ @ @Mho F  @ @ @ @ @ @@ @ @ @ @ @ @7] @ @ @ @ @ n @ @ @ @ @ @@Sڛ®Q @ @ @ @ @h7 @ @ @ @ @ @)Ma( @ @ @ @ @ @ @ @ @ @ k @ @ @ @ @ @ @ @ @ @h@{S5J @ @ @ @ @ @ @ @ @ @4E@)%@ @ @ @ @ @vs @ @ @ @ @" v @ @ @ @ @ @@9@ @ @ @ @ @Mho F  @ @ @ @ @ @@ @ @ @ @ @ @7] @ @ @ @ @ n @ @ @ @ @ @@Sڛ®Q @ @ @ @ @h7 @ @ @ @ @ @)Ma( @ @ @ @ @ @ @ @ @ @ k @ @ @ @ @ @ @ @ @ @h@{S5J @ @ @ @ @ @ @ @ @ @4E@)%@ @ @ @ @ @vs @ @ @ @ @" v @ @ @ @ @ @@9@ @ @ @ @ @Mho F  @ @ @ @ @ @@ @ @ @ @ @ @7] @ @ @ @ @ n @ @ @ @ @ @@Sڛ®Q @ @ @ @ @h7 @ @ @ @ @ @)%iSt4J @ZSY>|3j @ @ @* 4k63-lJh @fo @ @ @ @XYɞCo`aC{ƌ`4D @ZMۆܬ[m/ @ @ @e2ؿMdA]mh*< @Q k|f @ @ @ 72ؿM+ОZC @ @ o D"zG @ @ @@" o4d"@ @y d "О; @ @ @ dhɠ5D @r`AD=I @ @ @$`68c#NhOF! @ @ "yO#@ @ @ @ @F*О ZC @ @ w Ds$G @ @ @@ o36d"@ @y d "О; @ @ @ dhɠ5D @r`AD=I @ @ @$`68c#NhOF! @ @ "yO#@ @ @ @ @F*О ZC @ @ w Ds$G @ @ @@ o36d"@ @y d "О; @ @ @ dhɠ5D @r`AD=I @ @ @$`68c#NhOF! @ @ "yO#@ @ @ @ @F*О ZC @ @ w Ds$G @ @ @@ o36d"@ @y d "О; @ @ @ dhɠ5D @r`AD=I @ @ @$`68c#NhOF! @ @ "yO#@ @ @ @ @F*О ZC @ @ w Ds$G @ @ @@ o36d"@ @y d "О; @ @ @ dhɠ5D @r`AD=I @ @ @$`68c#NhOF! @ @ "yO#@ @ @ @ @F*О ZC @ @ w Ds$G @ @ @@ o36d"@ @y d "О; @ @ @ dhɠ5D @r`AD=I @ @ @$`68c#NhOF! @ @ "yO#@ @ @ @ @F*О ZC @ @ w Ds$G @ @ @@ o36d"@ @y d "О; @ @ @ dhɠ5D @r`AD=I @ @ @$`68c#NhOF! @ @ "yO#@ @ @ @ @F*О ZC @ @ w Ds$G @ @ @@ o36d"@ @y d "О; @ @ @ dhɠ5D @r`AD=IҢ{뭷/>W^9,R-a @ @ @)2ؿM1BNhOF^{-̝;7/|'a% CMߑ-v/>ϕU}? @ Ћ2XhE󥕺z饗I&o _Wk"k4?>]ך͢. ZhS @ @ @o d X=uچ>0v0cƌ9昰>HfϞ6hq=:\aiUďCs @2 DzeG ^xaXb%z,HtÇ/| aVh/WӀB}Q8#k_N;J+UڼyŽ;B^:<'x"[nou]7la j  @ @ @@dH=k֬p=mnΜ9q3W'U7kbK/rI']v٥*zer{n߿ZէW>T&@ @ @ "2~]wۯG)?B9]kmYx+;èQj䭷 kf{1cƄɓ'w'.\(9䐰ZkNqw @ @ @@{ˎX~WlP+buj`ܸF%\"Q @V`AW~}ݷn/|_pVgO+ ©Z欳 lMOLvZ'7:tkQGU/W{]U  @ @ @@&sho ^]kv v|73f0cƌ9昰>Ի;YWK}zX:C @@v,6+ n7}̛7p}@).p#G$/#L23[mB >Ph_zZj?w ZxGE]TqZc׬Y¸qڿ4J o{)t>pGnQp'a%,(?φ38#<%eFQhwРA͜ &@ @ @H(m 7\=nrJW|x~e]fΜYh<+7pZkdzK @ d g>`՟{pWO>!0t 2/2Z>}z;vlo[)ymgw&M /C=xGX'7uopGW] @ @ @[ d .h?Ž;XSO?ta^*~8蠃j/hO @h@ "-ho{?|lJozBkgn+~y>OȞ_ŷ~{Cw6bOM*\n; F겪n-|.馛O` J$8^{e 7 ;SɯnְkvN7 @ @ @. o 7\SN95{ym>#@ @d ҁ8Ex㍰[ `;aԩk O>dY7xB@vXve˖pW[voa ?0a}-Wz@{\9#M7Th>{aEݙ2eJя~TJ'2axWp 7X%@ @ @&Sho u=|A?~|x嗋z7N_$~;,7N-@}=6Qꏧ]q87dȐ֬@9s›oY8.^qC?zr˅%\W*X/wD @i DZ>'دn͵*}OiOi Pt]wo3fLzcc)*WnҤI>|idbn'U:խq[܄n.WvV dcdž_|O@{>u~xPؘ1zxrUW].|á/4SS&@ @2Xh׬+dc:~qe]֭9?ӟzEa[o5r!U7zp׷џ:RPwӦM+,vva!zꩧ[o]rG}4L8c=6ݭ} @ @ @D dR nDW_ lAQcX:;^x{{r;s k^1ؽzUU[n)k.V@{O|N<İ뮻߿! =ܳiz+jO՝U @`AD>{v*{-p'Ƈ V8U<-~޼ Ku]^{5g}6 80'?)Z_$?CmW#ӓ?_N8FKԩSJ+ԓ.[|+nI @ @ @4D ۆ\ nD`6k+Oc饗%<*۱xJ).Zn/iwܱګ)|>Dx]W7.hZ< @%@gSw cƌ 3glNȏ?x{x7FYv?a6+] 1펧Ϗ0 IDATsyWٝ@{q,K-T{3||&Km׈# o4hPݬX?q:JKCWo[TD @ @  o[ԍsw/֯=ju&tPXs5 iO>d^T,LYٳ:+:_xQ /f͚~pG/nih'N%{0ꪅ|O馛J%"v}8UM @@`ADy?nvEy%|k_+}zj8q|gdejSN*]} ~ g}vUogvwuWwy |P @ @ @ @,@F{챒ɫ=yrIk&L({B[|x6UmƊA @g(ڷfOϬ>/߿dzm޼yKu53^Kj|[[)j58 +j @ @ @d,m2R3>Eڪpryj7U+kyEvS;^$x_ BWG-ۮTj^?'O hw*$@ @%2Xh0:m̘1r5~7(ObB /Ϟ=;̙3PWǫQz?ZjKm;P"A~hծSA @ @ @z@H@{`pM7?VXaUZ)^r(wRQG8sL+{饗4_'Pxk^G@{-}p7/r{cK.)B*$@ @%2Xh0?pe}h'S;^DgeLln-|g? [lE^joӧOcǎ-qgӶO @ @ @0H].^Ms#G oVi=3ön[ʞ+JRyC3gl/_S1eɚkYTN KRGb~q s=OV @`ADOSzCrwU-mwK/h+:ZsGQt@/{ {%oҫu]akýޓjK @ @ @ɄH@{5]'m$^+/V 6۬xo|] .Xp O<||ھQ"^t:Оҧ*l @hY D?}߉'AJa>a*|8p`|Pvǫ@Y¸qŒ3 ͌1"e/ʅ< Tk7+?S…^ZWN @ @ @ @,@Fwmpe]v=ic+J!נ}WuV'wj1$ӓ/'t5'@ @o d " W.pߎv '.,.6o޼馛}G=Ej+%3τ'}ꪫâտ/Ͳ+-T渺tB% @ @ @M`6ٸH]@jV8m 7 -\U=NhzfΜ޷A:SUk-hOS @@ "r,>{' <}R{װaB :t7~r-i=Uj%9?eʔWlraرE}KdPk?Sa뭷.=nwn#@ @ @%&ho u@1c0gΜ.[;wnU -Pe;hvc oqQUa!Z׿BhOS/w @ 72Xh*w?W җ^ziozWlPTTjlլk<~m O>dx}2,SrK{o>|x]-*駟xBw]|0= @ @ @ d I'tRe]V2U/;.^OW}i+XO,^v`@|w[AtuJ @@ "-hgBx+Bя;;Zj.'r?vzhO՟.ש@'/w/_wuaoW^yet3Px5ezc=O @ К,tW^ na;c{]Æ SN ]V@{S_gx^}%{aM6 3f(ՑG8Я_oӧOcǎ-j<}ȑ(O @ @ 2ؿMVZ5+7{k=nכW{جY¸q6n;7+MP=)}jT @`AW~þ>YI1W5>p}v#B 1/B%ua 6(=VYeΛ7/>:_N;-MOU Uh?S…^~g˝y]ޱ'|2|.iڀ}> @ @ @)m2R@{-(|/BtM$O<ĢS:o~xr @{HSWl @ 2X3Gyd'ϝ;7᷿mA3ģ<+}v~FmTtob%7p k);w/>z:_S^5e1_b:?;PkAU~<ꫯ. շnZ*O @ @ N dho uo G)S_*n!|{+*{I']v٥gv[8Mm٦6 x5Оʧ&l  @h9 DD=N#GVZo:{;g;#| _XO? _'w&L9䐰~=P8ۋ|A馛kɓ'C,O~Ə}ox yӝ?jOjg]O?C__Xm>›y?d/XO뮻j w= @ @ @y d DԽ9Ot;C~"U5?xdܘ_a믇uYL|mz>+^|W\N>䲷v<ܗ;O  el5[n%zUwKj'/` /bN @ @ @H dZ ́ruׅ^{B17;^^]Ը;aUp>n>oyzk=O  @,t}v[XeUjM{x*{=;Wgyf!|tJ[|vܤ]dEʶZ:c{]hHj @@ "2~|>f:1O8K/I&AV[-xߔ />(xB{z+|{+lM nuũZAof͚/WvAjil%?\vevcw6h㎪ @ @ @ ; o7h q[mUXa‚ .'w}!~5lذ0uԢMe~.xq\sͰ ?0կ•W^YRfy,ԛitW5 @}@ ^h/-|i|޼yǗ[nKηovl -Pw}>,b*]O=?/sA#G֣:bp~̙i Rx>]=wH @ @ @Y dGԽ5~ꩧNf/~QNWv9ns1aʔ)5'uyo7ڧGn&@ @ d "g @ @ @ P@t.eXfgϞ6|WtIa]v)li&LnOV-tuׅ^1O]?c,۹@<=_^*71cƄɓ'RO!;_ S3 @h) D[j, @ @ @2ؿMpH믇uYO?= lAm6L6Tq.hb-¹yp97ؾ~=#,5+v\pAvm[5XЯJWqaҤIEU.&߿vYtu#|B @@ "-;  @ @ @&{ ɨl\=@~%~oYf2pRKaÆAxpmǓ̙SoDC֥wi"  @@ "-4  @ @ @&{<ɨl@{+ @@ "|"@ @ @ @ dN=u \ @`AD^ @ @ @d'm2dy6$Оs+ @4C f"@ @@32Xhoƃ& @ @ @ dD=u \ @`AD^ @ @ @d'm2dy6$Оs+ @4C f oKRj mMw @ @unuST @@U6* @(0c  @$Ikj7k+  @ @ @{[UD P@{UL  @th7% @To7ګ(J @@x7£>>9s+r @ @@#ҟ릨" @TŤ @gm J@=t'H-Tܹs eZh T}'pW|;%$?OGQZ˳&@hZ{ZUD P"@L@T @R ﺽжQvm7x;cdžCvVn &L(Z<# ^ҟ릨" @TŤ @g Z@=xծ'h@tw2iҤpa[;=k-RKuheK @RB @   @@j]T J @ 2$h7 @Żn7-v=A @@3hcXfe?ڌ!6k A͙3'߿}| Iu{N-n*"@JKUL  @|& n* @Sw^Koh[Pz(Af/оkO?eWO?rZ;V2nH' ^7_7E @T%`*& @>h7 @Żn7-v=A @@31cF袋)RC ӦM +B3U> |Dž-BaРA=ۇ~Ν PS}uqT'>𛸀"a @@{_7E @T%`*& @>h7 @Żn7-v=A @@3zvmWO}zx A+ǏF=!Pt<ErHXzs= yw{VZi0{p뭷 . :3)Y/T" ^7֖_7E @T%`*& @>h7 @Żn7-v=A @@3j Vwy·Ct??ѝ'0ȑ#K]fe A%X"\s5av*)NM/w›niڃޕW\vmS~k-_{qg? p@I?3aUV)#]tKU  @@Fҟ릨" @TŤ @g Z@=xծ'h@}O 'zk _WÈ#J~1t>tЊÎ~%?äI ?'^|ŪXǍn c /:o~oW{Scx'?IUc.~{}]B 0Ѷ) @*/U1)D @@hO-u{-mA !PkPshK{Q1~wǗN~*;'O{Wɭ'pB{z ^x0|[kuWQFg}a<#auMRS&@ ׍?MQE @U I! @M @ @{jk m ]O% Zҝ xw.뮻S1cƔ~= 1D޶hұkVduY'=9lga |A tP8s~^{w6l&N׿<+Ҍm @ m6) @*/U1)D @@hO-u{-mA !PkPs_6۬lNRaZ+EYfB|С%_s5av*y[H ߅EuY/<|k_k~pia8g}v8J- ZUW^yeyo7޸1ؗXb4o@ u -n*"@JKUL  @|&Wց=P @ZzCۂj~R *{WEO>$vma„ eΧM\}Yx~tUh[ @@FF "j"@B)B@ns mo,|\XdE* P\ U @2ؿM4ړQw! ݷs')Ppo⊰˖|ZkB05q0eʔ© 'O**sLJI&[k0^{-1x| &@SJm&@@}e## @>( @tLWqz G. @}e2j$@%mVR@{27dAv$@@#zڎ p@aꫯeW=p!a{.WWZRϴi¨QJ~ވ@{lkx 9!:? Tldd "Оtj _Z=[Yru-_fx_\pG[ҶC+Ԍn o '~CToN4RR8c OᵠXp/]Ud=~p駇:uY',wU<׿uS[*)zkd'x"=qƅoGm&Hz/u @@FF "ݟ$@QK` P/3jԨkO?t}z*e ;{ .4K7O @@n!MYPM-T/PkP+ZkU;/r!OL/wU,+rm񄘭޺J|tAs q!W^ +RIeY&<3aذae0p -P"}.6jtRt_ldd "iN @8 xWq~%(ٳ?\?&MTu?͛W_4hPR03wž ,Pto^R]4 ,,? @ d[{y@{7RfA5 P@Aks=7r!bHbW]uUћ`cxvaw9~8w}an/~x!SOO>׶+nb~_;/}K.k!;.+C\ ۞z˧жZlVj J׫{Z $kFj_p+] {npyW\ڏ:ꨰ"~OO{8>]sZ眽%3f̐ . ?,~ ߩΝ+e˖f$ | $/'  @ PoXH IP:r]u-ZȲeTu:;cJ3.#B "ϛ7ϱDk֬wy6Dc5H^^b=$@$@$ő K  @Kh,B$PGLUc=¸UVIƍK͚5r^qk]N8Np / '|yH$;p @~bo>JSVIeAȳ] MP:LNG6 @(=WrڵSGP3[Np&?W^FQ _wΫ /_' @R10Pdm$@$@$@$@9A$j6B9-[t[ofr꒛?2߰aÌz衾"0fujh]vSyIH oI  DF JA{d7Djpv,I$@$W(f_raFTrlҩSBѣkw_ywvڎy{ٳ̚5˵>'|Rg2ӧO5lEGB0`7.-֭[g6pHH ȈAPVJ$@$@$@$`G  Xzn&WX!uq/_|qVSN9Ey)S8EqׅG-첋 T3l0G\ &LPd7aW^7xCV*gqXG!{キl߾]pB^{ʅHH LIɄu @ v #C!TcI ($3gJ.]_~lZ=d;ڙsס |(hIHH@R10PژHHHH ,E CwUJ_yRjt qe51vX[5#"jJV\UAxZn_`o>+< A8ߵkWyg3#!! 'IsIHH@ =2A58;$ b' &H~| v_HHrH## Y#   /fC$@9L6dV 6ltMիUI/^ ;իWDNw}Uɓ'K>}zǤ{C駟jC2g)Wk@$@&;p9~   `\j=2A58;$ b'E~-СCEPvHH $őˍ9HHHHB"@KH Y @^.R=ztV_W'K;weJ͚5vڎ}gXx+tئMfe?ʕ+kNy[lhժUҸq|;õ<3 @b   @ C-{TYhP-`$@$Pr`hn޼yVDQSnq~{(={*vwˠAV$@$@$P $ő @7[#&ci׮,Z([o%m۶/"z\2.ۺum?^BQFDA+={SfO_ׯ__-[&5k̪~rgEGFktv4ߣG׮?>}Ȕ)S2+W䩲H *IGŋ O  =*l4$}M$@$@$@$@$@ȈAv$@$@$@$@!%$H 0D0뮻ʏ;V^˥E! / M4Q׷l"/mdv\KdԨQm!x)| -[tD>O=r)yf)_TV͵>f  BHP|X/ @b P5R ѠK @ȈA(,    PB (ItQ,XՍ7xC:tj  ƍsrg+Wγci׮,Z(n&+V:u긎{J\?mڴ 1k@$@!H8D$HHHb-W=푡 ٱ$ DA )D(hb     E. $qFiݺuVTիҥKaÆvo^V\^Drwvׯ_/x|wC>o<% "wqGάcƌHH$\R. 1te1TͶHHHHHH?82b`c      HB!|rATt3A8A{jBiZ͛+ &{ҤIr9H !bUW]%wyvF#BgϖN:yIH lIͅ @ Q3푡 ٱ$ DA )D(hb     E. $G}tVz)SN2e{̏9RL/K\kIΝlݺUW.5jPc" $@A{Ig$@$@$P: x #C!:cI     yӦ=oHHH (hwv)h'k#   =3 @P9C;5'D$@$@$@$@$@$3Iqd > @<M Ý HHHH bN]/ 7 _ev ZE(6o,ǏW6<#"yK,/`3f̘;dRZ5:6eA P$~3!V7KCcU$@$@$@$@ @e$@$@$@9  Q EiP]|,\?O7n,p=#r뭷fDjѢ 0@z!2>S;31x8(j GI|#p3OҫWtg?CA"\ ܥKJÆ "f/kӦMp0KB4ܤIw}l;v!01+Vp.cu$ET?5k "S^iyZln;;Sg $h͚/^,ZRo[nn^^zrG:~1Oڀy5FOi_#uwnr\a#Lb{WAG}$gqFzn lAĻ\ jO il4pJXst ^ҡ*O?^^3 @R10PОZZ**[hW]uvm~۴wD5 69=)NkH ?ȈB{H$@$@$@$5ޗGM $P EiP:DGtj-v3FDGr3y'8x!բmhHÇhѷL'>Sno3\ f͚%wg=1"vo;k%n@ccBr2.{UVS&sSY4YKϗgeF?_('of1iyIHHH@R10PОZl}xݍ@.Auqڵ&W^yŭJu;#Ӳݴ5O!lCM6 #lGx<~˖-MFRPNqwX @rPjY @}y`g$@$@$P  #C( &MSO=UTqm#2S[sΑZjɷ~+M ^~e9XWENʪ9gΜ)]tI_;E:k[A~[m 0E׿ĺ +R~GX\{iG49s裏xϏW\"o۶M Bukz뭷m۶Qűc "WVMk?٥[s%ȹ瞫6D`6 ;kZt(/YDpykׯ;tB;3Ni!y9g{9+);]vkNT {^2"AnIi˼y2ni9Όo16ݮ^9]nb]80ݎą1c(q6X駟9n@~'a@![X7 hIqd BA{jQQ| [3NA pڇ~(˗w&"H v]xҧO9m@Yևz(@^ ^vϐh Nb#ծ"1̈́g(fa`a;1a9ꨣTЁ\i +WNg۾} ~477mpXb3NJD?T9F|`z/rGB7xCy91fpsZv 7tS<'ೄuD>p:th>'@,pǦHwl寤. @2 $őrTODƉ[XW=Q:׉~V8}' Sr3m:oXAt>nܸ؝.g_!FqrWZUekܨ +Bu.PofQ-n;;n &f&yZnv+UV5DهK9)S^If /e$~3n%ncgHHHH}f,A$@$@$oURY4O'fj*iܸq5:/9|衇5G)*6E8J&x _DTUup4hXs\h;?۬ۯ)5+:i^}Et>8#[kn֟vn[yfnݺi+aՙk:m۶KA*Uɓޮ>DC\s\I}]   H## S ړQ+hG]uNH2V[ϫ泩[vaG$h =0q/I{nq fm25uqb lf2  |˖- QkeӦM@i{ ~ө~a~Hff2JK 'rXHHH ?17)hFiP5S| 78;Ẹ#Z5R]qѷzJE,AE su6X!{7!iœ<[$u+hy{ݺu152w)Sk{:Z'3=wLdz5~ ]t>lP'iAX 3O?Tr?)*cR10PОP^Z9 70v]rJnSљςH^ϝ;7}[6mo u ȗQ{~cA:u2:\[ {?d/}t~Q^1Jomm /ό%HHH#m~QvJ*kU1e hMnѴ%)h߸q4i$-ꪫ"g @D-}t~M7 HDCkM\p.)udy0u=l0%j{衇K.gD[oC?M8Q훳^5dɒ/M?8:UKǎӗKRQDwmIA;"Á ZٲeUSv9~WOs2o8A tRA\*V(<\|龘?\03 &pYgɓO>oYfຒR0)D(hO}((hO7Cɍ#,A:")Yw\veY~/`ߓ8 /-kDb˖-ү_?zG~nUwq|aGWlwٹN:I^ e{キTZBwܰ}6kvfrBRN+{Fn߾]:y#oBl?k5|Q~^!V7TD{OqpdM8zkجW^W>, \z2zLBX{t3lpCx3?qI10PОZ QKn>[E;]-I$@$@Q}y DP 5_Mq,y:IЎ>[c$f/_.p2"wݺueڵi ztÆ U>3BQ$:W8jN']w߉A c{챇- A{бsu>227ExzG׳ӧO3<3]knp >UO%  F lA;6ᄠ[ S%\*Xk=){0DZ;Ny_gPm8/Yk}&h7E8ӧUo:oe`,^#h7i:uΓ3!"o:Ȝ9sNѼ-B{:%KƫS1u`=պIÜc8<@}UԖKJ^z)mN)hrN sǚIHH8)hʼGJVAޚ Эg 륗@X\ѯq~5p*udzO&ÎI'U<ȸMV5[kO>$Tukܰ8Ew:1δ-8V`4i{%k"A˔^f)h+.\(;wZjIӦMmԆ{vx(N͢'Fh<},H$@$s/{$@$@$@oRN͉WySOƍI3rYgeqڇvI0@lٲA ˕rfP.W$>̝;{>+׉_׮][=+'Z3F1O泴O4N$@$@ XL;A$@$@n:SsCۯXC Qr$QU_~Qѻ!Apm۶Ok 3 ۦMYrzy0;Oz vG85(3{l%~vJ%!h7^?D?/G_&'"|i:xEreWb\Ɍ f ~؅][{N$@$@q$OXBU׿Uݻ Be˱_,> o;N t6MᔾZa}W Q8k$pc&8ц|!`yX 6(w->wGժUkԨ3ƋvN:J: r >k /P 'a 6 X MKv~|P= s?Jv,Bia a/w}Xϋ|}]wBӠ&7|S`Cwdڡڝ܋ ^7;Ԇœ>6sl^ 6&ON& g &(h 6ݻ t)S&F\ FR 7h c4cYd{1D@ Qu2׵ k)⻮QŌ8ju>: ;ăs:".a!_9Q={ ċܩ?C4"=-h;!@ ![\vۚ06a֬Y*"lz@qǫͯ ؄n`#UTQsCС#:t?.Gq_TM7$zR/kyYVu|< :׃1?tx7nv*h>pKF&^zVE׀Sަ@Xv'/X7oެ,LA{upxޓ{.}"SylVwy[[ss/^V6.ԯ__ fB̋SLvu׬)1!^W=EA™P=\ǏO2NdQ ŸIin ƺɴsh: 8}9W @(h{{thE+]tE)*e̘1۷+*\A1תUK,kCR-s?;41|Oǜ0kWxlv߇}}1 ?Aƅ2 ac (6h"}rE+uUy *hw3AW9EA™P *UrΏ+VVR|y6Hl\=1XI!mظ|{+vXF~M:9Os(^֫g߁xf"  &O$@$@s:SsCۯP Ѳ! !JADEkVSCpQUb菔cۈQn'<^-YD ZئM Qٳ;^NoZ'.f#6(#Z66;a)蓭0Zl6RtPx@<`McUJx0?;#aN6A=MO2>Efm`KF3l:qؚ 6leڛ}pF>>'.xe`+G@&lFZAiݾ0SNUv$; V8 '~8Ջ9@x~I7քy:tLeCy"HH x_HHH (hߴС(^QvB)y^ztC :ڢ7Xձv δc=6CTl'Z;O-嬢)]N]vǺ[#~\? ^҂vy…RN۵N3A8euzOsc -6+RFv>ʬu27EIz2al'o @/roiXV:? 6& i-gl&hwz]7l~gJj\%hCK.6h@la{7|3-g!VA:6#>SսCLQç~*[V{LHTKpziA^-R/)1ͦ/Ǥ{ pRL$|Ǻ]S5w<`&<")3mI\XBAgoxO>Q^~'0Nؽ͓5r\9 |V!]>PYFkbsuh eYp"@LaFv;^!\ϕ f_t@~lE`(wfpX8kFl`4cQsf͚Q]w[a^1nٚH7{D$@$_Hf6DSC@km׃:mYft5k>}Š]I#۷fDޠk9?|Qխy_Tu!VQP[*h4%3g~!;WuZE#幦|3TVA{A؁i 1Ǧ^[Կ|Sx1'ZcׯWeZ|$FvixnS3C^^UL=xr)e}Qvi k].f[GlNn=&mũ^4|;ݗ}0L?MAUv,9Mf5k&{t_]R.Gk N ju֩ϗSL!0E=v}L @A ~38f)`C@bvv˔(-׭CأG,~UЎ{?e'h3HcƌQխvD37B~+["yBI& qN/]vAd\)6Q<{@+NA-6{g_8eȑﮂ]Vךā&Mdˠ}_-8 e쒓_bLw>| >Ltr7pχ_oNU~GqwvWx~1 cw]!+k# H6ޗ'{~9:  # 7+th$ is EhF`0a"~]D,o) 2D㎌cAH+p4E2yf}]%j&x Ai:9 0vc8!x둡Q=5׸.[lc=' ۷tT+SLQU,XDU(I`VG*޲A@|Nr9j:?pAR87HHH \諎{8 sn'hǩ._~7"C#?p;7g~[^yۍsWA{GBp@Q]iG"ʞ{I;ONwTemqD }đD.]YƛAi:m pbZ75k̊(fL'9"tMQP'C@f;zO;9" sH-,Ϙ1#Nn8H@\*9P9D8q (N9&#/Thm4~ǭq:%|x*;m%R;5*K`Ӧz)Bx@07^bAHQ 7zuLTX4>X9,AvDo|3!B5"tҫW/!.;];[oL;6HYσ[z]?!3DuJ~Ahi|wZ+'O>D;Eܷ6b܈/7k% äɺHHH hС+ZlCt:[ET~   x_~Y? MoSi36+W*': Z#A߲e˴` q?u=U 9"#9dh=*h6kB9>6ucEw!j{*LGw\d́^5g?D+"?CSQGo1LMZо1^?>(X #S>)XX1jK lA;Nߏf}nSSO=Nkd("UU6N ;n|;'}I` 6")A{q2)Rw0^lOq g 6JRnan3Q >Q .CM5>tApR[w7jvv,!zĈ0? x֧+f<7~V;xMۂ$Ϳ9g^  h ;ݺu3f?$A qұc +\HqA"}Dcaon~HVn'(Se!~ ꫴc{GucQ*h8BBDtũ8pC\޶mtn6%`58  K.U""o۶M Bqc=XߗK.$-fuu Cwy'Ct=#I&&7^A{ w',ԫ{5ꏋJ2)`| f+0mlqOK׮]]'ߝ^Y Afm 1#z?h߻Æ S} ҿ+o] 2nk=AoYjԨaۧӧgmBF6B ~O8.W4i{7bAs=3ul=ͬYx.Ì$D™¬_~sFmL]ډC8^W_}Um/YD9u<ٝde3DޅYY{fd ݺen<0+q W%Oqu  Xoy睗M}0Gn8)K.0Y}ҢE ț>SINWREۼy.LÇWQuڵJeݨn-({ H:?իW5~aQ-a6-ubPd! ?֍ef=١^{M:w[ DN>: -jذ9>z0"1crnhI-]vqGp^'3Y~8͹N 1p\x~~nb_~K7ސ{WmgdZNjH>3oM{=ϰbS_-~zD\<ZΓn6b{ Ɠ>a#C$@$ޗǏIHH?BC;E8[HzXjBT:K"tM8+Wz| '5]z** RίI   $`u&#B7քip<͛7M~.Cobk4vԉ(j @߾}G\z< 8d?\ASϟPo˖C7]yPjGqIAFӦMy!PGcaMaBpxg#UdfͲ*ք͞B4Zfʂ9*t IDAT"BMzaN0aP9nmޛzoL].?(  <<HHH ڽq2)Iwh#ԃ>XuաC;w'gTA:JC't5Q-LTA#E[nx4:0VH$@$@$@@x+VPWMaC)fB XuZ!Z/*ĠvƏ>!_9#㣏>-[2hi/d"p ۳֩A׸qc}L?SRsRS^6}zozC>$?$@$Hj5j6<%L8eژH ()̬K[{A4 2~Ŷ~&hdžgsCfxY ϤxFĩ(/RuXvO@$h0 )H>mڴtv)&A{0ߐHH˹.HHH&@A{ۣC;(ȸqWCh| 0RWkGZƑ0ź{ꩧ(h/u&  (m͑ߖҦM5e~?W1ϥ==hЉЋ .8u>3N>>|x X#S`Vf3fVDq$Plqdh'vtꩧZ7}2rHWWx\LvlBܹs֩X?t)} h)&L~.>L@MSxs/i}U'$A^u3 @x_7BV@$@$@$>E `1b!8z;蠃dĉ L[Nx [U-!5\#GqD(c=&g}vm:G㷎#  HD3 'xkA▯$_l  AW<* 2usG_ XHlH0ō֑CyI'ⴒ[nEn,@aWÆ K<f+hg\~~$h7lȕ.R=zt:)h9st%}~CNћ|k׮ \~}YlY/ ԙHH /"   Ev_"Lv s푬$6h6lPJWVM~g5:uH5Jt8{~wmTHHHHL6^re:rq[R/6G//bySg;^E4|DK͐ $lAM4q t;]SDp8@o q Û)?B d?|נDwUI'SnOV6nHؔ駟ʊ+d? d-[8H)B{xBD$@$ޗGAm X P@vjN(hdHHHHH ny䑌a?8Oٸ-#@N*v-ua|=?#\nlH )f͚%w\@ @! F@7#w0]AO>)guV,N۷o,_\ZhyiӦ<^q(>fY6=5Ĝyb^ާ5 DAQPf$@$@$@Vo=С 0#    Dvۺukn.*B%5bvd@_gN]=6Ѯ8F$8ux˵^K1_p9bߌ)h%)&޽`ñ]G}Tzꕮqرҿ~7ر 5v3:9Ξ=Ն)_Q }'̙3\r' tW^y1"]ƌRgPI1 ڋu=O03 Ăc1  *oN [ @R10R! 6n(< 2D\r%2`9ǂ$%oFK[{-Ϻue˖_VXa+^dr!qRWP!1;dnÇgԽxbۓ躐̙{ !{rg[nsOI_{뮻.#3k׮ -N3 //A @C;ŏK @$ő_)%(eYvlڴI֭+ׯW=C6lȈl=$aIߌ\Pތ;L-[tҤIr9iaW_}%g FYQ!D袋2xJ]l˗/-Zd ~O(}ժUeÆ *y>}l8 qNС̝;7D!Ho۶X/ٳ4kLޣ̗_~) :4i|kw? M^:,G$@$,/O|r4$@$@$P b࿍ 푡PdI     (ȈALjKw GN$@$@NrWj*iܸqZg0nÞ;vG- ,zv?8-XC}JΝ5K/-RYؙsؽ{w] eڵ?<ߪ"~l}3;O~׭n,[N%H^x?Ia$@$@ DL#A$@$@EE xQ EiP (zjYp:rrGz:8F^z%`C=T5j8 @82b`=(h I )q(/ӧO3<3`رJO?c4pD놭{ʕʨh)[f 7n(۷O7)SH;v͏3F AtK/TƏ^x%?蠃\ٙ\sڸM͛7mݦ^AWEK~ ڃ A{V2Ɏ?| @^zҶmB+ҶN:k'pBz܈b3f(3i$ 2ޛ?<5hBm&Pf͚u̙3K.5~ԨQs╙]>fs㏗~qةynu{&+D] x: @$@$P*TL3I$@$@"md<(h u4eqtQ`/%(|dy    dH## S ړ}ё @I 2B0teoU/_^jԨ!cc[n?\vܩ=CԩXfÆ Q+V(*Ur̋zl٢?UTk !0aBׯyǎj nݺsͰy*j)|>'HH}ZK$@$@$o:(hd3ѠFh݅ *hZiӦ <'1 vHHH@R10PОZL$@$@&c]=/%Nm׮]zl'N}&rT6?W /CyIHH mT0l&T#K)[j7jыxjZv4h@uC2w\- $"@$@$@$@"GF "y ?C駟B rs foHHH Nr' 6#磏>_|Qu;SzX16vMf̘3vX߿a G @#R70 8o#c@A{d7Djpv, }ݻ ۗ)S&x1*IA{&]!   H##[]w%&MRN:jcn5fh%  $~3rp. +n3O.gyf˒%K]zsReŊ?5DI%mE$@Ks$@$@$Pb࿍lG:xC4gǒ ?>SN~"9K ґQ10CrbsF$@$@$@Nuo5LJ?o^eP\9ZjARev(]vRreu ~2^z2jԨ ML pc0  $@vHHH<cOFA{ ^C ܹS+U:~+V(0ns/0J*UBζmG["yJGlSM[>_~%=OzY7aXS`uN$u2 @(" [>uT6lX6nX6nX RY3 @=%6llj R+i@d2"2 9!We8JQ:d`H(IJ%B罾gs~\5|kpɮ  +%zT~yvh'O4j sCu޽/gΜ>}rǏ'N`7`ԩF#+_[ƀ;v 8P-:`D탘gϞr-v?6l9sȫcwժUeȑҦM]|y&X^ZΝ+[l1KֱcGUV\!G}xHc9}tɛ7wM7 HF}7ښ0nrT&A WA'|8c>[~Xz!wq'5k֨c߽{wiܸ9VN7vZTk³z}I֭f͚|pz/_"t#*tuNMz9~^J(!*UJ<ȟ(ىݔ%IHHH gӑAA{9$gsA $@Zڸ:   ˣ6"HHҟ,Gh^[憪.F4c5VsE'>ԬYS٣>MǏ߮su{ʔ)#gϞ-}1Aqԣv=>CH7oބ]x6z^[6hLW˗/m6ː @ |aO$@$@$\a7kyk+CHHHH j.ڈ  H߆CC07Tu.{&:L ̙ݦn IDAT#=Ps'4A モSNIܹUSv9apHo@# v}˗O^|Eׯ ƒ$@$@$@$3R  3WHHHH$zs2o| Ҁ^z^<+u, a:2(hO<|=HHH :DΝ;W;:شiSʉ> Z%IHHr ڣ7Y.%KK. h**|v#v4< ^SO[۫W/Ax#SP37y0 NGINȑ#f˖-:Yf̘! Uݳ껢&M!Cd %   O LGFld?H>AOtAIHHC ׫[&OiJڷon)-SLt:%o)    T'MtWx-$@$@$@$ ߆6 j yuhx-#ʕ+%wfbg7oK: -铟zQbŊfz©?vD܌<δ#,n}޽RJ@~mu]!o޼?~y饗dڵf]Ю%"rvY!+u, Yy`;"Y?32k,)YR@/x5l5 JzWXa4"CެYP(h&   > ڣ?FHH҅@=4ІþUVR\֭[W&OlFFfbg7oK: -铟zQΝ;jժfN#fqaq /^(M4QkzפQFq0W,$h4hL:<֚5kuf1D2e\|kU/_>%g"   Ohkd 3NΝ͛71> OA92Ž@ Ocǎwz^u:|oa>lS=T m% T%`ބpv#aTbZXdO/     /b ,߆Po( U_g7oK:9`-^M7$K(fߥgϞ`cV5>1c 0 !q FJ%A{Pܬ\!;{*U| ڏ?.%KQz "{$RH9yT\YN8aw a    rd<"~+|*d>YmJ=wضm\tI-s[#ʨN t&`]o!d7hk` |OA{W:?5 $@Pɷ5 oCCIA{h7憪W/ NO>-zk`yRR%s2-[L:t`_ήN;Acǎu(gx|LD|ɒ%ҥKL/^( 40EFA]ϛ7Ϭ<.G*ʔ)#{QvG!&Lp *=\Evߴi[iӦʣ=zČW޽/רQ#,"3i&.mL9cXHHHr,tqdD`C(񺿑SVn]exvdO=jH,U|g˖!hϪh>݈Dl$@$@$@$@&.FLHHH4߆6&CanzuR_Ў1u;4'ȹsgqL;ɓ'r1Q۷o/>p fy;w{N~yYfLsJnߝ={V8 : ړ-3y7ߌAlիW?~j-ԩS~fkr~   HȈYu#-^ f͒+ñ7rʸSwHҏu ׿?+V(oYXc3 7dxL>ZHHHA ]AV !3`  ړWwha?o]m''6z֨Q#~vRg=   ٥zi ۋ+Ih;DpYWTIwnFwa"#Gfԩ#G݃>(Wsg֭5 @".lPОDEP]aNȏ| /}N}sew%… '7|~嗘1nl˗8h"F07uO}~ 5HH ح7q$HT?Yڶmk~fwdНeCPd!ߙ 9'   @O- $@c=;" an.[L:t`v)z]Dl^zB1uĝ^[᰿뮻b3]t>|Xx )SF m~wJ oU8l03fԮ]ی,/ ["1yv ړIpiux^,XC3f̐Cs!UAqLKt)2    emHHH D CC07TmҤi#4^/+9iӦōw@ х f_N֭[_JVэ(GhSd\a;B 1Ck:Q'@nX93 8HGF6D(hϘmaڗ,Y"]t1}TDr%޽[@l&?޵kԨ%3[A; ~^]Vݔ(a`Ĉšqԉwx x۵k5@)SHRb=^{xgpzSo%Kn*{6mRN& Hn֛8h(W_nco'ȟgBt tneI9}  H߆PoE#ԩS_ XEo8\$UYDxc<+QoUfkF .,y@)Ƚ+U$:DO  D Q{ ʖ-+ +ԉp/^)"%JP}`"   H>tqdD`Cٳw A4w\~HA>Nȋw'DG7{ɀ(n.ִiSyfuiwwձz;c ڝn >O!pzʗ//HH x]oB؎Hnd?p& x!u]nv/HHH mhAA{h7 URҍ#=*Frh    Dő  3&Z؂v}n㖩VZIո9x駥[nRhQu 7o^LuIfBA> ={ O=ÇC=[oI^b;/^( 4$k~ܹsgYpʕ+iwzܹsTZ5okn4l0?RbŘEs{BۺuԬYEFҎ ceezNxc[ԩ3WuQ( @e˖ VN$/q QqkhNL9rQٶmPի7ߜ}8NļEZy9Uć @!uyŎ $@ISE;9aC55ɑh)\9t   a0F#U\Y "E`C.A5ke˖Skɒ%ҥK&M"dۥ{Qʕ+%wALLu:O>c2qDܠAdԩqK52vbqɨQ<!n_|y=O<ŋK׮]qqt޽;!D3F`GV{IO;A3dq>N-ZPš{Lp6B$# $kC8sW_}XΙ3'ԛT49'СC4 ԁ:$ի'6lP븎;ƍ%F$@$@B Yl1 $oCFA{h76TAܹsfK|W&Z99]WaW:r    $H#Bɓ'+GSBf͚Iڵ=Y5#!r/(gϞUǿ!0nI' 5S@#;(ւFDs'Ӝ}ID[ĉf۷o+a3vb˗K۶mU9LZ>|X6^:y:wlP`A?.\Xᅲ%JHJ9x`Aw#A]Z2/⼵sz /U' \ecgy떀~x(Tb֬Y17)$NwvHHb]!  9" 6PNP޽{˼yJPpUavEJ 0 ڷool8HHHH\Ȋ#bUBx@D {͚5=" ];ґw9+e[nQ#B;;6ѺH)5n|ܳg3JJŊ;:P&L #G}OEuț7oɀCFDvo>ncЂxw~ "E8xrž{CA$@v9&{9 h˶mҥKRhј?z(7Yq^|YZj%YݺurNL= @./k'  L ІPo('l9sP Jf27+VPJIHHHM#Eʔ)Kq%hGDНo`k׮* ;_|}ҧO}:^"xc$Dc7%AE <:u< dX-hOOt@ 1C= ?MJ tOFݶlٲ(Ø&M!Cd꠾{/)R$Z0h @<퇘$  64&C9aCtƌ6-WXqJq*Wv]/!d0ΆTatҴ    xqd r7 JDnǟZj R"3ڿ&{!JLOh!z ,4SرC^|E)VX\ P/jUؤ ڝ-*N?^z3x7] BrÇ :<{2eyWS{(s03 x eWKA{|^_,2Y>4$%lٲE֭lrQ]vv kGy*=%H  %uy '  %62l(hP ,聸z\x*ASbB$@$@$@$*82 d6m=Rʕ]v)HތD^>ϗ(P@F?'JO81*ZdtW^mFF¦tm@C IDAT9tY.\-[&:t0#Y?#ԩ9rq2-hOO?e(h5MXH Yoi2w\3w.Kv*^}ӒYo:ge177m-viV.{ށn%tvHH p]m0 XGo]]9=cQ7Tck!     0$rdDAn7Rㆬ믿^p;}U02=??{ ɓ.NTUDA=xTXD=h :u®3MFȽ{J*UlnM:~޼y%K/ڵk2Q'x(CA{ZxdH2P84rAZ?[L^S1c͎ۊ Η/Y}j[bzp# jB}Nm %N6=lX"ax6ann'o߾@-rH_":}爫 / 3=ZD$@$@a<,lHHH @A{cbljEoxh lv+V(~vFdO\ʨ Bt;;jJLH֭+'OV#y6]l?l08q6m$ 409ZLbw?"IM45kҨQbD"A ڃx(CA2$@@™`F)قva=A5k֨wnScǎ* |ƍ6C_7 {)͛7{Q|lݳg {UƬYo!>wvN׺ukYuqqʿc>3gO>jݎ(KD߲eYc[&Aʗ_~96lPr ㆢ۷~ ?>رcqPe4x`>}1~*~z8qB]vIɒ%2 $ɠ:HHH pҡp&[!     W^yE9!z*VB N2φUؔNv=9"دZʕx*Qv)>߼yB}:OQvIR3\6b6mc!ꅸp=}#Gtڀm֩S'}~?xK2:t#d@]+WҥKʟL?Ν;'5jpl7,X adn/!.]k!@׿cǤlٲf2en`NI&LPsH?gL$@$@$à6HHH(h|C;cL(hĤE$@$@$@$@$@$#"*Pڵk'{ʕ+ VdLb'ޗ˾}F5>$/Pi$xRbE;ԋhʙ(]pAr-yɆVz^dΜ9 /_mۚ2QaЂxve{E $HY3tQCܴiӘn~s ۴i_|!7xT~nLeq D .T/F*ulˍNmAC/Ϟ=[q32a/8 A{׮]!ՓwyGw=z3Ʊ.#Fo-+ו2# $uKPF$@$@$4&/NN"=AA{Df#      @*!G?ٯj1Czw#lNt!Ob*Tg׏!eNTǏKɒ%cpWn۷K"E=<f}?C mԣ#sЂxve(hh t%@L0#UA~ z?|p8zK ߿_/n~={O}2?8}fI?ҨQ#)Q`wߩ_c@CsC ۷o/D|*So߾1ubCN 5tz $ݻwWn*=zqzu1^v.}^=?H|'oVkvFur@-Z48p̜9l>ڵkF -kwו1# x$uG`N$@$@$efn+-l̗ l &Eѣm6+_UWbSHHHH X~_7|S~llZYfD`CQ @I,$h_͛gvs2rȄψ 1 BR-%7B\e$٣w ޮ>D,mРAHvv/;vŋǍkADSG=3 ( gd_CvzFxQq{N>] 6ӝ;wJժUcJkkÆ f?ԨQ̃u'|"X%w*Uu1c߼1ZCm7ֈK.{H$ްaL&qS[Aݺ0pajsβh"1A9>_15۱ydĉfva &  Af$@$@$@:oC CCdo%Iz?O:t*HHHH Esd@n S܎uTn<шlPО10gwnmڴI iJd NkܸqX5MݮKӦM3UqF$΅  "g.VfphZO Q3{LEvo7^%#OP<1XլYS @jٲҚ+WO,@$@H™`'oIjCvRsqɨQ"&M!Cĭb͚5Һuk{S]5ԩS  dq(I? zduCz"vDF4+&LsѺn"gT^=\e|1BЏx0~O{ٳg'\Gy=;&NIݎ ˝{   d6][P7T[’$@^89 ż$@$@$@$@&Ƒ|r%n߸qc`)] W_lPО1j{ 8ŋfM61Ç ]k׮iGxBTh K.g}&?`M(_~XI v;۷K߾}MZLUS2sΕnݺgϞRJU9 ڃ4v3ڳ hpތgah2|_;mkI k׮-ۥbŊ="#59"]zN<)+Ws ng*X̟?_ .&}v[Ls-뮄 z8.F:m۶^o1p˗/Sv\pS(\2f@i,/_"#*/yϰfDɫ]氃]kѢ^:n$d)   ຜHHH l߆e CC!(a۶m]hј$Q׫M??wHHHH 82 0'Kޮ];◬lPО1ge_Y7cG!h^~+V ^z%%KǚjժcrǏ:LÇWIz ?WTIw.M4Q DGYul<݌Ά??H 7PL_"hgezCL v޼y6}~A8p)ӭcǎ(4c _Kp%7=#)VSݭͺާOeJPㅺu{n1dUn7^x:v(5xȼ$@$@~p]ː d@Y1SY =ʞ^7TJ/6OVXk`%fkРXB*좆ulyzvD5a%IH ]ol WKk~C[nŬ*k(/v/`^:Y@mF?qq"s 4Qٮ]FZfn̶}v%y {tqSs`& A Mͦ}d uQm-nIO[j̙j۪ͦ"/~ak:g_%w}wZSϬ凃Q gV궖Z#'jC_`MCйst 1^v=?UdzHHH%]b6   6i}qv'BjL I$0k,u]8R:FKQОU @ $ۑ~%lRo׮V\90=ct6RSNw}'c ( E뮻.K=C -[V *sΩH.]+R믿^/@vD(O?ɷ~ț7\s5Rpav%ϤJHH ${b\?c1:{Ԯ]ۼ孷ޒ:uŋm=m$v]A;M҅vp(9)UZUFzD78ի?i$2db\udWuU͜9IA$@$@Ll6O$@$@9@Q=45璈z)AȏYˁ el_ˉ aDwJ4Dv3n/HA/({p>NeV<.ԉ&XHHHґq;W_}<#FAi$@$@$#t2=jW uPJŊM S/p˂ ԭ3N Xbfd ڝNޯ@Cg@VN^,ӧO71z}F?c!LFhw;3HH .*A'  J [&OAotLj"+/vӧ+WNN<)/\]JA4N:P~]b⋙;vβ;:tH^uۚpvϞ=yr=(&G}6XC)6 yCdԇ5Zyvء>\-e:ֳVZqQ_P%(n^7?5NxnN8?CO(8G[n)76S|]V=s wSf,pߓ @##"gLl ڣt  tYoFH%قD"q,S/YDtbV5l08q}M6I ̲-ZP{Љg| h꯰|򂃽FzocHZ>ekwkz{ܹSHVA_{͛gϯ](hwHH Y.OIC$@$@$@nMr> ڳ0 b}3l߾}jC"D ݻwˍ7(Ży5h_9RM}^DK:77pz4z;C6{o0 vùC*B]`7~kN LnXAč 3矗X]p6CM6&^;7oufŋ7 .tk;}u 3tqdD`C%8x믿#Q<@f$@$@$e N%v="ڵ+ ׅ T#4C#fD /H~,vn(O

6n*=zԿ_~9&ڊJ<aq7!ǕgSW;vLPVQ`2f~b1! 6!2[Uŋz[nf(9u:aɮ"3 0tqdD`C OU/!  &.樷lKV ,Y2L+T|7 B"ETDBe=-[S{ȬpjNbPk[a`17ɓ'3SrsMf·e+9^a ڗ-[&1H~B|wrm>; Eʕ+ 9|=   oCBA{h7 D {ATS$t~zښ7byg͏;w">sNZj]6˰!1",قo/B Ѝ^a$cOoKÆ 3 ]ԩ#nL$SdXG8hPfM[,v8Hޘ9!`9sf ?ZlچH'X) Hf7 ڱDPῧzJݤ/`Dnv&ޑ#G&*}{Ո]D A Yfn8y=^Ÿ];4o Gz&L8Ad#cqC;{'8/!ҭ4*URE/v}.8 7n(M41 ԩS= U IDATsj1+ $$u9' @" CPMdۚ5ka.]|5i$2dm{Qʕ+U0c=&'NQ~52v"NysԵ?MҿHXtA;twq7n7!ݻwg:l2n8FJL"֭[q,6ɍiou`X#Doܸ56q۠ƕ @ő  3&n qs=d@j׮ۛ6mRv.Ǻ7"p[']{7oVAKD< Qq(Us/YGu_ hǍxlw@m۶e!ըdܖj$@޼yϒ=^FN$^4o7EA/B`G7@~#iyHHHJr   DZ)h W kDsU/"իW7A; S\R~9p]wݕ;˥m۶f6H>)øQIH_T`+,n:sΩMޟ~I{ƈz(6pXopuVZR`A?.\Xzʕ=?7n5ƬHHH&.lPО1Y]IHH  z3jc?"n#R۾V8 (P@.]$}7-[#b͡ 0# $ u9 @" CAlƳF߰Bgk4.h"6E\7 [l:%=BuTGyĀ?q r=(n,碾IsM n=]-}^A;~O$@$@$@D ]=cSОJO?m%  pz3Z[ *b-[jUٱcmuw[oD&7BGƁW^?h饗^RekBp#^ٳgO0C؎u>s۾tYEoAtobjDVJe׮]uL+2Ne5j.-n/E_|X/Į7l0 #ݭm7k5z܄>hŋIHH/c9   "kr{F~07T 6]ج^Tر,^8Q;jeƌ2`3KT^$p*9?^N8  q06lѣGtA;"r-Y=4ׇMt€3ڗ|ɋ/'%   HGF6D(hϘgdy  H?ތȄCIݺumEvuM7'baӥPBI7" u]u|׹/]TO8YL_7okٲeҡC=/vzK #! 9'@}ԬYT7'  '\;$@$@$@&m> CC!́bŊm *_ "FźIIFgOF  g\;3b   6JPP^7RIЮG-)uTgϖ>}8NW^yEx3c"\DA_Pܬvف7o4I,ӧO7vϤd'   T%.lPОPО? h7 G ]֛WsToK~fӧOKjȑ#JEigϞFB@HHH L\Im @ P^7ؼ]ϟ?/5k֔={( ^D _I˫y:V /H~|Va2>Թ{R^~RN3 |u;F%cǎuߢE[nf9sC=d?*s aqΛ-[Ĉܭ7&ATm I~Aqrգ,o@]~q)YyqRH9yT\YN8a jX/ D@82"!BA{T'9"   ړ; a_k9k ޽{˼yREܲv;ʕ+}f> GN˗/73c$=;>ϥ\r!+%  DeL$@$@$:" P,hǦ[7)Ǐ/#GL88zkVK(a[lt$6n]ocǎxQANlٿTT)r%.DEtx4h ` vySO?ѣG"v#)SFݢA;e#F &8LD nԣGHK.˘HHH ;p]& l߆6Can#_I6o,uֵKƍcDă iӦ߸q4i w A53ɽz|vvD۾M{bxW( !T޶m/^ܶcƌcǚߡ k~ [iΝJm$ӧ~*vbG{.SG"KӦM3 FtkC4_F،^WIL5HHH$.lPО1q)he[$@$@$e5a_l"o"wܡ!ڵͷ9yLZ>gy&&@U9#N$@$<˃gHHHb DڐPj xQq}!Ĺч׮]3xWP!3=z>p f^;w_NA}͚5cϝ;Wu&ٳRT)*;a6xݺurwyϝ;6Ǎc@:*%.Dӗ_~)_6@7.0B`n߾]k;gPf2eɝ;xc K.~X|@vQ|^:&6>6t)Aa5:ՠf/%   tqdD`CIMA{TlA$@$@!.?KD'X hQ\OSP`Mዂ ?J.-}\psK|  %uy&  &mh(h PMwA;Fѣhzf""'NȔ祗^Rbr4x(F}nADnkFRv~M6l!ڶ:0(ӨQ#iР]oDgߵk<) %.7iЃXb_*VS{MeǎPE7{:uȑ#=?~5wnݚ{A%%   ".lPО1)hϮ' @t z3jDﴇHHHH;˽3c    6kPv+k˖-:]uf!^reLh+7D^zBw!joܸ87^3茓I~i[9r'O5϶lْRΰC5ydDKxnnVZj*| @jHGF6D(hϘsO+IHH L yHHHHMh#  t$mhX)h Pݸqϟ?_z/BnFʴiUvR~}%Fܹ,\0?5璸cJDWUTRJ9Vz93go͋O=ORW$֬Y#ԩSmفo v"P2n8ywmBTW_?!(noh´ B;Xy9裏Zsx-c̝;Wz,ZH|ɸv! *8^SW_u~86 $@$@$@$Bő  3=)T  @7C庙0/bF    r*v޼yԳ~oVu ]s5Rp.:EG?k_]#2;%eW%oXd0t\y啂#Z~xbb'Om^s$eJB29s|n~7%\/R(QBHHHrtqdD`CӃ}%  we鮷%}ZU&GQy |?p Z; d˳<%  K Sj %{Cտ%,I$@$@$@$@$@$`G ]=cQΟ3$@$@$@:tYoFmdA?>ի'/^A@˗Qх ̵h\KTHAk@;xF ,e |o^ FGܹ,O IDATZHO<(+V4 :}@ ˽ ү_?3ѣ72 @v<ȳ]  ȹ" >P7T[’$@$@$@$@$@$@vő  3&9C$@$@$HfF6DfYٳGu?V:$lܸQmMXݳgOi޼s= ;vP"dϜ9#}r `^ZE߲eYQwǎVZe7|#k׮+VdJ*r}I֭D"{/^7l s̑W_}5GժUeȑҦMLjYaM|l! 7CpBپ}'HǏA0vX%~GPe4x`>}1p[oUN8f]vIɒ%2 A N   D" m(h $     #.lPО1(h   @76$SЎgO66!$Sm%KH.] h>wԨQñ:Ȃ 2Ev2tPW\)KveΝ;6zӦMRLW$#S|89a#ҲeKYj:\gǭEJF:v옔-[?9&M0A= Fzn2 $IG IHHDw`VT+(mYfY$*㏚R6 A5D%IREY %HgνSܹsy|993{dz>3 rzA~M@@@@@@@@P62| A] H (ߛ~Y_&MHޭڶm̢`-{6*,lNHH͛ԳgOɓ޶?S|/i&tAZٸnu9shԨQ9 K.8iӦMA_c%G8|0UXQelܸѳ5]QD .nF#A@@W|=Cm T)A@@@@@e# "501π鷞uz A; CݻwѣG#<"|ڱc'(ȑ#9D⢨XgRJIB;wRnrˢFƋeϳpL2 Ӥgo$!CHد_N_}TG}c픚ٹ7 y(O< /O~-޽[Ӈ|۷`0f͚~qU~}0 'B@@@.| @@@&[Ϛ Ag %ld`AA;@P7ֳN8!hΦ *ޯ_?]|gϞ СCÆ ɓ'k\ [/ߴ4ڼy3'E0sLݪ=zի/^:t`<߸q~jV=Gbn` z믿>ȡSN`ݾ8W^Zj)by>PP!KGI&MRlذ5jd)D' @@@ ZnDq!h7^P(@@@@@@P62| AaA{h)%M*/,Y&۷Ojž={Zj# w{zfffH0g"ڵk+hT@7=#hĉ!yTXQ+_PݫW/ [_#7iDIN,gOrЫ/ "-[Ҋ+B-uAdziws̡\rl-{/W'Ft^ѫT^{nQFXzɗ.]JmڴQ ;û۷oSƍ%rP{t>wx]reӉ鋅ip°m, ^×_~IaX\zWxyX)A>   `#Z  DႠ3 >;/ @1m"=b;B   8θӥVz#kFCU/]4;))ɰSZxΉEaYOӦM+>}Hk6ON+k$7ӒG3gH3nB/i,d?xK^#vixx fx  Ah%~ꍟ mpG€      3A>ݨ@P7AZXbH#{EѮ(9s&_|̘14n8&K˖-{{vϕ+WȼCgϦ}*qYYYԻwKuԡ}I)hw+ШQ }Ժuk%;vPݺu #}[D (`9/$H<zH    `oTVxhDVTJFD h;ps H (ߛ~Y/N ڍD={$rL׬YC-[T{ٲe;wn7Q_֯_OM6Up+Vˌf<{I_`V}լYӲ}[pa-D XAnu">@P7SN￈jH쭺bŊ Rffr֮]̷W^xI'0N}cQnA[`CAhƌ]Zr‹/<~MQ.FKA@@/| =Cm T)A@@@@@e# "501π鷞uz%RAEs aÆѤIQz8~[l (QZlI+VܹsKslJIIwʕO+{iۖ͛WIg(h׏`EݛΝ]Aط[bH  |;ـ&[u4" TE@\FD h;r hSiFrz%RAjժ ky84|ʔ)4d%|Μ9ԧOCo^7|pIdٰ !B ڝ=ԯ_?n,fgrؽ{7ըQð1햑!K]Xd    o==Cm T)A@@@@@e# "501π鷞uz%RA(NNNPbb!zʢ@|̘14n8|,X@]vUeeeI^9>|*V<f&B ڝ=%hqÆ ԨQ#˦!ҥKKc#))r^H  wy$@@@ک4m"T=JFD h;\ hUA޴n;ԩSTdIMY4^|(n߾Mקm۶)-ZD;vT- G4o<%ܹsTre:sL}k ڝ=%h_d kN~؁ٳTJ:t@ .\rYqA@@ b.!2Hkl?:yU*@@@@@@ Ne# "3āfA޴dO:ܹs?222=.\X^]P!%(1… TB`TR%%͠AhƌʿGA'N4?СCqƔBsX;K{0+hmHX~zjڴbLM_ْ\3y"!r;Ԑ@@@ >ؿBn Wt";VT.ld`Ah%Mw+/W\Uǥ*o߾RSS-U˖-ԠAM[Rzta}&M4Y4<}tM|=A;{O顇{ر4n8Yrr$OLLT~Ϩvښ>n:d4,gt4A;+{0+h?}4UVM96t-Z4  ,]*QLވ  V -poh< h7)^PnkP:@P62| A{Z   rns4;vo޼I5kԈpt,h߶m&ҥK%ڋcLjk׮eqx5 9׭[GիWW_v&L@/&,Ƀ:aL׊+4gPϟ?/y^4qA;+{0+o!6ے؜]|+7(--,Ypyx|!57   `oWbJ-Ft7T      A% ,@Av8BvG0*uĉ޽={?@̙39OrPvyF$!==u'={PRRRg셛E˲gzuݻS޼y;wWΑ"hw`V 8p!Sȑ#iҤIJ͛|p!) P7 A@@h:Do=k9힡__TٔW^իWSܹx!" ni5}4UdO?QZL2QM6/Չ<T`AW$r_ӊQ. D!`i*̋s;>@Chт`e# "q-he$&xhݯ~ӛ4i+բe˖QVtŋ{x=z(QB{Y3OPn]Z|9)R$G%KPvCx5O>Īnu;'h߰adrӧ͞=[s@ٵkթSGҡCZp!ʕnE   @Pցѵ   C3 V9άXazz:7JuS4FthСQqm?ܸD۷oT7t5o~ Z{]50_cׯ_[nި⸼xShWl7y䡄z71a4'{\~}DSA6g mC2d߭,9swsnYiԨQWbŊll(7n_~6oެvԿSܹC|8++VZ.z"RbdiӦu-Y,;uD7t_'_5@7 7~K9\tWgmJЬY+W$>   uhC   `^m4}Akby|jr唟VG?\mlJIIQ{R|*RWyj֬zn`ԯAk[oN q3j 70YPjٱ?Pv /K/dEHϛժU mz7_~ aÆIWSAP62| AxAMpH  q@НK/ϟ'ηk7))ISϻ^[6+^?[Xplw-Er}f,@TP!7=8?t,Dկ~E(aX ElٲJΝ;G+W3gHypaȑs9̛7O3|    hݾCA@@ V `3t{~Aj[F@m`ĉ%SL!Cxf.\'ORܹ֭[TL͵Uġ6^!0keH=k333#>KXT~6/v7_5@~m6ɞA{x4L;lRhk ,@n/>6!CwrMv˩uJN:s8زeB-Z 3o&G!F*Al#URT^JvZj9Οrț7/Ν;Ypt-)O"oD58\L+VT$))6rsw*W\O:zvP/UQcUג?VXAgϦ3g?uF=i%R'{߿?uU Y醙~& /]D}Ν;G .kTZUbұcGF9p_ꗝ;wjJ7n`&jsGիi?ܴl̍=0G}4l3(11QbqV;Zv^}OO?MuQO*9h r`h߾}$׿`~x ۶mS}סC[n:Y|˖-VbCkԯF 5NY eWZ>i322$4f8^bDW?_kJ=/_^gѴ4ѝs,P}饗yn_zוG $ߎf{g{10_iy/q ~YTzv+fVZij3/|SL ޻baQw_Pv&ܻ}ܸq4vXYlWl|Sq_n 'h(;|j FD h7DL X8$qAYn@A@@ `3{~AVT<-/^CFV( E^ZZmڴÖfbDq[F $Lg̘1bPAl&K.C##Qnb_n0RȋiӦDᤑP 3['>s̡>}(uh^3O>-݈!(+7֜*'Y0/|} y|ljí[[b^wQڵk'P~ >Yf̋(+++.ќ}7j:TBի%Q ̌s|o{~p]P62| A/V    t    M (݋h=z vP/ꂪ(x_)Ešc{<$$^Yh@rٔ<W6{.[ltn)pE(,ӊ}n$#h7S]',$߿0#3bgw%''Ku[CgϦ}*ű|rjݺ|ǎTn]Cӳ*034Jg(}K&_Vˢ޽{f{ fW98}uGX/ݦJ @9i=/Griςka^nNg'^PD ?߂ׯM*?mذ5j$8j]%}4 W:(>X=҂X"`uR     8AY\ǠA   `>ؿ.힡_Uj1 ĞWZe=vYc4i$<~nkX" mŬD_CϞ=%/Dr9zމ-[&lp? b[#Gn5UU(=u\735shHl3\Z#vƜ:3W5r+KIA{Gm">yd1bu>WJ嶅P1/A kFcyѨzKslݺuԼys=fϞ==˟h &>xP@é.(>X@@@@)V_*&&}@@Ⓚo=AgdgAի"1˗)55?.=fOiii'ur<ڵkuQj EA{86sZGr>ԩ͟?_]fcb?3+,E~֬YJ4q̄*˪7_7uQ}hxFbq3iQWcѾd[T΂ &HBvöHDoXt)i&P:>nQ}6//N%K*Ǐ %/_nڽ8p!h'n}!_#@=~-hgM3 rzAUiɪ[~Znڵk'y--R|bb"͛}]ZvHꫯҟ'CpFdQT LڈH:Xˊ+*8233%{p?JO$e/ 3f3L̪֫W/+G_7uR&55{n7))lR#vƜ:3W-/^6lH׮]Q<[>?ZnM+VPm4ҿ9 Y!f|5j`°VZ)? EoաH-ab$(6b 7pqEQΝÆ I&)p߲e 5h@ҲeKI;wv'ÌpprJz駕lY6m4}eݛΝr̄*dggSJJnF|(-zy7_~Jhs dgF hW&M%hz={Pj"7Tb>O(D~|tAF|(MFj{EApf{ f#vƜ:3ʜ 4K=Af̚Kztq)C8W1/j m^43\rj׮ML쉝o54o СF_={9AAwG@@@"AWQLvw/m۶i:++rEP'{nӦ[M}OzqZn<n0@TPbbaz-۷O-u?Dn15\ؾ}aUzp+* ADݰat QӍ3ƍg,X@]vUf/r0Q|r3z/qW6t2\'RFFRuQfͤyX݈y1rAA-~>tɞ9F*TƦޛ[iD$EҥMJJ2@@@@Q;@ e8@`3ϳ"+(7^PCTu=㈂vQJΟE=zy)?d̤ç@Z%KTY/_>lB>aѢEԱcGnm̙Կ}嗉r*h?wU\Μ9aYd kNj8G9b鍥HN&_qf$D4JFI7kı]#f\gWh_MFC.* BJRoICCnnxڼhf~vիWڵk.g$y[#^{hwq{YRmǞ.\GP62| C_AY֨#|YW@j9ɝ;wVd$y&5h@#pĢʕ+Tvm:t萒ʕ+ơQ$5Ni$(spC g=w\kHfX_p*T[3Aь3#F<СCqƔí_6mdJ0'#?7VTR%%{#a[hYm\ ͈'j=}nI1ڮ$h]1.v~ 02-Zo%Jl=53's|XӲg~v`2w M;;.;;[&fgjGA(Nd    /     l:#+q* VJǏz:'"_=ܓ# ty:ujgzEo,c?#= yEh&G[CQnۖ-[uVWnY֤IaO>]߭ е={5F0:2TThQ%d:p%&&5==%O?z!ݴcǎqƅ-HVn5gV딠ݭb| '+΍6nܨ|<#ԬY3GL/v5ke˖mի[3vEvSFbm[?0z/e_`uU] ,8 iN￘.A@@@b@Pցc>*   qJvP/ȍ;vHY^fM]oV_c Ec2ae0{nzh߾}Y 0MFsV߿_۫ }׭[GիW~u}嗒P~ڵ9$^ H#vC;ʻҥK%Zw1>aAy5Uv?Ν;G+Wx\o۶-&Vʁ=wI6]' IDATE/r9ў휽`dž:艥nܸAiiiҡ׽dɒa (NKɵkh„ ć A0ϭrk4ά׌͌xҭb/^ǝ8 R.]SKOOM6iv0ZK.I!@u7l@5|<o9Y`o ~6e2dGDe/*ne# "}a"enX  pT@@@@Ս^T.ŐB> }GG,tׯUXQ;ǯT=ԴiSb1cFYBիWkjѼ ٭|mrH4iw4o<ˉXɞnOp]4 *-Fj{^WdpkL${\"6h ڹ'N Mw^ʗ/_Tcxڙ#G\rv`Q'-Wdŷ߅ˇ Cp#v|P62 h @@@@b_b    qG (qqh00zFvP/ȏ .\z)bpk׮͛7$mrD9s&_E}zH3ׯԩ͟??]ofqy¹_F%ye/V!ڵk 7+mrX$6L 5kq8>HZ*$wz/]DիWW0c$ay䡼yw駟 ŋVfʎ8=r?&%%I˸m^rED*\pD49eN,?n (@ Ɗ|gddLnyoU Y ό" ^HexkSl1_# ԩSTdI%WnuNٟXA[_,);lʖ-ZtiC3WM["DJ/A@@@%u`g 77 `i3 ‚}vH A p9 3g(=0^/ (brj&0qD!"+1/†@ v9&MTx޼yZj@P62| A%CdH`%zH    G (sh1.zvP/ !%ihJsyi2Zf3J'W_Qr唊Ήڣ?,PFCA@@@AYv2o=iYIXPx@… TfM:~ԜK^U/,PF / ?.]tڷoRŋSLWiTQ#pѣGv9XA]ld`A$`@    fMAY|$zj=Cm ,gFk׮O?F֔@w+y#'|rjݺҐN:)W\EK@rcgk+"uݼySjخ]{!"-99Y:HhMBD-[P [hA,hr!(>Xݪ!>m A@@@ . e8.;%[AjaA>;7RՋΝF֒'M6Qܹ]6 ڶm[Ztmڴ4,Zh۽n1_Ahƌ!uATFc^ As 8\#GPrl%(>Xݖ"C i@@@@ ~ e8~{-#[ϠAjaA>;7Rߟf͚FO{=nR2IΝʕ+ә3gϟ>s*ZhQ1_y@8A֭[^zۉy22$AnݺIaԡCuFD hmH    `_C|oAY^DA@@ `3`{~AXP΍cǎSN=eeeAF(/B%E_~%-ZTj… K.JgΜI,lv;D\cCݽ&w /#;`O/_4/^_zN>-}6.\:hzP@\t]pKׯ_ FؿZe!hλXP5J; Ҁ"[ؐ@@@@PցpA>ؿ힡_TCJ@P62| A2@@@@$!X!u`+mF\g h jRڽ0X     @@@@ [i3Do=AgU@@@@@  ,@ _`    VeJ@@@ |=Cm ,g      ld`Av/ eHCB (Vڌ   %[@jaA>;W^իWSܹƍTpajذ' 8q}t=O?Djբ2exRvB6mDT'fOPZ'N[XbNdi|4_p9Et{Z  /A2m@@@M/^4@@@'u` C`ֵCjaA>;+*NOO'upBҥRkFCuؐ/YڵkaR޼yV'O.\'O7|#>y!塇%JHbqya)55UTԡC%MZZ_ED`FD h@k@@@@A@@@@w;@H>ؿw h jRʕ+T,|͕+?Am۶Uy7_~W@vv6(J.M{|E>(]F}MؿBn Wt"cA5:Q h'Ih2e 2ێ@i%0w\իW5mڔ|Ap gϞ{XԩSTdI)mzz:mڴr(ɓ'|Y8_Lb/;Fe˖U_D ڷo(P Vz$ ,@n @@@@:Xg    }@g h jRxҥKӞ={bZ;߿Vķ]u,NgϑnԡCo۶ի'ٛz\(56m o=k2힡_ wܡ~˗gŋۯ RYcr4k,ҥKM6EA t}>~aGt:tH&8@IIIo&O˖-sCac03Y&?~\={gO  C (>X=~ Z    Q'K J (BB    힡_ W^_׊n׮]TV-IeW߼y:o$ݼy?\d{qdzZ>pu͝;)r|Xdye4^پ(W#u֍{챰6W{wIBpy56ezDda8:=o:[>Ҹqc:ؿ 힡_ N-ZD;wVx!O>$-_\D̈Ei&]̗,"o~C4lB,T*=pKJxڵk͗,>})\tl2Ij؋1 ^E<ܲv`֡C5Iz"dfPxO:E%KTpY}pAL춽3(jaiӦQbtGzP9MFޣVZI6"1>sA@b@P62| A{l>j    1Mʃ:aC@@@7| =Cm T,3g5*xE,p،݌n>,|饗$fBrr8!!LW{g/ʕ+W-1H&MfΟ={BMf^e('{a?\=5SqJ`'|"`6pM{ǛٶY޽G=5R'zh)܉Bt0+Qo_ܴ>5kfV9ŋiԩhԯz6nٻt@@AX(2# (@PցKho= Agӂve,l}~X=L&KL@A@@@ eذ   >ؿ힡_ n Y(Bh#j7nܠ4Wr36`ɻjժȢ@Rgo,1bVh=ʕKwvv6h~N.G8{$xV{R6lMB'Oң>y'Aĉ)##C%+++,'6iDIN7n<ˁo P{֋L~ R.]"xL4 ٻwoC~wɹ!T^zŸ|VbEz-UMG#[7nZg9o|MЊ:1b_y5j@d$ ,@C$KLB@A@@@L:#"@ `3{~AVTo޼I,fXLރ5kxfѩTR4_~%%''mWPՕś=s=:ȁg$Db]ٛ; ޔ Ez"L^JݻwS56YOļtRjӦNu嶩&у͛<ҥKP{T\2'1hfo 9e׮]?J*mAhƌJ>ff?1Gd-f=da{v'nط蝟YX=f'foZ    [I@@@힡_Pj Pį")3"K;͈ZVkD3mгgObat\ M}ٲeyѢEԹsg%ÇӤI 4hŶ6$LQnƻ^vV0crY,V%Vd3v#Oflُ7ЁO>˷OO-ݭ-Kf073mDX+\0"]2~d  >"AcAc(ڍ9a   ^+ˉ mvzUsG¦ ,|2ǥg6l4^`O GvA4ɡ̊׼iU,NON$,=3meaX{Ȑ!Cj?k,fx##81co׮]ԱcGe~^ GҶmhȑ]j\ݮ`u1v5o;yr}ś,5ڽx 'Ac};FNয়~r ``KUVU.hȾΦx"=Jڝh %Ac};FNv.x/;uTŊn&EOz?Jϟ5kFO\D "%㨳Tpt(І+*WbJzB;bE#CZiۢEr6lM4Ik-[AJ-[Ҋ+(w9WPiV/o;D\~i%={ҴiۖMo;`8ݛΝMA;дiSڼy]|5jԈtުUmՏ X޵kWK={H ̌uhh3g=(;vʖ-#!v#x !Ac};FNvCIu"> ^j*Xbt*RA@ &L@_rN:ԣGjҤI@Zf; hw+r=CnUhj$!.z6yXDqZfb8e˖IbpLzW-h7b^z)wM5j0 N֭[^zac˼Zenޭi 1,YR9{طo(P@*#jr Ǐ ^ծ][3-_r2-X@KvǺ6w"2p#VZ-;JÅ Ĝ  ;wk͛gsQʕ̙3Jp23%.YڵkDU ު@2}{Ծ}{X;l3D/ٯv7L_ۍÞ*U$ϟ??ݻJ*e9KQcm9Gh׽vښUz[9n4z~i*ZGrr>111x3lB.]ZXZO-WXϳčDFm eϢBlhRWZR|Ias,+6WvҲ$rȡmJ|W':;<4\֝~83@,B t2@3gC=$mڴݕ=qAn/lVJ@" v(]+ nڵWEC&SN@_ahq3}Qhe^^lٲ@5h˶]5g}&^DwV~/M~饗駟O>1FGEt pG񴎺]w%%K 6o5;P kP'A&쯇c/.g"x?򗟹~_LuhX稇-l&+О~Ԇcko`6l(֭ s(S yQhݻw7+Wnf)bJkիWG}OG-g&9r$,@o_MLW=%͂ Ls}]v#ў|Cv)@@ #'{4xWCA#ήJ̶nݺɸ e"Чo蓩صk/%_ެ!i A@z6" vMdڝvHAFo߾}e„ a!XO>| Nzhw/Aw*<4;yh}gv"\Ct /vKyh`7Vp4]Gwy>lS>(l˘1chy慾Ey ^C?:e͚5a/iW^vO5`?Y6mj޿ŕod6v,NVoek[p]_hܸqBO|ʀ~ @{ac<~F |$^]Os}f%e&G@u)R  @hMqE5#FNաoj`R?6lXn}@_ 躢!aOȍ_>`Y3ڕnā@ h֞] $W@{r}) ^ /ZH:txNE?   2݉5Plٲ|ꪰ|d V+حooauݙBZO 75=zo9fs[n)\v_{5d\:ڝņ@ hw)@@ ~μ޽{wn9~VC?߿y7LWOp_ƍsfwv]k{jY}p5^Z5Er> . xR@ ^ ]~nAEh=nv~@ &՝bvǏcu81p=D}&Mk Kw1ӧO7v뛊v?\Rڴil~Sq0O/*l ͚53xG$} A (/Jw/Dʵ:x7dƌfCO/l)SF|Bǔ&O,R۪ {  Z?z"! $]ρv ɻ܉=s3e]ÑEU7@|,pכ/7->]CbŊਗ਼p΁@hϖ V@{j ^ [4+h0YG5;=qJ\oZwzאf׿~/o_Z˫PBax-I0zĉ--W\IQFɐ!C5?l'olٲ1FNjYg%g}vRVn[SGu A>Md,5W/wyRti;UˈsJq3RJz͛DaA%@ hwnK5E B@H fP{ƍ!iSLIW5/ ;3gC=d6}֝% -})N7P7 s}? )@{l{oQ#R/~ b_غu s @6l 4V@УGUW  dv {o@R#8pԩSGvidtܾ%!+EImGTc˖-^n)'uq/;_]v uʕҺuk_X@_]S @@R(0n8?3~Vtܽm۶ff͚SrOn .Oر;@wiO֡_b`nwߙ۔*Uu"]@@@ {W}M=رc?&Ŋs=7)eSh |=9 IDAT˓9sH"E2q4 |R|Z7o4h@/n^L"Ç;o߾2~xvݨ4Ю9@%@ݵ5E B@@ -Z۷K~L0Gt݈BÔj@H@nnݻW-[&7|s+o޼9x/ Z@@I@z S; [3fHM6lhYhѤO-;=-K, 6D]v0jG"wiV͚5M/P[ ʕ+ZO?TVS @T hwM)@@ u=tQFIrʢ;vX: @~i9ri<9#UͮgRC=X}@@H۾ /.ݻLE!p<ҩS'es" @v hw?}=wM@@HqDTTI6m$KNJ-IR^=YpaRA :$5j0ڵ=?#F`{V̓x9@@I_/hhOɮ~k;;iӦhOmf>#^zjO /̸Pa(W_ɂ dԨQ/DSÆ GmB߾}e„ ״!Cx @ E]5E B@@ M-Z۷n6p sիWKŊݾ!$ 0`Yx&\>ƍ'wC{*  @*B=|M`lA@wj?ri*T Ŋˆ<%ʁD\ٲe+_~?~<K<.* zz"!  &={Hòl2]+Wl޽keS 3|رٝ]wiOձ~zE'>   Оj63@@@t hwM)@@4 <2rH)] 5qh`QwhW,\Ѝ")@E\ٻw_hUYfI׮]bGR  {;Le|M{!  ]HA  YCЮ;;=tƍKɒ%eRbEEr= /4i|)adR6  {7~ {o@R#@@@ T@k)@@4 fܓ'O:ީRV-ݻu@ CF%}BGJE=)  Q_/hhހF   @vƃ)R  iXh9R4بGҥMQ7Kzd…5 )0`,^|iĈߕ0  q _/hhހF   @vƃ)R  i8p /]ܽUVryo6;=֯_/76⋒cGH@~~tή_fr &e! dv {o@j֭['3ϔ|0aMO8!-[RJy @@\ )R  )xM}߾}fDwҥ٥=77W>,ƍmqAU|ҿ@{޽L&70رc}nK  x^@ ڽ7 q:tVaÆ/ˊ+SN7h@֮]+g}A@@@5Qz"!  "cǎ u7#FHʕ5XzSJ.-6m2Z94>~x^KV.@<6m׈0cB @@`<_/hhր[k׮ԥ^*7o%J=ҫW 1cH P@@pC@ _]S @@R fXܽ{}gю=z5jdX~ /JNNNKx@#t5jڵU0G:j  M@{c ڽ3 =*\sl߾=Xm۶A?,^Lwi޼wBM@@@Uqz"!  DSN 3]rssͮժUyW 7yqƉ8xԪUK>s߿Á Y ŋoH ̞  dv {g@=Z رcE'g}&UT bŊe)SwCM@@@5Qz"!  $7|?Ss}Jm>},mfҥK> RJQ50 A '/;v4uvGh]c޽l2-W,4޸qcYn-[Z.@@E_/hhGAN>]~9r *x=T#5뮻jՖ,Y"ڵYH:udΝ o߾]tv@@.)R  . fWv}#̞q!i޼ 8j ^ =4̮nݺx-q  @~ičFCjn ڷo/>L:\e6m4^1  @h^WzA@/W\qE0hoW_j=)"g}:)ɓ~'Ѱ9c?XU<_wgߺu/^2L"wyghZ'"  g5HA  $O}LiNwTz~~tєnzhQ իTR /1ըQTb׮]RRB+ԵkW3f0WiҤIz@@HA۸u ;~o<93MAE+@@!@{C ^ 뷨۷ooFN*C-@e˂Z+;vGdH^Y 0[9.2tb Yg`·j{Y+   'HA  $ 1+nǭjC44#???i*TիWKRq;DH@-̆q3gΔo=v uv}:=  *@{=m#K{YgeAw"YCe;?~\Yv͚5anذ#,O:%wuL<9xn {qgp~@󡻾{-G@@@v;Zk  MYf0O?djf)O7n<Ӣ;/X:@k-JF̓G ̮=yrCrK(!jSK9@@ v&5(>lذC)ՁKJ۶m 'NǏm^jq=4uV)Wwe̘1k^yiB   hw)@@{1Av][nl ]ҵzD '_Z͕Çˮ]RJf_pnx~K.-gϖZj. @@vk[e|\y啶z=2w]N*EYV*'N޽{Ǭٯ7xX",WZ5xNÆ W_bŊ9zeСd@@v)@@ #׏=*eʔ1k• =}f4w߉>DwfO4>m}ʕr盝گ {l@@<.@{m'O_|!NC:Al֬۷ϼϚoDk=ڡ? 3hO.+t*Ю;ZȰzNd޼yaFERZV@@v)@@BoveM~馛L . @l۶MZl,ۍ0{;CVZeӝګW6P(   Оm?) +&2Av۹x՝#˗72eɓ'[n2gΜRhꫯ / .[gaKΝ+]te&  i )R  @V |'fW7|ӴSzf/VXV! D`Ϟ=f;vHÆ NDk@@R!@=mJ/?hhN:QuQKNNN؎GK. R'[l)PTO8! 4wyԣdɒQd_||)ѣ̘1#x   ]G_]S @@е#FUXٛ7oa )_P]QFfv@@,@{dڝvqvo߾}e„ a˦MÇ5 ,cƌ ^5믻k(lٲN@@@#]_]S @@ʮOգcǎ&^Dk+ B@ }yݻ̜93l@@ hwė}@ECΞ4iR΁za 44ܭ]˖-E/M>CUW],Wo+ 7WϞ=婧@`%^~m[nN:ɼyH".u@@u)R  @V ̟?߄ُ;&Y7iӦMV  JO?f3iӦ @@-SD_/h{1Ю;4m48f͚el]uRد_Ԯ][|AiѢ?~·lbӧ?>,PH}4Zn^^̙3@СCRV-ٹs$a|}gԩ3 IDATSr]wɓ\2ρ  #@ݵ5E B@ &˴GC+;O͊ i58߷Z)SF@@ڽ7*|@[CD?z+VL8 )_\pn•rx≰QdȐ!qǕUR%xޥ^*7oq8@@ Zz"! dҥKMr];v xI?4;7?Q&NQ@@@9Fٷo9AC[nrڄ˘1c̞=L9@@@ ֟HA  +OŋM7onve،m4G@ 6m2;kIƁ  +@ݵ5E B@ ̮}t ~'~ݺu#F@@C=*^&2W^=X1 'c&L|ԨQ2d5!  ]5E B@>\GY թSdzb U`͚5ҽ{w/2l0Rn@@4 hO#~[zA@Hwj?r䈩W*T _?~8@@Hv}=wM@@<%Ov՝̮^{He@@ /E4=# @@ hOzA@q‰   @Zk  >ȄM}u&y+ d˥o߾w} 0 s*OM@@2Z@ ڽ7    *@ݵk  viӦ<`<doڴiE@@ 1%KHsO0Xi\  5֜Ry rq/@@@vf15E B@H]̮֭3u3aŋN@Xp MzN  @ ^&I@@@P_]S @@̙3Gj]|ydoݺuZM@@ 9ϗwܑQ*  {ozA@$5B@@B6|=wM@@R*_ȑ#eժUڵ3a2eʤ @R#%ÇK=Rsc  ;r_/hhހF   @vƃ)R  @-Z$ oرҾ}ݟ! @zfΜi~k׮wE@j^_/hhހF   @vƃ)R  @8`kK.5jժ ]xI77@@oL>]}\)B@@ ;}Q0aiɓ妛nΆ*@@hO zA@   @Fk  X`&8/_.kv\.  @ SҲeo4-D@H:* .F@@.@5b_]S @@}]i׮]zݑ]wf@@<2uTsO?-M6s9"  `{o zA;d@zg   @P`ǎ5h5MD@p*@ݩ׳) @W7nΝ;۬dAE*>a@@.Ç6Vwioժ?O+@@ m4av=.]*uI[]1  @fx`6ePJ݈ٝ'e x'D+ʕ+TRr  ?=Ð" Чկ_?[ng@~ ''˖-3uYbԪU   ~aA;>!О !J;k׆oaÆʁ"G@{; C d dРAv>Ү]o7 D@ ^+{F   n  @W_ݻ@|Iiڴ,h, @3O  /kk;#\ph) xN 4K/I=WG*  ~hOO'e RJׯ_c}5h, zaS?#  +V>}7zh0@,P>I.ˬP[@@ hO:m 'e ظqt9f{uvݩ@d heF  9R`7xC.)T@Pu䷿m&T:" H@{m܆mXhO@|#0sLy衇 m5΁ @22O L@@GW={[ ^"{yP7@@ݻ=TUac݊3@@@D2u<~ 'n@@ :t j֋/(999 ]E xUh^   IL]'D@@@ d)o6-Sq" d>PZ s  M@@ua@@@b W,f~`A   P@o6-Sq" dCFԿ? d>!~z0Ȗw@@@ yO=yc@@@lӾ&X@e*ND@lϗ;:jR%_+:*@>!~z0h;z   ]L_'ݱE@@@/ dשo6-Sq" dqD89ʔ)#<4oI1\ g<ӣ@gT@@ d>v-*  xJ N1=~ 'n@@ zavWwz,X@rss xBh?={@%@@@/hr   g2}N`z햩8@IE}vMڰaTXq9 <@ @@o d>vo/j  xE N=~ 'n@@ BYwvt=# %| B wuA@@OݛZ!  ^SO:m [D@&=z/I*T|Gep1 ^"O^zWP@@) ڽ9   5L:&X@e*ND@lسg4o\>pի' .Lz.D@k@@AAk   2}A@5B@@(_XuhLʼn  mO?92fuMF\ <Ӄ@@@ d>v)j  xQ NM=~ 'n@@ Zh!۷oOiÁ @xlyG@@' ړ76(@@&L: :m [D@F|رcBM6m4o|Vwm|NF@@@AB  @ ;v]3<5n_@mQC@@S N{N`z햩8@Y`ѢE2p@[Mt;оuVp.(ժUK-j> S| Bt'hw:@@}?Fi!   @x`i,_O2'" @ tA[jfzd…΍wұcGӧS˖-+֭;/ޥ @@%갦0@@R  g尤Q   >u [M|=vT .m6iٲf6k,@tA6m$ݻw Dk@x;@Q̵  C  cJ@@@@N`z햩8@ 0n8?qr̙3GVDժUk@"O0"=*E 䧟~:K+ OzWx@/hX& @4Cɗ_~)|9r$x9c4FJ. ^0̒ @ dix`i,_O2'" ͛޽{ movVwr 6Ly]|| ZwB<~\tEҩS' ,D-V?$b0btRJ%r'JS @@/hɜHɓ;;{s=72ܾЯsf>=n)@@o6-Sq" IG/l 6t {]gΜ~h>!~zLh e햩lvZiڴi ʺu擡FrK,)oԬYVr]?ԛz 2}A@{u@ 5_}yr-[߰|fg*U8.ˍ:nͮc2@ &X@e*ND@? ٳ|v޽uR  M| Bth9p9<#@п 3f޺ukY|)R$ #M4uV)WLb%:* ]$$k.W۷/c]ȕW^jv \ v3J|@>+<~ 'n@@oƍyTPA] :)""Ov@3KNNNXD˖-KhڵkG Gy$y^:!?/՟  d>v7 ^^rssޭ[7ѧD*UJ?.;{ ~ $!EN ?2}봗<~ 'n@@-Z۷5]wXp]'@@@@>#nsr:xF 'Oq9n s8q”yYggjb .˓shGO"2euaXuhLʼn  G݉cǎaM(F:vI)@>!~瞓ŋ?ʽ+%J0?۸q YϚ5+M4!C 7?tSV*ǎ3;O@WnBzNa3qD9s | ժUKײeK@>ݻˌ3 a͍ZwWx؍7(+V*zc"_-,Y"ڵ+>H4v|A9^^~4hРu'U?<@ O=##thGZf4k,d^\9dxWY 3oڎF N<~ 'n@@5\#j Eg(*u3-_w/VX)} &][nw֭=t7Xƪ_z~,Wj߾h%P; /u=|fJ*xEn:5kִӷ_/0OU~"^=z]:?{luzF>}zNk׮MZk5Ծm6)[lˉu~'~ @ 2}A@{  BB++e߾}\V\)Yk&5z>!+'''Rv=|搿1Ldp  @@o6-Sq" ] DݥNAmQC+D4vG.#W?I^^|r]wl>^ =1^KOdʔ)2zKEŋ@XsN9zٳ@}4>n8YfC tvɊ+ {֬Yrm8p@ԩcxᇥsrݻ{5kD=wРA#VXU^=oq҉WO' 0#/(y,eʔ}ch{kj:=2m.;sC9  8N`z햩8@.pwȪUdvsF|I eaA 7ʧ @ x;@{ UAxgyFtl={yQCkpRJa0`@T|W^d078q oٲ%>;wnXx?_|QZj>09_;oȵ^6rH'`~_7>Zlicǎ|:W_ }i@ =7_<ړ'Or۷ou.7o%JH酡G cYC2o<)VX̷C~^}Q/@ R  3@wvN̿O[Xt 0 u搿xě}p> }LoqXuhLʼn  w>(xǥ;9>3iܸ"tW @"@@#О[vNw.[lfi0A瑁h;;k0{֭15c ޽{{GXi&MH-|o߾CIح.cZjsԶ\r1{׮] ;ϙ3'),L'e]VO4u@d>v+9 ]zu¾46Y7Ñ3"W6bYCޤD@>uڷXuhLʼn  Gi&G;ٳGׯo^+ @"@@#О[>}%P6X>|92&ه~(5k,z䎅VZsCQX %ɽm۶eŊE,ժU+Rbv4lP^}ոAD5:@ O=#!w2ϲKg.pԩSgϞa?ϴsH͸@ @o6-Sq"  ұcG7P&8@&| B"ОM*ڂ {CD TT+V([l2eʘӭ#l;!k;@{?pĺǨQdȐ!]v#]ccر2`oD  G2}A@G@ wvxĚM4IzV\ͥC !yq6 @|Loagx`i,_O2'" "8?e߿Á @6 xlzW%V=Z!Tjn}R^dIszNιp3޼ys\6ꪫdaeF~a ޵2Y  g ݠAY~}'!Ek{a;O8QzvYͥQC{CfoxW Ne=~ 'n@@^xAN;΁ @6 xlzW%`7lCFؼy(QTj=BGsC4Ю} W^yeAvQYܹ3/ 8޵2Y  g TV@tm6m7モڵk7K/CLK9 3C~Kr u>u!XuhLʼn  Ȟ={~bݐ  U@@E=V4_ ]{~j!w!UV-w;[GhAvBv Gvw}Ŋ3ϟwyqweLVGB>^5eʔ1?sr>Q?_ii,d@/hG@ :$jՊ>܎UVɍ7XX!LKsa2%Pu@ #2}N`z햩8@E _]*U  Y#DMڳ-ECVkBݱ|޼yRHSXxw媫voݺ,_LuE@,M6ڜ{WF-gyܹҥK͚5Kn2q.2(:sF [ NN`z햩8@E@> / >@Fh?=g[ ;d@ڵA//G;X+V(Z>}d`ǔ)S;zO!~z4hϚ Awvvhoٲ\@@c͚5ҬY}a zo!^{m\>rH'a'oD]˘1c \v饗ƍlٲ^w^%K͛7KʕZi>VH']G% 5r/ Uoɡk`SN/?Z"ms8*T1"f a#T Ȋ,(Q1+ \TL7O7ܾ=i }Ξ[]ꙩ'Rtt6:0L~0Ln@Uov`6inLEB@@?l@ &Dh&ڗ[ D >F@[nt@=x 9M5qƅOMv&q~ڍH %OOd„ v|@p] }#)59jjv 7ohPƬYSjP뿗:v/x޽9RԩSȾxbСeOu=)Aw%:#O?t4ZٳgKӦMkIZ~qȃTC}ګk& PS@ m]J1aÆ%OX1iu |YaN͚5z|㏲J+.W rNdꫯモuz]xW͛Ӻy\3@ h#v ~I,X?7~CkwQ5a,2~Ɛȁ,oI`vc*" 5tAݩ.촇 . X0!B@{]D@&`1" IDATLY@0p}AvӖ& @CVJ  MoNmm*'ݘ  @MK/T~ׯ~  ༀ"zJT,  O@{&݂"$`  d(7)I`vc*" 5ƌ#{Ν;w   X0!B@{D@,i g-8) @Ɛ Ȋ @X~ h7"!  PSO>=S[o=yWA@y &Dh"ڝX"@0% A1@2p}AL'EH 2Y@PoR oV8?T$D@j lO?믿.k6D 8-`D@ӷGF1(  @/Оz  C&$;  >Mbm*'ݘ  @m>Zɐ!Cd}@,!=׃hw6 `5EAH]}S@ cȄdG@ #ǿIY,XMZS@-p 7.z}B LОA;}QxH`ԨQңGZ%γ@/Ɂ cl}9; q\ƭw>I`vc*" y9餓cǎ2rH@pZ s=vo#  ,ZH+SNTK.M+lQI)  \_'=z@ [Ɛrv@ >[|> oV8?T$D@j |wv*"gφ@,!=׃hw6  TD}+M   ༀߤ `m*'ݘ  @;,sΕ{NZj  X0!B@{-D@@ Wp!@@@iǿI-XMZS@8e„ r7a  X0!B@{-D@@ Wp!@@@iǿI-XMZS@k;N*@pV svgo!   @\_'b] !  N >Mom*'ݘ  @+"Gql2~x@pV svgo!   @\_'b] !  N >Mom*'ݘ  @o&m9sHݺuB@I &Dhڝ}(4  p}Av.  8+7)I`vc*" w_yeܸqҮ];@pR s=v'o   @E\_'݅!   >M om*'ݘ  @i}?,W\q*@`Bv'{F@@j |'/+FΓvgڧ|   ˠߤMam*'ݘ  @i#GJ~k׮2h @pR ڝ9@@%>1\@@pSoRm oV8?T$D@J ̞=[:w,oL2*@`B\8@@@r=?dC'G@@('7iZ~ h7"!  P^UVh"5kjp! "z :@@ O@{Ż D@@p}݂ۤU0O@1 @@v2bmB@9 &Dhڝu(0  p}Aw.  8)7)I`vc*" *2drgÅ sLО5;wP`@@*.>2\@@pRoRt oV8?T$D@ ?^z%:u.@`B\!ݹ[#  Pq hx   [~ h7"!  P^/;Zk%ӧO @,!=khw֡  T\}+e    ऀߤ&q~ڍH  kN{6m4o<<)@H svn   /niǢX   erZ~ h7"!  .гgOydrg   X0!B@{?nMAQ@@T}-X @@LoRN oV8?T$D@ $7|zҿ @@" &Dh-)(    O@b!   >Mim*'ݘ  @)S㎓wQ} @@" &Dh-)(    O@b!   >Mim*'ݘ  @ d뭷 ʇ~ X$`@@E7EA@@R hcQ,@@@2ǿI9-XMZS@0m>I&I֭2 @ ,!=hf  X.>w0  X"7)I`vc*" fg}7N(GyY&R! `" @@\_'F@@@K\&e`6inLEB@@L+Bz! 0D*@,`B\? ݂"   ` [(   `ߤ&q~ڍH  L>]v*mڴ'e" X0!B@{n@@@\}-`@@DoRF oV8?T$D@/^,o> e$ U`B\ 7G@@ hwQD@@@ǿI -XMZS@08e֬Y2zha3@* X0!B@{ hM@@pD}h@@ߤ|&q~ڍH  _F!_|r)I @,!=W&  8">t4  TYoR> oV8?T$D@}Q9堃;<#)@"W4  ''D.+9OƟi!  ,6I`vc*" ~tIK/d TQ ګ\@@puAv%F@@)7I`vc*" Zn-ҴihI @,!=   PRWǟ   \&m] oV8?T$D@ y2m46l2@*X0!B@{ B  8&>u8  TIoR6 oV8?T$D@ 0@,swy2@*X0!B@{ hB  8&>u8  TIoR6 oV8?T$D@ L8QN=T} TA sN@{:?D@@1 hwQ\@@@JY~ h7"!  M믿wQV_}u9sf̤F@ LОkwګ$   O@c"  Up}͂ۤU0O@1 @@;찃|7 /H- TP sM@{;=B@@Q hwQl@@@rY~ h7"!  ]SNI&ɭ*]t~r  PA &Dhϵ7\ @@G\_'юG@@@ >Mem*'ݘ  @t;S('p\~O@@*(`&K!  ਀ ;(6   PaǿI,XMZS@.K/I=dmcF?9@"ڛ vz.  /hǣ   @\&`6inLEB@@ ?,mڴzɜ9s TP sM@{;=B@@Q hwQl@@@rY~ h7"!  O`>Hx i۶m @ X0!B@{ h@g  8.>w@  THoR& oV8?T$D@ wy2zhcwr! P &Dhϵ3\@@\_'H@@@ >Mdm*'ݘ  @<x@.b9妛nwr! P &Dhϵ3^>^{MV\qE?d7޸P)SoIΝ_ŋeUVYQ= ,[/оl4k6 {qO&Mb_N&LuzcZKc#c:K:iŤ=*뮻nZE<TE'd^*\EǿIĂۤU0O@1 @@x3gΔ:H6dPoQm@#索o֭[;vgyFq޽{;tmР@Xv\_'}i$l\$>@6|c)}XX9'@T>&`vɮ,#Yh,YDԩSQDW~eD)j<#̫CY@q} -XMZS@/ТE ?wߕW^9ȉ @LОk_3䎟֟qwgQﰿUƌҮ]Ÿw>ٸqc׸ d dI)>  ଀ ;2/xظ&q=q+'HV ` -{n]eӤ=o!۷Ϣ89; fͪk[d?n5n8>|̫EV$FJ >Mڴ&q~ڍH ҥC.DD@ C &Dhϵ/:knS򒝹7*o{?1.]*|L0fҤIV  ˱ '뼌{J?g5\A6|K;tF Bvmki:}S?/3.\<LǼq"! PVo`6inLEB@@ _.Æ /P<'"'  X0!B@{} hOΝ+͛7N r0`ׯ7tӧV|Ҳe¿n^z"7  @l h^F=8-b缦y~`jvI~^/uzlƼ8R;F%nzCjVDJ >Mڲ&q~ڍH ;vs9~ɽDD@ C &Dhϵ/:i oYv X{1Eo\04hݻpRZ  /nGDm@ݨ猒~YQii$zv{_#wBoݺuaZ˛Zgu*H@S!jѼH\&mQ oV8?T$D@ ̙3Gv}wo"^"r" "7v=СCݗ~9ce˰qK8 0w}PwZ oK\`3ftڵ$־}{ߵzޢ΁  Pyh_?{{]AԶ(NO$K?o#{ t `7ox2d(WSڣբy >Mڢ&q~ڍH $h۶ū*뮻n@ ,!=׮io[_un29R_-h"^~} .G=4Z3H„.mܠAo^>{liذa}r\ver'&{ԩ#vQ8G@p}A?,?gP^qOi°qMߓ{%ӠMm/IuOso']be-0-r2#^sOi|ZAAxd= -bҀvn-]4_hr oʹJ+d+&v.ͫ%"3 ߤ$&q~ڍH $]Q_x{dOv2r# "5voecɄ  i׮r)M6nBP}qǕ *odĉέjsr!;x ǽ2]{5xWxzhWx7@{[o]haaϟϖop~X8P}8s6+uoA>X~M7:Hw52/\PN>diժ<3^@~zK#F(IݍYfFr 7Ѕs9G7nlD  P9hO{3k,oܣc ݠ17ox#1v/EW 17Uyr1x hO{_˖7i9{]DL31cw8={J˖- ,5ֹ#GzsA-9Ga.I IDAT_t~ٮG:~nҤI%r wP7PxFb h㏽ɓ'ט[~ntT9]x~I.]7Hw3VE@ \&%`6inLEB@@ M7$zE]dF@  &DhϵO>tAGC= /=8/[wZW ~?Bϭ tmرzHCQGzre #/vX"=hРo X^ܹҼyBQL*4Cǝw-uۦcǎ2e03{2k0.ƍ+0͗c{j@wzA&ԩSe]w-$5ݽܤA@t\_з5][ݘЇ1>v۪_H/7rw%}|i_7e]2;{kzCgΜ)m77Io}* ,HmQF範?]iӦ/J@>W_V iL) e+PS .jnxYQ5 E.XQi}Ҭ;:uT`dMDWXaP}z;珡Cʉ'  Pym hϽ8o-m۶qOo鰾ִ;iO2Ma6iv~?W;xmڴ |k?OKYǽJOziKK} `Юy(uGu~W_CIAsYi~o5Z/"˲ߤmcm*'ݘ  @2˶n+76ޥ3ɍ @4 &Dh5MW{ݭ[7wW_}%w\wuԂYw+>t> x㍽E%Jw>Sk_-ӧOV!uKݹK_]1O_{ݭ4FKfD'`utPU*<,`=ڷك.T_vɸr`;x"5p[w}/>ѣ?AHX)Kk/?ԯ__.-K\q`? ~}9s?kB뮛ȋ  o[@V֠9}TCߞ{)M6:ǫvi5ZTpw=ƿ&\^-6Gݰ(5vPDW3iW ˃ߤmdm*'ݘ  @r]vE{'nir@R`B\{*SjZDҋ;C8Bpp>cŋZ|kÿS\w4םt3Xc5dW/ɿ+.b^FCwsfmdΜ9$t6mڔm.guku7Yx;nݺ>l itMH(>֣$H']Ƃ->o~A &GPt5 jذa%k  @ g=շAip-jA=qsyZ={Ç:ͪ1>.#c lacVQ.0[ o #VҰ 5>7lVv.m}-ӧ˶n[A  {?3=¶̫DdB$7)I`vc*" ^{5oݵ'L~B΀ @LОkOӀ_]w7믿{Us/eAI޽ /hE]OI|*.Ph/r/$FJq5]EU|qeR'JΝ /SO=%Qn8% /~Ԁ?]NgҚ{L~F}P? T2XC2eYa6wtZA&N~v5//~饗W\Q=g5iRoVYg ?\/ 7y֫m۶JWyj[ߤhm*'ݘ  @r駟N'"g@@\ se_3oš3fHv M^ _|E} De˖o#}}ҥK~s=r\κXS-XNN3<#cz"}ܸqҥK5^yq#WɿhˬYj)eY߰Կ4G}$ZQtyM7 3 @@z/G hzm]N:5h5Ml/i( 3>I^6[5]߆|',{?,8; jӴ=uQ"vo>ы>`vg!C䤓N*KV,y^k"&MX.I`vc*" =o>r@R`B\;V+ݿ8_2Yn.I׵=],ODji_x3ysGLsByU|iCcB׀\yH  _vG/qq`iP(;SlF%[qYǷeWZ(Aiޏr⊤]P h3,gTYGԀ7E?‚Iٛڴ^{%/B2\p7ǐ?oN8AtnN:@Ao;vla8o+o 6 $@,p}ЂۤU0O@1 @@t."щ?݁S_ǁ -LО o~:/N;$ss='t]:tzgqH3w`P?o΍`ay_u9# a"CwMjII^@ZY~ h7"!  Q /C=TntNY@HA s%\G.pY򄽂:|nZxwd ].{ء5Xq[{]s5=k=@ uU9B$]G KfͲ=Հ 6רQ#o(?oɇ+I@:KnVoM8/Q˛9軠|̫%#y@J >Mem*'ݘ  @:~+i_|tNY@HA shk@+'|rhOOe˖tE{;}AD_g].GIzSN5^ˬ|Is=aÆ}1ЮꤓNaÆImIΥ,fY|9AΝ;]}ȹ瞛މ9  ,!=~rwu. 2οK6md޼y 6I I߿tIe/ߵJg>zh+Gݽϟ/[lE w.#G,u5K%΂j(.8묳FmTH6t;f@p}Aրv T1Y BlܹҼyB:}׶aSqOWlڴi~gϞަ5?~oQ$]G Kfͪ=K,HkoV_#N@^?o%9s\y^HMc2?PC=$Gyd̿=m0ߤf&q~ڍH 'OzkGz'L $`B\_ .]~W~+$ ۿ^)=^(tءino(ծ];/X$WhlGuTLaA54s'ON:ʦAͷzk ð6пϘ1ó˛s)8 Yr +,￿h`z&]4  ` 6o{>;k^.\(l̙3KңG1bDAO=> :-ukǿYޏ2ͲY~.5uGYKVe]ڳh .w~& ?  VHzx{\L3bRa^-i_"? PiǿI,XMZS@HO/:Țk)oVz'L $`B\Ю [mU!_v)R+//iѢL>]V_}Zyuֵ=(]5Xxj"[i۶map}H(!Rj6+b|͵{؂]gu)\+랋 1SrAZP 6@gt.U0|YzJAGn  @E\_O;=gҾ}{5kVLtM[pI2ꪫK/->mk׺5+<0Of{?7rh_|EѷҮ?{K.$AK(/,CJ@{V-V&AݥJ;/D:ߟA٠8SN]v%2Z޻ƦmV#}gly /7I`vc*"  "Np  Pm &Dhuڵ?+ 6خ=W#<7jH,X sj=Zϧ;5-:uSOq jf:t|_duV%Z=SqLqaРARn?-؅]uJ]C5y&A;l 9r}хwQLR^/JXMR ʗe= Z/i#  @/ЮI?w}~F{\jSP&9q[9 dָ'h&Mvcɒ%z&O8Mf{?7r(u;vlqvwe E{hY hϪj$\I>iժU`Ro3fE[?SoA]B IDAT٪;o5gŤY}o>ZezWAp}T؂ۤU0O@1 @@ttg}V9=9gC@ "-̀ݠV} 7`|*]x]AA Zz2MN̙S4h@~myWk{ɩ8sЮYv7` vgql5]{tԩ?dA.qWS 72u]4ȽN:,ZΥc7z]k^o9s4n8qp@ Y'߼7Z=pخa?7nCFQw.㞿Zq9ku%݁X0aBw&c7QǿYC s.Xɪ=h7O$;*W޽k%+ugCN8Eڨx3bZ,u^-=; >MN&q~ڍH +ȻO^Ɂ @,!= l hݔ>l뮻B.k[o-BW ڥ<;bf_*NT~{}rޠ\ndR^=5ӦM\<3S hם<o{UaA iԗwi|Y#nIz@p}A߶vm8ͧjt㜗q.΃q=xa \A_|۬nx'Qƿ,G81)?(QϢ=ڣ;5@jn沢Xٖ6Qh,gA:E)#-סAOvmkqg񽝯Hy5A%7I`vc*"  d1#oā @,!= l h˩SʰaUuG:]\VW_}uh}'eȐ!O 6? (~%qPQw5#;,f},Z{>s=~.Pz}r1tPO=p?5J4P ]-\Pf"ZhJmRn{4m5n?w0}t/8DoF\n2Uj>Iwu)޺˖|7; +?([Ӈ珮\Yڿotu-YAO+oqڂ#ˋߤdm*'ݘ  @= _|鞜! C sfk@{K-YDZVXAVZiЮ{o.ֺ l:IAM6FE?|w^-窫*ZsMk˚kY ,|uvؠA4R8.s9k_~Oe˖t̙3KDzzYcܹҼyBѲ4So  @tm h/n m7߈Eθ{?Y&Md֋nV(><۵kW/t9h䲾L(0&i~ zXڦmZ> &7~a`6(9  ]`=Lb'  ) X0!B@{=mhr/tСLw] 9]$͛j֒ٳg{;."8p`!w·/0`/  + >%3;197ij 6Z}؝wɍ@/  @|ǿk&q~ڍH /}뮓:* pF@"X0!B@{l htu͢U?J*-.cƌ)a!;MX ޽{}vg ӗ LdН8@@= hwϹXhK'N(;wà @[W%(*7I`vc*"  .Lwq  PM &DhuG-ݺu+t__f̘!kf`W0axJͫٵv? ۷9sxնρB.={ݑ=VcwQf͚U(Qq )>n;JeZl*ŋEF;m4yݥ]ǘ e^$p}О/v@x뭷C7\~i; E)@[ګ6t̟?_Zk0#G:vvtw}\z5Ҟ}r뭷Vw]LT^=iԨQ&ƍ.]NݣG1bԩS^|E} {;i}9Xd^^u9 /m& +T  *Pt0L=5uE7p^n&ӧue\ }<2utQy5[2#mzZ~ ١ݘ  @hҢE >@VZi/@0`Bd~ր.i!=0۴iSXd'x 6,SLufrOA?p3fLwڵ }j*pao [N @*-JcVcɸ'vX^ΫE?X^sWϞ+#PJy5 ߤ &q~ڍH d#po-s"  @ 0G@@PǿI[тۤU0O@1 @@:(:u :Tg.ę@(#`!@@\_'=;   >Mڊ&q~ڍH d'pw!gu\p]3# PF sC@;)   O@{X w@@@p}-XMZS@N駟O>YvuWygF@"ڇvnS@@p}A   *7i+Z~ h7"!  ~+o;dw!Ό @ &Dhϵܦ |gkɊ+([mlƮUӪN2E;~Ν;K&M*#A @IsV#ϓO>}xbeUVFQze="W|/nҤB@CO0A֭}WZ{_*Ic\pF]"qeؤ=ߟ7X̙YשS_ƍ]v+/WTvp}|<VÀQs!@@N;$_}<ҲeK@"Zw}.P`ȑrG/xry%}ҭ[@ǎg ,(Ol(Iy޽{!idNܧie=M<(M54KM<,Y"חzyZΕ) rV>7߼oj R#IV^_voENXf^꜋-X3D\z-ߞ?~2f̘ZU\Arjϣ/,Lau3_zJgm*'ݘ  @v$(+>;)4)졠~-O5 k.vI{,?+bevYfո+[T /΢s{A/pBJq oV_}0WQ"&ri)bՌ>5dD&q~ڍH d+뮻N?x+gG@ @ sB@{S^{5o&Mu/ٙ~Qǩeh..T-*o{q^;Vv܅EIU!ҥK &>i$w}Pҗ̲,K*u@X' kwr#4 x#Tw^ԹhJI]Ǥ=ߟfSi@n[c~BϪoӷPGy\%gxdONM~/ 7}`79T:/Ny*hܯ_BnI4_F &\ԩO?]2 ' C ܝEk駟J˖- ^] y)~ktW7Mjcm*'ݘ  @Ë.;'gyf|l/hAI޽ ŵaJW)rVo…;bwq%x㏒ kY0q/ОɚX$0ǣ'1 ]iWHVe+?uօueXgu*M@S}rg[*+{W1e)N~ ,LYY7I`vc*"  >;jS; `B\Ю:t$/"{zRU]w%gqwm ;vl╬hEME]\\0-z/K\h1cH׮]h.頀7ސ^;6{ٳaÆ5t_V̢jܾ}{u= TͣRum;o/hQ۶mcv@:uȒ[QʓE;/Xvɢ?g#~ɗ>o<)j׷雿U nOﻓN:IM^cSHJRuvƔ4Lq%(u Jkm*'ݘ  @\p<#rW ELОk7r- tlvy9xѢE^9֭+7.~zC|A4$LR 40Zêe׷ i& g +D)i4ݒ|yW}S(llZ?M?-.JQJ_\X˴?L_>[?Cw'7?M&y?^yH;);Pr4 P5w/˖+oҨ@|{*m9LJeh}#d[IGhzyϯj?i~>Ig󒔣ע]%|,ô4-o5sɞ{ZKFG -\.AKz=kg>Ag~-D&:IRc$3ϵ/Im)I2״Y~ h7"!  \髟-ܒ ELОkI'{?~(]w=72qZ֋lVҹsg9Cˠݝ4Rpǵ^[㾾+ l=4X+}^o*qzF[ou`Ѣ/jEyyᇽ].\('|jJyu݅pĈ3fxA{={J˖- u)`Sk׫Z'puQ{;z毫p 6^Swՠy`?1Q#(YRz?5}ܯ_?ҥIuXӶ9rwO7;KyNC~>}O97iOz-AnM.J3wߕUVY𯯆^`pfɁyŽ/ r@,!=6?rA/>z^`v>R?~y煞[رce֫Îre vashAduMT?}ONS/70ax¿`pNa$ گ4]7;v(SLg~V..ܹsy2YC5hvw.

8?NR~C?S4X#B٭[7>|x#L̳)']˨sU{1uTmm#YUi_Ҹos@{ֿwq{\NJuءoID߂x>os?*GX{wڥhyK/'gT_}8F\Pȋ5y3Uv}HRTv:tL+CM1hÔt~2ڧ.B~s(G40Qv//n2a2>k< /`T|?:TYLF8ߤ&q~ڍH TF@/(cUWE  9 &DhϵCZ TiP>c|5JnfoCRӧ­;}G9t^vGz4H./,}AQ;x(E? T߿ hmS]t?_(ЮV䤁@kuUn'ƨiٟbua+,ܓa}qM6vzn:_~Tn ۴icԟT;\ kwh4Ј!#}aڮI?OY}! .jR ¾J}d}-lު{*!>wq|PΘ?BBʿ5О?tO<ϬM/Оm_3*O3Xc\ ,LО6_]?4Cw$]+ }Jh63n^@̧zjtŻz[e5j.Z;>7iҤF |VNVOt7>X5k&:g)rEMwO/?ԯ__.zDY0Ο3@5h A(a jrq:i|W^2xBU Au,׎ݧO[_zj otIӀGg׺K?\kF9^D{ذa5O4IwSdюzѣG{ĝH3cƌqR QINIwwiP|4- r)u׳:{.lLq}`$TP@{V;k}qIO?x8gXu=F~9ezm h4?g[fud; zo߸pKAָdz*O3TˑdihYKV6OPs? ,u7E-(=mC&yܳgOH/_|l?kzW??ZsY}/e=j6q_|=.wq?݈Mm.'|;tcL((DQPTq܈ n0 %FaAȀ (jpA\ HܢQ1hDF  '?)iN ':m/Yz ["# @ g]v  P/, h/tO#gE7ql(8.LouoFi$aFRQ~$WqeFGknڨ"0&}iSnG$/nHƺ"A3~PHI׍]IO͕?/4nTJy͚5Q"8{uHOb0#5z&I|LGyAOhѢE FWGu\}5t3Cmz䱶_EI~4'Ou aҍtKoi_9NMlO uձXq?)yҘVЮu^&]ҿUJM3LqҔ)S~ʿQ|Z }|Q_/Ws ͏__ycw Z+{\g_h*l'AEή]E=zDIG~Qzq;VFe3Mq>o6bz'^exqbs\N۾6oMo1j6njSfE 2i%]7m)^qmG`dȐ!e_74nlG :9s'<?~|y>.k%J墣z9qC*~u]emIeZqQ$ߙM.G#GZe$Ӿ8;TBɮܹsT$Iިy'wޑ;Fmtf3CWŷVcԦoZОGyW* .`AA{m7M݈˯A5 ~ F5׈!I7-oQDtI^rI^bŊ() ԤQvZqmnĻzr/O? "#wҥl6Ҷ/m˗ WJi7՗&1)I݆"q60ҟl]f$1۬R ڴWOoPh۶mۙV Țqb%j1{~()$ {+4"o.Z]OSM*kI}Ui2SfJ捜wl9Xm֬Yү_"z.=qY7oƧM=yO+hw/s]]Cgj]?>繲fޫ6O.3A}\G^?l=nu=Z/6 qܓV\G}m 6@K_1Ǭ^+m JzΨ~nĭټzjYTOmɥ7+o6`B}GK'oW8榦&Mfo1i}|.IO 9؊Tܹsc~4?3a#fi}C?OZajw\31ǻxϞ=eҺu]jO6vå ϴy쨦^2K<)e&g^>yuۗw[R]})# >j-K.wzi` I1Sѥ 5@q{k0SN9EN裩ꃠ=IX6(톱}޼yAI׍n#ְҟm ŵIש_7M6msُz~?WE/(Ҍi*O|.iKÑ~4O0L>U|kc+R4ZO}Wg<ߘ5{?ikڼ9#i/X,zޱ+xeΔy~o6b5i`1#z :S|x<Rn<&E?MۯƔԛԩ&2i&iN~JkGj#[J Vʹm6*fF6#פZɷ^^'if?>=bI3]j{MOecsOylc-Q]<۰0Ʌ.OŎ;]ms쏼<feIss}otҥ26F\i'bzܗUGY)k[<ҿrL\>y ' _~e*5g]rB{Гlu$ſ7M[z ["# @O?Tk/` Mr @mgZ;B6G I)Whez+'ǿgɒ%{mZl~W4ϡ/rp~4oQzOsYgJ⬿צx ["# @/>XK৞zJvu^A:IUnswyG:v!seΜ9A0i4'C6iTMig_#ݻ8e-caV>]>?!ho*ӧOAвeˀSFu\MnW9#SNqmLAOڐuiM?ھiWh3gGQnHk.[Zi5pҸ$0.o[$uä/w6B-k~Ԑt*H%i~9([@ߗ>.2꾕Ŀ|o&hy',L%_TS-Rx${\gҵ}ɥͺ;A:~qjk^?p5hqy'L _|qjlË}W-[]7}'\G{O/y7n\$lԐ !eI^ޤf4¶ 9͏,e0Z@:sF5ϡ|,_\Cy=0.oD:Sߴ߃۬M.!@@} ˶.(߀{q@$D<A:f1bDlݤ S=وOlH6~ATF K}mT c[< rJ/ݴ Su0֝&oZR1F F{6d]uZ{2OjM[gb'0)>СCpn&-j;7\ܠm4&M-7lY+{\ r?Zjn~[g}?Ԏ<aVkAǛlLʚr[o~Ǻ\G45ZR95+[` IDATԚ{뫶0m|ީf*j:th5k/g=3U+yvy ["# @/K=<8W u " 狠\x5$wqFyܯm6EbF2ehѢl4:.xnjajگj5jTbF-FzU^aMG.l(I h~4ڦy\cReo~;b?\t4ߔ1o]~G믗=#+[Oɋȴ}XT`F 5Eq̨F9 MliӦE?'mqĬZo$_nʓ }|ީmT|4#-ܲP yweKLsuiGƸz藦Tl!\͵PaQ ϵk׊TM6D,)in.W>4j/M|l yl>8{WN=ij[|GҾ}KGLIucƿBsxWεmڴI"1Zh֙a(Cb`6k#hFEF@ P_5cǎE5__ s馛M~t=X$)n ;aJ:r!%lZ6Ҷ/m~>J+X:iX,>3e]J>|(ݴ:3-7WbUOҼ76Sk,R/qW4y.WRJ%:~qjk^?]pgIss%[̵397u(1cJN ͇.Kjh=]~g2rȲ}]Q{eҥ=Wڤw(gr]~ ?sK"/KVXd9dQz>3Y`]A5*2B PFЗ}ݷpE@Xx qanݢE{N8T>=j(k2őj̊̈q]6mz衒#2q5{xYAv\DVffk;NDf잶}iW02_-"_.6&N͉:+X(NqK..e{|s5үJt?a&(Dz_|%u|߷vk|2~ Fׯ_T>Hۯ']1c 80s%Ժ?]8Au-SISiۊT'͠ U\*RW>Wז[-}Cւ?|ީpOҼ7.Sy tfmT'tR!N{A5.qi\ڡ}>_wu˗_~\uU̩PJaö<.Cg5|*c37Wbkg}6Ȣk9za8n>ֹAArA^F;#JGh[n$PISOz  TNhGsZ[\ ]>gk֬=zOl9b4LiڷyӃ۬M.!@@ g?}E["@pI]}"/립.̆fl(jp| 7UVр[mF3,D ֯ w^Iq"+o_}ӖO\/R4,ibɐam*^,/O9 e˖џӶ/mj6Ըat' لIvUnn8*Daҏ4JN8L͕?JϞ=1&ږ߾VӏI.:Q;k̙rgD֫W/Y`AvQA~ߺ'lQ(-?YtcwĈM.c"mFOj%hwu+1&isZ'վO}7Vqrީ4Mfd2I=GTzNHÿQ|m*O7k-hWYS+?w5?IbHzN2qqs_v,_<8fܹM.^P {\g.w~;p\;w\bVZ6mwuu=Oji|=imV灋.Hy0 X:..3*;C?꺼>VqfpfϞd+лvɅ?Գμfem&XGn @T,ve/zk  N`AA{j)h_~@6W_p6.bXI6mdɒ%qf*ZzhIⲕ"x۶&_9fݻw"͙&vȮZRW!g){K<58`dxatCD?jox2xhdhڻlٲO+iȅ?}ÇqEGydE7m6d]uZ{] \"-ZL)y*ٝXȠ-7[ou/'kc]B9Xsޓ#r'U4I{HOW㿜 4>˄ S6yO#RCƎrꫯFmU9!-|m"_7]ڳrNj̋&T}O*&MjW?}K~ #,Z+{\gR_Kpv6e\G^?mfnN?1-Snܿif*=O50=SrRVwW%WQ֟]~ ?!5u=2>4N~w)hzΨ4+5f[oCLBO5\Oujۧ2y҃۬M.!@@ h=^#Ly$@K, h/tovn~{H@{gEpX'|\.Jy ) VuKgJ/ӦM "!{ѢEM뇍C }i𒄅'E@SqF.>#j}lMl!fZ?kJYKȶ:F¤6y%L0nF%֌ʯ'hQ7g9IYc^eɰ jWmRA9Ҍj;TS]^zIom҇ qZE($ oy}Fv+S@żSp}eg-.%W@#hTs3i$'Skdi ;z}\hQGG :Tt]wmS7o^AB jΜ9r 'D4dnr]#ljFGҍ7޸ |M?1.iѣG_TP~DQnXV|Z_|!{WY}PK\. Nk{';C|3rKyAvGbrmsُw.!n)SJsjXF%Cjv\r9TS6ܠZ,"VPF&?]rzkgOeI}~%T޹sj]w9'6N]+?wUvW;3fW\+bg믿^zht_}0l<\?C{m\׼wGyiٲefVuuQQ#/ϟx֫^drsh=S'T\jUWҦM&m_o ڬvlOBZV]֕oSwngklӞZq׵Ԫkim]:ꜪcLT:V;굳 [}WFرc,J"x5mJvRZݧo51ȷ}CWA{Ȼ;U|.O?O?4zldK8:x=jc-i\ءI/~_m!leW净mhm۶ﳬl}'[3jR?wjkՏհ]ʸxΰe *tm?nަZO֙lM+`6k#hFEF@ zd.acW u " 滠=i0x={iDVB#r OxT5~AG n2nܸiӦŞ$Ҹ֬WNtZ?vX1bD9Zo8.W7}W3MKsWֲ͡.ۋLum'‘ @0O Δ߬^m&XGn @1'r衇nz @x й> |oCdz4 '|̚5+K'_5uu-mpy3O.9: B|\d8J#$Pϓ }EOڒ<%V}s1THI@"(Cxbb-b?.{l[v7q%AϤGbŊ[*,=}%#V'z\z^dҥQ%w 0R[j1W/x}Y&h"Ү>$@̓xm:SzY`]A5*2B #6?#Yp3+C@&$hOd*{̙3gϞQMwwW_] /o٫q7ȷ~ĦVZɆnn*ٳo߾N;MO.-Z r!uG}tiT$ k?ϠK/$?яo;sU֭+5&󾡏A{ߘ<''eذa/`',@=Lyfmuy֨@GsϕzJn9gW fMIЮoF-ݺu Dy i-jܜ}2uT'6i H˖-O6N>d5kVB~m05"PU|뭷SN5jjEA=IXqҤIe <]tpd>v{cJ<~8Ct~` _+`ǀ',k:SY`]A5*2B []H8C2 4k, h/xovu{G dleqȐ!2yd';D1 ^*-XrtU>GvW^A w@Fju{70w}2`YJ5$ڞf%Ao[9蠃+ߜO,* }ګƕyqrnR?\  `Cu& >x?YI[/u x[7$놚 A 駟.=zz( @@QЮnG/cƌĴ;+<۱v5Ȅ sO2e vo{yoDUQ˥}ͣ &P){#2._̙33Έ~֠*"5%UvUW*lAoKvW~r(r+Y7Y{CXgoV`6k#hFEF@ 8_|nҺukywgW fMU^V=r6*i-}{z8B@#CcOlmj͆@?PV^p1Zзـ!_uឞwV[m<㐚)}lӦMmYfVWϮ%]z'y< @ XgBmB @ ?\~myǤ[n*4 @  " A; @h,!@ @>Y`]A5*2B 6lW뮻NXc: 4K, h/xf9h @jJ '?5u * @ 7kx ["# @h,iӦUW]%opu@hXvۨ쳏N͐D`_0W_?cd6\ ) 4y 9Mz:e7NI o=ؿڭQ 4s/)Ç .a 4+, h/xf5h  @>n?Qvu(S^Dź*pϒ\՛ŦZv̙rgD{m~7%\\q8o׃>(/3O?i%\_ aЀ\cU &K=ؿڭQ 4C=$]tQ;lAX@͊ " Y +@ '򾡏[ON:ET(-Zd'Wf2կ~%'|rԺңGvAlŋKݣ;ꫲFUY# PiK.Lƍ'J}r@h_<ؿڭQ 4;-G 7 @Y`AA{7aEc @ @7Wv Sx~qٳk.hUNXC͒^yyG;]5E]$ =2_Dcǎ-SzD΁J.]$U "cVт1cƔX0jԨ@dI*rO?^ _|{ҩS'yfX1}躋/Z}72x`رcdg9!ʕ+eΜ92eyJuYg駟.zhY~j~=ל4iR /fZn?C;w:Sʶ\?^Ry?%w.#F}ZLկNj̙KPSO=5; 2y''~6G [dԏ&NԡI'?iY\ˇ@aÆ~R~Nz}?U~ɼ~4hf̏,L!w-;w8 IrG< mQI׍܌vޫW/Y`A N9^G8n&Kۥdۮl_~:Xm6ۡ>J(wA(b…)ja}WJW_v!SϴVE?Z9DLj~|~ECs9Ǫ-w'FyuMJGu?' k=M.U?Btm\`d G=ؿڭQ C@}{1?~|p*  P , h/t$Zx3u@ @yv32iYo馱ET0ڻwo(łrvWZ2c-j38#тPłS #G6Ak;YIЮ ƍ= *׏ŶڜVPD1[k4I~ ~\20`@o2EߵvݯqE>'? <ؿڭQ C@3ϔѣGc@@ x As=0 @u! }*;CdѢE%}ѥ?xzEkthr-%ʉj?3e]K5+m۶oF{99bqvWrZ裏>_~Y;l>C5fm+4|Tx}饗.믿\uUhJ~Ƞ"/<vZYdI KGEͿ8%]7+ ګ5jTCN;WZPjTtJ8СCeџu$Eԏ{ذaիWP3w4} z!@o`6k#hFEF@ ^xA/{챇<% k, h/x\# @@]}CGAMV;Rao>1cF  S(*\7Eǯt޽I*TQ)Nו{;h߄clO*x+u6mZLq}:q[IOw D[o=}6o~m9蠃JDfl믿}':@ǝ ߷rˊ[~зo&e׬Y#={,ъ+W[n%z)X_ם JƍFoѢEI^Wjzv&=UAWJ| L2%zsW/ϟ/vn@h[<ؿڭQ C@#t90hŊ%c9@l @_kAO5*o(Ⱥ $M9sqQ?#eqW_}Dlwz֬YQ]YŒҟg~P)hwUo^i6؈mM?NX"YS[|$ay\cƌ#FEkڮI׍܆Mǎ[S g]88iG\x~Ȑ!ey}7O>вeo:9sq{O?kV2Wi?p4\c#M~#I>[vMSA6L . ^\/A{c=qzȑi @vY`]A5*2B uQA]d{2k @  @ ~ZA?OQwRVo[?A#;p˦nZ_~|A.]T /P&MQ dFMIF)hwUK'vWM+N?NX|ry+Jw}'~x }:qF0_O/¤~ߵk׊>1˙틋w\˚m0`CW~mګE۶mc?6ŧ4#ǝ]5^O(>B#AkX׫!hg> q @͇@Y`]A5*2B 葐1GuY~5 K, h/x\ @@] }C?e}7%\1ܚvSd,X [x'xBT&Sl@j\A_\u5Ma xSS﶑ "L> j6mĉ/ڧ/sAM>(8!P&N]iW_-F*{9s$ Mw\\c5ҔETO~_%`kG5\A{=}b˓| no`6k#hFEF@ =BrQ7|_a  䒀 " =!@ PWyO+h7ŚYaۊ`U'ȪUe˖e5F?Ӣd /^,ݻw~ľ;ұcDz\՛kY-h=z92i>$tNn6 muhp0_ܴeח;S?'ٳ=0=)S9S;Zُ umi~L찌LѶ~L'IC9D-Z4좟!hg~֡C ryBJ8 <ؿڭQ E@}Ѣ1ꗕX@y" hӨV@ 4@7-h[eȐ!7AG}4Uv JrGu\~2nܸ:îM4jWIbe6Zaqx$ay͛'{eFNn\0m0mLMUF^~46۴e?f \"T=.]Ob<׳_M?jD#hb@@1fMo6dy{ E$A`AA{gbB @` jzي\|פ[n߿n[oݺi&X;wnT&I~u~D'MZ՛hX .9ILOC$D_]v}g}v h焤k&OXw-w̞=;_xᅢ0َjxۈ]uZ{􏲳i~0X}ek(?:kZfM&di߾}k4]z7dM"Dhv_2BA Y;ԃ۬M.!@F-/K.)"៥X@y  hhF@ 4@7}k$޽{˳>[ұ=vaҶmGO~KѦyQ8$֪^W^튯ڛlSiֻxb޽{Ԭ>}a˖-%]7m 3gp ѥ4ĉKbz~*zGQ럥X@y  hhF@ 4@7}bPrAUhO<8Lm*OוW&2۔6Zaqx$!9sSN9%w˸q']76Lluɛ`#Ux_~Y{j$ѷKNkoZImfl%vB2se?cǎ#FDV͟??)iΨAIg9d6YQ 7koz ["# @/Tr-W^@, \`AA{cj`, @B  -ZT"^׈SL-Z_6ݯ_ :vpPgϖVZUӷo( o=u̮iři' N1K_}2jԨ.1c 80ʧ~QdFzU{NzYr(kTE>2+'B6D.:i:me?f4e?3ѣX"(;o!u}Kqeɒ%Ү]&7]zgY;$Mߒ-y[fmuy֨@$G>~'E~U 5D<A @ o&h7^"Nפ[n%?vS8V[b> PR.;eڴiQS^W튯ڛVț6Zaq0637n6 3{뭷VJ^{>LrJڵ|Q곝;wb>?l TӏIo~޴~63;B n!*{ɗ_~){ѣGȑ#ckmd?Wݻmݢ`2s̊ZB)|i IDAT:D Yʃ۬M.!@4ZмyDnu@ " Z @yM~駟9PHVr!{)h.,[,6gΜl%cWA?I06Zaq0]-\P>ݠѦweFҥKT&;8A{Zn^xL4)+c&馛K.?\w.|u/wQ=o&7|sITӏIo~^v~lQ!~haĥF\~]xq0Ô4?װ۩ fo6d~T\EvZ/[Znu1cƈvEZJ'~]fYmycݥ_וU?j~V_/zJw- *{F_Czs9GN5O1UR 4yCfmuy֨@$Gj6m[o姑X@  hpH@ 4@7]ڳv_~Խ{ 3X?ge+Z?>)wEz @yo^`6k#hFEF@ /]EaޣWH jx qګ^@ @`" }8䠃w<2m4iժU :Sz2dȐƞtIMqP… ㏏P\^%_S[l)h7HE VЮ_2lذD:t|cm'~ I+Uxe5\#p@E9K-rG= ζ_fϞ-[ou ӹ+wyb\jQ{]"7# +E;e}C1GhӀd̙ҢE167WuY'$]U?|j<+CK/,&@#߬=ؿڭQ K@#رccBI jx qګ^@ @`" }AW_z饱Q:*jCʬYJu]rgGSA3+(fШ_ <8W#W]ňrwAT|3Af[lϛ7/[ gΜ9r 'D^~@&wk]#8C7޸ |M ս' =:e7|s@ܖ[5u yGڭESA{%q_|!{W]V*/G3BhM Nk{';C|n喲m|pL'xB]B#<"-[L_dw%Em~6w}o3zZϰNɓ'y  P V2fmuy֨@%0u ʩZrԦc H` @ o*h{XŃz"ڵke 7Fb-:UaFm_M6e~駟ʷ~ld(>W\;MW|K_Һuk'lIYOƬy՟>GTm4)d=(\4j}6ugbt ׬Yn*[ne0l*1bmq'\c3VZ~_~݇>~v1\~}wcǎQaѫ*m b@o.`6k#hFEF@ /W^yE+]t'|_C &aq @@7}{%rJڵktځ_@Ob} "vmrx,-K`%M{!@kr<ؿڭQ K@/_.m۶X, o x ڽ"@ o}CA7!-'_ٟ߹s1Tob9 7+Ko6vIoժU+p M={7qiӥEOoN֬\Rv*qЬv+9u2m񚀮e4(8!Lw 0k1C YI{ ["# @.~/V[mX@^`AA{#{5,0 @^>vk &82e~uy7ߔ.]DRQ˥}ͮ4 eҤIYcƌ#Ff&@@ 7+zo6v/ @ xG Yz ["# @O>)?__d D<AWc @ %o#hҭ0  @wY`]A5*2B  ~l&tR B@ " ݫa1 @@7{V@ @;y ԃ۬M.!@@>Ȃ dvʇX @^`AA{{10 @^>v @ xC YAz ["# @;OΝ+'N~h /x ڽ@  }CAq @!߬ =ؿڭQ ɓeر2x`5jT>J@ " ݋ @@7{^@ @y ҃۬M.!@@>,ZHN=T޽̞=;Fc% xAO@p@ xM ڽv/ @ fm&XGn @ +ҥlRVX!}>J@h$DA{#G׆ @@>}CA{> +!@ 4@`6k#hFEF@ {˗˜9sd=̏X @ % ho0 @o#hυa$ @ oVfmuy֨@CK/x@Z4hP~ R@h(D<A{C @@.}CA{. #!@ 4@`6k#hFEF@ ӧO#GI'$&LȏX @ % ho0 @o#hυa$ @ oVfmuy֨@C`ҥrJNd1K!@\C @fm&XGn @ V^-:u ӟ$뭷^>  " ݹs@ @7i @ oVHfmuy֨@Kc_]f͚%O~  " =ݩS9 @o#honH# @ 8'߬<ؿڭQ #d̙ٻ*oLT׈*H$TA:@İ&5TUz`X +R\XV."3{7@Bnn=3OB2;Ι$sn"D R NPn0!=@@W8>8 I@@?fHM!LŎ  s,Y"{uJrrs!r@ hpBvc)haN  B)hwaH   @7@\n =`*vD@+pQV˗OvD@ ݘa Cz9  p} ]q   r.oB)h@@g ,XP~g9xɓ= @H48!BA1S@@\% i>j^H@@@ 4'OrǷv[@뷶Pn5! hܸ|2w\TRxat@-48!BAqdPЮ  Vk5   b )A NI]" i Re+ IDAT&ӦMݻ/6@=DŽϟ#  g_~Ns" |wh"ڵvPh\"@+;k)hw|-  Cjv6@=gt@@$@AfX@Ο?/K6wP{hQ 0 ڏ;@(hۂ(h.@@'|RLɓ'zhQIIIFHH ="ן@@pN-bE'J2e?ػwΝ" ={Vbbb)h@ Pn ePn%!  ܹS7o.%J+W0"*@A7 ?0:  8F 8__3?9rD~')^DEE3hP>}ZTbfsӐ  8eYGg&@@ \xQ)"M;'\4X$ T*gΜ(#guCAh@@A s|NOAG1 K_ .HJJs=&oFʖ-ɾ}2VGRF s`%  lU\ 6jB@Tyblfq; > Nx=;G"ѣ-8,ZHu@DD8崾#H /Ǔ-I#*hoР;wNl"iQ̮ݔhz@zX(hbJ@ND뷶N9m B@ t]V\)Æ ͛=]CȂ'DJ+A 0 |ϛQ$$$HΝ,c*hWr-:Jl  8>4?8 :t~Iʕeɲj*Yv:uJ&Md|ݬY3i?=߯e}ء<:5jdfNARFEom+5Q3  ܹs%))I7n,G#(@B@"y$P$_~)˗Gw^ɝ;w#bh}Q3>Lt@4p}>h|p6 _.]zz!ٺukX;__̮ .Fw?п!8Uom~ mf @@@Jݺu`7@ hpB G :a8sTT?:E a +h_*~â'\@~A?:MĂ@x+Q>YdzW.111RH)PDD,FX\9>qĨvX3V 1e @U\m=qA;f@@z<#=zTrʅ 'DHAGp4C N^x?CxOyp␾ÇKΜ91#h" |@" 4P96l(=c./fW w kL %>> v懈@!Ͽj4~k@Am  #VhW'P%eʔ'0"A'D(h#H8!`@/G=rȑ(Ωva:č# h S`ܸq^i:uȄ $222,={VJ.[=abL_^:t`ƩƆ '[&vۨ@G ))IΝ+},':"A'D(hr)hfPE?/^\رcRfM|iIz~}w߭_D  3U7ؽ{4mc͚57o^)YK˖-m/K1SW^-ݺu3cQSG *[6jB@Xrt]jԨ!ӦM'0"A'D(h=H89^ҥK.\{O?Jo;~r-ZJP ?#ɓ' k?E'`j| ȑ#%""BN*2i$TRX_|qq;ӺuK.^zyQ%!@,ed {@@D:0CRBF@"3qF ڃTA{ ܹse.O>D^z%\8 }6 s [._,W\9r)WΜ9mMbv[]3؂ f>fZI@4~kyNuHAm  %PHx|?A@ ,=ș=H89RҡCׯT\Y&O,VkʩS̕ԓ5kHÆ qP)qt]v<\@ ן@]?lٲ=*rrWdYf;)h 1"  HAm  %мysٹs̘1CUWpD  NPS$)V/SC֭[%***l9߿_7nbMkرC|AE"  Pn9#";(fwڕԩSeM}kXA@ hp61 mf @@@/ѣGKrrtYzWpD  NPS$)W_Z>%K9B_U0^zu4 HDDxN~G׬O1#5o߾]?j!8Gv"jEg}&9s7`"N`„ 2f֭S@mṢu3Z  7زeIrdѢEh! !AW^ Ç<#=ntttXzE(ftjp_A:yWF2  Pn7!(fwߜ##Sz.]#D@hp6% mf @@@/oFJ(!wu9rD@ ,=,3Ϡ8S`ܸq^i:uZ,222,IܹS7o[9!`m[A@@,7 mǎ;# =+:tL>]/   hpֶ6jB@(S|Yc !*  mqCwM YF+%K4=x`iٲɽ{ҪU+>wzRP!K  &@1n3x|(N@@ d\ YnwLAm  'СCY~;V6l_D `'D(hu  -111P?.9r!4bp3åYf͂@@ \ u)h@@M&Æ 3WT+Y!x[@"{${$per势@[r̙~M]vn(fϮ(3^n3F^~eI^  M f3S{" qO;H@ k=kS f##F1{'NYF+lI@@=N:%˗C1(|֭놔@B d~F@  ~?,XP"""oDFF$!&  mmB B;wb>3+WJ%2۝#  @6/f?yvmWO=MF   hpֶ6jB@V=zTXO!*@N NPn3-j*IHH0۷o^t_4D Pe˖Iҥm~   ZB H >͞=[T  om mf @@@O޽{˒%KdgD `'D(he#|rٳEN_NwE `~[ʖ-p;vD@@ \_˭ޚNL|.\(111x!  hpֶ6jB@XhWׯ/Ǐ3HBE@"2  A j֭=h@V(vȊ"  *^X19*s•{  om mf @@@O#GH5$gD `'D(heC6-;=o޼k l   pj(3ޚ5kXb!N@@\ vۨ@WK.Ç7P"C'D(h 9+0|ILL4KtlWC hٳgK*U   pɓ'RJ_PQjg 6H…C=# C4~kQ3  @Æ ? H @B*  C:t̙3G `6ӧt1nh@|g̘!ժUvt  8qT\q) >m۶ME  \ چGAm  +0x`QEtU@ @ =3L UYWV[%>>>]? NO6MjԨaYt  (f7wg;wJ| # \͆vۨ@WwޑΝ; / f7P"C'D(h 9dE@ 6l$qqqYiξX.;=yd]!  ^8~TRşӧ[nI?y) E@4~kQ3  w)Ww}o>}%2@ hpB0#@`xȑ VZڔĉ^z!@@^es};p{zKt  6 ږ;Q3  @bgy衇@B"  C2tYPodСꫯf9"2qI B6#  vcǎOmjџϸ}vŸ'|"?(QBB3ޑ#G䮻 8t mNyGm B@ eʔ)RV-27 i NG #~U(^j1b4m4AꆋF  AP\tI*W,.\]vIU:s2$ :uȹs$%%Ee}N>-rKơc@pom6jB@P'Zl)Ke˖,!D@"df2PIEjS7y6l0&|*3   Nɳ/m۶D󆇉'ʊ+dr)0aԬYSZhaY /_qcet +4~k+Q3  z|O?3@ hpB0#@z8xEzIII2w\4hjJ @@=*ժUF#/i;$**ʒ@')Tǣ%t k4~k-Q3  @&6m$O<! 3L@Ϝ9mRv̚{&0p@5k 0@ڴi8@@' P OgϞJ={=8q\B RhQ)XDDXSfSO=e&LA{xQ@pomVv@]ʚ5kdȑ+  1 Nc"NU!+99ټRJvQ(h@pom36jB@!PV-9|,_\J*匠@ NPnL '}ѢER\9D@П;wNRRR$::ڒ~d2vXIHH=zd :E@@G5jSPɳijѧ+W{RV9sZ̡Cvf! mF@  3/ ,Di׮3&J@K48!BA%3I' p@b%K+!K۶mMUV'ʊ+dr)s%5kJ-35mʀG.]H^,뗎@@pnEwx|s IDAT^iР0ޙw2E@  vPm(hF  {-[&2ɓݛ(!  9.@r} :TE=Y @@(fp@JJl vg!Q# `oJU9pzcF@@7Ī@=2 =r&v-f_v<3v zԭ[W:df~*L\b̙2h $C@@&paU?l 6ލwҶm[ֻ# o Ӓ}(hN@@w gϞ-UTqWrd 'D(hDK_^)bItƍJթSG&L ͙3G  2p@K3@@&n*]?l6ގwݺuҩS'c# o3 ѲSn%! 1bL2Eu&={tObd pS NP1r<>6m$OҾ @@/T$F1Sf8 ,YDz-M4QF d([fvۨ@pٳgtf vμ) ] NPОI=}M{wMqGYH˗/z2@9Կ+gΜNt  06aO@@@4~hޏl  NRJW_}%;vGyĝI 5c,?OiKWf<@!#W_}%J Z ظqH   ߿_^z%\!0dy뭷$11Qڵk爘 @#[6jB@'PJ9~[Nz)%@ YY1vF{_|)SƟC$wރ cO lٲE)!@  g(fT{6Ѥ$;w 4HZjYG@4~yAAEt QgϞ|rQ+Eh) @ NP >}Zbbb?ru]!۷Kll/힙vE@<'o>_?ozD}ŋˈ#iӦș$@@ 8 xUD~4A@@@ |YΆ       [ۦ) rA۶C@@@p/?       p@6O6IB@)/^MMnEwIV   5:&ی/k+a!j¹Pk?  Jp񵙝3a-Q{ugH@@@;Nzk7% h'Ljzxm.:B@p@1#i'9u+c?ȹ@@@  w A#ߨe>}@p']1$ T`{7x Ht  J`p#hn8VMpD  A m1B#Wݐ9 Ťb@'1d dG@xGG!  JO30CMN(eajwᤙ#V@@>bv <"  pқ@@ 3̴=  @vҮΧgv4i6FBli%@@卬wfiI]ݸ!@ pAc@@ ߉?;    E )m_e=#v&ɹ}6! ݽsKf yoƞ  {8޹%3@@JFg/k  @5Ub%* p@'Sʹ}@@' T0g?' 1[%+hU  dNz;y@O`1kk߰  u㤺RmsWkL#PhijC#H@@p T03o`0 q-̱0<  '@@mDnl  X!PԎf_Y)} RǍƹ}N2i! .h^jntI+KV_f%;# .षK'@@`Uj@@ 5^4k:6 @@oƫw&S@@ :mS7  n7O?9ϑ  @bvt.(d,l! \+Io@@qn2R:B@REԶɮAC3&_R泝f|@@ Cqp  dU`Ѡi6d1# Z \-!`=3"  /PhQjs궂w% L7F.PA@lmV`0C  8CP_'9#lDQFԗ9" Eoݒy `@9ccQe:  1@MR[pă  $cԁ#w0N K |Tvd  @bvO}cC T1vߚڄٱ7 X7F.\RC@B$8"]E@' UAKp=ڈDM  na$>50.j(=  !@A;  @0"GƟm@@\!0Ȣ_j&s\I =F&! L m1T#.ˏtS%cUko ,@AγCl  kW}$2@@ 0~=!@ρ  դPhbdRu66@@Cv@@`FX3h  h7Ӽlf|<?`s&R@@ 2^k'A$fol    T3m2^WL0@@Rol|]L 4By1  (fwdN7^3 @@@@ 7!u^E_d8!`bD@e@@)+(nw)  @S]]9,1l@@ [/89-M5xf *vIz] @@@@@@@@@@Z)A)hzi          pJwмNI,h?sLЉ@@ZHzNyf-! t]V\ixbyu 8{֭R@O4  [l8X8p##F)SH>}cǎ  76vrJw3)hzi Fʕ+g3g D  aڵY{ҥsυ%E .^lܸQ|Ih@@ȶ͛m۶f?[?O@ cA̙3%))# ^  @FwlPЮߜ  78q0  ԩ[LjŊRdIw%H68TwߕE:4 F@Ebv]f8$(ϗ!CH-:" @&@@`x 7oOڷoLA@L >>^j[z/^\  .^]VyB9  @6m$ڵ3iӦ 0 }d.лwoYd5J4iy@@O PЮSЮߜ  `٣GYL:@@ ɖ-[,yy#.ŋUVɳ>@@6mf nHHHnR~}@@ PЮBA~sBD  8qB*WlJ9s@@ oҺukپ} p͆p˥TRM@@B&ȥ̥6uSsRRRƢcQcǎɓv! 7]v愈@@Sy޽r  0_,f߱c͛PB˂pz<}2e4Y" X&@1et@͘1CUt?4D@oPЮ̟?_*TMF@ȲCf;ٳG,hҤoKٲe-@@}77'D 7o6O… QA@p? GsNɗ/"'D-+۷o9sHʕA   -fo׮$&&Ԏ@ 4+WJ%B3" k(ho*)hoN@0k)Y3gPA@\ᅲzZ #%$H@ L"j @@tϝ:u2G1; z RRR$zG4  M +7%D)wnu IDAT O`ԩ2|piժ 4@@Ξ=+KGuAɓ'fQd&ХKYv$''Kݺu3۝# xP m1Z_~T eٳGz!}%2@Bv- X]9!"@@/K&M9s@@Hٲe>|Xn"$T !!AVZ%Ǐڌ@@<";HΝl)fȤ|Ei{q0 +@AށFA{ J Iয়~B >}Zn0E°  iN:%˗7TT)3gy !Cz)˗/1c/,@@ i;t } 0:tHrΝh n]]9!"@@ߛk3<  @N<)*U2(SY~w9*G[,Y"F?%+;@@|.]d(fwǜ{|ӎ=*rrod Pn Pn)'!  K.]* VZY?=" ,p1ycbbdrmܞ@@OEdҬY3=$*@@[wQc Y}r뭷f1{# (ho)hoN@F`…ү_?iР7@@0 |/WX,f S4 V $&&eȐ!ҢE +/@@ YFvjFީS'y!#WJ|̄Ϝ9@=h5=dt X#'H͚59 g '  A }OVJ #0`3g 8Pbccݓ  dYb, ?kia @@97W7'D  {# B@QRn]sjժɌ3l@A̙3%))I1@@4Xzt͌sһwo $$^~… ?fv@D@݇v{ @JFz…T4B@Ⱥ}~f5jȴiӲ -@@{Cӥ}  `֛#v ?^+fGA]ꌃ (ho(hoN@AZ`^K.!RRD Xt ,G/lXN4idC`Ĉ2eӧt1=@@V3tu^gC={VJ.mLAsH@Ol "B@nXv='.]$+W .Ȯ]$O<  I_4ibK/Ʉ <@xE`ђl.&b'  ٝ2Sĉ@O?8ј@p% ! 7u"NGϝ;')))l  P75k̻aÆ2vX0^P?^z_k xQ`ʕҽ{w3]kErFǏ*UyP$@l, BA{@ʹZ-,K۶m%11QV*'N+VԩSj5k֔-ZX6&!  Zli׸qcQ6!1cHnݤgϞO @@FQ9"5j0sI %@A]ҁCA{V UfzԩfU[>}d7NcjXA@ضmnڌiӦ2bb$ IdԨQҹsgݻwhW@@msS6B -pW<3vڠ! xGv暂v愈@@tƍ'վ}{ׯeJgϞJ={=8q,P-ZT ,(KG  N7o6Z6d!C @ԍÇ:H߾}C<# S`PΙ`lQFRT)Q!  PО~sFD@xUVfP<.իRn]9t9ƽ+)))T4B@"aA:q"Eoy#Kv$11Ѣ^@@@7 ңGB$B`ΝҼys)W,Z(h ^]]9!"@@ ]|TBQ[>uԑ &Hdd$ R`ݺuҩS'38IJJre$75ky3K6mdp! P`ٲekQ &%O l۶MZn-+W9sxڂ@@ 0 ss/ f,@@ 7ԛ7oB e5߽{4m5k$o޼RdI߃-[Z6! =]v5Éq s5ohQO4hͣ3  @w]ԋ #{^eIL@@ d6)h  ]tkʨQI&pIpL22rHSJrrL4I*UdXt  b ;v(}!,b@0 ,XE=~ذaaa@@B!tRի5O/khu5m!  PО~sFD@9sZ`fdA^˗/˕+W$Gտ+gΜCg  @3†p >ŋ7+Ml  HGX|S^~e3f :  @gJdN΀^8uYV_{^c _@+ 40{;sE   _Ѫʘ3d@f˖-^{M5j$off{@@,YDzmFJ1&ȆEo߾!Y*a@4]ɡ]9!" Rjk ]v.ʒT@_G5 \ҼE@VVΆrrwҮ];+@ ,]2.r6  pS ]j'n`A6l`RF 6mP  Xxnj\V61cd„ ҳgO֭7&K@Ȗ Ic گc;eժUR`ө;_%22Nr=2FL>UQn+! L"#F038p! l޼|Vjdƌ ! 8LbvM"`eԩҷo_СŽ Qvf55k֔ ˗3T[͂'NȜ9sȖ-[$::Z#"@@>p{x  \'{Դ!CE! l۶MZn-UTٳg#  EEjݻtA*X!25k 0@ڴicE .] vMAȑ#W^ɒ*Sl=zC;A%JvF@|oMz+WytC@?~; RլY3&:@JJlR*V( {<  9n߿,X@*%?@@v -5o6Mv Zt֭'O@Ⱦщ ңGwH  Kƌ#&L05j4i%R`ΝҼys r(F@H@oׯ믿.:ugA E+.]*Gƍ;-|E@0PL|A知vrĉkݛ˗}w_HddDEEI|$GST{$wNW\1[oUKDDDf]d;~e>9㏦/\rwwFㄺ|?8ձ7o^ vSDŽ_W[ttyL!)ai߾ܙ3g @@&Ol _~e#`8p={'-[V~mA  gԵ|"(f̴( tMV^m.rK/! d*@A{D@A{*5kk׮L@z*ǎ3 9"K,2eȅ ̕/^|M?Ms3SNƍe̙r Sm۶jժ#F7ھRhfW'M޽t\>MK mgsΕJ**0W^Yc޽ӂƞ1cyͶPލW-[4y7M6r-4gu2 dS77̟?_}Y _}۪T"WH `gyᇭ^P@?QS p)))YQ! jev3h ԹTmFڵk_#^AZq[]=z_YT!qz[zYfo߾g)=T[o%~{}xU{8ꂰ*hdЂD=\=7m׮]Uv6u e/;Vk,Mmذaԝn۶m3WOR*hW+gV@ձ@ sߛ'Jz2oX(0`3g%6lۛet3 ?ٳͬXeK:(psUb'! 'o߾A]/+"B+^}UQ @'8/_ފ.@\.@A~LA{ꜜ>}ZbbbXQE#""?Oo%m̙3u䩧NmVlrj֭[K"E$G?촫bgU6F8tMڪ\U˨QҍŊRdt7n8Q7R*W+qrEQzn?ӧOիoV^|tWfWeʔ*ߦݯ_?y'Z6o,j~lU:{AI=~7׮]+K.wQP!9w5AA~c'"Z}TVNb6NmBH@-.0o<u F:o! %>]!thܸYo{9B"@\v&9ٳg *ϗ/}UA*DϾZ-UV7 =[={7hKUݨQ_dY~jʹWNʕ뚟M_7W،a uCZ^]dN=ftt5?W!u9sL(eVWiVu8}Ea%SNrm#GH5nZN_i7uo(W02avND O S(h[@P -29>@ sl !6@@}|O{'B"Ap KBWŋ;G@PЮ$QО:'Æ aeV ϨVZ2qkׯWJÆ e߾}Jׯ_?#D)UnҞ*7[m]Q~jWWſ+ Uid[5?ge˖ؗC?m*W7-uV:TnU.ݐq]vS7,3?{wnS RQ]"RJID2f Rd.RBD*),S-Z4x3Zp{zco3Ĺk;>!g߾}I2gu髁PN7k7n4Oѵ.biE:d۶m?'9כNi +ЫW/*RF +GOhQZjח^ @@7c=fCU E}~h" @%@=PONƵj9qVN/䦛n rJ̇NA"k?Sի'_}U"Zt)R$;wJCyUv׀AhjRJ@_sDy睸kiѢ bqznᆐ4ﵡ5رcGju֦I۶mCfW"ͬ'G N!`W' M]8iӦ&,7إ.\P>K qϜ9Sz!u> =6o˖-Z›8ߚ㬁v|ҵkW_S~}^:GOhױi5vM=nܸ7^pt`@H`OӔ IDAT6mڴ?R:!oFH}dɒl2웍" ^ٵ=6E5kʗ_~i3Jŕ05 dv)? (vmx5HDC}/BߓЦ7ll9;a 5-8.׌^['A@"`\0_:D y`ǎb 8@ @{gٗv hŝɓ'Vnk^18hME2Y`AsYgձ~ziРAH]wZ߾}M@V:SQUڍ᠓N:)VA 4ծ]t,IZ4^u 4s׍d01k~=@{<(1Ձvkuv^ zN_n֊򁽯[cҮ];AY|L /@{ׁv 3i5J^xN@_\!߮Th?z?YFjBZ]sٲeU-u=6Vy2nܸXC|_ZɭH"Qsk|/ڭF /ݵh>޹ @C=$w_fi?<.xg6p\nSh?3]3fyZQ 5d:OuZ6-\P:wl{ L  =zTnYb7ߔK.Ľ |/A  fig:P H۱cGB,`K  8-k}5tX3gNRz|/nSWT@@D/.'*UP@@ ! q3$ *hO=*i*pQ/P4BZ~u֙݇NW^y!D[{)n5j>Ooݔa 3}@XbLE 9|4h@6m$:_t)R$b|}̙R|`^% j=*T0_r Suҵ}vZ@7o.k֬~W_?}I_hޮM6M~a- ,Y2'c@R$~iӦ]V.y饗!J.m"Оq@@ h6-'077@dK/@|,@{'@{$J*+Pᅲʕ+\+i2D9 9 si`q&h# ּyhv Dot/䦛n a!eI'@u &7|>򜘸kpF &4!O?V]ӛ|A3ģ>*۷`֛'-Z0}>#iժor>vKEHN@?xl*s=78 HB@ё@{ $ c'GW9.@W@{RG= dK4hZޒs9'hM(WLGm!:E*THN8ɗ/.\8xLC[/^<}qܹҭ[ik&%S3z=h+5C=% 9xgdԨQҩS'ݻw 'zoچjnL'蛺znRԊ zS_k( L>]zVv>F*ٳgkŋ˽ke /rWk[EpZW_W7. ׷6l.]?lSOMq @ ~Wf/bsSYg_@_+^x@HTf#}I Nj~> #@=!Оwv.+59ѣGMiݺuŊ+Vȉ'hfjq5sk>ަckZP?Ԯ];bR]oЪ7:[uW_}G=+~ K.mۚI_Ķ _ht dW;cҲe˨ӧuCEoլ:yqal}RL4o9֤av k;v <8t4@Л4̮7X-[ ugނ|+e;<@@ YmXat3رcy/?6.@@ sL'A@_-_}Ȭ*U2c}@5a~[N:vhPi֬Y1o#7k~93ݻJ*!ժZ2}auzhXbC# @Vܹs,c_@FtYc^{Mxnc}@ =?zmc[ tHu7%K 4}agz\)Rod6tmfc=6{'Tcʕ3>׀:n]nZ-bқ15jԐ_|Ѽvz۝~i}Q`lCչN Ucћ8 ɜ7Aș={L}˖-ً-A9ȁ@ }}'Oġ  0{"T/5kJRD @@@xǣ>ړIݩj#aÆ1WU?k9OJVgϞgR~״V-@RZu ;MAo Ni*+;kjxڪUE!]V_'ZFuժzS۾} 5\A#xmN d&7*)R$͈z5x/_>9SڦOxy ( hν{zt?>긺~OE_:1 @(h}_|!˗7aSO=58H@o*-7oT   ?~ 8Xf@T l޼YΥ^j4@@ ($sh֮]+M6_v k|cEmWvnVԼ#B*cƌ Cz-Pa=|AV]O>rʐo?Ҿ}֯_/ 4r92Z9m /ixknsx-ЮבkEks/NwW WcmړGC@ &M4qiE@r&Tfꫯ4n}\Fh@ yT۶m >(8@@@aXCd?KfA@ hI'О9iݩ .N:&<|*@ZRÇ]vE6<nTE]hGKG 6m5ok׶IoB7_CX2eʔÒ%KLh:>,-[5kDR)D2V԰~>m4cz)]tI:& EZJgq/1 jժr$@%#[^74-Cb>Kk ~oL]oʕeҤI<9'="!0/!'e" ƍ' 2k$Sh͔hQǫZ^}Ռ G@ hOu3hW/vJsX"nݺIݣ/ _n]RĚCC}S&5~ )S&b~<Zi=J9s!= 0qDׯ_BzܹsUV&@#О#@WO!*u/{7I+&˗//8@ fޫVj*p _5C? >'.\O[g  @ak@7Oks]qc}  B=snkq j%D+kjr>f̘W1biذa\iZC3f̈ѠAF5ԮZ->Ѧزe:۴ic*'47(Pp7 ֭R ^WzL²k׮IMV\)'tRRs  _0~+ᢆt4@K0)⥥@@ Ǝ|:8a:u,XtmVj֬i@@xǣ>h߽{TR%DjСҬY 8PƏK ~qY1Lиq㐊C ͛G)UB׹][oٞ35l߾9OZ]]y@sL\tBpW [*Tpw2F :tH8߻4@U>3av}JZ5L=PՈ}!7OmذAX  @0SO=%wygp Ecǎ&K+wtE@, v1fEG={0VX=z˗4}ꩧdy~36 Ryr9礤s:-G[\VŊ38#逵$0+ k}[PYN}Ȑ q^pF! )駟͵Q%+7s#@<>c" @@ 1{F=1;z#@6h@F@  M{!YQA O>jʳkfa f̘!=]}  ǷvY7,&LX  )PreѧP]V/  @0>Y;Hh" ҳgOѧ?đ  +;ڽwNX h5{*ҥK5o-ڵ +5\tBp_>ڵk   `/Ս5jd㞹J@ ST"wիW'Y@@3f :t4hl2@`ԩ#ȝwiAC@ RzhO7!zס0\RumGƍ˺u ,(oYer@ R {ӦMr)@ U@+7mY~}=ztZg2@ YիΝ;ޓ;/a8@av_n6'&N(m?5(@v;'4޼ysYfMC 1_wjQ^c=ֵ530 @SL뮻.8@HR@lذ1"ɑ8 :ؓ IDATH_/_)p̈ dpҢE El7ny:DǎO>ٰ% i О О t޾}miܱloG?ٳgKJ2#Y']f xS@+qfqM4aÆys p ۶me˖Iɒ%qB@"@"0rH>~&@@xǣ>l亀ViС۶k)Xwn*6l׺ukخc9&@B^uԩ]A@\Xb߉ښ5k&Cu}N&@R-PV-E B.]:3  5Fgy'֓fl<+OЛlz)]t:Y xK@·@ +Bu?<~ӦMIͥ!{L; @w4^jU)^]@|/tR/m-[4x!(PNٲe,^Xʖ-[`  ]'fHP੧^y{<  W;ڽwNXi8|,X@w|ZgVECo4@@M%Kikժ?.&M2OoӦMn/@@ C{Dh9aEUyfS?}L|jdrxe+%KR=Eg 1E#2   >Xht |wO,0+DO}ӧi  @<QJof6@hB@@'@=} JΝ͆۷o/>6N@ k5jd >̙3G*Vdc  @0O?-~{CpM^{ {"@?i  .s1fT.2Ν+ݺu3{<þ` =M65O15kT\9{6N@@<쳢i#̞HEt" ,ѣGK]@@ {|h9aE >8l@r]@{ZJ^iԩ; wɖ@͛7իW̙3J*~e`  @PfgDo!^{7ߔK:u<փ  C{!$E@u~ X@ +ҫW/]YM>;ޓiӦI},@@Çˈ# av@6mdK/ɍ7奲6@v[ vV> @@ )`Y$0}tӧQ=[nY;OUVrJ @@ )`Y"0e۷MϞ=K.Y3 }ݲl2qa  4>rHc@חG c?Pf͚%+WΘuP@]o7;vV> @@ )`Y 0i$yNz!`Wlh߾[ /HZ`B@|'` kƍ΀ #@ ԯ__>?/_>6@@ W {I {"vd .@=O p t>f3\ٲe3~?l@#@==ΉB=-") О"HA@h@ y s8 cIv#@ ԩsIz2d,@ȹ5JFr>(# i䫯e˖Iɒ%4+  @ h$sŠ@}p" \?, C49dK֭3t',HL7ʤv[b@P~ZFmV?biذae#_U&׿ޓ;ϯ @ О XhO2S ڹ&@@o '!Ņ;36Ɏ@ݻ˜9s\"  ~9ꪫ~kJų{@R&@=e)@{(_@{VD@ ОU@K-J@>̚5ӴiS*m@СCe̘1f#G eL+_~E6l vZ  [ݒM~\q$ 8@[@{TtD@U~ ݻ̜9z|w0  sΎfK.D'[l… gV P@{ 1S4A2  D $'@=97Vc;våqC` ~a6mW& +"s   ۷oOx|%|  | 2dd J{L^~e߿n:C3  8 ,R,}Ν'Op  ڽwf {"vd .@=O 6_|EJW^=! O&N(?m@H\GyL^СCrwڕ @V@N-vV> @@ )`аI Ǐ/ujY Ȅ o߾ҡCL,  @ $ƍ33fO8S @/K _|y@2_@!vV> ʕ+~ Jkז… G%eѢE@;x\qRTn1Lv`649e^xAjժ{b @*A>}HǎS1$c  .` 3Fn\_ @R%+WNN>dOS5,  {$h9aE >8)ykUG1N89c:N4iD̙m۶I%BhL6?pKcokѣ!p-^Xn喈eM:uJt= rE@{3z!1cYK/$7xW@ :Tw^s雘@@hʥ0K *[*V(~[.W  Yڽw{"vdmQ*T[&[7n,ժU &|Л p .@6n(  G"VTɶM??+2G7p; 5^nwM޽#I*97ʘ,гgOyW&O,_}&o#)6l9Rxڵkg@@@ 0s='K̅E`RJ9>H˜L !@{@ +BhIw!i8qjՊqB:7kLOQM|Uu*q.sE{7lu~w뮋tEwUE&bŊ%;t%z&< &ifѣ̞=۬U_ 4@P#Få[nn@@Lxꩧ7'̞gu#@<_)ڠF+~XA@ h&sŠ@}p=x.W N[lԬY3bZ.kFU_zTZ5uw}1^zu1l"E9^nwp_dIhϟ?ږ1 hOY+U͛' ^zI暬+Cr"=ҥKѧZ@@Lǎ+umf@ .?\jժ%_|qC'@@@{: sŠ@}p=KǎS"b 9R6f64n8Po-,]w]:Vb~ipx 5*ad/^lXqJq @ g5+O6\s#xE+s=#x~,@ XZF@l' 7jȼ5wlA@ړ0@ 'sDZ8pT|ۄft58t萙N69inzv1o޼vt_fMܷo޵ԑHӰѣ,Ut= :Tz{Oj~srk̓ڳ۷zK)bW\qd 1^|E߿ @'|z?_}Oѥ^UA=A{ǻwysNxTk\kҤ ei߅ フvEx"ڵkC֯{~Mes9'ޜܝ?1"$A`6{ o~=:۶m_~xamk6k64U.n';k=p}r)kԀ~*UDo^zК5krQuɒ%KlϽ|wK-z7o޸cM@^wy ϛ9s愌~]-ZspZܴidǎ!<(=z0{-7|T]/q#(ڹ<*/:iҤľ@p77Oׯ@  @FX^y:u2b,Hʕ+T)SbH@@hމ&sŠ@}p=E j7iu jhtk8jƍ&nWQz̘1&͚5Knv۱i&4m45+>s& iYvp17)tM|'ON8[]4^RZjٲ -˦\x>~4sZ䪫 zkuRf͈5fеVnذSCX֨QJg̘a۵aÆ뉶jժmo Ъ~& oZM^D۹h0yРA&0o8[չAVbESW^!_..䒈Wg˗7}ß&apZow.כ ~4(߸qcY`Aܗ޼ϝ^/ϓ'K37#X& Z)\r /N^7~ƣ#QsyI@?{=@_/$?YW@I@o^տ xYfEIC&믛Eu5h  @J_?f&@ (@!]ݛ7fo׮]e&(Mo!;õyg bA;ho [\םAMT)鄤!*[.=:d9ӛ oj*TXŋ[n ?'F8CfjnPЛ9*uu @o2QJN}RLxs2bĈDGd8p\~믿 Ozyg^\tE~>DR&+m5k&CMٸ  Z@߃ j GsJnL, +@=^#О>kfBڹ!ի1ظq\q4_/Za.\ Fw^37|=]E[={w! ;w*zN!e^F7o.֠iӦ&pZJ*|FŊ T>`v)̙LjXZ]۶mz]5ihѢUVI24|gmvaq1 }d#Cb]W=gq/&8d[?{r'姝7h*fhmɒ%RV`;vDҢE 3v{v\ҪU_~M О]3V?0K /:3v?,M^{Mx)rs}̍ av@ pc2W $+@=Y9#-##ڹ8!g)W\\!l>|T~w"]Z5Yb{&@5y>Nm4~xرcĘ]t#G+EORlYǀ{'k۾}(Q–w̘1摃to]w^:c=fkW裏L@<ր{V_`1d۝Z~0~=Zx͚5Rre]̙3ElC.nW}ҤIrچK>}"Σ^z.]:{ӦM;ބ?<km)Svlg}} ޜgW_g]\n9  О 3N?Mld%K^´}{G ˆ7\kk/*Tw,:5m?Z F86j𦕽+T +UqV[Ϋվ5nwn5?vبMP_/ID@oŋψH@zlOҿui  {l/(7h*  x׏@{  hbH3e˖Si|E|O3k5q"y…Me˖J-r~ԨQҵk׈(=z|}֬YrGmѢYcx@G @s̉;*8| 63U|r)؞; .v3y.\hB%#vo Do(X`zH S^=7o^D|kGG٩SJY4h ZIC7xӛ2nᆨ?/Ycx9s4kX/Y$U-[HmTs&!^n_/1P\(?0`O8>vZ6n=!ix7{ڵk?@Њ6\# @#dҭ[YX! { sŠ@}psy믿^V^ҕ̝;ׄ?+FMvhNի򶆗ruEZQncNo xSլvuא5x8;vUo/x駟lٲ7XCn8϶mۊVYiNB* Z͢Ug߻w\}ѣEہGPxnov=[tcG:awT$[>P>0P"ףK~6 um~/4O70YmHomO5,FC@r[f8qRs{̏СCe̘1ҫWυrsM̍ !@{@ +BhI-jd ~7)YI&M駟s=7b< WNR?ɓG#Ŋ8Zjb oɕW^~=}t3Ss0lZjNhw ;3NU5D;D8{NެoPu*\Χ5P=~xرcp: WëkH\.\Xo.%JXfx%3fH˖-#Yh{ οVyYfR[>07L7}VԜvf\rDG7d9CN@;E6 ܹTf}{] :;B'8M4)u3  >|%a$9|!0`0aW:t=I@H8rd,@ NqB-i͛7e]˔)uq}G+_oٲE/Nխ7n(ʕ[.]:뱂Nuu֙JNd4dµ9[B}xsC .]dȑJ:v?v4m4t N:![ ܜ_\xr-!׊z5>{l3[iCL`oϙ3G6l(v?yҧOK֟SpMxjիWѣ&7V?R-ZȤV@e|l'sŠ@}psy]aÆI=R:FVh_hQDUi jxkׯ< TYwoQ[jaŊ&uN͚5Rreۥ:U}颡tam ۷TTׯiEu Y[:Suz%Ov7} ,{jaÇKƍB@ 7U:hO5!_@;WG5orے%KVZ)^ö*T`s}ԟ-ZW߰aC nxhpsΝeرYɄʼO8G;lAիWV~#ZhѢ!h5kFJ ܳg+W.B~x-=zڴJԩSݻw[ZLuwnwVӧOU^p o0@y3f2vf۴i|juqҧ\y7Du~3ڿ?^n]/)lj}/@@F=ۼ0FG<*kfk=d Y$5{gщe+ ઀~6pB3f< +@=^#О>kfBڹػw\qWS*U*G O^;֯oFiخ}r9~Oa\rViЀBx{EV5vիuf^z|!]UT1!j_dI ]'|"˗^x-@a1ɓ']w ߴqiZ}q!ܭXv߶m[S=5T]i=gtQg}f[W8M6.{=@KztzI?N {2@keYn&)4@&jO_ϝ; @?k]RYF l@С\h„ r7q]@@ hޕ@{!$խSq}17lz*N5PAw]Cx[dV~"9ϝ֣F B;w jN[g̘!-[oyHKx.n]B l2S>i[+[S_k#qy'S85p7T͛7d=?T\Nס< kjF@ݺ^R!@k ֯_ozQڵmf@ #뛛b  y~XMf=X xHuֲb f  @hމ"sŠ@}psqNAzɼycueu˗/7h-<\nwŢ[Z|M)[k.ٳ̚5vj] ]|!j|Rpakך0JxVzh߾}pNZV"֖'r Zr5Pi&9CV͛B[V_ףoё#G䣏>v֭[#ΝSnЃ}bSCw ժU3U;zVtwjM6wyqlu@srw|\}ն[9thy]9p;zP`u957V!oӦL<9 !z7Bہ*QDע}r;wFc^4Oȓ'OG0}on k֬L>=dDGˍ%Y3CI@;F& YƄo&3jԨLZ>kEB@oէcV4@@M}<>Vf'6c#@ 4n)π*Ud_  vPs8$r8 d8& 6ha|Z&> *8V*C:_O:5_͚5eҥqͩ k^zD;ë=zT6lA-[HѢECNUݣsα]={\r|w]G@n@~{9s1tir"Bw*N7el,u"Y%O<ѥJJϬB`3  k]+_wuiI@l[M _mc?  vq@{pD@{N86޽{+o&[x;Ւ7+u?S*V׊R +W.q>G#FMfW@{޼y]oW:'|"˗_w:\lٲezD9s昛 ^ZY\ቴ^zWHהV_hQ̡C*4ޞ{9"{n+> pܹs ^xa8F^SyDIhOC.q#=3Ϥ} L ?۷K5ߒ/  B)>^4Bo6+ja/} @W@{RG=}̄s1%T[h/\[SˡCL}(M0Aڷosnr-dڴi_ĉK*a@})޶{nX V^=? @HD@{"Zu[`Μ9ҽ{w3اOd|@VӪ~z6 @ȉ@Ϟ=W_ keUd8E袋}v- @W@{RG=}̄s1d_-P|rQFn!_zÆ =z$4@P\^5k $#` ɼ_̼M@+k!.̮i  @"JO_qf@ D@;D kҴiӈkҪU+9S?#͚53kۖ-[K.IxL@@@;@n L6M~ax@vKb~@(*UJ/_~%V  @~ '&@ nnJ  `#@{vV> [ܻwDBנ+~9i۹瞛s.E\{a'OG}lW^ҹsl{@Z~Y}z  XZkq}N&@,믿_.zlܸ@ Оˇhw@;\  7f{^zIx >}HǎB#GonޱcG%" Wz!g6OzٽrfXd={RJrgG}[eo  vPs8$r8 d8@HL@{b^N df}J0H IDATR30 E ͮ]2  @ XZꫯM@ ﶪUjժ X1KD@/ hZ{"vd .@=O0n84hٷVh}g@L Z]+kv  M{2g)PNHۥFrEɊ+713! @Vhi$sŠ@}p" \?Z1cdСfO>jWg @h|e˶  ` keʕ+0 C"8 lݺUnf)S,Y(@@ ! q303  *@+@p_@?#Gʰa8pq d@R䯿/Rϟ`  @ndܹr'g|@ R`ƍr뭷_. .@HH@{B\iL=-L h@@ 13 >\Fa( "͛7@ !Z` n XZrk*E"GIƍRJ2{l@@'ĕ$ @v@@}}Cʘ1c 3<#M6; G2^K/]6o,'tR  @jv*37i]C4@XjhBV*3fȝE0+ dv:;'| `ݲE@@ woߞ˗1/Aɸq̦}YiԨ- _Uzjm! @2ٓQ@=+VH֭F&#  Dӗ@{z  O@{2Ӏd„ fGez *O?$ׯEf~  'pPB&4Yb kK,:7|ε@N@N)vV>B0 @@c9ƌJ\ٯ_?8q;vԭ[l>J*?r3)e mDf2O G2?!Ȕ!sȰͳ(Cyx t{-{Xk5ٳg5M6M^{ZA?2e?Oo%/4A@ 8iҤ1 E@sΕ޽{K&Mdᖮ@@zڭ'ڭ'T6 nMf  r!߂)W^2o<ӥr6 *P|y9slٲEg Tcǎl20vf/RM%X6 `]>Qe-@@ڭ-ڭ'T6 nMf  r!߂(,X@%Kf:iБ@ r*U$'OM6ɋ/ ee  @dO bA)0e2d{ү_?kIU  `Y*Bh&D@oAˢE$E3{2e~M,@ ~Ue'O@@f1p6` @?^F!T={OT  nmp)@@m,@B.@=[tY.]*O> *U*C  rYz˗ϳ8 @"B}b I6N="E @ =Zƌ#ݺu]FJY B@{ T7&q5 vظ@J@W\Cё/u&^xq|@l"PfM9x (P&f  3̮ *  .'N޽{@F@7Z9@{pp @G mڴ1]y#ً)dM qԮ][+x  `vʕ+g1 dY&eڴiҿiժUد  @pۓ{9 @@XV@n:I> + a.P^=eRh0_ # @B1ڙ u'3gΔ!CHfͬS  @Xh6hޞP@@ 6%" @| ¦ʻ+7n{t˟?O _|wpB)Qf$@@۶meժU.]:s avm! SK1b4j-@@+^qdAaf@U@;@@ o 36aZ2ed:GEEEX ؾ} E>%  1ڙ`P65"ڵy֘1cDŁ x#@K=8̂h ^@{}?Äٿ5̞;wp_#$BiӦœ9slٲK@@mڴիWKMgvV)Bh߾XB&L jՂ @J@W\A9@{Pp +@ wޕ-ZȎ;${&̞3gp^#AYf3gΔ *aD@@+ kg Xkfe3fHŊ{1T A t'$О '  SFD@M͛&?H C@f;p,_\ ,   vr203  *@ÇE}=ߤYf)S&@@[ h,ҥK&~)T K @ԩDGG˸qv! @ jJ֭['f.@xرc)QQQ0V A f&!'#wݻ/ӧMv c?EbO]{1r@ItaÆ !fQQQ!@/~,'f@@p n#n="[owt`?D4yl߾]J*1 {WP<-x sqp&LO?܏vM6͌OvNj8p WСCwޑgyF\"w =<֬Y#ժUuA=RvsiB7nܐKJ6m\]n6%"]@{-1ވ߿^tI*VhK4%j@ OfΜ)C f͚Y`*D@#ТE ٸqyvf' |^zI޽k~OFY! U@_92v02 @8/^,u}dߗҥKtp-|8޹s(Q"Vٳgp㈫K{lvnǽ{Lgغͯ]V^{5>|(z#G:?ޤIӁ=I$.9sF4?~oݏQʔ)~޼y<ܻ8@{Q'QF9dɒG{>\qCO:5y%FVlٲi&ɝ@'@0޽{MgҪU+o/|@3ednJ# `:@ DK֭[q5˗/:@vrZzބQ|g@+0tPfyS.ZHի   @L]o|7D2ā ĉwҾ}{_:@60͛%ș3;a34 `Amo_V y5@'@VI+Oiذh7]@!OПѻt"ݻwǐ ~pkg\rad@GgJr$k֬/8@@o{+ ޘ@G| M<xB@@NߠAɟ?  |UlذhҤ ><2ʪ@B"O1btQz@\ff̾e߯hgvJ@{ 8qBTbh7@@[ފ|7f@ڵC|ժU?c+VJ*Iʔ)c}uDGGK:uj}ٲeRvmgǒko횯a~@vYZekJ֭M9͚5!CX4@I&ɰaä]vҧOY@@ |ڙ_ Q9 _:$5jԐ|4J   nm&n="]'O|K>L2rΝeܸqshw_]m/KD+a] j*i۶9E2hР@2eaޓ~y{9# ivfߺud͚U4̞3gN?P *g PB8@@o{+ ޘ@Gh߶mKx];O:U$IB׫Wڏ;&QQQεԬYԝ4iR^  `?|rСUVҿ?0  |榩-[ʀA@ ̬ܿalٲ_|A=D{ v) 4%J… X"5! [o[oOl `@ aÆ0;<( p9Ūׯ]zžK IDATYcW1KD@ڭGTtR'ѦM۷'q > hWqJ<ا1@'avZO|0~}Α#Gj@~m)[̙3'b@@ 8ڃ,ڽ\@Ov ϝ;W6mT0ao>NE}|h D;<h5\5n8gzÇ'Js>޽T\Y .,]s x$@#&KW_InL}d޽-]/!YS裏A@0׿e:k={3;a0DE aѧYk  vo>3 w1]O>mھ}*U+y կ_yMBs=޽{1n0ժUKf$IkڎCt>ziBksaŋ\xb[ndȐA{ڴi3NF@+dңGN:9=WE VЛƍlr@0vfoM];?9@pXjmVW.'O@Z@d@{xT߁vaǎ&`]΋-u8ۀΙP㋿0ҠAiiwر3F&M*ЮA| GGG]ܴTR9?~ Ѻu1On#@^@fV&z}@`,XpߠA)@#f8m0;/ @w{ڵkPv-Rtigk7)RȾ}dΝ8Ν[:$ɓ'Or=  hϗĬYo߾x v9<B a)h"s#՛o)|IX@7 {۶m3av̞-[p["Q`…ҽ{wQFqfB@H n$n="Cvst @{"Eȑ#Zjɒ%K\:߼ySʔ)<'g̘!ɒ% *7ݏKzc swJb\ֶtRI$IKvTR%.]TdI1{9Y"  |Ψaze@@ sLUNO95s! `K=ٷo.9s4 @̙#}Mʰaü@@ nv ! h߰a|84LԼyxwýC{&MD;jѵflGJ^uqϴyǎE;tmʕݻwOŵ=|PVX!SN˗i 5 3` \@{:~7x`3| m۶  'P7  8]Zh %f @ b# u@Y{:vO8~\^CӧO?S|Iѿ?>}8qhP\?"E`9nݺ%ڵ];߿ӦM+>Y; ^@{5ɓeСf?PZn@VZenQ|g^]  เ~@;رC^|Eә0~]~b6mo߾v`  v| 3< ]@@` hsM8QnnJU@P ]XUZ56@6џav#H @ ƍ'#GN:I="n},@/@=@[1G @  $ @//!CHfͬ_4"D wߕ*U#z,@Ph];ܹSre:fN0' FcJݥK.G@h {hޞP@@ 6%" @| -`̘12zhsΰaäiӦ.@l!yfi޼TXQf̘a5H@%pәfYd ̃ A~IO>Ү]ZKA@` hh܊3@ h%! q hAI+1b4jȺR n*o+WNfϞmX@)avwIܹMgvgl@  $ 0@Zlًeu  aMԠ o}s*@@h܏?X&L`&`{793! mۤqRti7o^  ~tfwٵ3{̙02C Uo߾2k,:t;ve`   Х˰ @|y}   xcog裏>3;V֭ @@v) 4%Kʂ :# ` kg]vIݜ>i$Yr AسgԩSG .,K. L @$ h];b/h^%@ kNV\{!F@ [o [oOl @@@ C?Pf̘a}ʔ)[0@@ KZ`|r@@[n}@Zl)ׯ?\V5  nv ! `Y" \@{ȷ@#s1˥ $,p!Qϟ_VZ .fDEE3fD @ ;e5k/_ޯc3 C@@@m,@B.@=[гgO/%I$2}tTRh bv@<8zTV֮] _tf0{޼y/ @  6;vTRA@@ [o [oOl @@@ Cݻw JM\r+@BĉRJɝ;lذ+9@{ h];&̮ٟy{z@ ԭ[|Yd)R$`00 Dv-v ! `Y" \@{hk׮xbI2^t¬  p)X̙S6o\ ~ә]376f#YE C|(gϞ$IDRY  h];ς 2 bAŋTR ;vJYԁ a&@zFz{BE `6d ! О-_ i7cǎI…MCL@ <r)ߗSNIãhD@OW\1? :ڙ=mڴ~a@H3g|={vٲeKd@vm;v ! `Y" \@[p9әbŊ0{4i|+@#ܹs˽{ĉ"E0R@Hٵ3Ç͍f7(W# gǏKժU%O<~z?p  `i*Bh&D@v߶&̮,YyO>`\ @ DEEɝ;wѣ| d@._l:kH"gAYr @`<(5k֔_~YV\@"V@@@m,@B.@-Ўfeʔ1L#W @֭[r!I:ux/@@@ڙȑ#&̮yJp ~A֭kf-Y$$50) v!v ! [o޼)֭ɓb{1ŋͿ߿R^dT =~ Y ׯ˞={$}8V @ P;=zԄٵ3;O'% `u-[;cr̚5R XT@6@@ڭj}<]C/Y<|Pׯ/-r~z@vߵkh>s.,@"TXbrٽ{KigvvU~@ <֯_/x2c@ hhޞP@@57y޽]b[ne:?o>)T5k֔hI45EU X@{›7ig7n믿n>Á v ܹS^xs~@pkgTREJY @$ \Rڵk'{I&EY A@{@`C@?;ɓ'%W\..[Ljժ1^n]bw FC@D hPj_?~1 @$ .]ZΟ?/۶m,YDX  rEәRxqә0;/ @pXdtvرT:" [h3[ v ! [ko޼)EӧO; Ok׮a;ѫW/>|E5  ~hY oRvm7n@@rٳOm˖-. D E0vfꩧ"f},@ϗ=zHÆ eȑX4D@ h;i$ОhB@Dˢo>'O\ҥK'͛7w>Wq]]SLI q-)T39sf9pM>p@0 &iY~zɘ1c`7)@ TXQN:%7o9sobfB@ f'N%J 5kWy:tha`@l_*B"~12z5k֔8ى!{$IDRHaz!L<^=w\iڴO{?yvm+[Wss2 V@=vԯ__FM`t@PJ*&Oh˝;w@p8Yf8O>$L . -[ʀr   =p@@FÁ  ԨQC} =0! ?MG];{Ӱ]<#Dرc͓'6g@@| Z`!X_FGbj]u֕hSwժUXbG=SJVܹ#ŋ#GgΜvKG 8yK*ӼysӅ]… >C^YZÇ%uԏ(kΑ#߿_z)$@3^f{{og@X@|?/_\ ,+t@; h];ks}];f+##0rHӠGҩSY+A@ h*Gh@ X5Ҏj>~xڵk{ʕ_4w+_.YD&M'܃^z5|%K5w5z ͛1cHŊM`]cvΝ+7vC;vt:ox1㍾w(QB8`Q ħM18@+@]dʕѻz.7V@_:uȞ={dҥz@Mܹs3#̮SLnˠ^@b:tL;gΜ1w?4h SL4iҸ|=$Ժ۷orc|Sb>kmѡCg(@;# `=ڗ-[fnӣu?z;IE W7ߔݻwF{@I@ڙ]~WMgvᴃԊ 7}Ł "@^C=*@{FdΜ9$Iv`Æ RjUk44w94ܕ5kGB&Mrf!mƹx_ w̧'5,::Zގccճo>ɘ1ccϝ; @vq\|1֬Y#ժUz {M U Jݍ~իW3 .иqcٶm̛7ϼ @8h];SQv%;G  {Q\y;# [o [oOl K]O>o:vrylvlC&޽{RH9r䈩NÇ{qlٲEʕ+{lL2EZnҥKnݺs܃/_v k{e5otf^M  (;ڿKٳqҥ3؞(H.F@folݺUfϞ^X. @ 9s40{2eLP*  tIy>Ivڞ_ș  @ {9hޞP@@vל8qtv-Eu%JȁӺQ#G55k4o $M4Z5 ^~}Yh9'UT~ɞ={9<ϒ%K]WX!ZXzԨQ&MPz$I{! Wp2 $Jnv}̮nݺI׮] U@o`߼yy]Ŋ@D@ߣOai (6mڈ>sob@vm9v ! | wec߾}R`Ap8Ҙr k=eʔN+Tn@'жm[YjO#W#PԩS35ȧ7:'O<324 _|w`)Yu @+.*B:[X} k@Dr3#Fvwk֬qxk= ޽+ŊsK.-7o~ޚe@@I9}q߿ѐ@/СCY|L0A}@$av~9VjA@ڵe޽ ڼL %@zIz{BE `} 9sFr1}M$Ydq̛۶mss1ɓ'3fyUs2n8\΁ @>c=x`f@@ON:Q@ǭ?˛)@ T^]>l?~GA  ڭOڭ'T6Z… %K't=R@utРAףdΜtLO*cԣ]3fpod͛wڵ{RuP ^,@ Dj]uߍСCwޱ^P  $7/ZH>y7étjE`ѼN=7!x$Preѯ6lܹs{t '! ڭ n="v%J8{%ÇOp'Ν+M6u7eiݺuicǎ2qD9%KHҤIsǓ̋+fzhP^Svhm܃ڥСC2eJO.@@$5Xך4i$MA@>_e g@ϟ *1Sl @9۹sd֭-[  `e*BX->{lΞToܸq;W_IM:UZjuW5j|>>}|̗zΜ9#,w5k֔h7&L'NF!w z=  @`"->rH7nAoذa`@ 1?F&p әf?!I$Zԃ @QO?$;w^x!h2 Dv'v ! h?vDEE=+}whܹ3۷O ,NwhҥK%Yd\TT鑏/[LSz×zrUVu3d۷˸N:رcvKp9^{n)Z's Q ڍѣG[oDIB@^|Cui`/V ` kg .HŊMgvVj@@B={HCY s# @ hhޞP@ [nɮ]L'3ݓRti#C ra~?x(PeK׮]%mڴzwv6lի:tHz&Mm}ѢER^=_dLkv;wnQ{nA@ DJ]t'O6r~ԩS'L  ~;x`"@@ Ǐ7avMsP̜ C@|m{ԩScJ@@ nM%n="@ ]|tcנnc>Gl+VLΝ;W?^pa_4 kgyG]?K=>4oϘ1Y ۻ5j$sͽ{̍0N]7n+?! @"!ЮAiӦ &x)^9rԛ#GٷoJ* J zʔ)=@@ hϤFd6@l*0d2eO{=*l@P hmkgv@YfmxvY  hk@@V ZJj֬T7Ǜ7o.h.]G- ݻiM4YfK=7oޔE:;z.]Hy}cCf~ >|Xdɒz -[F8@ s]t3g4xSNjժ@adҤIҧOi׮Vr@B)av~%\&P XI3gN{`ST  @h^hޞP@JvrOreI6<"c >Bqlܸ<5C;;**Jx 8@@_*Ζ@8"5[eP^z;m&eʔq9ĉ+WpX25" `[p Cϟ/I&5a+vX8 5j;VwnHǁ @4̮ٵiԬ?r  T O?-q@@W:큳ed@ N{qܺuK *|]̙Ò:uXҎիW7o4j(t`f@Hvr_er5yc˖-:9 @+0f=ztUu @7P;f祁 @W\bŊI O@@ nv ! v $tfV#EݻwOʕ+'wv~N~ȅv%̎  :Ю7i0A .?ϟ7К58G7X^ N`ܸq2rHԩ J@fXjժj*E@ "ҥK6-۾}{DE  @h=Y [oOl @=o͛75kJtt$MJe֬YΏ5h@̙#ɒ% @<FݽΝ;Mm;vHƬYJlL'N{Sz]r E`ĉ2|pi߾/c2 vf0k&ӦM @8uTXQr!_5^  } ؅F @CРXeѢEB {ѢEz2GeŒ"E  ీ?tYY`駟W^yO ={JҦM+7o4;t萤Nur" W`2tPiӦ׿3 0vfr av^ x!/[΋+9@p nWv ! ~+ vRlYٴi޿2dsڙ}ƌCmT W o}Իؿ;I$W]c5^Bk[,]T2d ݻw77ԕ,YR,X൜ VZ8@Ĭ5̮<}J x&~U(P@VXE "@z/ *Bh&c?cg1ӧO+Wtn0`|&΁ %ٳG;fZx\pτVPo,_\2fh9k,;v N/}?@Æ ]!̌ qfW^%q˂@!O~7͓  } ug @ڭ׿%.]rT޽+q='O.2eFT ^ ۷O *5Yflٲ/]= I zjɒ%,[̫jϗRJy;,# fΜ)f͚<͏S0 r@3gG6m*Æ @D 8p<0{)@7o͛K DoF@@| \#8[FF ΋@#^reٸqk׮'~rܹc6m_|ф_z%s?lxdڴiA @@U@S5j$#F@D h]o~T^]&O@; ]VZn{v~v@$@O~@1 T@R .f͚2i$;sv@*0sLׯ4kL ױ @%@zMz{BE `6d !/Юim۷M]CnOqә]C ŋ7a 2:l:uD5Jׯة@ zjiӦT^]&O@.{5 eY' @M&VZI==! @ hfhޞP@@ 6%" @ kn2]vzcz.]|ٳ&~!yWeٲe.]8K6/}vɜ9r! O`ݺu&k%8@D@ڙ͛<0A@'Sz\ v[oOl @@@ E~駢Ю_|qIy7ѣRlYә=M4q_KŊ%o޼fo\@ƍMwʕ+ 8@HH`Ϟ=ka< І$G]Jn7W# [o [oOl @@@ ڵKϞ=ے-[6Yd+3?hBڙځʠA_>L$I\ ЛΚ5k&*T3gv2FG{ kgv}\8a„_ @@#FǛq;vh2 @@@6@@m,@B.m] pႴktYCj0!æ3ӧJ*3{)\lr| (P 9@#7HӦMW̙I@ "~ә0{Dl'@!CȔ)S_~{AŔtE-iґ}h^D!4i E:D^Az R(}gDH2ܙ=g L~w_@ hwhwޙP8d ~OUteҤI_5W_Idݻw|鴮ٟ|G]ɓ'h1 ظqԯ_}<0; v v !@hCf  woO=̞=[z%׮]|nkc5~iy7Mݵ6`Q+VLf͚%ϟ xyFLN?RH%  O-[H:uhѢ @ؾ}̮ajժɈ#@B@tټ@4hY@U@N@΄@ !E@𻀷v޽{Co>ӡ];k[|gϞ5 e˰a?6#Gp{׮]u?p'1@@-dҥ2n8\`Y@;E;*B @@1V7md 7o4{K,DFF C\ɓMG+ʄ r_{En߰aL6큯iР /L @0 go̙% lݺ|?H5dذa  իWZ\9?T  ,ڝwڝw&T[fFl n +"sΕO?T&M*W\1׮]l<\Ӻ;w m}\/k@̣({ @ wƍRV-ꫯB# @ʕ+˾}dҥ1 @wvlE@-L@ h'OJddK?.-"}ɓ%C &Ԯ]c;\]nMgNvׯz~I,޲eKS hr^z ʐ!CN:/5u>hhpg|]"! `Sx=4 @h ~N=Ν"@rСCzj钁qfT S;d;Lxv>A@*W'_~taÆfݭZy[o% ϟ?߲4GEE=r k(JhWwȋyO~裏̚aaaҳgXl2i߾\vT1}p/TPx! N8s)RD^z%ٺuK6@4̮ݼy1OֲxL @@ Ӵ)>M+F-D  qتڝw&T4~ʕ,;wn,}JlNG?5kf}C;YZlـA|>bĈ>W5kVg.  KsI%eʔcǎX^@]@$@ c Yo>֛  @|Wξg -maeRBN.dɒ:sr.l8l" 64z?{mJ=zTPev @hժt+bҤIf 'M< iӦ>«cqիWۧL+֭X@G\x]C ;^`zѣGͯ&ԬdOrXpJΝ-={ \cRR%Kw$ڙ^kd,M;׻:?N*Z@p My17ٳ@ c3;a ?h 9s?<3QDA'6  JygBE C  ByGAN\F ӝ]3 Ǐj疝;wuKǎ3x_c3gJnk۶kNz) ܻwtEGtt$IIJ:[jժYX1w]o`   GhHB?Ru X+avm>a/vfC@Kd"CYE2 vv ! 'x#pU5k??v$\vKΛG[o%N2A刈UlY{ vѣ=M6柭 |y5tRM6۶m+VtܳgX @O7oJl$a„rx@Tয়~2 -@p~B@ hulW"[)^8D@C2PÇ~.]ڄj44h;eӭX@ oB:ϟ/իW'N,֭ >GSN޽{zV3XhwMPη QGFF>kvӤI) X&h?vc =0_^=/o@ { ;P  _|>1cƌr1n/6yd۷yANE/+; }cܹiӦ{D BwG  X~iׯ__  6ș3{$L  vo칖@=̊ 8N૯xXwvWr Kq{ +о}{3g k]?}1ze:MV6m%Y!ƍsٻw0;xЧ*+VGAEqۋ+@ x})Bٵ3۷ 9 @ \xѼ"E ٵkWUO  4N;;*B@,%JUCeʔy)SHϞ=%,,z u-y]/_6>י3gJ^"0zh0`yh]زe;|(_;] x+Nڡ];3@_45;ȠAS@8sh^zInB;g  `v;T@w~\ @\,X 5jԐ\rɒ%Kb@lkB׮]3v 9\7hYfٹs?B`ȑG{aׯ_w۵vsԬYpI#xȔ8@ K,_ɡC䩧r@E ;wfk@o&h짟~c%,  @yHygBE  qZv !  ķ;`QʕK,YbYMLo}z2|KKsJ8lڴЖ~ȑ#_~ԭ[7ʠضm;ிI;Gܹbl@{.3fcǎɐ!CN:NrHաaѣG=Zj?n޼m{NjW=Yp"$.^(H"E"/fiҤ a?Jv [`֭Rvm)R̙3@@+^r1v[X@7]ko…&MQ6hzꙙeʔz [2e${}ʸq#FjժVtٳg3g,Yr !P`A9wرCRLf @ڙFIdgl@駟AkC# ڽ&|햓2!  o.&OlYʕK,Y⌍Q6~hXuΝ!Cx]//_6s $I$sraf'F%of&!+믿s@{,d8 ̙3_z`.CF`,Asl@97+W x#@={%n+" ~;n@ɓ'7{ٸqKίbq[l޼YgB)SDhZΰWGf;Vx {dЮ~lذ{':hvxWڂ!&Pxq9u|ҦMjg @@ BnZ+ }P4 5k<7"h>/^ג%J%K?Y@ ʕC'dɒ%WR@_ \R6mj$KyB@ӧOΝ;K d/@@ ;8 ;L@]T^].\(C :uĻ>.D Pˋv{57~ϟ_;'gϞD|wHÆ Qu s57hg^{QQL ܹp_~? J*ƙd *T +VHl|_+"}F@!@Qv ! ;W+ʄ ]"(zsHݺue߾}qѫW/սXbSxlƶmL~2//T\裏dsϙ0{cw!Bov7n(QQQHn2eq, p@ʕ,Y"r@azQXX0r@I`ܸqҷo_iѢtݦU@BE@N@΄@@x ή]aݧ'Onꊎ$IīF.B kB׮]3s ?hhWvήOfΜPv.QF 2d$.ںukYhza"ED .\=zc*v(Y<]@)PjUٳgW_}5p 1thC9@pȑ#eҦMԩ3*@;*;*B@%ή]գzpB2dԩSG ̞=[:t`jE4]k׮-C>"""\\` o6#G;v(m۶ ?ӄׯ_/ٳf{gFsy\O2pc3-AB^aÆӦM?! /KJ-aaa&s @NMǃ  x#@={%n+" >ewvݐw'OnmKGj`~Fbd֬Y-nif]7f̘tk''qㆹq@o&j=s]%{M!1Gp{-YI@`ntWL`"{B/@GD T <<\͛'RfMb  |ڝwڝw&T Z]E˗OlJk^5k&˗/7.YDr ֬Ycް߹sYt&^@abW^5ٷn*Yd1a 2ؽ,#TG}ǎK$GW^y%f@oUVIddLŵ [li{Y@I?_[oL[c/  ~@Ēڝw&T J]EiVó+V &ĪV^@0 hкRJRxq:t߶Bc L h)S&t k6[l&H.]:gBVʕ+_Bi%K_SJVlpĨ> @  Ν@pǛ  7ڽѳZ2+  `?릎;&3f4@ +ӧM}ܸq}Yjw?&̾k.^;N:B8^ĉgzԜ3gN{„ ' DЮxرoX5- 0a _vͬAq 'MGm'NH&MRP!f놜Uƫ@%_Obvo?pN4i<Shp7oy &  }2933 .Oڽ{,ZH@F@7z\KWfE@l@?u\zt3v ڡfO1&հk׀Ǐ/*U\6 \tɄ'OlV05qQSÇhѢ)8uG(8{{:@>:ٳeȐ!RN|@lfoٲtͦ@]bŊ/Ȳe$GG@? h<`y;*B@,?~zo$E5kVz׀Zlvh׀؊:|/k4ؾj*x6zƍcӦM{g<ʕ+6n@1c=cNB@ biI G SLr9|<)U@@ h;( ;L@]C2{l/nݺ Æ 3ݻg 裏Lj[l1avoȑ#@ tn޼ѽ}޽=ӧv 8PFehK @,X`ѺukO?W" @H {2d CҀM# ڭb6V(2  `,]rL8Q*T`S.)pIjכ^tMVå^zZV;,}l5DC @ gϞ8qc,Yxܓ&MߥB\/#FHǎm۶!@ fW" ܺuKf*O?  ^ h [Nʄ  4Tc?,XS!\@l:+f/^<ֵ_^4i"Ԯ][ky! ~r}\r2{N%ŋF@!۷771@.;k\ئMԩS' @@zΝ['N,? @@ >f5ev@@KGڵk%SL@3g4ӧO֭kڹac͚53ݻwMwwr@bۼeSO=ѽ=ONb@@AڝyAygBE  .\K)d׮] k׮ l?{|5iDz BK.yK3O` @7ܘעE ޽{l"^̝;|oC'x1" 'pA)_+j*h@@{Mh-'eB@@rʉv!]f:pԩSM{٬Y3ѣk8@~;wz޽~Oy)8A`ҹsgywdРAN(@ KW#! @p?2mڴ@@ h+'3"@@VѣGˀ>]ں#@ B^C G1qDlzM41I,Y3Y(w}Æ r?nGc3@f͒O>t/_0">p}e @A+WJӦM_`  `vE@̀ @e̘1ҥKiժU@N SJnL1裏X˗M$IP{XX36B w޾sNU'OpϜ9 U0%x+0w\Zj0@O fA78c X-?FMo;@BP@@΄@@[:v(3gΔKm]@ &O,=z0Ou֏={LLȝ; UP@ .]p?|ǖ^~e{ʔ)}ԏ@P,XܴWzu>|xPM V{JޒV2 F`…Ҷm[V1"6@@ڝ BygBE  ͚5˗˄ bŊ \uO>fS]v5q2/^l4UR<>{q"-pqw}Æ ryɓ#O~)@Xhqﭷޒ:PA 1۷71@@_̞=[:t uԑ!CbI@@  &3"@@V5keܹRpa[br}k~̆>3i޼y77zh7o9Zli:= @@ЧXlܸ|hro%A 6p=^o3fLO -Μh IDAT3f̐N:i [|  (iӦI.]aÆ2`G 'gA@@e˖Çի%K,-@ F%4[4iuLo5s &z=7 *p߱cV&MpWuԍ-[fnT?R  `@0vFm׮17  HٳyoZߣf   hV [oʌ  '/^;w?Z)/0b/L!}ƍ5+ͼ 0/S0 W\~!mO#*U@&5#H+WJӦM|2i$GHQ /;OR||GX5@@DiݺuC@Z@ׄO@rR&D@-/vծ1|lׯM:Uf:gʔɮ%8'Nxi1G\< & =R0NXf{O2eSʢ@}b  W㕶mJǎB@ 5h @@ \tI+ɒ%ݻw;P*C 2D f޽kPh',޽{=n(x{…ikԊ15RJɷ~z(ӥsfY f @@_;t ڵW A$@yIygBE  mG2eHƌeݺu  lBPPNnԩS&>k,n4iL^z>@@شi oذAoQ~ĉ=g˖-жGT@;RdI @ iW@C@ :zhҥj*0J@pvv !  رCjԨ! ۶#@ _ƌc6!5kf4͛7-j%J[M,k<ܼ3v9NAE`7nxO'b3P知zR@Y`Am"@ |ҵkWO?TZn l@*бcG'_|ԫWϩeR vv !  ;thѢtݶuK>)SǍ'+Wv=j`Y $<<\jؚ֬) -[[n(Yvsg QQQ[oI޼yeѢE]!.N!@p66? 6LjԨ*) @I@N@΄@@(F⇓ 31'ХK8qTP! 6n:;vz  )p=/N#6mZ'o@ttTREr-?C"@ b;w,~a M  bZŋ.bGv@;J;*B@l.[}hA :u3fc=&&MrF1oMsΙڰaCln/8U>}ڣlٲy'NԭPؿTT<`ٲe"$7߸G=NZ@=0YbE@5e Sb vv !  hB.]*ƍʕ+۶#:t gϖ'|R"##TR/_*y 3Ƽgp: R88]?ã… ŋwv K2@@ Oԧ˔.]کeR vv !  Ԯ][njBEm&Fg h{޼y0aBәdɒ.8i2"",e˖̙3˚5k|> "v L2E>3av;@WlڴIfΜ)&UTm6+f.@}`Ϟ=ͺ}ς  `@ҥѣn:M @@[ Z=vM@pcǤTR/Y#E!ݓMի%uԦKCy&خ|t.9re8B?ޘ{]ϟ7OykElf޽ӻ  @ /^\N:%6miZԋ  C!3"@@;wJj$pB[`Rp۷M];֤K΄ٳg\ɘ1c$""Bnܸa*Pvl\#Ν;pW^fI쾔g-@Z`w  vo@̈ 8R`ժUҤI)WL<ّ5RX#p-f駟S4+X3y͢?PԩSͮyjoܸq풭 #pa˗=67nى^*s$IO y1,C f>͛c.A@!'OrٳG&Mꌢ@Z@@΄@@[̙#۷ZjW_}eL~ye͒)S&f_.?^"ålٲ!8X@~oܸq=wO>G];m3*G-˥ĉwަ=zHf^  d˖Mn޼) z3" F@>;L@E`矛b3 |ڡF;o۶td0vhgN`&~Qsom:gΜ9v*@ ܾ}ۣ{ݻ=jѧpޞ1cF!Oɚ5<3r(*@W0;  }ݻw{<-O=! {7Ԋ%G@D`2rHرm6Hv6@%pEfnٳgHI6-@1b ߹s\ݦMln @pGرcc5@ $7 &H>}L={"@@ h]  b  ڽz֛2#  H.]ȴidҰaCGHQ ?sΙ0{TTΝtf饗7WӧOP̙3ͿNڄׯ @ 9rD6nbyzܹ)ݻ'2d -@[oʌ  7oJl$a„rgE  ڝwڝw&T "вeKYd3FTbL8s GGG4̞2eJ+n޼7mdvXHl/Ydm!!k.{{{C]C_[c= Xnի +W$O<4iRٳgOl}  ,t*0n8۷)0SO@@}BiM#;vx3" nd 3"@@ *G1._\gnL8~믿J…M];0w>l0H L=<<ܾE@/^~kw<Gf*nݒ3@ xvp믿ܛէw #+;wnzDGGK$Iw#TA'3ާOynl@p Ӈ)ٲe+V X"@FK'!n)'! vʕK%J$sfT_Lɓ#ٹAl`ڴic>CG LaN ݻn=jJ4GW^{{yʥK$**J'O y% `1cf6B35 8F`Ϟ=RjU'-^1uQ vv !  os~|~&Dk+W΄{1fX ܺu˄Gm^OKxxn:V"@|GСCK>G} ϋި /8XCf }I@m;vH5D6`  `v(-@eL 8W`׮]ov\hs 2xݻI&r9hՉ'"P`SP,YLzˡS zcƍyԧ((Q0aB;a8 .\XΞ=+۶m+q `<;`31av}@+i&W/^\fΜB @J@@΄@@֬Ycv)SFgBO@;jg/Jʕeܸq-̖ ZfU_zc@ t=C8 k_bŊӧe͒&M 1}ƍG@XnRti:ujhm" mmMDž  @̛7O# 8S)!.@5~ZhX&~eSoH"6B ޾}v5'Nѽ=[l.PdI9qlذAҧO QFڄr, ~XbR?# lڝwڝw&T X.0qDݻ N{?jժɈ#_}"pUjׯ:%Jd7o' @`1(eРAfM>g1@p϶x r@p;@;*B@,֭L:Z.˄X/@4i"oߖ:uȐ!C_*dӱ_~1uT\YM ۷#~ -Zpfx/P|yN+WYz?!3 f߿4j(W2@@ fΜ);vz_||dG  ~aڝw&T X.bG2꣪|MgBF`զ3{~2x`k&fG ;tl~yc{DY/E!8K`֭[=n%Jph7wF)*e˖ q ȑ# 0@6l @mؤ}]Ӹ X!@ Ek n'! `ƍモ%K:FB VX!aaaA \`Mo1M" {yv `ѽSMֲy*UHtt;w`*{CfwAP 8J`ĉһwoyϞ=U  @ hwhwޙP  `ҥK%gΜτ lѢD̟wru ܹS+Yv=_|&^lـ #_w#[lA|foDEEɢE$o޼?X*@1b|f ~  [ѣG04.]2^ A;L@\H"rٲehwF8G`xM6^z98*… Mȑ#fm i=K,>@G@;\=(\;^xٸ;^ϲ`)Pq9 w IDAT @@]`ذa2di׮t*@@0Z: vK9 @p+"O;HB {iӦy> A #G`۷nݺ ?S! ֭[͛7{̗0aBZjɶmdܹ `/4S4Hy;aN@@ `)6z`ǎm۶ G@g hwyh5ڝw&T X*puɑ#<쳢 yIxx)>]:0pnB3f05K&N1GD! @Pܸqã{}<*U*{tb߱DݺuEf͒bŊ^I0{x1 @~dرҭ[7iٲe*m@Z@բG{Cf@@-pI@ƍ]+!*gv?U;o駡u-[`kx"EZ G@aNm*e(QBey$2d̙R "K2y D$E2!dJ$}k9g^{}]*^zߏqޯ@=d2h iҤ@@޻NڽwM@pU/04@ 滎]. "puӭ]0P>RHx, ~ l޼pOr7;,XЯsx^xA֭['SNʕ+{,@0ه & 6 Q>  \W^yEهnS~@@ j{Rh5"@@UEINVZ:7!@M&z2tM:vy$1l_`ᢡza @ \pnpUEzj裏'H6WN+?{Y +/^|ƥu1@@7d6@@s'O}Jfͤϡ{Ҿ}hX6kLtkߴi9[E;.]:)@p _ȓ' W;ӄZ+Wĉjժ@0;# 4~T x@M6|r7nT^Q Dv]E޻&T *0|pvڵ@[oeNo@m3fNǎ3S7j۳f@D ٳp+Vp馛=wr|L%`kzEE@\Ulݺp\xqfe@v{h5"@@UzJ~Ϥ`d [@Oc70)SsӦ^xAt" W:uQj"E|&_Rr})7*6h"W@uf@@ y+y9  &@{W@ !  lذA>Wf2p 6L> 6O={ք'MdI.޺ukOG $$?:W\2eJGvv#ye̙ܤ '  ?.wիWK|@@'@{O޻&T *?~d׮]6mZWf21coЁgWtǎ#Fʕ+M 0jժydB@ ٿ[ߗ;tsbϞ=O>~[y .Mswݺu " AxG?Jr30% Dv]u޻&T &Av k] Əo&mk׮1+. Xtl߹sjժҥKy衇\:  |Nr8y9|&_wuWӧL2E/͚5K1<W0{^{V (S9rD;{{2fG@ KM{ׄ@@>,ʕ3o} p_@N&M2=Zj֬I L0_s-[v[ȴ _wO88iB4iYT~?}ʋ/9СCeԨQ_ԩkp@@kŊ'Oʖ-[}@=  =޻&ڽwM@pMի˗/wm^&Bpɓ7yc naЈ#dԩ;ӄDnWz@駟K."E ӵ^xq  zW^u#q f牁 W@?:{g^~{pO  5ڽw {P  _-M4 *Ȍ3\@@LiڴibĉRjUXktk_nYG"EL uQ< vk׮9>2d kXb֬YңGi׮ @,!Ct9Rj׮  \xQ~7[\@@ Z{h5"@@5%KHf͚X]bݻn裏'b iokVF l Dٳ' .,-[43gi '! Iȑ#\~]~w馛x4G@ gv]*B@\:u[6m* pm^&B u&gϖ)S+W=F%#FK.Uoo^5KCvVA[ݗ/_ބa*0x`3fY| jՊV ֍ U˒'OI*۷/br@.޻ڽwM@pM@Ç.]H׮]]Vs4o}Z)"wqO/ @@ b{Rh5"@@5 ^,^XF)kvm^&B :t K, )iL2Fz0Cv=puݺuM9r  @ 3F,۷ݻZ?od˖i}e͚5X$0p@7nY2ah@B)p1\#sPŹ@@ {h5"@@5W_,*Urm^&B ڶm+}a%KFY+_nBڱڭ])R@ @?~ҦMy7\סCG:yw!`=Zj֬S%  4;a ú[@pK@[C=KfB@<'OeٲeRpaGAx]UVrJɔ) +V%S&]5>|sٳׯl5p"@)0i$߿lRSرp?ceKN#7 .:'9gA@,{Jʕ%w_ &@5J&"%! Э.ڽhѢ^le:yd)TPDMެZneʔ" 3`߾}yү_wֿ֤ӧwto> @01cFO   عsTZU5x   ړߜ3" &oi@}/]tv^N@8 \zU4̾n:{Mg|0D$̙3MG=s&~=$9  n L6Mz%M6:sWs$f} ~„ 椄ٓמ! mۤf͚fbݍ %@-I!%3! xr7ݻ=U UK.0W_}%ٳg7a{\BsgHCG6Nڄ;tZ)@O.=z&MȠAzx@?w}vtg @acO?13  @6mg}VJ,) ,@ @pM@kMD5J&B@%p)Sd͚UbngJoҼys$ʙ3 ͛Qa&gl_tZ99,`7NWˡv@@ ^x8mڴ_ @@hε*!kBE  +tpI@-Z͛%_|2ydɞ={%"|SN5SN6mTt"wuW@Hng^խ[WFģ;tiB+[l+ ˤI̊GЅe)  ڰUVRJja G@h̥B{ׄ@@W-[&۷~l@'O4a[v{/T  AC'N4K΄۴i1%  ,^X:v(jՒ> 4ѣGC9Τ7m~{*aZWf?~}af.]Y\ڵkf?C4;)RDҥK{rɚ5*UJRNٙ3gd˖-rII&\|Yr- TR9ΧML;f~/GKϞ=+}'Ǐ7;8p@5kfW^5sH5fuGn:tWٳ{~|OM6ŋ%K,>}DSoNDwVc}9  h-Gh.s# !n_|L:U*WJ85ػw ߿_ʕ+gBIz7 0۶m3׮]k~]pal !@SG5]a荟~oD?ŋ;7tS4Н쬛T4 00@@)0fhnRHh͓]ƻf}\bL7"Q$@{@ ! ԬYӼ oVi7,+4̮<yәof֎ 0ACu:~ilϗ/8@믥I&ӧGg5?x]@zZfբ>@@@̮Ç.]0iV}mֱ, /_^4iHzĉo׮][dZPCuGŋMf2uס!^7Z7>32rH;v 2g߬u IDATıD]vWVWzy\oذҾzjߴ^ cOʪUnN{_XR%)X ¯[wW?$Dv]~޻&T "PB]~~wen&A viզv0;* Rk4 =|h[ЮDlg [@= ^?[ۻv9sF;=H  :t5J^{5Сu+W:v ?y4h@v;t9rDׯo>#i|Ω uιsJH.%hOD@F˦3 wްBpS45hojgML]wQdIհթ'|WF ^֭kY`WL]3oӎڊ-kg.]bM  .;f͚G.Ti:RR%Ǭ?Fv=z vm7nl>|ԫWӁSQFfzCEÆ ^:VZ91o6l K΅gS v={P  v-Ub! &?0hz)?~|^"V@1bX]`ԩc:̙3b@B#uVU+VL/^"z=~Uj4nޮawNӦM3Sf0s" ޽ٹqtp5;.\dUt^bPsnڵKΝ;%nV]˛_jiӦ/ZH/\]:uڵ0^؇SB)R+g@ L{ׄ@@-/V x>&@ {ff͚2zp*Z@ j׎Ν;`}DN@xN:enI"iӦE+~'PB駟F_+WL o۶q msvf:{}G F@]k׮2o<_p_yb-t2{lP3eˤpŽdž*Юnq/C:FeB թ۷ zl:ǵ~7ɖ-[/qz[Zj@( O{ׄ@@7 75@o̟yݡAGrLv"@=~ǏÇEڑ8v%O>W!=~I =&Ma-`O2E{챰^# ,кuks ZjCq%Γ֢^*Z'UTP:}9xݺu!C4Li_;O8QRLk>{ޒ^x!fvҡ=fcb:DŽѣGeܹfӧիW˽u@@ {h5"@@ `ӧK=I&f9.])yL;DY!0k,l?rVv ;wN;fϑ#Pcƌ1z%0ıg͛7]65O7i]._Uۋ/uPX S>sb@@5kf^Ev 3~Sl۶M4 7|gʔ)֥={t̙3|@ޘ;ٳgEsiGn;h7}ןuLL̙3ސ}5 ˗?̇Dv]^޻&T ,ȰaDb| 5kyFɐ!C\.!@4vAO5ޱc @jn~ҟ"q߿_*U$r2z3B#p5G[: ;w0ԩSr @@hܸ3f̐ *DnhJHӦM 8 }ܹӄ59sfG>2u MC~#,?[hứ]{8ןu?:vױl2ѝbEINou@a  aڃ  @/&M .΋jٲ 8S0nq%KhQkQ6![n-6m2k˒%ʺM6vpnyF)Rpzw~b3!;o}wp#t:ݭNaȿެ@G^z˼ytQh?~(Q?tPkK/d:D3o_ s7g|vQU?B瞀vϳ&v ˻+&L05Mw5Bv믿lٲ9r z=}G8q|_;g̘1jP@ n(;vw= @ϟ/Ç7o(1D˗  R @ v1bX\C wx`!3VvqӦM (Zn[>X%[lp…ҹsg3ҥKM^NV?Xb FojΧ6zN\.zCgk\޽{5ɓ}k> WۃM17ll.]vwW(Q\97|cBA(X#~-xs!T巀=̮v<~Ł  xg4Fh _͐!Ct_4i,YҼoqi]wuKkj֦zC}Mi.*fFc5YB1u3F2bx Aڽw1 {P  {׮]k)?x1^7h۷ooz]6`uoNǎstܵ #0'|1"Q  Vn8 LWkg=F;{e}ڭsٲeRpa,:@B @= @ !  xs6o.߿?8h= -ҥt+QDILٳf5-Z0 2DXРM… ʘ1^bh>e}ߍ7%T@Ex <(˗7o5hRޚ qM6u\w@λ`3}h^;)R$lvҭ v]JvQlY_=Yw1}t  +`~G H n V=nO=ӮUV5Av ?7tM.f{`=%hד,Y* 25zyh9D^RmÆ Koȱ;?+av  @ XA3Tg'ي[MyT]wn&k?7jd@ kE{ׄ@@~7Xe͚5Z`ԩһwoS ENΏ#O?`ohBL]me @`/^իݻcMTvm1b hP_ػwT\|v[zx55ʕJݵ"fÇȑ#i~E7I/vQ3g0 45Y8^=jF@ڽw}{P  +uRZ50aB@sq0{e @ cƌ1]/\`N߶m[ӱ=mڴ(s"  ٽ&vQFnuԑm۶WZ%2eҎX̙3|Ϝ9#2d9'HϞ=4О/_>> Oxx;w:F -[s{ {;ʫR  '! S@ڽwM@H@u]5j$C h.F cǎASk-Z Ή/ӭ}nj,@=nP{X]dOak׮֮]k&X~qYiӦ?ބ5w96sI k}au֙.:t… ZԢESN>$>ؿ;3EK>nVGv/uyW̮:Jpn@@ 4Ν^C{hgE@ {h5"@@ ѣG {M@8 h7ѡCz-_*@ 7o6,DҥKTR.!s!ݙ Y$UTrqw5ziॗ^2u4o\g?`~ʕe{G& GiB|lٲE:v('N0!zО1cd~%tև{4i$@M@[vDbYfuܳe~0  %p))ZY4@ ОI;yh@@l0aҦMKX cPyn W,X #F0Ok6sՒ H(H۵kgWo+Ai!CnϗPz^zɴiLh;Zbo(}l}ћp4irC%Kʔ)So7Kh}/^իݻ.\nj#N>fw5v%iӦ A29>|ر|9֟HkM]vyof:@@Gi[v!  vE@{̀ xJb;H  {g:V7hr+̗{aٳgKr"y @@bPyv*  vE@{̀ xJE&ʓO>(*F2F # n{OΝkJK.Ұap[ "e9s\rT%KI" W˿kL2IT^p93v/cͥ/8 Ks&և۷o䮝M|7r-p}G]U7Ο??apԏ .gy$o޼73@@7\qd@@3uԑ~A.\=G!!Ncǎ5@>:8) vUտ6nh*[ eU4PYG+ЎLXj㏒1c9)gA&9/̙3;f3gY  SVj̮h1wnV nhwC9l  @*U$/Brz(  &L0VZ`!a/D54h۵s;B#]\Wezv2wkRʕ+m۶e˖I…*y>Лnc:j?;y ,X<0{BZ|@gEʒ%Kg@E@{0'$ړŃ@@ X?to۶MNLQ)Я_?nj#5jԈJ)pjכuꫯJN"s  ;'{G֬Y#ӧOꡱhhS/G&^_}5k$˃Hݻw;gΜqx⎀M7ݔe%0ܹsL2>" DW.]Zً͛eu  ړ<hO  #p5ə3)S8F@>}dʔ)fǏz*b@ zg/6[{:u"b JΝ⮻uI :~PuM1ݻw7S\%J&&HƆ m#qd<*~~| dF4iP?i:@@ _^7n,*T3fxBJB@p G{ׄ@@N<)Ŋ3h(гgOO&M$UT ٘_|!#F-[*VhRq}"._,.\k)Rp;<:^*)O,YR,X<̙3LJ*} CB{}CڵkS$ mև0o޼Ѷ|{uO>XqѢE}Kʔ)ϟ/J xN&@@L%KHgQFE"Y L@{=1v]*B@ϥe˖RJ4ip n ˢA3f̐ *9=s!a)믿ʈ#>3˗O˧~:,C @ XjZJd][:W^ɤJѽ]I;vŋ&|  @|O׮]^z2|p@@Wd]ad@@f͒^{M6l(Æ FQT{gϖrEGb h@R;[O< *T,@Xj,_\z!TD $˗m۶9N1cFG=w7,0{\7΂ D6޽4nXIKc-  1J kBE  cǎAI۶mgϞ~Á!K/ҥKTItnLs:uJI"6mڈ_3 D?Ph޼ q P@wؾ},[L .J858y#~GA9s˗//2e}Cd J%BΎ  SL>}HfͤaQ3E" @h޵"kBE  qI=]v~Á*ЦM.:,$MS:w]x4!n'Oo߾/FX jՒ[ŋXbjF (ho}_xQ;^*0)  &LH֭W^XV .@=ݺuٳgСC ©4Zl)Yv+ZhD/؋!âo.d̘1ؗv`իM  4ڟ|ɐĉ@h[l޼Y,X %KV֍@۶ms/_8H"RjUr/^x@@Fe>^{5@@@Wd]ad@@VxҤIRJoE ?H4iLGȔ)S4_0~hB֬YcNg&#iΝcǎr!RJ1cM7@ >SlRW.]tx җ@ԯ__6n(sΕ2ex. Npy}xUiԨ3{̫uw`~/_^U# P`2bڵyO )@MMw"#  Ο?_J*剚(?O &zsK֭['2do uf/̙V\) Y>VZC 1"0vX4homڴ1oh!` @toVf͚e· B!n۷o/˖-3T/9s:ݻw}w(9'  69޽Ϙ @@Mnj3vw@@ʕE?^v͛5QDn*/8qw/^4aԻzjɗ/_#.]$4gϕ+\vMv!hZ)"ҥ3o߾]>,Ϛ5$uԮ@*ޡ=f7t۷Ov-z ևzȵ?:rm\ve\t:-*ٳgwgΜ-[ɓ'./_PB:ק9}lڴɬ-K,>}zYf&ܮ(E.\`Y9rĜC]x:p믿LGO>ķ~[p]u#a!ФIsG )2avDwѡavi,zuqCt/{oj@@%пO>;3@@7\qd@@wLhn294=kugѺcǎ_4&@ ~񫯾2g-^rGM*8 =M65ω]=++ ׇ0>`3a{]oj{D  D@^k1}[_1@@7\qd@@ܿhfF Xoj@qر2k,\btf +u+p}mQ˗/o3k4d=qD߷k׮-#폅e˖I…4qթn믿 [6n؟|׎ډj׎:4̮7=ϟthn~[):'UV9֣@kTo+U$QvÇKz` `EUM@=9f͚5fܽ DC}M!t$%A_Oz3}~z8@@ 2^{5СC瞋E*@{b޻&T %Nk v..eʔbh?TMnt'Xr/2dys\;j%'z [o߾i7y ظq 4 3<#5$ݭ[7<ٯ_?RA\Fu.]tV5+ȼys}1]έ|ZE(QB(ƍ3ZVש|.VH,й}1کF&7o]vI٥\r!iCwЛ91l0M.^@` Ϣ;IF+Yڍs |z#婧rcJ@ 3r;cǎ9;iiӦM7D@H@_o^tS{Uȣ@@{A{ׄ@@t:u@K!@J*Րp̙㏥RJ؃Vhs[7$&>j(}^GYf7HFk\;W^=Nׯƍ.uݻk>F!y j=]taܤx*;5h3)StkP4M:Y,3f@(yߍu4*\&i_~p׿lٲlNƒ@@z1kĉRjU  vW9]@+L ^`ժU/O_u,ZѕZvG,_hנΝ;M>fkڴ:uu^zV WPA)"9s ӌy}g -~&؞-[6JA ?sѝb1ر,^X>U?r  ~EnZuPZߚ/}-hӧwtoϟ?  D@r :uT\9Y0 W@{p}@?j xP`Μ9ꫯڻ )e]O?$wq?KJcΟ?/UT:oҥEC+b=ϗ֭^ 񩧞2v5^XvF;tMcq/@dɒ%ҡC8cr4h|O ' իWMvt9eqn@ `Q7psӧeݺu!Ca@oF!uuun&C nsoRP@ ڱgϞutoߵk\{#~=&F@"A _Y!3gD#k@@ohuWA{ׄ@@Ə/o˽zk J*pv!.ŋz{X֮]4N=LL}޽8g}hQ5nؕ@Bݲe|2nܸX ݻj*Ԉ+wuktI@ DZgڴifcʬYsЮ7͝;473^+W4 ^u>9rxpO.](/7F@<'ܯǬTR @o޻~ڽwM@K@äcƌ_]^z% :gy|0.mڴރAE.]KiҤTR%N(m}߭U|K.KB욮!5jCĉ%eʔ=[o /uHL'|QB>|ތzjn I@o ?`#/,%K&֊ н{wɖ-ka*Y,&Zt :T{d ,8z`LcvᅫtW˛{2e8;m{O,%&./?SGXڵkҢE YvA_~y=,;`iڴ㢜?ބ޵- &t2d4j7uLw˖-… z",ZH:ud~_&D'7η"վ}Y!@hRX1Yxq4ѲV ߒ!sڭo}Ț5n>>c( @@0>=tyK;:2; *@{W@ ! ~ ԫWO{;w.[a%̓p|+޾}<ځN< Rh_R%рz9DðFGҥe֬Y*U*9~(Q:tOK/d:h޼B}b{G/)[GC#G4[oU._,[l;ʉ'C{ƌ#iH1bsÊ5ƌ#5jH<@0^(t?~^X65D@0f%|c#GÇ;t(+ܮn-J,@I ?j3#ӣ" @2hO$@{x8  Um j4h;քv It IDAT6mPuVU9GժUeĉA=&O(HAvڙRm5(3dв 'TW^2m4{ -H[`AþkiҤ +YL2 Kh}/^իݻ.\5;xJ< `\^~8w;a&\ eׯfBIP>ПtDs=hup_re(PrEзY~7 G%fQ $(c3&Bn0Nr@H$% '2@@Ca~;9N9\ЮN29Eꫯ:ɝw)ׯ_7]K*%2er͛7Kݺu|7nks{y"{Zܸq3<#FrcZc:gΜ+Weɒxq˿kX #sΙtǂo9C!t҅ՠtO@ TnҲe+3!IV_~z*V%ڑ9[lI"@(_fQzv_f~)ѿO#sR|y_={0DG:[jenf]N zk׮MVJ*2i$@@ R{Jh5"@@ >+T9I>!p!vbǧ~=&i(3ߪSLDZ"W=B2@ WZ{~h2@/ +@;Vo/~1@^V~K.1rk׮ݞ뮻H"N۷oÇgfjv#K:uΛ{'[l'OJ4igs6ct7;vL#G8o믿ٳr-}'Ǐ6mژft/ }<=Z#\׹d+WJ۶mM)˖-… U @ onѲ[q.]@a JEHFBʕ+g1vg@*H4̮&zuvLp}ڴiңGٶm[,/Ҽf[8q}=vɒ%토!3gJlzɓ'5tGΝ;_.]=1۷7J.-gϖ;_0T߽{{D/_%JuoכrO2xk޽:ף~'L`5?i&MBD PQ7*$.钡TDHRPn̳E{s}y>5|ki}zjܸuI![?UUU.Y>/x!} ( 6mf:tPJ @A1"@ ȇGiC_Xp۷={bYq3H׷#R]v >*!h2@yW\'lE(UVyv^A%C Ŋ+ <^IU I(<+BˮS s7֩$@NGuzX.Q$N7gKhn{7j#8Œ?tM檫Jw}iҥ Emlf֬Y)o/(O>d3WӠC=4}g_tAVU a']z @I cǎD<9O W727&X@Vڵ"u oeի+s#,ȦȀ:w{b>h]q9$".E $AM믿6ƍ3ӦMK 'P`= PT׿,.>ӦFy3j(3ydzQ䍘 bD L1l1w%LkzaN7p9í_ݻ[ܮ_u*N;h.)㎖0\Ã93i$rY'/ q"EIq"r)Vҹ3Lq L͚5MժUʕ+/:9uyYQ|ƌfwNԥ/~#Fzw7@%M`͚5esZ8S̅^X}x@/Œ;6Eܩ ww\ @YH$i |%B`ƌsIW̮?9̧~֝-ZX%VzG{쑒7P<*A{i|  {6;UGn\ekfEdWf>䓴qR[nez袣[҅au!.@@ (? Z:*2A M, @ H,7l=M(bnOn֭[gAi׮2e @XdѣGJsO"i$R) ,^($1f'F@gΜiEh' sOv/֞{wV樣2ƍqgmZ}+M^JRtG[tH'A;KR]j/W^1k,;xN?իW7ܵ޻\pg.aOE @(P^ tYCA @ =~cE @ 0{K/z.O& *H]-M!@ $xL1ll*.80py>02 DH.gQ8 de]fN8?%~뭷,ѻ_S"D)Y%hꪫʈ'յ裏ufV{?Q ^}Ulٲ4tbo?G<@+ .+EwgϞm <GJ3,!@, @HަM @J*PXAD!AB6cǎ57tS¤Ç[QRI M 'm;;03iҤBM{L !+j#Y-E5ON ,0'x+Ee]\?WD<A裏̿/3o޼j׮mE[пJ*)^z;H}.]25ʴk#/Y#mܸqγ6YЮoSӧv)OG7x + i\6˗/OӵkW3`D9ä  @!\߿#A \ hυZeG˗!@@^}Uh`J#]>42 }]s!Bȏ@O'*&/"7N@Bf~VX=KdT }m[nBe֭׬Yz-[Z~25j0ni"?OۑՆl߬ճ˔rd^x饗̲e /`.]Vm۶FQ7onZjŃ ;ZrM 4^!A \ hυZeG˗!@@w߾}vG` ( ʞO":{>( @"SO~4f͚ځ*@&M,Af>Iњm(7Tp]x'E=(=$\ԩS]ٳo I T]ԩS;wOG @A hJ,ڣgL  @ Rf2g}HQ|wHt @@#QN]ߟ]$@ʟ@/eRͨQ= @ tCGwFH @ѣٴ+0Һ^zY|y(=T@&f3vX5.P A%@ ,AJ2Qu|]L֭KS#5~FԮ];hۏÇ' SXg<͛7}, x϶Aڂ@@XGϘ @@3~x3l0sYgEGK@OnKF":{I@:wbmf:tPLh(3a(ne@@$J9:"zױV[ 7< yՑo?9s[6}'/{TX -Zȼiig wu8- A@Q"Q׺ GA{~(= @ H /4ӦM3_~ӧO[0 (BYNB졡"@(2sSN9%Ŋyƍ2ʁM6ͻ+#G4 Ȼ*(:.J5:~z?b6Et0ᅴ>̨O;^zNL^hڵwqݻwO@@iKDok$@ ĢϏ=zƴ@"%`/oFӵkHۢ :̜93 F* [ |v8pڵkŔJ@kq*QNR:Ɣ.;saPbg͚j.kժU,Si tѼ9PrFGA@M, @؋{ʒ9Š iW |7fܸq;LT~饗}5BC ג=XsuU /:-ŋ={+CCIE1 [o%O>dE-[L$@M ߗx)+u @B1"@ Ȟ7oiܸqdc U@bG`ٲe#Lkԩm۶ УGdɒL֭s*K!8 b]'yz IDAT+_N^H_|Ŵz ^8 @쇗jKn1 ;c7$A{ @@ "[xT W\1D #@@3g͐!CLj o -B%K@Q?@} \dBo7G-BH%%L/LgΜړ~%J @/^lzaB# @7%oL &Sɟ~rdmj6Dt0(R *ZlbUW]e?RvC&0vX iN:A|L4xw< Ov]p"A P8A/sR%@@9@E1"@ ʕ+6%&+LNB$"@(a/޽{JOnڴiS½t@֬Yc6m)DQ盁ș= ٳ3"^z%h"Kdɒ4 ۷Oܛ4i0@"$r}gP5 @B h@#hߘ` @M_7]v#h-6jj$b@N7Æ K?o6(r d"$pI'ѹ 2d G`ݺu&Mr-ԭ[}W1 @E 9xSZ23fȖ!@@Vڳ"*xGN @ <<ӧU!Dtl*ikܸqKwg6ڨg@ B "`3$wis63/6u< ? `o߾ oG@$^2_MS %=~=~cE @7ٳg3<ʏ7Xd f&!@1O>p)^wuF 8 ,Y$##G5j [lA@(!Zk}얆 bC @a@AKj @'`/'Nh8∂O \: y!@{})X[cHn#5kXBasdrdD'IV_=h 4j"'$=k{~zB^Jm@@ tѼi&f 6@@ @AC1"@ G`Сf̙Y {VDd J`OL2{gP P:#Gb aV,~H@ixWyy:sA%͚5+b= @ 6lXJIr" @, @ lExjڴrd,(8cC )S_dٲenݺ@*J˗/7uԩ0t7 3mСhQuB֯_~7t' 6,X@:yܹI&L Tq, @ l=yޤx0aӌ${ @]69+=@PD^x@T !wkIq+sy"dTO>޽{'\b L %-Zz6(@@@G1"@ j֭[gySF X"v"{qƁV!@~ Xt)[o5;v[|@7o^2}gZn] q Qě~h[?<򈫱 JܫVa @'qٳ[nIٛY @@Vڳ"*xGN @ \#fi͟? 8xEwׯM H:k,#A; <]  `.O? ,u@ ꫖+-^{U4 ;ra@(>7&X d @+O?4a?  @@1,Zx㍮ӥKDvډ@A?3@ @1  PZooLA 1@J%0Hh? @@o箻r?>!pzPDB9T @@qZ0@ h/,M"hߘ` ʟZ_c$@ #FYXbWTm@ (?Nz!qX` @ :A h !hߘ` b>  Ę1LK# @l'NtN:%7jԈoϾQ :=dT gA^`4C p8R  @(6Evs! 5@ @< ?'wy3!pW^y, s( :=@X#h/d'XL M[ȗ !@ P>ݟOٳg5jғ ?獐 @ȑ@sB1@ 7&X Hd @1&b0) @ @˗/]v$6l%po޼9 ?W "uzQ4ʆ % A>!Y @@ 4L @ @&P?~=z%pu]8FBbD1 L@@B1"@6|@"  @16c<8FI@ g7֬Yc3eIҽ{wKܮ3ʜs0݃ ĘA o`oLA 1@bL`R@ @6Yﹰrʄ}֬Y תUUfl@ @N/TĄ D7&X Hd @1&b0) @ oZK.ijА!C,{˖-WL` @ N/!Б]!R P l:mB KjGi @B6Zd%p7ng%#G{ Qh🋆!@OuzO@ 0큑E^A{ibQPN@ P8l,5-O#@ r!?H]?'OvcniZ"%)^* @ L@ (AEA{ib@J@ P@l,6MM#oT@ !?P~ ̙3]ׯ%nMڵ1Z"> C y(* < A>!Y @@ ~`+U  |Q @E0o[QFy6xYgY֭[(ZJ9+"2@ Z1\,|`C$@ `c1ƃii? @M8s_]w]tq1V 3 b`^, %=~c=~cElD@ cl,xp0- @ .O]?&M4ꫯ 4(bs 4݄ Đ &A ooLA 1"ٳobIժUM.]~j'W(WXoyLj _zf-,1˂&b ^QN!#4;慏xVJg̘j`߾}~g ԩNEe2t#cb;E1)8eԄu:sJA{PbG=cZ" XP~6K&d7*_J:j֬2(M\un馾ի.b꫔;Y|Qj)T;Y )`%7yw~ֲ0-R_Q5u盫*/w} fښ5kLF" A l,F*##Z*E˿£w$ܕM1usw )5Bg A96}]K~{V4x`K.;)<,2ed}{L : JA NJ,|`C$ĚG}d$v'O?=^zfj5k#-O^]weCMo}Ӫ0`BiC|yU߷^d-_mT&SP6}5TLdo<)e6/`FGR1G)lk\>lfN3GuiѢEh@3l NI真6/0!Jl)R/%pk< -{ӦMK19FQdSG{N:Y? 8 ֙ggqƶN%5AR hH#hߘ` b> %6;8swgb|wfvKlw8[$j=vE1zꩧLvBa7sLӽ{$f?餓}X/)/!9d%\ݓz΋Wj̛&8;0Ng@A hJ,ڣgL @*UlҼ)f_8סWv &G*EXOXsժUlo1M4I>ךCf+h;vVS&ZUT==;=F%?¡/،=:[U+Ȕ)SL߾} fƩ[nF&y߾`L89 0' ";;j;-B g8*[󟞕~ o^ePs b]p;ZJc桞R!:TF ;!3%7&X Hd5\H=S$hݠAo3:t(Yi~2͛7JIvZ%E'N4:TɔV\V'ףv*=;.6 2rFbv#㏦E)Ǝ;h $@$AqДrTк??P"r#vRm{챇ku:-Xl@1(;,(.2_ֵԾ1c<1bok'J\&S<~?4lذ`e7??qS<ӋǞ!PooLA 1k^y}-[D X$ޝ%9ћni]裏RXx 5ʲEkx}ꫯf͚YBU4W[f3^Ew֩:CO?R\|뭷HۨQz2ӦMs۞;[D>đql"܀@ty:6[lEg֭3/642#G&ֻAljb lۭUҿ5k %ϼQ5evWZ5ծ\ \I.︁cل% q믿Zv|͞(p~gvaY<1iyaϳ'9f0K 'ׯ\Q.Ah7&X Hd5}k[`,X*wnbdQ{Eׯ0g1?xJ$i j .]j.r&L4)!f㣏>R^"c=V1͗_~iyCnĺ IDAT[>}nfիgN:DT_?$Wh"cyK;L϶nҤ("w_ZcIDĿ뮻vՀ82$EOZm;y 3]PYgsY)JO/vd;1^v+ZԩS8Jݻww}gwߴ ]vżk)x7|׭[u|I{W%4hP5ǺtVCT[3fHcٮ];ӯ_?sGE_d@l,2=JG)׶?odM6i/?ZSIXXm©|z,Y$mXvݻXkJtl2Oxڪ>hӹsgwt _&y ,s&7X38##ooeW==7/٨(կϷ^}8B>i 3__Ud*CpP"&=b9T=hO O34&LH(XQ]tit뭷Z IXzԙ.bh˶w}<Q޵pqYblg&f_K`>O(Q… ?6"_~esgk:׾%D%mz cJDH˸ xn馴HK/g1H"]d8u5,7]RȔ֬Yc=\-͛7H|I,mݖ,ݺu3sIVy睔"ʸ1Qxݑ]\I?ꨣsĖ s!@tp&7?͊ӧ[gԩ%_ZDZr 6H%QCcǚ3{TkYfZ@ϵ^]Qԋh~Wd'?Q>/Oz)U{Uи2. Yϳ@{vx_&'3ـyYah-XcDA{F A{ ,'D}ZTSLԢ6YI"F%~E8ڀرcB~^ܫ- )aÆVQ/pk(J~۱/u׮]~%Zn2v7i %DѣG9ۿSfMv[!g%?d}/I1cƸvGl?XdckX uDAݙ$.%']x={e /0o94~.(6 A6"I@*L~JLܼN,cX6󲱟9"i٣Xks9ǺK8eAF1o@RVHOj/YjosA 1"gwa̙3Eٳg'^NQ^ĺ̫=zؙ<lmΌ,{8?)-Kkq =j A>!YbM@JN<9N%xMN\su]6 Zeѫ;m5owD@I۫Sd=w\ I"{=UN/1[DxF>L--$a$ENNe˖ikCTOh& 3EUW]W)S.(~߷oߴx=ׯ7m۶M|-~_}"=řd.K,܎*4^z%WI(5l,ڈU=>:^m/fk%*:jԨ#\~zo&j9c ӳgOeN8!峨&^uaX7c^@Y-a,x'~7Kܮoѳ}5ʂC9@+"^g_?F@}I#|FߓYPF}a8;Y}ҝL@ (AEA{ib@ʂpȺ &'EG{7AYgG7![t٥h^ZEVXaE ,0j2{+Ef~W}fmǮoDÇvHp(nK.V'a*/aT1JlCBKD r9@Q<i[ 6m~dHr[:uXQ'pkZhQE+}g4^^I4/~D{hK|^P^Eyv BeԮE%^zf-.-{F/%8SNC=TO{.6b+3tZiV[eo6 J˗D^l{]5wiڴicV^mݒDZ38eȑEm֊B(&Mr} | 31?)doQ1oko6S>~qQ( ^gA %64h` 7$Ӌ/hENN([p5kVZv+([md<("k֬6%T>y1/ w̙3' ZweW;<9=煄_K0b'΋b^{Y:ygNJj8 W{ܢ/Z85(u^lp;7x|樣J|r['~(Bb([ICڈr-S>nQp0FQWRvq\v#F?3)ʹJyj*S~}9]}vM7rXyNt>@?""r"Fׅާ~:=5kf]u&]}8Q.0?Z[n{5ZS^r5n62gW~.CTʞ ktw4|ǜ)!?[1C]Ўjm$J ~!q~UVP6T:Й΁T%zԎ|#?FAjtFL 7|y]@6gg/s_Ad(aKx0E"H34=~cElDFYkLћ%MNƬ"޹sϜd^WΦ&(:>w*՞W /Vv2EPWO?=ΧI~m*rX*"yLU;0OQBfE闀Ǟ3vzʴk.-:{^̴ḭa̸qRꖈH&V}&&A'GIOn(M.zu†5nΨ^Of{*)sK Z6{Mn];"j7nlfGBz%ꉪLkt/1e/| S1< Pgvhc1wJ஋^IT$nz.xva}zg(}疈\__oGmuO> h$/체F^)39;LxPkX2m@ hx"hߘ` b> %Onzfgs4`HZwy:\Sx %n߿[iQ\*.XѼ3E~T^zkZ62X(a39Eɟ{Γi߾Aҙ&Md 9ΤﭷޚV$b Kȏ:풣.4lкp+;`]xVԿ֚}IO.%h ktٌ~qÌw M~/۸L{G^IB[ ܝgs!idz-,ٖ֦ 6=v{iLD[3 C!h뻷a^Jd+oLA 1{˖-s,L3f>|xJ_nњ%~g{KtLQz+j?h;%W;n dqדtI Lܒ67)~[P-+EnE>~iٲe)"x~msjdzsŊVF/֪_y"QM9'G.nPԘ>}}䜣A楗H^PJb$gm(A""hW>`ciQJ?JiTdZEP^t%;l>N-Sl|\kzbl|X]hՅfEc#p^;|T|ǏDB 4}9*lu\{[SO=5 X"ȅJυ"v=zX7|sQ=N%bV󑿣V{sj q"gg?ŪX3C %}~3@ lE* NM.#la6=;9S$VGrq7AuOڼr+\WdѣGQFekW;^v)"{ ,t(<Δ,ʣ˗/7UV[^a:7 W\*?i޼y=[;Fi6.^شj*nɜvii2E3gbo{t%~g uA-IhOZg5~f 2J3bĈоw H¦ 䍐 Fk*-Z'^DΔg'굦eŬL`U6z3Y/; o.@ϖxU~%p?2vwʔ)za mITunsG`߳n:3mӦM Z!@6 gg^큿ʠ2D@^`>CY `C,~cE dϞ=t-bx+BixUixg#ZV/qW\*ޕ裏6sIi2>h;^G*$ફqEhwi>Tݍ}O>񌰷⯺ׯH^mb{W՟)"שS'aք hΔI ~zR$'9d^*(>6?XV䄀_#9.ɞs9VZ)q^{Z(?cKr▢7=O/< 1ΥKZ:.ᄀv9T%WY~^9^~r!{ϟo:vYPg ?/]svן6cNuz C7(7&X Hd=u@{<0M쌚IЮ Ν;pp*'gP~dU2LnQ0 ,\%XL:Փ(ɓݍ-ʼݻwQ󝛷6ipnļ=jԨhksx޼yC7|cZlڀ ҰaCb}uЅ^h]L\i.]>5ܺlKΔ͗Pk͵kǎh =Yv8/;~2g g F@"W?K.|s>JYz%Mtg_-ι|^Y~KZAG'R D󔳳 s\uz!=@ h/4!hΈ@ !AIH,R6x4sL@s5cƌK|۫S&MҊyEJ%'%pfm2riK8:[vm8EH$jJiWm۶? Uf]Q gҋzX)ac:R9;K.r`Δ@XGϘ !T,8C3gwy'E+^ӳg;݄z^g5JyFe3\+rGV60K% IN[ÝI|1QWo$BQ38U~zW6g;7rb'mߏ>hJ/xEȗ NK˴GlsM7gW>\Bz3yX}GFzg!R+s49);k֬Y#@1X 괙+\QָGk5ΨŠXkM|?nxk :0SowwwgI&@2 ʉʕ+59眓[zTl{4;%|}8fHWR+J{T~ۋSNfƌAe3] gìl΂}|N/'("Y6b43ͥO<+ ^ngdW;^vM:՜p mxgTkT "9r"H$/ܒ[y=cٳgϔnlvgtvUD^K{:+B>2Zr}*Ԏή|DW^ye.E7oӦ+G%?3_o6xD]nꩧzo.\hV(t{=]%={֭[ZO>Y>8`c1M^?)nD^O9{7dn?ѺޙH[`ZӭN7$6W\qEEAFq޽{cPJl J~^/^Ӄ:Ȫ^{"v!~<tL^gFa=P* /Xn)8b3ggUwruz<@.ڣ"K@D HM h⯼JŠL}n`QJ͛g:t萱gAY8+ Z^y?Lk֬YVDB %]v"Ovm@ڍxm.EKDnIcq7]wռˮ]veV)W^=5g=mK .5߼qyGOr/ǏO399bTFw0T{ co`B ϖ; K>tPӰaC#A;~K`9]s=K,lBPZl{Yz{3 $#71a@XGϘ !T,  dc!ܺukWUsڨSim&'mҭX"%ʸ!ʿf-H+ }iӦed/]we)3JC}Yb=nboE!w)g|(h-b^!n-^JOe& &EsK^$(2H5jH3PFr ϴfqr>qt^= - !Jn0Q EB0A: W=FH8 Q3ߋb)Q}-k=Kd.8_ޠkt| Åh}u /)Aϖxқp ?˳Tj yׁvN%{E[5jW ۟33+eKy!83=~cElD M!׊RڧNjN8ᄌ}jժySVZ]p+xb裏.,cEEg匶mf5[cq2Ra#L6@]v5zV b#^ ] !l,`07ߨָa_IHk`[oc=6:olṿXk( &z}sήEî7|MM|Ǡ@1 ?gUN93yZk$m\-PQ~N:)wyu rTg ?3c8;,Wg`^L @ͳJy8 Pl;O`̙F X"-³lƎk>쬆?FÝiʔ)o߾9&W+WN+VӫW)u$LN~"F*:_n~ᔲ~;5kEwFҥ={vJ4|Ctmjs~v$.]j5jWmu-\VL/$8p$ k?~kÇ7gu\Ib /[Qoft˺&ng.Qgg?:=T 2'=~=~cElDm}YB 7|wtIׯ_o[zu馛&LgEкիͪUoVjW͚56l[o(Z6U>YFP N]Pvnmх_~SVƋ.ԫW/J 6CH#Q04 &OjXk(֚61e~%Zj4Q՛1wwuzE-@%Q~ ?%ENήt6!/ʫ|u^HT|'u6P4N/qJĉ8[oLA 16~kcJ*fwvm["eEPwj.X0i@ X ?"/ xwdn@(1"pV"R5l@i`^(ŠM,|`C$@PdC9$ w}5EL6mR_݊M P)X.~8 @ X @ Z&\ȗO)Cuz>( $=~㎠=~cElD@ RVtK6#FH|(3f̰ &38#R{@L? +|\Q@| wl?Xb)&}چX3 %}~3@ lE*!@Oӽ{wxc=,Q=fDf@%F 7'݇ |? 6"J@N^xӪUe? 6"J@֭[gx 3i${֣^x֭~snʁ0kz @ JQҥn@#?[8ִTzKo̰Q,B^X37 %}~3@ lE*!PO3?|o6d!Al,2JG)B( 1NX @, P A6 P^o<oLA 1@bL`R@ @6Y ?3; @("O(]7v7&X Hd @1&b0) @ ,s b].;,|`C$@ `c1ƃii? @Mo@Euz.J A>!Y @@ 4L @ @&?\7gf @":XiKA{A{ , @ X`Z& @ `e.@3 b`^, %=~c=~cElD@ cl,xp0- @ 2 M@@N/yڅ@@C1"@6|@"  @16c<8FI@ g & @XXr @ UV-p @ У, @ʓly+ʏs~( @@Xϐ P)ooLA 1@J%0Hh? @,s bCt hX!hߘ` OD@ c<4L @ @&?\7gf @":XiKA{A{ , @ X`Z& @ `e.@3 b`^, %=~c=~cElD@ cl,xp0- @ 2 M@@N/yڅ@@C1"@6|@"  @16c<8FI@ g & @XX  Ę1LK# @lxfv@ P,ӋEv!PooLA 1@bL`R@ @6Y ?3; @("O(]7v7&X Hd @w@WY?E P |@(aEDHE]I , ɢ""P22, $ww{;=wf]>;=s}BL!vLK!:IYp&3z@;=[Q/D ,P 1H   !&;@@@@@$,8@@@ [-K($dbR`N     `x} `   -Ny %A{|A{|@`BL@ `b1΁i)0@0 `<0~F|g<=|=|>E  0! Y@@@@ 0bS0E_g?wdӳE@t @>A>"S,    bX s`Z ?)@@@@@LϢ/3;@@@E"zA  h hO`L)@BȀ={?38?N 4??sVg-bŊ yX.)D]0@@@@xV+^q?@+2/ey4NO cWIEJbFe9H`ܹԣGO< :4IdI&R@@ 0Ql!,8ٿށuXQ/OBi#A,* vH@ `B,Pܨ, ԹsgSL ,㏩~R@@ 0Ql!,8ٿށuXQ/OBi#A,* vH@ `B,PܡA%K٘Aah&t'DD#y[GA@@@@_ŋ ,ݬר.' _9@R ;@-ӟvQ5BE~7Tre͛ի)jAzS>Lɻ?>|kSވqS Ę&c6 :M0s3YYQ/]Ny'rw:z[%?? 0!j\n5k̰k׮Ļ75 ě;Or+@K ,K%zIYp&3zG6ajrχOr+@ evڳ=]'@@& 8˴ihF ۷oO ,U0o( ~[  ^X o`Y* -$8 E[5 5XW0In%rD֝=!A{v(ۄN!.\f~XP!*Pld8y$N2ƶf;>}:wL7L͙֭3;F\.'yCS;1_&o 3fss﹟ N|fQѢEU dL#D=@ب @@@@"B8 fJwVkj豮;b]M"D9F9(rBl֭K/Q"ES4w\=z48pN:4p@馛x^{yZ9j(;dɒ כ/e˖ѪUMLܞ޽{Sv)}Gh"#ѣG nw^ZI`FUuNfyybb~K駟wH IDAT69s&wf2صkWjܸP>,Noر/^lJ|nG7|3]qVO9L|n.իխ[׶(pƍL灟jժ9OO6Xf̘AK,I^z4|pرcN> \&grJ      g:J sd úհ] '|Ie@n=|~=|>E  {{U۶m3"p=]ba͛J*ēJ*YDްaCgh,b}ꩧ -_6mj_SӖgQ^^yzjCT+T5\cH} ثpҥ =sQ>%gyȤ:w1MS ^XlY/)ܹnbJS&L@+VLW?tz)*T.vޝo(Yf UTIkXt 7d@,6U(xV, q_CǺjXWúgw'MA(rB,qEӧO#FHŠ Jk׮m Zl* GYp]GY8zE  5!&6sLС-Z8t߾}i˖--׿E={%K=k֬o]y riݻ<1r-tӹk"}Q _$ 9lt}w}G6lnͺ7GgxюzN曍(>^z綾^o1䰴ϟ^ZlMWaK D&MJ2{~"~z-ӧORҥK[?y$l2EOnr| M2%%"<@>U_P♮3!C؏;Fq[W\uԤI]] 0L,# A@@@@ `"xo~a9rլtgg뮳G͝;za]kѢENнy+CQЮ]D]*"aKg}n;_~~*S ժU+-AGz7(>gœ#ض]o O7D&#鶜5:GBFA@@@@ "0`fV9~~g7/UǺGjXW3{ռ=C  t?( -'@@@6'8zQŠƿ;5jԈlb]D.Kl4G8>| ͛oM H&L@L>o;X.G[# L뫓'OJZ}[]O }Ҍ3Ҟo>⋭.A yAOa. G.qE[A@@@@ 0`sP?4qZߗ Ǻ%]:m+o w%tĐ@X @ bY4mt0 'f&OLpEvBl쾰Oqě_WVͲW#sTCQWXAC>"n'>SzgTRf͚t뭷&E/N'+LDmӧ9_|R/\`%Ĕ M=V6mX#퇥}RGy̐7b{OM1ncKyɒ%-hnMD @ hM\^rF Bn'S(^n*G\g}Fyywwĉhڲ66SɓX׉7}}5&DAPXTHorǍGC ɨ[n:uؖ /ʕ+g`Tpazi=a}1Qw >Ģ@QVnZA     Y%lVp;~CeP^ֿT$˭ګԹsgˡXWԹ3]M\ӵN{*U_0bCq%=0AЮ AD;C:tիmFf'L:<\1Űah̘1h;5kP˖-,bt{D]*T$7nzꅾ}ttNq۶Tzjڿ#GW_M%Kdx>᩸t j׮uYpƳ Lw,el^ֿT$˭]:|~to#P~@ Z-Zd hO`b E:fq;qVp2yWs & EӖMٕN%Ob٢ӧOK+t7ZCoO{]ƭoeMk׮%޸. 4&Mde-hœd$)ċ&ϸ&G\YΝ;za5oԤIVab͛ӊ+r@.>{ҷ~k~̗/#SNQNI]O_ׄn@@ &!D GU,7yU+FͫZ*m޼+)/r8z(\ƎKoƦeiСcl6 '?Cl2%o]y^]@@ +0 vTACЮ6/rźÇu8a]MDtޅ\ J @L/@>"S^9YܣGÇ[]lYC$^L>{Q.V~_pt뒝]].og(K)K7]ƮGyĈC# 7Ytݺ&'O-[D쳛x˴&'7>AG7x EpK9Z#5W{5NaOYwR7q!*zj8&&ƍGC cS&*χB1s}uM6TT)boct}vEժULJ30`<.?]mjޞp j{{pdN3D k h!hO`L)@R̲fCě֮]K͚5-ŮW\qE;蠟z*)UĢwXL,$ٳg']s/,6/Pvo,KQFY׸,X[Xڧfd],,h۶mJXޫW~b΃Ս7Rz;w38öロƏr.{PSGA7x$Ep[V9JsuYن(E}iӦ@6Dl&ӷ%'N$kx 9rr/"ALL0l5~Fy  `<0ʏ2 n!h.hǺG]@XW7~ou5 MN&} $A{A{|@`BLbϧ;`3˗/O*5kLvI;wL?8{V^w͈-&;A({fΜI={4"KTbE(;A;_d=k}%Xy=J>(=I >E7&oep"E=?s뮻hʕImKהi˛̍\߆ n|wA؞3~( uѮc@q  `b#8ܖ~?xcÚ߯7|3s9Yi*%6m 0@c .B;9:uru ]bIEOqXǎV^Fw&\xHyׯOw6;7 @@(x 6ftp; AwA;j*YԵuuIt}[G^ul^H%t  hwKL~3F `B_{!^ONq^{$q7viРA4iҤKPye8`]9r$Dhl'h?qj*)B|bb$x'AyO֭F|oO4 ]FTNl#G\Om=zh>|xҵ{D!&.,xg1S^^^-VZb3>y ~hwܢ*~`MSrADXDG.}GԠA(!ɊHG~-Z4c;Xoyo3gNތ+ A 9ITL ucr፪\,9_BOc^纏;fm2~0aߵhߧƍ+F%~?ȗ/RTne ~iۭ<'!@`</l{n!hLnjR?XbkLַU(e@*|W%A[bCЮ1j@ yQ.]VL2Ŋg4 tx+Dɓ(W\q%Qu;f xbT3Æ 3.4ibEvN.1i6Q- 6c M/x5'46bT?=!T~GNjiC;A;7O/'.ҤwRz|4p0#Lq@{7o哑zMժU,غu+KOիW7+V >gɒ%IC'K%ɩ^.Z#c~WEQOf\f}7:6K? ?=|>E  [jL>甾+Ruz)i1rp÷,.,JxĈFT+Jf OGy*clbh'NeNj-[L,8PS4o?L],x`l;:c_X7OXvն4+bG?(l۷/f?8%wm5 w߾}ϮӼq̙uI? U̴aC4nls)[%/ݼysZz/Ϛ73#˞f~n'gҥM+Ax)䐽C{챤dA$d6:EB IDAT٠H({鶴ܶYc]ۓu5XW .2@ @>C>"AN),#Ǿs4DV\9Oe7 Q~7#vF/~.o}I""p;;:~a YMVD^i3~菜?(Xb2}Ģ3<ӈ Z|y*]cp(T*PH!.^}  'L,Ie&#rYd·zXjԨaD`=zVZ "x8+]3QlJ'J>}[xCsڵNm}1"lIܾ{8 '-3a Fhd O駕+WR۶mf[n_?3˳w. p;'l 2O0 $%ϱFa]-XZºc}/`{jȔ3%A @>C>"S,0d&z#v2Ď׷"~'zC 16i6,fA6 -I&Is946SIeRta R6zɾ ~-G}ԖL.ZxsR; j"cCslD!XW`*@|C%A[bCЮ1j@bƴHLԱh@ `b1.̩=]]$@=q5m4)Y&c̙c IH,et2e6Ix?hl|@5}r39EiN:IeċofJ4jG{[Y߳g!JOL'O/5k$]v ɞ]RX]B駟誫#G;ԪU 40٢E s sOޠsSm̖ݻ'ٴvZϊiݺu ڹƌc[oQ֭eq@J٬G!'9͍yXW[( N 7 +}KaMbAn}x`9@ `b1[Q7mD_|qܰ%Nj tQ6lh]khѢEIPY=12J׼ysz(钝̝;zaUͼgohƅ͛7]x1qvN;n8$u֤:?c4|p?cƌ"m^q=>ʞ]ee؝N ÛxsG|b}>2}tʗ/ch|߿ӦM36o$&wؾGyF  Ax6h/J?Gqjq$ Gs2%A{vDʄѣGZ3Rv EzlJ^lp;8y$ݻX(&hm۶2sD&MwRRDhoud:EN4HrTkL' Ce$E3E/X/?`DȮUVZ I&Yyx#oIL%#)RĶl͝UT/Fsc vig'WϏ.${>t?lW)G6$,1u%wa'hϩFiɆ۩rʎ3p%ݻ<*vI*CƳAF]Q#vyjze  Nw Q&A{A{|@ʄG5kY8իv_b8E8pGx!wv L'hС 2U"[۷ψliNvIYnYr!> $F=W9FXwFY*5wi}„ 4x`SNruS?p0!31~xz\@;ݾ?D,dgg@0vX{-*vq3$WCfƳ@F%vՆy82̾@4q'A{|A{|@ʄiԩ DgQYTt_jE,pOp;Z>e#jp[3_~i"2V5,\:vhSƍYӡC O:ȟ??XXXl&AөTxJ_۵kUVͺԩSIF*TyۭKA=#~ >d~~z gV.qܸqt]wC#Ј#vdO7nzYy4qD2ߜ^f lҺ2$>_*U26Ddc@ 0`fnϚV|iۨ|+A@wWrr= ,P +b>Ez TeY/_>w7F,ܹsGՓ'O&hrB`;I Q"vᷠ=߰n*^^([U%]Tabw}82%K\aU[_'GG\jr;F~-m۶{=#zyڃ]ys<% #liG6 /P-*Z8CE#>_e˖%>v/=E٠H(p;~b8`]]̕VwT@CZA  h hO`\S@,G}} Ŋ7,b]2ӧi}_&Mݻko5mԈHm8s\d2Yna*Gh'+C>n6*hmWb~>dȐLPW|Jᆪu}g?9;ᅧ{KM^d[Yg1=N@#l`QQ x?G6jv/ &X- hׂ5B!hnLe<`b(%~?D!8%[YYnz+uРA4i$վE*Ȳ[n:u6K.tRr( … ?O˗/OJ5-WI,7#͞=zTMnhΜ9Vv O6 ~jƍ^z7O?gm݃:{7Ƴ~D9q$9&=𝮇+J8=|ޅ=|>E  0! Y@@@@ 0b~? h'^ē*Yԭ 5JrGfͲΦO hӦ ;I.]J["E]h]iJ_[x1]wu} &'M|*u'+۫Jw*={Z*mڴ/nm?“Ɇ:ONݵkUVʧ"h R φ 0!?0 @@@ t: ="X @b @@@BL!vLK!vڃzX޷o_ 6Х^*ދV&dſlڵkYfiQ2^BdY۸n˔:! J,ߪorVy*U-[P%~jV ?4FMÇM3 6A.E %lD!9Q @,;=nD#@ P[2ڕ0!@`B,l=    &B{A;AhQ[oeɋV&^n]x#Ϙ1hG7ԩqv3Ewk[!mlg}Fyyy\sawev=Biݍ=~8}嗴}v ?#=iM!ޱc%"ψjfַ~ԋ9o۶} gU(!O{n\'A{z ,P 1H   !&;{A;A=fͣ.]XM2  ޭ@ _yYޯ_lݺԩ'Rڵ,rYw2QоsNUxyfRk̢lٲƅ\.nnS|pAO^xtڕΝik'HYp[N{H_;(rs7 ,P 1H   !&;{A{xh3nBUVQ6m>}wĉ|;=:/uݪR&?y$lْ8{b m4hM42{{LV$ :.rWGB2Q.FUg ڵŋStw{(n^+<~(;AmWoܸf{X!#@`<)w؀ =~|T   aN`:dYZX @b @@@BL!vLK!uzF]ׯ&MĚ|6 TV}/"רQoJfq*T`۶m,X0^hjժѴ8[f&n#oDؾaÆIyרcǎ{T"adχ[l*+omsϥmҴ &ܹ3+V(ȑ#4n89rdRSN}xRFE>^~f7l`EO#F7ɞ-7gٳUlԁ x6Ȩ"?G G A  h !hO`    V,*tďn@~pC@ǂo?9 umnl {l geN8sLC\~W%lX8q6mJ,TxqC_r&~$9bW,HMBv}o߾ԭ[7UuY/_>g޽oSfX O6$[~ںu*1UT^~ec#ЙgI܏9}޽m%3ӯ_?5ku) ^] EƁE@@MN @$m hW\ !# 9GXJ@=l 2eUVn:[JAFƌcoЕW^:/[!=C?xJգRJʕ+}wςoڴi"u H͛ [p´i&R8FȞ]ڵ!r7晶1ܖ"nhQ\[ݾ?c% rNϬ˗M42A;X§ر(߹tn݄  (ō"FwsS &&kAgd%cA@@@AeEI˄ͣ.]Xɜ^z%d F+ږ8z-+fߟy2vÇSf,Y{9#"v˖-m7L<Hn}'6ݠApBGo0w2oƍ)3Ksu=_~G}k88G#KnnY_6ħ& 4M&El2cIݺu>5jd  `<n ?焛H%|-0 BM($dbR`yXx1]wuVAfZj!4ӳ>KzJ[a[n4{$^}jժF4ŋg!K%t%Xù- N;%];F,DmoݺqDm;'3sL۷/*UXeqO=ȟZ-v}Mlӧiҥ4c Zd#kcA{M:p˕mTy.9X7G8qy:_Ѷm[g_ᅧޟ}nڵ4k,wsbSqo@)Q,?{SN~@@ 0 `_6 `M@@rsh=x!Ajz]/_ &4E    L,B_0~΂ 7:Co,?ae=z4 >\Vmy=j#5|Tti::D, … kςySGmH'N0D,-Sco?GY`////q'q~$fo!'-ԽDA;o֘>}ivjժY͛7SbeA@ 0 `` `E@@rs iv̀Q<@P0i?2ITvm:paFٲei۶m @"~zjڴUPЯ_?>3fu瞳=)/PIY?i9nE{@@@ :_R ≿쀠=|>E  0! Y@@@@ 0bSā hVS=\+;w믿Nu^cR^Q;wLϷL2 rڋ./g[,s?g%x#top2}X @b @@@BL!vLK!:E>|ׯOw6Sxq#J{„m6D`޼yԥKJ*ƍtҶ5,[ZcoN+WN޽{Gd7ŋ}>Yb@@@?g3F %st}A@@ u~3A&B   0 C@ ,㏣G}'өShѢ]@:3}/7}…ԱcG _nhٔ/_ .WR!dH￧e&;w.5mڔ)b\|O?492)Ahĉ)VYZli]vĂvH  FS32~FQ'@v ;=Q;D ,P 1H   !&;o߾4k֬Hyyzjʟ?N46N92rΝiVw):]˸=l'WNC駟w>L2Iē"_|AիW[a%,:8>=N= )3%%@b@F    &5EѾc4uTۧGrq?x ծ]8`x'@PAP~R?OzR#ol IDAT/+W.Pfzk׮e##Ƴa#6%t|Q:đ* ,P 1H   !&;.<K,r袋|~ݺuiƌǑΝ;VZYS |(]s*J`߾}/ѣ0bY,d.fРA4i$5|p&>*g@X!@@@4w(bN9($dbR`NG_5s9 (ǦM >8t>|+fCJ.M›;fs=\BQ  ,gڢEh ւ@;=ND[@ M- ! и'X 7eYjA@@@@ 0 S`Rh`W9N9 13F{Ab)9r-[F'NPٲeUVFW*ٳ>C:3ԢE Xʭ zjڿYꫩdɒFK .åKsN[ծ];:묳ڲJYŏ]%0dx6E2&sztq@ N*3bй ;= hwL kG @tXz}Y7oN<f{&IW*6͝;zae]~=5iDV7ou%]b.\8GÇ޽{o5gy|嗩k׮ҦMҪUF5(@@@@bAXD`u*WS7A fK`0{u;={D Us'@@&CCիW2`SEW*FԹsg+LWy@ 6nHճlZ*m޼+jlѣGiʕ4vXZn)ŋ#FPǎF3}8m40`lذa4f̘Pm \. ."r@^ԵΦ8IN{3wuB|ǷBy ( … }U A>"":}4}Pɒ%\nd~ Ҏ;*oNjJ1!zܴ'y,X@:u̡Cc=F -#?}x)С-[oЕW^G1:0_R"? @@@@@LϢ/3Qx:r׃&/l΄7 & gF .%X @b7!f?~C}GMFJ%7Pʕ ͛7իWsCC ,>|eظqhȐ!fRGYfQ߾} oӦ *U}o:ӧ͘1vP~p׮]TZ5wJh˖-TDu2X&]B,#9blNo'NPٲeUV{MW*ٳ$zgǩETbE["Alj78?kW_}! Dqy.]xsߪvYg'z†rKxܸv .&[3u`u`y:!U#IX @bS&9eݺuԬY3]v5˗/gynWjժi&Ǎ^uVSN'N$Û@dqS{KL1a3ҥKo3wRLL2.×_~9i ״iSZj}:lx6NɁ9=j]s]q(qo_EZWX'5 }{a~;wנ@=bX @ 1> l̙ӯJ7|3s9 3+]冓bt6m 0h u4DhO/swԩS-ϟO:ur4K=ZYț"1/_^&;vXyy¶mۨH"IORqiu>yKm[#nq_*uW_Ν;[Y@ 6nHKݼy3+V,M G+Wرc#F;R5dٵ_0q Fcƌ E x($?cN`N+qo;o7 E3wS_NwCyA'@@b|E]d >#jР%jY|e*0f=Gr(jL\*W\'O$`hѢTx1˿َh7,ԭ[73gN(gfԩSsQP!_˅3QEz-FuG Z#@k*k-Z0fzq5BF~)]S6:MF]Ll{ox̤HrX `^qÛ# ]KDbf>CcUdɤhS d~ &mܾ};ժU+L"m\:t(I%>:'q7ޠ+R \$ϢC3Xutx}g;^6OG2'ݽ~s߰NS. S9uZh=3tVu]ԫW/[B_G29z*ٳV\)d(rvzMժUc^z%?LիH|t3hɒ%I5qTÇQϜ6|駆8\,w 9b5Q)Y,tL8 a~=ly睗iҤIT`A#Y,ѣ5j!Q9.nA_~DŽ'.Hs>ڵ)r?wX`kFf޳gϦ 6{РA/}oL_u?.^^{$\z뭆hL2&'|x9qm~Fb|5kL3y7)uYŲ/C5\C\p]{h˫;90\߷eҦMB ~Bhb=O*W~]ڻw1N`o|_"orƅ͛ի3 2s <6ӸqhȐ!eR'YfQ߾}qiӆJ*e|曎ciČA)n]v%'K(  0UF9H3 ~ۄeMzWXڧ/&Ws^uB"׉Ni}7匐@ '@@@'vI7pCRijtMh&L+mjT/fؼ<… m?uFWY^x޽u@΂OA^ve٩BYڊI,'4XPQdK.sϥDd+[(s,Yuq3e2 1z))u-_6mj_׉}o4% (8{߱N _u.]jW^yzc)&wb[C,ɭ`<CP};sJ>4ۥf٢jʔ)4`YWbDf=F]s]ƍ|5kf{96vƥ~E/yF ąsd:uR8q1M QycEި /ϗ'oy[7 wh_&?)h5_kMW7ݽmTXGӵ%c}.n ta֧N(c;]9@ @A{|@uBL\(4"Wriܥ8J){UM>3WyA=m_%ըQÈ,-Jxnu߮fY*q=NmS$.n\'G+}h'GbiĈ)!YIdCڵxlx(v)z\ڶm+mǏ>ۈ\e#}Y#Rɫ;s#JއJM?Mȿ)P?2O,P'~xa_o9uBS\W!ysMfm@3uo< 7s:uU3DƧ*9%/dЬʢD޼˗#&yހm۶')N ̸~֘瑘]o5.x6F2!/Ar]lķսnugkMzlߎ7 AtaPN(wr$=|=(ɓ'e˖ѱӓO>I:tr@s-EDv9^k,frSnlvP]*SĎx=>QcǎO܉6i$oʽ; Q*U|!,~IUnͺ"nNN%KR=MwFt-O{g>?_|E t򆓻ロ~G*T{m'$wfۼ5DbŊѷ~KG2NByeO1s 1,ILNuOn9 #{z:n9/PhǼو#X"ŏ&8o袋,!3ݠArg… g|BJĔ\W̠ &-Aicӿ/|E!wI̙c7lonyA+[7 :Mc~#8*Gzx|ڸqc*wQFI ?9;ed>Ķ~nvt3ēƳ+Z~ڃ/ϵm2:{X'tćun}o|!@2# hO`0N 8 fxgϞVkė(O^z)XlȢz>*1 Hu>OY 8'NM&E[tpZJio 1;$EP諯2fʖD7}0_cذa4vؤN;Et[=G+SLm=Gqlڵkb=vw8PB駟誫#G&geke~k?}l>3fzZn-Ï$:-MCt-ZD<&׻|Yn]eO~Ŀ+=XRሣxORNMTW Sߝvs1oVu'W:G㍔qE+V3fВ%Kj1Í/?ScX.ؿ?q5mڵ!u7wyiI<9잳&_Ҳeˈ%&&ڵ):;,ݽ{yϞ=ظ{РA/dOxq\xa;=?WzmYāYě'>}լY3T8ϝ(ޤyYg)%.r:to考&uϟ~g>z&\a6m2ƤH  #20u͗ZgU~ԺMv~c0}:F: ڱN3%[:~Q.ė- ,P aS X_|%e;w.6yIP`'޼y3թS*.0 h޽{S TWe`6ydChxaÆe^$f%K&\Xʢd79r3cWPH%K& ]9ĉӊE?m3>R\D4oޜxҳ@i9L! |X 唦MI.x;Y4<*_ȵk?}h#d„ TbŴ]`!%^` itʕK\8> /2KT'׬Ycm$T>+qT.v'EmǮLq\߮W6KvʰcJ~]̺y7#'9˗h/Ew|X.^Sͧ%v7`>'z-],76̝l}ؿeȮxrS1~ge]C:os;l)݆yp@ `<[Fk{;266g.΋rn㗠z.<TlWrTVc0U:^.f:!u<=(M(p;!Xm˞X I9p۶m/"5i҄x.*UJ(tE_~~#2G+L(id%9sjJvG]*sĎx]eXᨆ,N0MڵˈNHjL⢨=qʔG[ rK/4mt&s=nѢBe[!?@< `<OUp;~]=jwA{ IDATd:5{xk#X'|@3-uB2I ׉Er.넩ld :!u?=(G(p;!5R)G6?t!8Ѣyb b!ā![_^=j0 PZ5t:Y._~iD*#GeIWOU&Tl㺬NuOr#r˝<9X5#HqR&k;+&~]1l0M~^By'Q[bp~yS0(.GO>޽{k׮f͚ϛ{4 &qB Ʃ}W]uݯϠ3۠Ac/e~=nNf>Dw3[oE[u'lw4?-g% ڣ~MXX'T{ b0-(a𯮁uBI&NW{G#_ h_o=|>E  =!&aMfK~ɒ% +8#.s=IlN˄uX.l:"h#;5cժUԦM2G_h%LǮ\_<.{.nbQ ;`P-oYtϴ}:G*"iKSpGO,yvgͪ}L^CP$ Pd00pTYe,nG|ӦMAnܹsG-ߛNbL;A͛N:VvQgΜiPKw^eqaJWnxO7N/PO(ǎ̯v5i)ګX֎ 8")^ԩS-\*^}J{Aw8# ߇:9;v{zNd/YK^A1_9Gnܶm[kԋ/h.*U*夊G/?~kժ1 I&9~~/nlJK)ػwoc3{]==[ X݉CvP XjUƜ:u.r#*o5Y{ ۓ)7~&mrܰa]zi;>SN}v~ω4OT|xXݩ^}xў={=oN+WvDħR]r%IqF}mdA>ov[.u.[u?އA_m6Rgsh;n\::U.΋깃\QYOQ3I7R]:anA7ƠX' ͇uB'g:q&NW GN? @A{|@xKWQ2)"u֤膉׻tb,+WΈX`A*\0=|r+LGJH6#*WjX. dN6/'Gc9̯|J ܠ|nLTe1W{Ȝ'3zDrg # QxD^js̱k~d/އ^y}kALUh E PQ6nv31'CfRq, [ƛ|W^m_%pesr3]3"-r6oޜ~m Չu a$F'n?1o* Ev=Ym0`!;6'Vz@l2}tرħI&&b{86$w'=dC x?oMe;v0bH_ [0-?J?2^nv򍬒'׹qqLu))'jt܂ok`n %h7( k&gNtr= ,P  1ئMx܌KR֭믿jL>n82dl,*WjX.L4l׉*qp&F̯|J ܠ|nQfÆ 1cXbtƭYZlieC͓Yf9>NmpנG-rdjժYM D# Y޳gO,jڴi9э2y^?P,S3շ/;0;)sEAeCxMY***AD5"QEWA@k.( Ԩ, DCX(L}==]35=uNNZ5=]UOl5󏸯L}7(Q'%Kt:hx?_{5@M# 7Uo4o {s{PQd̘1F׀λN@cTM+nQǎ+#G $7tիwӦMrގ_q?Cɩ:l[fulr˅Q]#$ynjKڵ++W"#(8-Ng ?U=[q]6mM}hqx] 3/>?6Ӄ~ϹOCb>!Îj'ݾ1@A}>E$@l[_^ڵk7A".Ya~ ڹO}L|Or$P| Pn)h'H@m b6lHlt8qBu&&u1F]h(ւ!WRSj.r\lبZ/DHB=]B$mzYa7#;$ njp=:6K.؍3fS,T-A)_<ˍ*l ڏ;&}l߾]pয়~ؤ#DRt!G^E_"nP-[6a(Ç "/ cҦMٹs1 1_:}x^=at(*>whթ7leuUǏ|skH5΂u=?t'JlH ?p>~!eL\//wܷ>.yfsAîp0vyz==7lyv|BH4ض ׉"Geu#zꎨZj))A4ӷo_'MMSj.r\l-(+|$ =2jԨ~aN";$8ar Z#:tpnB7u<, J=vu5#Ow ,;vFyՖ/_^n*gyfT㙜 x:r<0~A=\/BeϞ=e!v2 deH7p k*ȯaS3pG`Q2;d)ab^hA¥IJx2R#pjd@')7F@ܑU~}/OP_~OQ (R'@D_WTQiYH!gem͟^^ﲹo}4]~\O2 nosаgr0uyz==ÚIv hmAL 9hȋ]tQV%j4`Y|#M&ꦦz5\9K.&f~ J[P9眓t]8C'j.XYN&s%U7l0@pƍ'#FH;tpD]0{ҩS'^Uؒ#a((¼8YD~H(ܹ, "J{G&Nf$F)'Kop.~M_9Xd}E pa"6|k׮M/;[]wW&HOw誠;+z C.~o3y?P? + Eϧ㦾W6LƏn:g 72=[# W>AQLXnR&YH|RmgrƖ}4]?S3L aO}A; О/|O7Tg$P(hϷZD$A 17iĻY9$u |I')'t3EvWr׮]~EF_,zXE;쳝&u"]0_~N ޽[;o~DgMG7|O7Tg$P(hϷZD$A 14i&:-[h2믿N,^+Wbns6 e`n_yi֬S/"|=xժUEx7U"e `DYhЙ-#Gȣ>*8,*ݻW7n\ uVoUyovPT@Xܵkפḣ@ $u1Aay?~\ڶm,ʗ/8SNZa/sSO}9ۧTBK޽2۷5k t*mڴAnPP'tAJ^}U[`E vGwzn׏D c.,Z@L6lH:ЇÏx,QDʎ8q9xUPőCt8y.&T&m~ovq.l>9ZoЭShʯ{XA)r̘1Cnvcu0{`Xj;A7i'ϣۑtه9jW~V"n J궓8 s?Rݓ/hmlG3eKL SLB'L-h>|'{:%%@A}>"  &4M FVSN_=#m( 6L;CXy-HN;SLRnDvŊIh \Sf1Uy[%}\rs<6'NX|lFt C| "mٳǫbp㏝ !_|qRDDEw62_ 6ՄA_,^8wܹ_MQ>|xRLNێLUw@Ʈ+O%(Q}гgOUᇟ-zAΝ;3qCRF)Ф]hkR݈3YAN pa1xHq?2QP]pP5xCWZ0A9sxy{StcUV H@=uT4hPZ&^~qaϡ2 njtlw8V5G:t4'{oF} ;aݠ~COHmAzS[n3<3.z`Dχ:r<0~A=\o !hhdz @^|6/D#sD s0^njTqLU}e>.gj\fbE/ovqh'_Ўkj}eOtSOuKKv|KA}>E$@l\ۿ%F="B fjJ`S/"\kj\v5$ H,YR$ұz5\:Kjz׭' UNj ,D~HƦ)3f &GN2y?[$ʕ=Nn`Ū;'u](ǭ(,oDEn[gI.gdvL>ӧO 4m4A T/l4Mo- _Ԅ[4 DOQ]),}]iݺb#5?qa1?D+I Gvco&=AƦ^ 4W*~UzMϠ(Qo ws9'@'ᠴ¾w' VE{WΔ1ƍ'q,2~FX8X?yGv< 07-h{uyv80p _yܚ<\A=xq4@p 79f7u O!92&M7{ذ}h$7Xx nH%qPO}߽yz񬣛E==3H8>SnOh `ѣGBtl:%DgW\qE%y095woiD0a'[lEiGjDmtl^ 7ʂk+Iݨ={'mW^2o޼$No bڵk}ۀ+Tֆ#G8A~Mձc:c BO7~:1(jR(Mssoh߹~:|/FDe?3R I, Hf<1>lġJa1<ׯYf k 7D AD~1︮&^ q.+()0quCiӦyM(|&EC =P!7nt.&U|w餓T't3EvWr;B`e6_PArTA;;祿_njuYtt]W!T] IDATNWg;8{A!~r=S@>RnOh [W N_(X8S Pn߾}'|)S&%]vɏ?(ǎsWREjժe-zڵ _|;D?a#37,'ԨQ#2DbŊRR)-nM"u`ߣe:ߋL/ AK!H=aAwlz^[p8;SЩvԙ9sȏ11!CTV-e4tS}Eq_ԫWK[nrkl3 l"6&6m_Aè~T믽nf!sA~vmށH>$dɒG~#OLx/}WCyHx'aUV1O@j؜T+g^<<޽{qI!}B^7A!z!gfGSRnaq׮]xPN v 4_J#,o߶mֻ 3:uEl?0~Xo{<6rq/ϟ?_zi߾YvQ&8=|1ܠ7|Wꫯ O\k'#w#.~A$` 'l=N95EK^/7f8٪uŽo}hlqs[{En>! 1ضOtG m" Pn(h'H@ b]`:urݔ)S ;+VH On HMkjJj~!bO?$'N,}DH߲esSMC +D^x ={b {p_!⋓"'.|3 ט㠝7D//^{ܹs"?uD/E᭾cp n J¶cyS՝~(؅(9ոUS r.XEt̃ܛ8ᖁauyh.[nժ>VXH|8]Lsΰ xް:Ы=sqp@;="}BkHPnp>"  \Ӏ,K4M>Lرc\Ru &5DDK7q׌oQI=}\X̽h>vy"tѣ"B aÆ"٧N* r_}_P[N$%KD:6U's 8.\(oѢE@\夶`Ax x^!`~ @=]Æ ѣG'Eϯ jرc(q2X]wux v7z3#Daӧ~ps6m *Rj|6&/j¼q)o 㧨.WC03nsOH'>#`9w4r!~W4/[073b>!3yv|BH4pALB .0[07>ZlbQ^+rU"y^AW_Y>""2S}:BK7-_܉TX-:8z@~="G4o|vWxUD r}F:xq#j5netl޸~ZZDGMWvn={vڃ~A;.^zɼy8z~7kv#q B iQvQ";3_"ɓ} 1d2 w9rH'*gQObeax8p@5k40|ˬYߵĄ"pT\9|[~}/K*O78}|/RPv'>\veZ]?Ewz&N6k $@GYHM3}FQ p+9r }`Fa|'{]c֐@>>/QnOh .i@b";fED|=ߋL/SO%bs;Tv`sqv)$S83sL6aǑccc}C_ZHk aG_rQG[rQIK-7Oyv z5A[4_c6E!r3:Mȇ}B>%%@A}>"  \Ӏ, F4}f)Y,lkɒ%ҽ{wO?DcJ&8-Zݭ[7kQ EtD1>"C h0/F O?4)fҢADDQض##+7n[(w7:m݄  a6, Ç{V(&,M :&YCgk\\et!71.O7BOlj!`>!+yv|BH4pALRj+l׃tz%VB\ʆ/^LA(S%˗jAܹsO>=AYXm3&Pq4o<φ HϞ=CI j5w!?A܂)O4I./c(ɱcYr :&LH;pdVW#3:{6<6Hp>[Xdo%s<3-Aڿ˔nBofc\pPwpPsA}B1Fh   h,f=zL8шiӦjZ?ۣ9uTBO>l BfبoԨD|ԬYdս{nyB={!< ^xaFm}!Cdʔ)^ATe'".,7箿Q LYsܠ`\IsQj׮-7oUZb ꫓U۷o:udr6S} "i傀 L$@$KY]RW pl׳l>a8. q>a8^̝{==c@>QnOh .i@bX>|X0!Â܉'W+W. r ]D`8xSZjIRWdpIŊs` 6ߵk=zuͶl/cP8x#GȱcnjSO=H:0;*Dn:%l~ֵkWX^rl<1mVʖ-||3f̐QF%ɓ'0uE]!hϗ%$P p>[!3G~W4[xn' ό% `>!yv|BH4pAL hshZ5(/f2bTe͚5RdI#UZlkɒ%ҽ{wO?- ]>GdjpZhwwnݬeFN4I&>(b?@_Z5kF X駟q0wA$8gv86M "t  (hK|~ 3f $@pATVI$@$@$@Y$,fS#cV0h 6mZuuE:W QО -^vo"5nX(_V zΝ+}"n5jhaF{ 8­nZ`2 k p>kkh8 4HH)Sdz$ 3g(Z  f/k'   h0돓q̼ѣGĉp6m*>lϗ޽{{N*sʆ J-4n&}ciԨD|ԬYdս{nyeܸq_Bx@.Œba!CȔ)Sa,1¾" l^F9fIHH$@a PgH HHHH Yͦ2&GY&Ç  ^+WNtFn}Ht 7VZRTľ}dwCժUbŊ9Mf@TڵK=uftG$P@8- g+9vHHH@5A1 Gv h$f!    pabд"8     p p>˱@ pA$@$@$+|OyKKv|GA}>E$@ )Y-^ҥK;tINoǏKNJ*9͓ sAmF%GTr,G$@$@$@$@G){Οci&ߙ"zIHrM'#@A}>>"  \Ӏ,}kNY/ˡCnֿ9s(Q"ֲi   \b.ͨ8JHHHHHp>[x>e#s|,M;TY' -n'h   heEIoڴI.?!ݻwEyoܸQڴi#k, @pa1WnD2$@$@$@$@$P8-LW9&j HH&|Ov|BH4pALRl޼Y7o[o%۷ei֬﫮J-[&%ḰlHHH W+l7 ?Pc    (L_٫xpǸk]DY nWh MvCA}>E$@ )Y>s_~R˗/.]$ QڻvݴuViҤI-fs$@$@$@$\ga pÇeŊ!ǏK/H~Sfl+ &f夓N|JRvyf,>Kywr1Сqy_vGobΝB yL$!$ZD$@$`pe$P(hϻZD$A beҲeKٹsO>)CbҩS'O{,Z̦HHHrM CHÆ L "Loa2Uo3/ @t .=z$=^{5)SLJYRi+oq?޿V*kvV3͟?_zqFiӦV_={zmVq?kH$l.m plg?h Y|O7˗@!>RnOh  bwٳgKҥN>}X%'Y"OfΜRri֬'Fmۤr9%  >.,f9[N#5TAab%22T$@Z6o,͛7uYʕ+U8r䈼2aٰaC`˗#G:}فMg(ݻwO4}t8pgad\gGgfHAY HMwFqc)  %-'\=WSKA}>E$@L,%FꪫdٲeG ؆æLG,l8qBN9 + / 7t]qÝ<7[HL$@$@$@Ń &P]tK.hBby_J*IQ υ?| CIV?}viԨQnfҎmriϒ%K[nMoܜWT):BX_kdŊW^yE.>bA p>k?h]L̟;F9! m "{ Pno(h'H@DF&B DZr-[V Ǧ`ŊWy睲gzyy"|駟:њP7ԪU+-oF&M$*Ur׿;&Q:t~޽h"dž]n}JӶ5kLo%\E07M:U XHHH  Ïť&Ďv;a:ucpXx͚5Fi窷޿\qMn&!1bWO{)~}_}Qk/^z 򵿴g0L̟Op.Hd^  H3 dF(MA H8Ć "SLq@e;vL6- 6gΜl"UM}4n8-˅ J=<~Ī=dqF~zi׮]|juUUl@$@$@$`=.,Z"@Sn7qPٝSBI?*=묳uT+W|ҤI"O}8Qؑ7ojyvZ *ɟxţ!֠A>c{֭R\9D$@$@$+n={s='r-d3s e1.Y"zE" o<tu떲{QڱU.(=1q?駟h" ֟܄X~ [l);wt>:xOL$@M7K 3C;ߙHH|O v|BH4غ hSN؎d^zw$H|p gScS sy}aGPͲF%mR"?"E]|Ŏ`믿v"'FG/xI^ۤ6lL0A{N֭e۶mοʕ+k$@$@$@K hy'p{w;ʔ)[dIA})AzbW?,dq'lNn/܀ϋժDtQo>R_&eL/{ҕg[=Jڎ=Sc$zС#`wӦM .n?XILoR~B;Ɲi2^w)QmFu̘1Cnvz()8EF0 @p>3l8=FW% IDATswy$@$@{zY! s: huA,jm_~O$vj*+##Ғ8p4km嗂G2sLXbGHn7ʟu_[:Iv,$@$@$P@X@,]{\3Isg! :ԹiӦrz_*E{)SHҥ|wAևVoÍ^+e>9 ;0D}A`Æ ]n{eS m_k1Mm<klݨO͇OC qƪ 7PmqVd⹇eIuxN`jժ9Z|I$D3Vn$ívݻwoA6e/U]XB}UW]%s\}RJϗ}Nm޼qx֫W3=U4nB|饗 a} RPhUpu@5k 2 XLYCrN 3:wE DHH"{zh,BŜ -" `zA 65Lr@ЩSRJfy+%5nF~ٳg; 78 /qvQ|$@$@$PX~,.{}ǎr'Ɣ/xMMgqFZ(7ѣjÆ ,]w>e ;~(ĐO=V~r۶m}nL0:k߾Yg2BU]AZBQ[jOq̙{`ۖv"bS\sL7ܐ_'A~g!@W?C'on;֭s_\ sH~Lw߈+wqIsиsױC~z3R1Au9R%w~km'.g˛}w:nZ  n1[ B#@A}>"  Q +!>=VV^֭[D>}hiGNT C|(Ӹq$ FNDILL~ER717zڵk^y5D.ر@t6!3"I=@#hO{h*;Ȍn !š- ?禸 r6r"_|gq( lz!uܢ.SϽ_].2`8q|HS;eT#@Hq~Jθs=?z~N}g "0~|ʹ/! 0GYslYs2-.ܿ{@$@$`e$P(hϫZD$A ʂϚ5Kv,"2 yV1K-<HĭLHS:tHk6q|QBB޽;Zd؋ U\MV\kP03 hkh(t rEɆ z'k5j87(!j2SM!$8sꫯe:7n(%O'4U7C[!xiӦyMѦM_WD~ى ʕ+'~<裾}J%T_pS; ucѢEIդ:P~J=w;a#]ۉ~QT@eߧ8ﶍ}ٹ ZZ:uºI,!%V28Og否Q$@$@yDy,JG$AA}>E$@,!zyF3iTQ DZ/W)5Չƨۺu#8pjfW.&7ZJJ@^U$FA.9#  cqEG:6qhh֭[67}uUK͋  D۶mKOi^ScpEw?HT?l00aBAM7i>y|>kzS C1/.b_pnJvlKL Sܲ5\.`F*Qu7msOt ֯_/ڵ+2pT@͚5s3QR~7Î]|p}9"tO'A.TǏ;70&޲wˤ>\Ə7ސK.DC$`!g-t M@3qΚaNCHH,%tKCHb -" eAlȐ!2elٲE;2`=h T 5.,Q AdײeKO-tϟ?_ze~>mҤW_{N-/UꫯWUǪT:QZ>y"T&1Os'Hg]'Ob~P_qX'&4hQTl7gDLw(=xR?z A1U!J42/:Z'8SÆ OlX?}Ot]ï^VAcsY "fcǎ#G3 CY{|AK#e;7i @>{z>z6@n PО[~SnOh ( b@D Zh|ܹsgЩ^O9唤Uaz-    panкda?@ Qz/2ٳg M6ѹ_:ӤrI9r)M~G'oFҺN Nb1;z‚^Ǿ}ʜ9sTazMSD#?s={ϛ/jЀ: lխ3ɓX0/H=Lĉr饗:QݤFƆ no'gQI> ێzkvf͚Ν;S>LAz7nu_'tnkڴiRpϠθ z~FPXϞ=#z(HF2 @np>l5??Wܿ >HH [lyv|BH4DY 6qK *g͚PGMGD7GY7Dg\j\y^'xB$3 A;D#lbÜyHHHwH֙p/oINYD1bW*hW۶m+k֬ҥKm{ʕ0MTIQ?۷*V'M$wuGNd$HC4E}GGdz,gL* ÈUԩSeРA^SAM?tĤ:yV5j3&iP9[ؑt9tnnCŽKJ׮]oڴI.@a0w!'ON+6iw`ƍ@ E/wOxwAs$"]R5ɤU[p+"Xƌ$@9%lNq ?t'}Dwuwy \ǧC$@$@&=UIMvKA}>E$@l[^m ׼wA6lx GG ㌪>d2eg B&   A υ˰(:+o]^ \ rM7yu{2~:Ď 4,bU4ٯ_?' p%kVdA`ӯ ('n5z^Z:u}˖-l7]?OAQSێ[瞎Hv8b/vQo壏>?ONv7Z_cxßQ)$& AdQbH Ƃ(g ڣsw:cyHH"tAkH Pn(h'H@m b|M:tt?&Mx=։FoCD'\W\|y( JE߾}U5KЎHfٍLTQ0`   <&E$@l[Æj֭=>'UPR#AEK fIHL6U{tܜ8 By+TuoS[m;\]<묳hae˖)<$@$@$@@ bԅMW\}NGrJ5(K2eܹseժU^ A#<"į`0z !*r;d^ A|QF'Obq8Dw/+;tiasO͛yޘ yd'uyuX?E=?kJ_RJի;ʕ+lH SfJ @guw%C 7߅$@$@{e9$(hIZD$A1l|͞#RbP q ɕW^TqdĈIbz}zuhn&L 顇ѣG$@$@$@D bЕ ɽSNv$/\r%).[L:L';AbGuWE̠<6l?wvݺurEyYyU`ӯ ~s Ga(QE/;s.,  fJlI6L;zp_Hz}W3kH:e&0~B7~OQ +h7Fh g$`ʂ!Y}_NxMW0C! c|Oct s>MZD$AĂXb$˚5kdIj42lwy=Q7!Xt*UHYq/_.XKLQYz#qرceȑIj j$xэަS? .,懟h? =TЮKvҺ,(ʕ+s7@TƧ.`"H$،e|֫ڡ+ 7pWݽdKL ܲ9LbvDw{'-ZtCqk깇5kֳ/h_GѪU%sJ(o2|7RN,طm6/rx?(srAts=ݶO{H 3fƏ @g [ޑ Eqd=$P|Pn)h'H@SN֭[kX"rqi۶#F•۷oU.{ԨQ2tPop׮]2eyG#M4q6>#iܸVT{O.Yht-~K,2up8餓y1 @b8$G 6$'NHy Qݤtng׮]?pS{`Ut&g̘!~O\BϷzKp:]ڸq3t>S)cƌ PRhK;1ڿ8es&oWi'=<'0_"bǎ>Y~I`SACqQ">m)[lCu dY@gtwܿ˫/%  =&%@A}>"  &4Ͳ{n';H]7ʺZ_r7/_}W/޼ysgrΝN6r^Q}9sxlݺ'&ugϞ2| Suu#J7?ˑ ).,㊩q?2Q>ӪĨw|bjժ\BvpTAiġ:nR; zԩ2hР>ᴛLu~Ɔ8es/\Pza7N6{::6-h߱c4j3|u3<3WzԵ0x>Eh}2~A=\o/ZhDz Q@LpD/s2{,K$@$|O wJLvGA}>E$@lZۼy#4ww!'Oe9;W'_*?/ЍH~ofx>=œPvE ꫺ɭT'?'  O\XOWd*hHn$墋.JZ*hW7,A*qUazMG  f7n1"m:9眓tO8$TXPx?[:QTkKL LI]] N6{AA~Ν;;RJi08qWs$F)'Tuܩߨ(:O=Ȍ$@9!lN׭[W.21.I䮻Jҽ{w86'H-Z$ݺuʇ Ew^Ot?/7|hd |6ٸ?g(ej{ܿ,K$@$@%fk$P(hϋZD$A1 sڵ8\pDB{챤ȄA\z!~YJ4o%@ IDATdVI$@$@$`!.,ZǦM"hq믿E6^2o6_q9ѿ*TB@J^}U['n 'uJ]ĠF:XH 78 wL̟m9lm   B$aOAylHB]3**qw@.mذH$O?Tׯ}HHH ?pa1?D+Iǽ눠Լys9_OJÆ }z M=h6&7/jm pgꧨ.H^h"O~OSwYҺukmH|6:;,|6Ο ?{iHH sl %@A{XbSn1[ 0@ br̘1M"aC!@6w2Yf%  ( \X, g:8z@~=zQW\eѢEIe{9߿7yL &>a۷ie˖^vD/g}#0g-m ڽOP`@0|ˬYna\|IΥ/_\Zt NACK/݉]vlHWHOQǝ !FwSz.ի'UqC ~wM}Řa"_hyΟ܎gh @n==7* 3  h$Yv!5ZꪫdٲeIarl Z!HHH7.,o[m@<~'.UVMz߾}'|)S&e]vɏ?(ǎsWREjժ!US_v-[QFHͺ;töt҂hQ;+JJK%v3}s!L;U"9CP%!"YtƓ[詧J:#Fj6L;sS|/=SNI*hǁ3gJkA1Q;.?~OU2{:3).e8/իُF[nreOlH ^˓υEhoWfHH{: @X%f>?  fj*۽{rQ9"9>^;wŋʄ0YIHH܁|?ËAwA.[zl{ƍ{:pG}䈧 -mܸQڶmu 7. 0к Çwn*tӜ9s|o l "l~f孶g$@$@%g$(hkZD$A b gټy4ok}K,)Fcz!2;6E}4l.'  -sIKB#@av4[0I&]we =v39rʕ8?aƹTO$OZ05?u%+'|6X9]}@ HHrCVI Pn(h'H@4 e!ej֬YbŊ=<НHHH\\XX'䓷h u(Cn޶sN's( d9… Tvmjժ`ꫯ>Co.uɲl.}u,;RҥVZYM .,NJ#Gxs-+UzqWk]}(Ƹ(Dn&ҥKk׮^սzyI%L4q?T^={Ҷm[)[͘1ù.1 2D&O 7LݺurEyܹ@Ў& 'l=0Ggsl0HHP=P<~@PО=ֺ-QЮKH"1AcHHHH 4.,F9$G4ݿP&Re͚5T! 1.,Y"ݻwO?-41NDEyw~Q mҤ#B;"WVھѰ`q_|R~`cH /p>n9"sY   {: @X%f>?  f*$   ,ba p1X+4hL6-:ʺt".mk$:]lŋSЮ9+7={8%ʗ//Ղ޹sJ>}4{(ۿFe^q:0p+,X ={H 4gC#cbDblvHH,#tBsH Pn(h'H@4 1 XL ;!]b2qD#ئMʳ>[K޽=N:U 甍qQjq3;vHF& jOf͚&ͨݻwˋ/(ƍj?rf G 2DLuciĈu dDٌp nsh XJvCA}>E$@ YHHHHb\X94?8( ÇMD?qT^]ʕ+Wh݌?)"l"o p$@$@$.dH (h:)hD $@6^M$@$@$@$O 38Ƚh BY[˱@ pA$@$@$+|OyKKv|GA}>E$@ YHHHHb\X94?8(HHHHH\r,@j?st sE@>QnOh B$@$@$@$`+?Lڵk'{ }СCr 7x߿̜9SJ(awh (h=4.cy    . uIe/c͖H,!p1K,$@$@$@$@Q0B{j,  3g| -ZHjdӦMr?޽,Z|ƍҦMFv    &@A{A!>&  bNN|Ŗv|BH aHHH ¢a 7o͛'[oImٲE5ke˖Iɒ%"   ֻ)h6 ;@ 0`VO$`.ZD$@$@$@apa1 -捃qP\[~J/_.]tIwi֭ҤIx bm$@$@$@$@ņh p. GN $P< Pn)h'H0.IHHH0.,ݮA~iٲܹ3'Czjԩ}'=]5$@$@$@$@yCq 3ܿ3\  ( w* g+$!`e)+YfH!1{|AKHHHH .,F20!h߽{̞=[J.-vG\aL&zY3gJɒ%}i֬']l۶M*W\?    (h7U';   \Sȳ]- s߯u -"0L bz   0L "LHÆ *YlYJAv&.9|(QBʔ)I5=z@~ 9SB /M7>\Ə{뭷}ff     9H ߙZIHH p)s@!>RnOh a\3 Փ a\X4 !`BKt"K.uaӆ dʕRlYPTXQ^|E;eϞ=NzyymO?o~UV4iTT)_Z:vTFgСN{ʢEM˗#GJ߾}ziGT5k&y7K. F88঩SʠA1 (h 3g+k%  &}`FAHvJA}>E$@ pA0`VO$@$@$@ pa0`V_͂!CȔ)S!߲e;VMԏ̙3A;w飏>ƍ… G^'xB2=DqFG]v)QU~ _3 @"@A{r7;Eܿ"l6E$@$@$DN$P< Pn)h'H0.IHHH0.,Uaz׮]eٲe͝:u?_ƍW> 0CUV;]߈.3FFePM6޿aO>}(H͛7w{Fؾ}TPH~6lwAZu}>묳d֭R\9D$@$@$@$@. 9H ߙZIHH p)s@!>RnOh a\3 Փ a\X4 !`]i'hSN؎^zԫWɢF+ҥ,YDJ,'Nnݺyy!>SWÎFyyd@I&_ֿk'|bx{oLB'E6lL0A{֭[˶mۜ2A|ʕ`F     9H ߙZIHH p)s@!>RnOh a\3 Փ a\X4 !`/DhٳDZ_~OK2e|j*+}Ĉi=iܹ֬ɇ}-[+_:WS=d̙RbŤT<>L{¢VN 4S/: @TG%r$8BHHHrEN"vI (h-)h'H0.IHHH0.,0-hٳ̟?_J(իSNIe G]JJYɓeС˗/DiOͼaÆ^^zɼylbG޽W}ҷo_/Og"cB2dL2NA.5#   P P1Afp WJ$@$@$LN yv|BH aHHH ¢a(ˏ?KtW\|޶m[3gT [nӓCժUKEq2Lƍz~ٲeruyyN*xٲeԬY3e/tM窠Q/RYvW^/NT03   7$عP?҄I—n4m3DqcDd!(rX"m3FMHF Ұ%aMyfᜮDDw !F^~sv^u]q;~^O? @=߂vb" @ @ bxΞرceݺu}MaÆrĉw SN5kִg6o\o__lYgmb[N^rsϕ~gyM^sw]if9rHٴiS[ou_pe{p!y @, AX$@ @ -XОo'&"@ @ ,&h߾}{ٷo_gOqvo&Ro+V=oVkڵ޽g|rW1x^3wrѲ|݄ͷ7AK/T.s|rw`Lo @_$@ @% hϷdA{ @H$oi"34߆|S>ɟo裏`/a;޽{U~V/kun:繗_~y6= IDATVde˖rСYߒ>w={ WZ5q7雟3}3_p(!B: @# h_: @ 0^A{ D @ @@"?3߶m[9p@|=Pؾl!azͷ7g~ȑ#eӦM~o~^y 因Ƚ|Z|;w @! h_{ @ 0nA{ D @ @@"A\0 7lPN81~}Woy' 7{ʟ>}_ گSO /?sA|  @}` @ -hϷ @ @ O:U֬YNMMG˗Uw)7n,O?t_||oy~o|=sC.U}e߾}{=#v? @W@~b^O @ DОo'&"@ @ d իWB ӛ?+lܹs=n/}ͪU&ߘrwVo߿ɓ+hU|?7' @ =G @ h_A  @ @a d ڟy晲aÆqǎe޽W*?/_җzοկx}͖-[c=V-[n< |8iB&hK{gzy^07o^(+VXq!@ 0K@A @/ hϷCA{ @H$-h/Y/BM~-+lݺ}݃>X㎳;rHٴiӬ{+wu׬-fSN&,ټys9|P~n|+_9+M?k׮{ @ @ _@Оo|;1 @ HF//|SSS婧so^׾eڵ&"?tPY|{6\wu?^oJy|r׷ٳ}ݳyoy^03ǎ+֭[q!@ A @=vb" @ @ @7(<$bˆ 5\3}Cɓ=ϗ+rvYK/tW_}uߟOΜ8qb^x|hX8\tEzy @3 }. @ @v(hϷ @ @j}^{mmMh4q3~8ӷOf{{o.eM|tn1o/oG| ڷmV8P.[P`:o5R-, @8 @ _@Оo|;1 @ H SNB;Se:',_|/>০ѣGggnXP7,W_}u_2w͚5eʕ߼G-7|s| +VXy/"@ p&A @=vb" @ @ @Cټys_vm|c׾7iG6wyg;vl[o<ó!}1_*֭kyC[oroÇO޿ Ow[?뮛]v~{ @# h @ @ۡ=NLD @$kО.^{m;v&r?O?]>zK/T.!<  @ ړ/x @ @` /ގ @}哟dyW&CZuz @X|ۉ @ @D~[n-li}ݺu7߱cG~o,o>~ @n @=vb" @ @ e<媫j*G|KΝ;˞={ڿ5̾~1{k3 @`QC @ @[=NLD @$XƷrÚ @`Ѯփ @ @[=NLD @$'ZQ @, hyޞ @t h+ @}\4 @" @ @ =fN!hϷ @ @퉖a @= {^'@ @;@ A{Ǡ#@ @% h>=  @=, @! hϱSD @ @@"A{e @@  @ @@;B1 @ @q ǵOOC @ " h9K @rsa|;1 @ H@ОhF!@ гx{ @ ЁĎw : @ @`\q @#z @ @{9=NLD @$'ZQ @, hyޞ @t h+ @}\4 @" @ @ =fN!hϷ @ @퉖a @= {^'@ @;@ A{Ǡ#@ @% h>=  @=, @! hϱSD @ @@"A{e @@  @ @@;B1 @ @q ǵOOC @ " h9K @rsa|;1 @ H@ОhF!@ гx{ @ ЁĎw : @ @`\q @#z @ @{9=NLD @$'ZQ @, hyޞ @t h+ @}\4 @" @ @ =fN!hϷ @ @퉖a @= {^'@ @;@ A{Ǡ#@ @% h>=  @=, @! hϱSD @ @@"A{e @@  @ @@;B1 @ @q ǵOOC @ " h9K @rsa|;1 @ H@ОhF!@ гx{ @ ЁĎw : @ @`\q @#z @ @{9=NLD @$'ZQ @, hyޞ @t h+ @Ck\i @X/|g/> @ н{荂 @ @^#@ (A"@ @))0kA{ @o=' @D|C{Ty @ Ѝ.ow. @ @ A @v @ C@Оc3ۉ @ @GHA @A{ @ @;@ A{Ǡ#@ @L } @, @ @{9=NLD @D@>Ez  @;@t @@@bW;u @ @`Z@@ @g @)vb" @ @`$,c @@@+ @ @@;B1 @ @v @>  @ @ =fN!hϷ @ @#d @@]A @:wA]G @, @ 0- hY @ @990s A{ @}$ @:w  @ ЁĎw : @ @g @iA @! hϱSD @ @H#Y @ ЁDW @ @ v|cP @ @ @ @ @ S@Оo/|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N[ IDAT @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @}E IDAT @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{= D @ @ @ @ @TW@ ^)h:N @ @ @ @ @0$hϷ @ @ @ @ @ PA@^5x=8 @ @ @ @ @ۓ=NLD @ @ @ @ @@A{  @ @ @ @ @ C@ОoO|;1 @ @ @ @ @PW ڃ @ @ @ @ @ 0 A{=Mv? @ @ @ @ @XBo: @ @ @ @ @+0{ћ.ZA @ @ @ @ @* hr @ @ @ @ @8g @ @ @ @ @zM  @ @ @ @ @ @@3@ @ @ @ @ @{a @ @ @ @ @ h @ @ @ @ @ @^{S @ @ @ @ @  @ @ @ @ @ @@/^ؽ) @ @ @ @ @} @ @ @ @ @ @A{/ޔ @ @ @ @ @> @ @ @ @ @ ЋvoJ @ @ @ @ @v @ @ @ @ @E@ 7%@ @ @ @ @ @A @ @ @ @ @" hݛ @ @ @ @ @ @g @ @ @ @ @zM  @ @ @ @ @ @@3@ @ @ @ @ @{a @ @ @ @ @ h @ @ @ @ @ @^{S @ @ @ @ @  @ @ @ @ @ @@/^ؽ) @ @ @ @ @} @ @ @ @ @ @A{/ޔ @ @ @ @ @> @ @ @ @ @ ЋvoJ @ @ @ @ @v @ @ @ @ @E@ 7%@ @ @ @ @ @A @ @ @ @ @" hݛ @ @ @ @ @ @g @ @ @ @ @zM  @ @ @ @ @ @@3@ @ @ @ @ @{a @ @ @ @ @ h @ @ @ @ @ @۵ca]WF(%@`/F  @ @ @ @ @ @@ @ @ @ @ @ v @ @ @ @ @ h @ @ @ @ @X Q @ @ @ @ @ @ @ @ @ @,( @ @ @ @ @} @ @ @ @ @An @ @ @ @ @>@ @ @ @ @ @}a7J @ @ @ @ @v @ @ @ @ @ @E@о%@ @ @ @ @ @A @ @ @ @ @ @" h_؍ @ @ @ @ @ @ @ @ @ @ @ @`/F  @ @ @ @ @ @@ @ @ @ @ @ aCmIENDB`PynPoint-0.11.0/docs/about.rst000066400000000000000000000027311450275315200161540ustar00rootroot00000000000000.. _about: About ===== .. _contact: Contact ------- Questions can be directed to `Tomas Stolker `_ (stolker@strw.leidenuniv.nl) and `Markus Bonse `_ (mbonse@phys.ethz.ch), who have been the main developers during the recent years. .. _attribution: Attribution ----------- If you use PynPoint in your publication then please cite `Stolker et al. (2019) `_. Please also cite `Amara & Quanz (2012) `_ as the origin of PynPoint, which focused initially on the use of principal component analysis (PCA) as a PSF subtraction method. In case you use specifically the PCA-based background subtraction module or the wavelet based speckle suppression module, please give credit to `Hunziker et al. (2018) `_ or `Bonse et al. (preprint) `_, respectively. .. _acknowledgements: Acknowledgements ---------------- We would like to thank those who have provided `contributions `_ to PynPoint. The PynPoint logo was designed by `Atlas Interactive `_ and is `available `_ for use in presentations. PynPoint-0.11.0/docs/architecture.rst000066400000000000000000000130441450275315200175230ustar00rootroot00000000000000.. _architecture: Architecture ============ .. _architecture_intro: Introduction ------------ PynPoint has evolved from a PSF subtraction toolkit to an end-to-end pipeline for processing and analysis of high-contrast imaging data. The architecture of PynPoint was redesigned in v0.3.0 with the goal to create a generic, modular, and open-source data reduction pipeline, which is extendable to new data processing techniques and data types. The actual pipeline and the processing modules have been separated in a different subpackages. Therefore, it is possible to extend the processing functionalities without intervening with the core of the pipeline. The UML class diagram below illustrates the pipeline architecture: .. image:: _static/uml.png :width: 100% The diagram shows that the architecture is subdivided in three components: * Data management: :class:`pynpoint.core.dataio` * Pipeline modules for reading, writing, and processing of data: :class:`pynpoint.core.processing` * The actual pipeline: :class:`pynpoint.core.pypeline` .. _central_database: Central database ---------------- The data management has been separated from the data processing for the following reasons: 1. Raw datasets can be very large (in particular in the 3--5 μm regime) which challenges the processing on a computer with a small amount of memory (RAM). A central database is used to store the data on a computer's hard drive. 2. Some data is used in different steps of the pipeline. A central database makes it easy to access that data without making a copy. 3. The central data storage on the hard drive will remain updated after each step. Therefore, processing steps that already finished remain unaffected if an error occurs or the data reduction is interrupted by the user. Understanding the central data storage classes can be helpful with the development of new pipeline modules (see :ref:`coding`). When running the pipeline, it is sufficient to understand the concept of database tags. Each pipeline module has input and/or output tags which point to specific dataset in the central database. A module with ``image_in_tag='im_arr'`` will read the input images from the central database under the tag name `im_arr`. Similarly, a module with ``image_out_tag='im_arr_processed'`` will store a the processed images in the central database under the tag `im_arr_processed`. Accessing the data storage occurs through instances of :class:`~pynpoint.core.dataio.Port` which allows pipeline modules to read data from and write data to database. .. _modules: Pipeline modules ---------------- A pipeline module has a specific task that is appended to the internal queue of a :class:`~pynpoint.core.pypeline.Pypeline` instance. Pipeline modules can read and write data tags from and to the central database through dedicated input and output connections. There are three types of pipeline modules: 1. :class:`pynpoint.core.processing.ReadingModule` - A module with only output tags/ports, used to read data to the central database. 2. :class:`pynpoint.core.processing.WritingModule` - A module with only input tags/ports, used to export data from the central database. 3. :class:`pynpoint.core.processing.ProcessingModule` - A module with both input and output tags/ports, used for processing of the data. Typically, a :class:`~pynpoint.core.processing.ProcessingModule` reads one or multiple datasets from the database, applies a specific processing task with user-defined parameter values, and stores the results as a new dataset in the database. In order to create a valid data reduction cascade, one should check that the required input tags are linked to data which were previously created by another pipeline module. In other words, there needs to be a previous module which has stored output under that same tag name. .. _pypeline: Pypeline -------- The :class:`~pynpoint.core.pypeline` module is the central component which manages the order and execution of the different pipeline modules. Each :class:`~pynpoint.core.pypeline.Pypeline` instance has an ``working_place_in`` path which is where the central database and configuration file are stored, an ``input_place_in`` path which is the default data location for reading modules, and an ``output_place_in`` path which is the default output path where the data will be saved by the writing modules: .. code-block:: python from pynpoint import Pypeline, FitsReadingModule pipeline = Pypeline(working_place_in='/path/to/working_place', input_place_in='/path/to/input_place', output_place_in='/path/to/output_place') A pipeline module is created from any of the classes listed in the :ref:`pipeline_modules` section, for example: .. code-block:: python module = FitsReadingModule(name_in='read', image_tag='input') The module is appended to the pipeline queue as: .. code-block:: python pipeline.add_module(module) And can be removed from the queue with the following method: .. code-block:: python pipeline.remove_module('read') The names and order of the pipeline modules can be listed with: .. code-block:: python pipeline.get_module_names() Running all modules attached to the pipeline is achieved with: .. code-block:: python pipeline.run() Or a single module is executed as: .. code-block:: python pipeline.run_module('read') Both run methods will check if the pipeline has valid input and output tags. An instance of :class:`~pynpoint.core.pypeline.Pypeline` can be used to directly access data from the central database. See the :ref:`hdf5_files` section for more information. PynPoint-0.11.0/docs/coding.rst000066400000000000000000000201551450275315200163050ustar00rootroot00000000000000.. _coding: Coding a new module =================== .. _constructor: There are three different types of pipeline modules: :class:`~pynpoint.core.processing.ReadingModule`, :class:`~pynpoint.core.processing.WritingModule`, and :class:`~pynpoint.core.processing.ProcessingModule`. The concept is similar for these three modules so here we will explain only how to code a processing module. Class constructor ----------------- First, we need to import the interface (i.e. abstract class) :class:`~pynpoint.core.processing.ProcessingModule`: : .. code-block:: python from pynpoint.core.processing import ProcessingModule All pipeline modules are classes which contain the parameters of the pipeline step, input ports and/or output ports. So let’s create a simple ``ExampleModule`` class using the ProcessingModule interface (inheritance): .. code-block:: python class ExampleModule(ProcessingModule): When an IDE like *PyCharm* is used, a warning will appear that all abstract methods must be implemented in the ``ExampleModule`` class. The abstract class :class:`~pynpoint.core.processing.ProcessingModule` has some abstract methods which have to be implemented by its children classes (e.g., ``__init__`` and ``run``). We start by implementing the ``__init__`` method (i.e., the constructor of our module): .. code-block:: python def __init__(self, name_in='example', in_tag_1='in_tag_1', in_tag_2='in_tag_2', out_tag_1='out_tag_1', out_tag_2='out_tag_2', parameter_1=0, parameter_2='value'): Each ``__init__`` method of :class:`~pynpoint.core.processing.PypelineModule` requires a ``name_in`` argument which is used by the pipeline to run individual modules by name. Furthermore, the input and output tags have to be defined which are used to access data from the central database. The constructor starts with a call of the :class:`~pynpoint.core.processing.ProcessingModule` interface: .. code-block:: python super().__init__(name_in) Next, the input and output ports behind the database tags need to be defined: .. code-block:: python self.m_in_port_1 = self.add_input_port(in_tag_1) self.m_in_port_2 = self.add_input_port(in_tag_2) self.m_out_port_1 = self.add_output_port(out_tag_1) self.m_out_port_2 = self.add_output_port(out_tag_2) Reading to and writing from the central database should always be done with the ``add_input_port`` and ``add_output_port`` functionalities and not by manually creating an instance of :class:`~pynpoint.core.dataio.InputPort` or :class:`~pynpoint.core.dataio.OutputPort`. Finally, the module parameters should be saved as attributes of the ``ExampleModule`` instance: .. code-block:: python self.m_parameter_1 = parameter_1 self.m_parameter_2 = parameter_2 That's it! The constructor of the ``ExampleModule`` is ready. .. _run_method: Run method ---------- We can now add the functionalities of the module in the ``run`` method which will be called by the pipeline: .. code-block:: python def run(self): The input ports of the module are used to load data from the central database into the memory with slicing or with the ``get_all`` method: .. code-block:: python data1 = self.m_in_port_1.get_all() data2 = self.m_in_port_2[0:4] We want to avoid using the ``get_all`` method because data sets obtained in the $L'$ and $M'$ bands typically consists of thousands of images so loading all images at once in the computer memory might not be possible. Instead, it is recommended to use the ``MEMORY`` attribute that is specified in the configuration file (see :ref:`configuration`) Attributes of a dataset can be read as follows: .. code-block:: python parang = self.m_in_port_1.get_attribute('PARANG') pixscale = self.m_in_port_2.get_attribute('PIXSCALE') And attributes of the central configuration are accessed through the :class:`~pynpoint.core.dataio.ConfigPort`: .. code-block:: python memory = self._m_config_port.get_attribute('MEMORY') cpu = self._m_config_port.get_attribute('CPU') More information on importing of data can be found in the API documentation of :class:`~pynpoint.core.dataio.InputPort`. Next, the processing steps are implemented: .. code-block:: python result1 = 10.*self.m_parameter_1 result2 = 20.*self.m_parameter_1 result3 = [1, 2, 3] attribute = self.m_parameter_2 The output ports are used to write the results to the central database: .. code-block:: python self.m_out_port_1.set_all(result1) self.m_out_port_1.append(result2) self.m_out_port_2[0:2] = result2 self.m_out_port_2.add_attribute(name='new_attribute', value=attribute) More information on storing of data can be found in the API documentation of :class:`~pynpoint.core.dataio.OutputPort`. The data attributes of the input port need to be copied and history information should be added. These steps should be repeated for all the output ports: .. code-block:: python self.m_out_port_1.copy_attributes(self.m_in_port_1) self.m_out_port_1.add_history('ExampleModule', 'history text') self.m_out_port_2.copy_attributes(self.m_in_port_1) self.m_out_port_2.add_history('ExampleModule', 'history text') Finally, the central database and all the open ports are closed: .. code-block:: python self.m_out_port_1.close_port() .. important:: It is enough to close only one port because all other ports will be closed automatically. .. _apply_function: Apply function to images ------------------------ A processing module often applies a specific method to each image of an input port. Therefore, the :func:`~pynpoint.core.processing.ProcessingModule.apply_function_to_images` function has been implemented to apply a function to all images of an input port. This function uses the ``CPU`` and ``MEMORY`` parameters from the configuration file to automatically process subsets of images in parallel. An example of the implementation can be found in the code of the bad pixel cleaning with a sigma filter: :class:`~pynpoint.processing.badpixel.BadPixelSigmaFilterModule`. .. _example_module: Example module -------------- The full code for the ``ExampleModule`` from above is: .. code-block:: python from pynpoint.core.processing import ProcessingModule class ExampleModule(ProcessingModule): def __init__(self, name_in='example', in_tag_1='in_tag_1', in_tag_2='in_tag_2', out_tag_1='out_tag_1', out_tag_2='out_tag_2”, parameter_1=0, parameter_2='value'): super(ExampleModule, self).__init__(name_in) self.m_in_port_1 = self.add_input_port(in_tag_1) self.m_in_port_2 = self.add_input_port(in_tag_2) self.m_out_port_1 = self.add_output_port(out_tag_1) self.m_out_port_2 = self.add_output_port(out_tag_2) self.m_parameter_1 = parameter_1 self.m_parameter_2 = parameter_2 def run(self): data1 = self.m_in_port_1.get_all() data2 = self.m_in_port_2[0:4] parang = self.m_in_port_1.get_attribute('PARANG') pixscale = self.m_in_port_2.get_attribute('PIXSCALE') memory = self._m_config_port.get_attribute('MEMORY') cpu = self._m_config_port.get_attribute('CPU') result1 = 10.*self.m_parameter_1 result2 = 20.*self.m_parameter_1 result3 = [1, 2, 3] self.m_out_port_1.set_all(result1) self.m_out_port_1.append(result2) self.m_out_port_2[0:2] = result2 self.m_out_port_2.add_attribute(name='new_attribute', value=attribute) self.m_out_port_1.copy_attributes(self.m_in_port_1) self.m_out_port_1.add_history('ExampleModule', 'history text') self.m_out_port_2.copy_attributes(self.m_in_port_1) self.m_out_port_2.add_history('ExampleModule', 'history text') self.m_out_port_1.close_port() PynPoint-0.11.0/docs/conf.py000066400000000000000000000066141450275315200156130ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. import os import sys sys.path.insert(0, os.path.abspath('../')) # -- Project information ----------------------------------------------------- project = 'PynPoint' copyright = '2014-2023, Tomas Stolker, Markus Bonse, Sascha Quanz, and Adam Amara' author = 'Tomas Stolker, Markus Bonse, Sascha Quanz, and Adam Amara' # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', 'nbsphinx' ] # Disable notebook timeout nbsphinx_timeout = -1 # Allow errors from notebooks nbsphinx_allow_errors = True autoclass_content = 'both' # Add any paths that contain templates here, relative to this directory. templates_path = [] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'tutorials/.ipynb_checkpoints/*'] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_book_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { 'path_to_docs': 'docs', 'repository_url': 'https://github.com/PynPoint/PynPoint', 'repository_branch': 'main', 'launch_buttons': { 'binderhub_url': 'https://mybinder.org', 'notebook_interface': 'jupyterlab', }, 'use_edit_page_button': True, 'use_issues_button': True, 'use_repository_button': True, 'use_download_button': True, } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} html_logo = '_static/logo.png' html_favicon = '_static/favicon.png' html_search_language = 'en' html_context = {'display_github': True, 'github_user': 'PynPoint', 'github_repo': 'PynPoint', 'github_version': 'main/docs/'} PynPoint-0.11.0/docs/configuration.rst000066400000000000000000000106161450275315200177120ustar00rootroot00000000000000.. _configuration: Configuration ============= .. _config_intro: Introduction ------------ A configuration file has to be stored in the ``working_place_in`` with the name ``PynPoint_config.ini``. The file will be created with default values in case it does not exist when the pipeline is initiated. The values of the configuration file are stored in a separate group of the central database, each time the pipeline is initiated. .. _config_file: Configuration file ------------------ The file contains two different sections of configuration parameters. The ``header`` section is used to link attributes in PynPoint with header values in the FITS files that will be imported into the database. For example, some of the pipeline modules require values for the dithering position. These attributes are stored as ``DITHER_X`` and ``DITHER_Y`` in the central database and are for example provided by the ``ESO SEQ CUMOFFSETX`` and ``ESO SEQ CUMOFFSETY`` values in the FITS header. Setting ``DITHER_X: ESO SEQ CUMOFFSETX`` in the ``header`` section of the configuration file makes sure that the relevant FITS header values are imported when :class:`~pynpoint.readwrite.fitsreading.FitsReadingModule` is executed. Therefore, FITS files have to be imported again if values in the ``header`` section are changed. Values can be set to ``None`` since ``header`` values are only required for some of the pipeline modules. The second section of the configuration values contains the central settings that are used by the pipeline modules. These values are stored in the ``settings`` section of the configuration file. The pixel scale can be provided in arcsec per pixel (e.g. ``PIXSCALE: 0.027``), the number of images that will be simultaneously loaded into the memory (e.g. ``MEMORY: 1000``), and the number of cores that are used for pipeline modules that have multiprocessing capabilities (e.g. ``CPU: 8``) such as :class:`~pynpoint.processing.psfsubtraction.PcaPsfSubtractionModule` and :class:`~pynpoint.processing.fluxposition.MCMCsamplingModule`. A complete overview of the pipeline modules that support multiprocessing is available in the :ref:`pipeline_modules` section. Note that some of the pipeline modules provide also multithreading support, which by default runs on all available CPUs. The multithreading can be controlled from the command line by setting the ``OMP_NUM_THREADS`` environment variable: .. code-block:: console $ export OMP_NUM_THREADS=8 In this case a maximum of 8 threads is used. So, if a modules provide both multiprocessing and multithreading support, then the total number of used cores is equal to the product of the values chosen for ``CPU`` in the configuration file and ``OMP_NUM_THREADS`` from the command line. .. _config_examples: Examples -------- In this section, several examples are provided of the configuration content for the following instruments: - :ref:`config_naco` - :ref:`config_sphere` - :ref:`config_visir` .. _config_naco: VLT/NACO ^^^^^^^^ .. code-block:: ini [header] INSTRUMENT: INSTRUME NFRAMES: NAXIS3 EXP_NO: ESO DET EXP NO DIT: ESO DET DIT NDIT: ESO DET NDIT PARANG_START: ESO ADA POSANG PARANG_END: ESO ADA POSANG END DITHER_X: ESO SEQ CUMOFFSETX DITHER_Y: ESO SEQ CUMOFFSETY PUPIL: ESO ADA PUPILPOS DATE: DATE-OBS LATITUDE: ESO TEL GEOLAT LONGITUDE: ESO TEL GEOLON RA: RA DEC: DEC [settings] PIXSCALE: 0.027 MEMORY: 1000 CPU: 1 .. _config_sphere: VLT/SPHERE/IRDIS ^^^^^^^^^^^^^^^^ .. code-block:: ini [header] INSTRUMENT: INSTRUME NFRAMES: NAXIS3 EXP_NO: ESO DET EXP ID DIT: EXPTIME NDIT: ESO DET NDIT PARANG_START: ESO TEL PARANG START PARANG_END: ESO TEL PARANG END DITHER_X: ESO INS1 DITH POSX DITHER_Y: ESO INS1 DITH POSY PUPIL: None DATE: DATE-OBS LATITUDE: ESO TEL GEOLAT LONGITUDE: ESO TEL GEOLON RA: ESO INS4 DROT2 RA DEC: ESO INS4 DROT2 DEC [settings] PIXSCALE: 0.01226 MEMORY: 1000 CPU: 1 .. _config_visir: VLT/VISIR ^^^^^^^^^ .. code-block:: ini [header] INSTRUMENT: INSTRUME NFRAMES: NAXIS3 EXP_NO: ESO TPL EXPNO DIT: ESO DET SEQ1 DIT NDIT: ESO DET CHOP NCYCLES PARANG_START: ESO ADA POSANG PARANG_END: ESO ADA POSANG END DITHER_X: None DITHER_Y: None PUPIL: ESO ADA PUPILPOS DATE: DATE-OBS LATITUDE: ESO TEL GEOLAT LONGITUDE: ESO TEL GEOLON RA: RA DEC: DEC [settings] PIXSCALE: 0.045 MEMORY: 1000 CPU: 1 PynPoint-0.11.0/docs/contributing.rst000066400000000000000000000010201450275315200175370ustar00rootroot00000000000000.. _contributing: Contributing ============ We welcome contributions, for example with the development of new pipeline modules and tutorials, improving existing functionalities, and fixing bugs. Please consider forking the Github repository and creating a `pull request `_ for implementations that could be of interest for other users. Bug reports and functionality requests can be provided by creating an `issue `_ on the Github page. PynPoint-0.11.0/docs/index.rst000066400000000000000000000016511450275315200161510ustar00rootroot00000000000000.. _index: PynPoint ======== PynPoint is a pipeline for processing and analysis of high-contrast imaging data of exoplanets. The pipeline uses principal component analysis for the subtraction of the stellar PSF and supports post-processing with ADI, RDI, and SDI techniques. .. figure:: _static/eso.jpg :width: 100% :target: http://www.eso.org/public/news/eso1310/ .. toctree:: :maxdepth: 2 :caption: Getting started :hidden: installation tutorials/first_example.ipynb .. toctree:: :maxdepth: 2 :caption: User documentation :hidden: architecture configuration pipeline_modules running_pynpoint tutorials modules .. toctree:: :maxdepth: 2 :caption: NEAR documentation :hidden: near .. toctree:: :maxdepth: 2 :caption: Developer documentation :hidden: python coding .. toctree:: :maxdepth: 2 :caption: About :hidden: contributing about PynPoint-0.11.0/docs/installation.rst000066400000000000000000000065661450275315200175550ustar00rootroot00000000000000.. _installation: Installation ============ PynPoint is compatible with `Python `_ versions 3.9/3.10/3.11. .. _virtual_environment: Virtual Environment ------------------- PynPoint is available in the `PyPI repository `_ and on `Github `_. We recommend using a Python virtual environment to install and run PynPoint such that the correct dependency versions are installed without affecting other Python installations. First install ``virtualenv``, for example with the `pip package manager `_: .. code-block:: console $ pip install virtualenv Then create a virtual environment: .. code-block:: console $ virtualenv -p python3 folder_name And activate the environment with: .. code-block:: console $ source folder_name/bin/activate A virtual environment can be deactivated with: .. code-block:: console $ deactivate .. important:: Make sure to adjust the path where the virtual environment is installed and activated. .. _installation_pypi: Installation from PyPI ---------------------- PynPoint can now be installed with pip: .. code-block:: console $ pip install pynpoint If you do not use a virtual environment then you may have to add the ``--user`` argument: .. code-block:: console $ pip install --user pynpoint To update the installation to the most recent version: .. code-block:: console $ pip install --upgrade PynPoint .. _installation_github: Installation from Github ------------------------ Using pip ^^^^^^^^^ The Github repository contains the latest commits. Installation from Github is also possible with ``pip``: .. code-block:: console $ pip install git+https://github.com/PynPoint/PynPoint.git Cloning the repository ^^^^^^^^^^^^^^^^^^^^^^ Alternatively, the Github repository can be cloned, which is in particular useful if you want to look into the code: .. code-block:: console $ git clone https://github.com/PynPoint/PynPoint.git The package is installed by running ``pip`` in the local repository folder: .. code-block:: console $ pip install -e . Instead of running ``setup.py``, the path of the repository can also be added to the ``PYTHONPATH`` environment variable such that PynPoint can be imported from any working folder. When using a ``virtualenv``, the ``PYTHONPATH`` can be added to the activation script: Once a local copy of the repository exists, new commits can be pulled from Github with: .. code-block:: console $ git pull origin main Do you want to makes changes to the code? Please fork the PynPoint repository on the Github page and clone your own fork instead of the main repository. We very much welcome contributions and pull requests (see :ref:`contributing` section). Dependencies ^^^^^^^^^^^^ If needed, the dependencies can be manually installed from the PynPoint folder: .. code-block:: console $ pip install -r requirements.txt Or updated to the latest versions with which PynPoint is compatible: .. code-block:: console $ pip install --upgrade -r requirements.txt .. _testing_pynpoint: Testing Pynpoint ---------------- The installation can be tested by starting Python in interactive mode and creating an instance of the ``Pypeline``: .. code-block:: python >>> import pynpoint >>> pipeline = pynpoint.Pypeline() PynPoint-0.11.0/docs/modules.rst000066400000000000000000000001311450275315200165020ustar00rootroot00000000000000.. _api: API Documentation ================= .. toctree:: :maxdepth: 4 pynpoint PynPoint-0.11.0/docs/near.rst000066400000000000000000000415471450275315200157770ustar00rootroot00000000000000.. _near_data: Data reduction ============== .. _near_intro: Introduction ------------ The documentation on this page contains an introduction into data reduction of the modified `VLT/VISIR `_ instrument for the `NEAR `_ (New Earths in the Alpha Cen Region) experiment. All data are available in the ESO archive under program ID `2102.C-5011(A) `_. The basic processing steps with PynPoint are described in the example below while a complete overview of all available pipeline modules can be found in the :ref:`pipeline_modules` section. Further details about the pipeline architecture and data processing are also available in `Stolker et al. (2019) `_. More in-depth information of the input parameters for individual PynPoint modules can be found in the :ref:`api`. Please also have a look at the :ref:`attribution` section when using PynPoint results in a publication. .. _near_example: Example ------- In this example, we will process the images of chop A (i.e., frames in which alpha Cen A was centered behind the AGPM coronagraph). Note that the same procedure can be applied on the images of chop B (i.e., with alpha Cen B centered behind the coronagraph). Setup ^^^^^ To get started, use the instructions available in the :ref:`installation` section to install PynPoint. We also need to download the NEAR data associated with the ESO program that was provided above. It is recommended to start with downloading only a few files to first validate the pipeline installation. Now that we have the data, we can start the data reduction with PynPoint! The :class:`~pynpoint.core.pypeline.Pypeline` of PynPoint requires a folder for the ``working_place``, ``input_place``, and ``output_place``. Before we start running PynPoint, we have to put the raw NEAR data in the default input folder or the location that will provided as ``input_dir`` in the :class:`~pynpoint.readwrite.nearreading.NearReadingModule`. Then we create a configuration file which contains the global pipeline settings and is used to select the required FITS header keywords. Create a text file called ``PynPoint_config.ini`` in the ``working_place`` folder with the following content: .. code-block:: ini [header] INSTRUMENT: INSTRUME NFRAMES: ESO DET CHOP NCYCLES EXP_NO: ESO TPL EXPNO DIT: ESO DET SEQ1 DIT NDIT: None PARANG_START: ESO ADA POSANG PARANG_END: ESO ADA POSANG END DITHER_X: None DITHER_Y: None PUPIL: ESO ADA PUPILPOS DATE: DATE-OBS LATITUDE: ESO TEL GEOLAT LONGITUDE: ESO TEL GEOLON RA: RA DEC: DEC [settings] PIXSCALE: 0.045 MEMORY: 1000 CPU: 1 The ``MEMORY`` and ``CPU`` setting can be adjusted. They define the number of images that is simultaneously loaded into the computer memory and the number of parallel processes that are used by some of the pipeline modules. Note that in addition to the config file above, the ``working_place`` directory is also used to store the database file (`PynPoint_database.hdf5`). This database stores all intermediate results (typically a stack of images), which allows the user to rerun particular processing steps without having to rerun the complete pipeline. Running PynPoint ^^^^^^^^^^^^^^^^ Example code snippets for the different steps to reduce NEAR data with PynPoint are included below. These code snippets can be executed in Python interactive mode, as a Jupyter notebook.py file, or combined into a python script and executed from the command line. The first steps are to initialize the pipeline and read in the data contained in the given ``input_place_in`` directory. Data are automatically divided into the chop A and chop B data sets. Here we also use the :class:`~pynpoint.processing.psfpreparation.AngleInterpolationModule` to calculate the parallactic angle for each individual frame, which is necessary for derotating and combining the frames after PSF subtraction: .. code-block:: python # Import the Pypeline and the modules that we will use in this example from pynpoint import Pypeline, NearReadingModule, AngleInterpolationModule, \ CropImagesModule, SubtractImagesModule, ExtractBinaryModule, \ StarAlignmentModule, FitCenterModule, ShiftImagesModule, \ FakePlanetModule, PSFpreparationModule, PcaPsfSubtractionModule, \ ContrastCurveModule, FitsWritingModule, TextWritingModule # Create a Pypeline instance (change the directories to the correct paths) pipeline = Pypeline(working_place_in='working_folder/', # directory for database and config files input_place_in='input_folder/', # default directory for reading in input data output_place_in='output_folder/') # default directory for saving output files # (i.e., with FitsWritingModule used below) # Read the raw data (i.e., all the fits files contained in the input_place_in folder above) # and separate the chop A and chop B images module = NearReadingModule(name_in='read', input_dir=None, chopa_out_tag='chopa', chopb_out_tag='chopb') pipeline.add_module(module) # Interpolate the parallactic angles between the start and end value of each FITS file # The angles will be added as PARANG attribute to the chop A and chop B datasets module = AngleInterpolationModule(name_in='angle1', data_tag='chopa') pipeline.add_module(module) module = AngleInterpolationModule(name_in='angle2', data_tag='chopb') pipeline.add_module(module) # Run each of the above modules using their 'name_in' tags pipeline.run_module('read') pipeline.run_module('angle1') pipeline.run_module('angle2') # Note that you can also run all the added modules using this function: # pipeline.run() The next step is to reduce the chop A frames with alpha Cen A behind the corognagraph. Here we crop the chop A and chop B images around the coronagraph position, subtract chop B from chop A to remove the sky background, and center the subtracted chop A frames: .. code-block:: python # Crop the chop A and chop B images around the approximate coronagraph position module = CropImagesModule(size=5., center=(432, 287), name_in='crop1', image_in_tag='chopa', image_out_tag='chopa_crop') pipeline.add_module(module) module = CropImagesModule(size=5., center=(432, 287), name_in='crop2', image_in_tag='chopb', image_out_tag='chopb_crop') pipeline.add_module(module) # Subtract frame-by-frame chop B from chop A module = SubtractImagesModule(name_in='subtract_aminusb', image_in_tags=('chopa_crop', 'chopb_crop'), image_out_tag='chopa_sub', scaling=1.) pipeline.add_module(module) # Fit the center position of chop A, using the images from before the chop-subtraction # For simplicity, only the mean of all images is fitted module = FitCenterModule(name_in='center1', image_in_tag='chopa_crop', fit_out_tag='chopa_fit', mask_out_tag=None, method='mean', radius=1., sign='positive', model='moffat', filter_size=None, guess=(0., 0., 10., 10., 1e4, 0., 0., 1.)) pipeline.add_module(module) # Center the chop-subtracted images module = ShiftImagesModule(shift_xy='chopa_fit', name_in='shift1', image_in_tag='chopa_sub', image_out_tag='chopa_center', interpolation='spline') pipeline.add_module(module) # Run each of the above modules pipeline.run_module('crop1') pipeline.run_module('crop2') pipeline.run_module('subtract_aminusb') pipeline.run_module('center1') pipeline.run_module('shift1') Next, we use the chop B frames where alpha Cen A if off of the coronagraph to extract a reference PSF. This reference PSF will later be used for calculating the detection limits: .. code-block:: python # Subtract chop A from chop B before extracting the non-coronagraphic PSF module = SubtractImagesModule(name_in='subtract_bminusa', image_in_tags=('chopb', 'chopa'), image_out_tag='chopb_sub', scaling=1.) pipeline.add_module(module) # Crop out the non-coronagraphic PSF for chop A from the chop B images module = ExtractBinaryModule(pos_center=(432., 287.), pos_binary=(430., 175.), name_in='extract_refpsf', image_in_tag='chopb_sub', image_out_tag='psfa', image_size=5., search_size=1., filter_size=None) pipeline.add_module(module) # Align the non-coronagraphic PSF images module = StarAlignmentModule(name_in='align_refpsf', image_in_tag='psfa', ref_image_in_tag=None, image_out_tag='psfa_align', interpolation='spline', accuracy=10, resize=None, num_references=10, subframe=1.) pipeline.add_module(module) # Fit the center position of the mean, non-coronagraphic PSF module = FitCenterModule(name_in='center_refpsf', image_in_tag='psfa', fit_out_tag='psfa_fit', mask_out_tag=None, method='mean', radius=1., sign='positive', model='moffat', filter_size=None, guess=(0., 0., 10., 10., 1e4, 0., 0., 1.)) pipeline.add_module(module) # Center the non-coronagraphic PSF images module = ShiftImagesModule(shift_xy='psfa_fit', name_in='shift_refpsf', image_in_tag='psfa', image_out_tag='psfa_center', interpolation='spline') pipeline.add_module(module) # Mask the non-coronagraphic PSF beyond 1 arsec module = PSFpreparationModule(name_in='prep_refpsf', image_in_tag='psfa_center', image_out_tag='psfa_mask', mask_out_tag=None, norm=False, cent_size=None, edge_size=1.) pipeline.add_module(module) # Run each of the above modules pipeline.run_module('subtract_bminusa') pipeline.run_module('extract_refpsf') pipeline.run_module('align_refpsf') pipeline.run_module('center_refpsf') pipeline.run_module('shift_refpsf') pipeline.run_module('prep_refpsf') Finally, we use PCA to subtract the stellar PSF of alpha Cen A. For testing purposes, we first use the reference PSF created above to inject a fake planet into the chop A data. The median combination of the PSF-subtracted and derotated frames is saved in its own tag and then written out to a fits file: .. code-block:: python # Inject a fake planet at a separation of 1 arcsec and a contrast of 10 mag module = FakePlanetModule(position=(1., 0.), magnitude=10., psf_scaling=1., interpolation='spline', name_in='fake', image_in_tag='chopa_center', psf_in_tag='psfa_mask', image_out_tag='chopa_fake') pipeline.add_module(module) # Mask the central and outer part of the chop A images module = PSFpreparationModule(name_in='prep_data', image_in_tag='chopa_fake', image_out_tag='chopa_prep', mask_out_tag=None, norm=False, cent_size=0.3, edge_size=3.) pipeline.add_module(module) # Subtract a PSF model with PCA and median-combine the residuals module = PcaPsfSubtractionModule(pca_numbers=range(1, 51), name_in='pca', images_in_tag='chopa_prep', reference_in_tag='chopa_prep', res_median_tag='chopa_pca', extra_rot=0.0) pipeline.add_module(module) # Datasets can be exported to FITS files by their tag name in the database # Here we will export the median-combined residuals of the PSF subtraction module = FitsWritingModule(name_in='write_result_psfsub', file_name='chopa_pca.fits', output_dir=None, data_tag='chopa_pca', data_range=None, overwrite=True) pipeline.add_module(module) # Run each of the above modules pipeline.run_module('fake') pipeline.run_module('prep_data') pipeline.run_module('pca') pipeline.run_module('write_result_psfsub') PynPoint also includes a module to calculate the detection limits of the final image: .. code-block:: python # Calculate detection limits between 0.8 and 2.0 arcsec # The false positive fraction is fixed to 2.87e-6 (i.e. 5 sigma for Gaussian statistics) module = ContrastCurveModule(name_in='limits', image_in_tag='chopa_center', psf_in_tag='psfa_mask', contrast_out_tag='limits', separation=(0.3, 2., 0.1), angle=(0., 360., 60.), threshold=('fpf', 2.87e-6), psf_scaling=1., aperture=0.15, pca_number=10, cent_size=0.3, edge_size=3., extra_rot=0., residuals='median') pipeline.add_module(module) # And we write the detection limits to a text file header = 'Separation [arcsec] - Contrast [mag] - Variance [mag] - FPF' module = TextWritingModule(name_in='write_result_limits', file_name='contrast_curve.dat', output_dir=None, data_tag='limits', header=header) pipeline.add_module(module) # Run each of the above modules pipeline.run_module('limits') pipeline.run_module('write_result_limits') .. _near_results: Results ------- The images that were exported to a FITS file can be visualized with a tool such as `DS9 `_. We can also use the :class:`~pynpoint.core.pypeline.Pypeline` functionalities to get the data from the database (without having to rerun the pipeline). For example, to get the residuals of the PSF subtraction: .. code-block:: python data = pipeline.get_data('chopa_pca') And to plot the residuals for 10 principal components (Python indexing starts at zero): .. code-block:: python import matplotlib.pyplot as plt plt.imshow(data[9, ], origin='lower') plt.show() .. image:: _static/near_residuals.png :width: 60% :align: center Or to plot the detection limits with the error bars showing the variance of the six azimuthal positions that were tested: .. code-block:: python data = pipeline.get_data('limits') plt.figure(figsize=(7, 4)) plt.errorbar(data[:, 0], data[:, 1], data[:, 2]) plt.xlim(0., 2.5) plt.ylim(12., 0.) plt.xlabel('Separation [arcsec]') plt.ylabel('Contrast [mag]') plt.show() .. image:: _static/near_limits.png :width: 70% :align: center PynPoint-0.11.0/docs/pipeline_modules.rst000066400000000000000000000235011450275315200203750ustar00rootroot00000000000000.. _pipeline_modules: Pipeline modules ================ This page contains a list of all available pipeline modules and a short description of what they are used for. Reading modules import data into the database, writing modules export data from the database, and processing modules run a specific task of the data reduction or analysis. More details on the design of the pipeline can be found in the :ref:`architecture` section. .. note:: All PynPoint classes ending with ``Module`` in their name (e.g. :class:`~pynpoint.readwrite.fitsreading.FitsReadingModule`) are pipeline modules that can be added to an instance of :class:`~pynpoint.core.pypeline.Pypeline` (see :ref:`pypeline` section). .. important:: The pipeline modules with multiprocessing functionalities are indicated with "CPU" in parentheses. The number of parallel processes can be set with the ``CPU`` parameter in the configuration file and the number of images that is simultaneously loaded into the memory with the ``MEMORY`` parameter. Pipeline modules that apply (in parallel) a function to subsets of images use a number of images per subset equal to ``MEMORY`` divided by ``CPU``. .. important:: The pipeline modules that are compatible with both regular imaging and integral field spectroscopy datasets (i.e. 3D and 4D data) are indicated with "IFS" in parentheses. All other modules are only compatible with regular imaging. .. _reading_module: Reading modules --------------- * :class:`~pynpoint.readwrite.fitsreading.FitsReadingModule` (IFS): Import FITS files and relevant header information into the database. * :class:`~pynpoint.readwrite.hdf5reading.Hdf5ReadingModule`: Import datasets and attributes from an HDF5 file (as created by PynPoint). * :class:`~pynpoint.readwrite.attr_reading.AttributeReadingModule`: Import a list of values as dataset attribute. * :class:`~pynpoint.readwrite.attr_reading.ParangReadingModule`: Import a list of parallactic angles as dataset attribute. * :class:`~pynpoint.readwrite.attr_reading.WavelengthReadingModule`: Import a list of calibrated wavelengths as dataset attribute. * :class:`~pynpoint.readwrite.nearreading.NearReadingModule` (CPU): Import VLT/VISIR data for the NEAR experiment. .. _writing_module: Writing modules --------------- * :class:`~pynpoint.readwrite.fitswriting.FitsWritingModule`: Export a dataset from the database to a FITS file. * :class:`~pynpoint.readwrite.hdf5writing.Hdf5WritingModule`: Export part of the database to a new HDF5 file. * :class:`~pynpoint.readwrite.textwriting.TextWritingModule`: Export a dataset to an ASCII file. * :class:`~pynpoint.readwrite.attr_writing.AttributeWritingModule`: Export a list of attribute values to an ASCII file. * :class:`~pynpoint.readwrite.attr_writing.ParangWritingModule`: Export the parallactic angles of a dataset to an ASCII file. .. _processing_module: Processing modules ------------------ Background subtraction ~~~~~~~~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.background.SimpleBackgroundSubtractionModule`: Simple background subtraction for dithering datasets. * :class:`~pynpoint.processing.background.MeanBackgroundSubtractionModule`: Mean background subtraction for dithering datasets. * :class:`~pynpoint.processing.background.LineSubtractionModule` (CPU): Subtraction of striped detector artifacts. * :class:`~pynpoint.processing.background.NoddingBackgroundModule`: Background subtraction for nodding datasets. Bad pixel cleaning ~~~~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.badpixel.BadPixelSigmaFilterModule` (CPU): Find and replace bad pixels with a sigma filter. * :class:`~pynpoint.processing.badpixel.BadPixelInterpolationModule` (CPU): Interpolate bad pixels with a spectral deconvolution technique. * :class:`~pynpoint.processing.badpixel.BadPixelMapModule`: Create a bad pixel map from dark and flat images. * :class:`~pynpoint.processing.badpixel.BadPixelTimeFilterModule` (CPU): Sigma clipping of bad pixels along the time dimension. * :class:`~pynpoint.processing.badpixel.ReplaceBadPixelsModule` (CPU): Replace bad pixels based on a bad pixel map. Basic processing ~~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.basic.SubtractImagesModule`: Subtract two stacks of images. * :class:`~pynpoint.processing.basic.AddImagesModule`: Add two stacks of images * :class:`~pynpoint.processing.basic.RotateImagesModule`: Rotate a stack of images. * :class:`~pynpoint.processing.basic.RepeatImagesModule`: Repeat a stack of images. Centering ~~~~~~~~~ * :class:`~pynpoint.processing.centering.StarAlignmentModule` (CPU): Align the images with a cross-correlation. * :class:`~pynpoint.processing.centering.FitCenterModule` (CPU): Fit the PSF with a 2D Gaussian or Moffat function. * :class:`~pynpoint.processing.centering.ShiftImagesModule`: Shift a stack of images. * :class:`~pynpoint.processing.centering.WaffleCenteringModule` (IFS): Use the waffle spots to center the images. Dark and flat correction ~~~~~~~~~~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.darkflat.DarkCalibrationModule`: Dark frame subtraction. * :class:`~pynpoint.processing.darkflat.FlatCalibrationModule`: Flat field correction. Denoising ~~~~~~~~~ * :class:`~pynpoint.processing.timedenoising.WaveletTimeDenoisingModule` (CPU): Wavelet-based denoising in the time domain. * :class:`~pynpoint.processing.timedenoising.TimeNormalizationModule` (CPU): Normalize a stack of images. Detection limits ~~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.limits.ContrastCurveModule` (CPU): Compute a contrast curve. * :class:`~pynpoint.processing.limits.MassLimitsModule`: Calculate mass limits from a contrast curve and an isochrones model grid. Extract star ~~~~~~~~~~~~ * :class:`~pynpoint.processing.extract.StarExtractionModule` (CPU): Locate and crop the position of the star. * :class:`~pynpoint.processing.extract.ExtractBinaryModule` (CPU): Extract a PSF which rotates across a stack of images. Filters ~~~~~~~ * :class:`~pynpoint.processing.filter.GaussianFilterModule`: Apply a Gaussian filter to the images. Flux and position ~~~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.fluxposition.FakePlanetModule`: Inject an artificial planet in a dataset. * :class:`~pynpoint.processing.fluxposition.SimplexMinimizationModule`: Determine the flux and position with a simplex minimization. * :class:`~pynpoint.processing.fluxposition.FalsePositiveModule`: Compute the signal-to-noise ratio and false positive fraction. * :class:`~pynpoint.processing.fluxposition.MCMCsamplingModule` (CPU): Estimate the flux and position of a planet with MCMC sampling. * :class:`~pynpoint.processing.fluxposition.AperturePhotometryModule` (CPU): Compute the integrated flux at a position. * :class:`~pynpoint.processing.fluxposition.SystematicErrorModule`: Compute the systematic errors on the flux and position. Frame selection ~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.frameselection.RemoveFramesModule`: Remove images by their index number. * :class:`~pynpoint.processing.frameselection.FrameSelectionModule`: Frame selection to remove low-quality image. * :class:`~pynpoint.processing.frameselection.RemoveLastFrameModule`: Remove the last image of a VLT/NACO dataset. * :class:`~pynpoint.processing.frameselection.RemoveStartFramesModule`: Remove images at the beginning of each original data cube. * :class:`~pynpoint.processing.frameselection.ImageStatisticsModule` (CPU): Compute statistics of the pixel values for each image. * :class:`~pynpoint.processing.frameselection.FrameSimilarityModule` (CPU): Compute different similarity measures of a set of images. * :class:`~pynpoint.processing.frameselection.SelectByAttributeModule`: Select images by the ascending/descending attribute values. * :class:`~pynpoint.processing.frameselection.ResidualSelectionModule`: Frame selection on the residuals of the PSF subtraction. Image resizing ~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.resizing.CropImagesModule` (IFS): Crop the images. * :class:`~pynpoint.processing.resizing.ScaleImagesModule` (CPU): Resample the images (spatially and/or in flux). * :class:`~pynpoint.processing.resizing.AddLinesModule`: Add pixel lines on the sides of the images. * :class:`~pynpoint.processing.resizing.RemoveLinesModule`: Remove pixel lines from the sides of the images. PCA background subtraction ~~~~~~~~~~~~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.pcabackground.PCABackgroundPreparationModule`: Preparation for the PCA-based background subtraction. * :class:`~pynpoint.processing.pcabackground.PCABackgroundSubtractionModule`: PCA-based background subtraction. * :class:`~pynpoint.processing.pcabackground.DitheringBackgroundModule`: Wrapper for background subtraction of dithering datasets. PSF preparation ~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.psfpreparation.PSFpreparationModule` (IFS): Mask the images before the PSF subtraction. * :class:`~pynpoint.processing.psfpreparation.AngleInterpolationModule`: Interpolate the parallactic angles between the start and end values. * :class:`~pynpoint.processing.psfpreparation.AngleCalculationModule`: Calculate the parallactic angles. * :class:`~pynpoint.processing.psfpreparation.SortParangModule` (IFS): Sort the images by parallactic angle. * :class:`~pynpoint.processing.psfpreparation.SDIpreparationModule`: Prepare the images for SDI. PSF subtraction ~~~~~~~~~~~~~~~ * :class:`~pynpoint.processing.psfsubtraction.PcaPsfSubtractionModule` (CPU, IFS): PSF subtraction with PCA. * :class:`~pynpoint.processing.psfsubtraction.ClassicalADIModule` (CPU): PSF subtraction with classical ADI. Stacking ~~~~~~~~ * :class:`~pynpoint.processing.stacksubset.StackAndSubsetModule`: Stack and/or select a random subset of the images. * :class:`~pynpoint.processing.stacksubset.StackCubesModule`: Collapse each original data cube separately. * :class:`~pynpoint.processing.stacksubset.DerotateAndStackModule` (IFS): Derotate and/or stack the images. * :class:`~pynpoint.processing.stacksubset.CombineTagsModule`: Combine multiple database tags into a single dataset. PynPoint-0.11.0/docs/pynpoint.core.rst000066400000000000000000000014531450275315200176510ustar00rootroot00000000000000pynpoint.core package ===================== Submodules ---------- pynpoint.core.attributes module ------------------------------- .. automodule:: pynpoint.core.attributes :members: :undoc-members: :show-inheritance: pynpoint.core.dataio module --------------------------- .. automodule:: pynpoint.core.dataio :members: :undoc-members: :show-inheritance: pynpoint.core.processing module ------------------------------- .. automodule:: pynpoint.core.processing :members: :undoc-members: :show-inheritance: pynpoint.core.pypeline module ----------------------------- .. automodule:: pynpoint.core.pypeline :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: pynpoint.core :members: :undoc-members: :show-inheritance: PynPoint-0.11.0/docs/pynpoint.processing.rst000066400000000000000000000057651450275315200211070ustar00rootroot00000000000000pynpoint.processing package =========================== Submodules ---------- pynpoint.processing.background module ------------------------------------- .. automodule:: pynpoint.processing.background :members: :undoc-members: :show-inheritance: pynpoint.processing.badpixel module ----------------------------------- .. automodule:: pynpoint.processing.badpixel :members: :undoc-members: :show-inheritance: pynpoint.processing.basic module -------------------------------- .. automodule:: pynpoint.processing.basic :members: :undoc-members: :show-inheritance: pynpoint.processing.centering module ------------------------------------ .. automodule:: pynpoint.processing.centering :members: :undoc-members: :show-inheritance: pynpoint.processing.darkflat module ----------------------------------- .. automodule:: pynpoint.processing.darkflat :members: :undoc-members: :show-inheritance: pynpoint.processing.extract module ---------------------------------- .. automodule:: pynpoint.processing.extract :members: :undoc-members: :show-inheritance: pynpoint.processing.filter module --------------------------------- .. automodule:: pynpoint.processing.filter :members: :undoc-members: :show-inheritance: pynpoint.processing.fluxposition module --------------------------------------- .. automodule:: pynpoint.processing.fluxposition :members: :undoc-members: :show-inheritance: pynpoint.processing.frameselection module ----------------------------------------- .. automodule:: pynpoint.processing.frameselection :members: :undoc-members: :show-inheritance: pynpoint.processing.limits module --------------------------------- .. automodule:: pynpoint.processing.limits :members: :undoc-members: :show-inheritance: pynpoint.processing.pcabackground module ---------------------------------------- .. automodule:: pynpoint.processing.pcabackground :members: :undoc-members: :show-inheritance: pynpoint.processing.psfpreparation module ----------------------------------------- .. automodule:: pynpoint.processing.psfpreparation :members: :undoc-members: :show-inheritance: pynpoint.processing.psfsubtraction module ----------------------------------------- .. automodule:: pynpoint.processing.psfsubtraction :members: :undoc-members: :show-inheritance: pynpoint.processing.resizing module ----------------------------------- .. automodule:: pynpoint.processing.resizing :members: :undoc-members: :show-inheritance: pynpoint.processing.stacksubset module -------------------------------------- .. automodule:: pynpoint.processing.stacksubset :members: :undoc-members: :show-inheritance: pynpoint.processing.timedenoising module ---------------------------------------- .. automodule:: pynpoint.processing.timedenoising :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: pynpoint.processing :members: :undoc-members: :show-inheritance: PynPoint-0.11.0/docs/pynpoint.readwrite.rst000066400000000000000000000031521450275315200207050ustar00rootroot00000000000000pynpoint.readwrite package ========================== Submodules ---------- pynpoint.readwrite.attr\_reading module --------------------------------------- .. automodule:: pynpoint.readwrite.attr_reading :members: :undoc-members: :show-inheritance: pynpoint.readwrite.attr\_writing module --------------------------------------- .. automodule:: pynpoint.readwrite.attr_writing :members: :undoc-members: :show-inheritance: pynpoint.readwrite.fitsreading module ------------------------------------- .. automodule:: pynpoint.readwrite.fitsreading :members: :undoc-members: :show-inheritance: pynpoint.readwrite.fitswriting module ------------------------------------- .. automodule:: pynpoint.readwrite.fitswriting :members: :undoc-members: :show-inheritance: pynpoint.readwrite.hdf5reading module ------------------------------------- .. automodule:: pynpoint.readwrite.hdf5reading :members: :undoc-members: :show-inheritance: pynpoint.readwrite.hdf5writing module ------------------------------------- .. automodule:: pynpoint.readwrite.hdf5writing :members: :undoc-members: :show-inheritance: pynpoint.readwrite.nearreading module ------------------------------------- .. automodule:: pynpoint.readwrite.nearreading :members: :undoc-members: :show-inheritance: pynpoint.readwrite.textwriting module ------------------------------------- .. automodule:: pynpoint.readwrite.textwriting :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: pynpoint.readwrite :members: :undoc-members: :show-inheritance: PynPoint-0.11.0/docs/pynpoint.rst000066400000000000000000000004441450275315200167210ustar00rootroot00000000000000.. _pynpoint-package: pynpoint package ================ Subpackages ----------- .. toctree:: pynpoint.core pynpoint.readwrite pynpoint.processing pynpoint.util Module contents --------------- .. automodule:: pynpoint :members: :undoc-members: :show-inheritance: PynPoint-0.11.0/docs/pynpoint.util.rst000066400000000000000000000065071450275315200177030ustar00rootroot00000000000000pynpoint.util package ===================== Submodules ---------- pynpoint.util.analysis module ----------------------------- .. automodule:: pynpoint.util.analysis :members: :undoc-members: :show-inheritance: pynpoint.util.apply\_func module -------------------------------- .. automodule:: pynpoint.util.apply_func :members: :undoc-members: :show-inheritance: pynpoint.util.attributes module ------------------------------- .. automodule:: pynpoint.util.attributes :members: :undoc-members: :show-inheritance: pynpoint.util.continuous module ------------------------------- .. automodule:: pynpoint.util.continuous :members: :undoc-members: :show-inheritance: pynpoint.util.image module -------------------------- .. automodule:: pynpoint.util.image :members: :undoc-members: :show-inheritance: pynpoint.util.limits module --------------------------- .. automodule:: pynpoint.util.limits :members: :undoc-members: :show-inheritance: pynpoint.util.mcmc module ------------------------- .. automodule:: pynpoint.util.mcmc :members: :undoc-members: :show-inheritance: pynpoint.util.module module --------------------------- .. automodule:: pynpoint.util.module :members: :undoc-members: :show-inheritance: pynpoint.util.multiline module ------------------------------ .. automodule:: pynpoint.util.multiline :members: :undoc-members: :show-inheritance: pynpoint.util.multipca module ----------------------------- .. automodule:: pynpoint.util.multipca :members: :undoc-members: :show-inheritance: pynpoint.util.multiproc module ------------------------------ .. automodule:: pynpoint.util.multiproc :members: :undoc-members: :show-inheritance: pynpoint.util.multistack module ------------------------------- .. automodule:: pynpoint.util.multistack :members: :undoc-members: :show-inheritance: pynpoint.util.postproc module ----------------------------- .. automodule:: pynpoint.util.postproc :members: :undoc-members: :show-inheritance: pynpoint.util.psf module ------------------------ .. automodule:: pynpoint.util.psf :members: :undoc-members: :show-inheritance: pynpoint.util.remove module --------------------------- .. automodule:: pynpoint.util.remove :members: :undoc-members: :show-inheritance: pynpoint.util.residuals module ------------------------------ .. automodule:: pynpoint.util.residuals :members: :undoc-members: :show-inheritance: pynpoint.util.sdi module ------------------------ .. automodule:: pynpoint.util.sdi :members: :undoc-members: :show-inheritance: pynpoint.util.star module ------------------------- .. automodule:: pynpoint.util.star :members: :undoc-members: :show-inheritance: pynpoint.util.tests module -------------------------- .. automodule:: pynpoint.util.tests :members: :undoc-members: :show-inheritance: pynpoint.util.type\_aliases module ---------------------------------- .. automodule:: pynpoint.util.type_aliases :members: :undoc-members: :show-inheritance: pynpoint.util.wavelets module ----------------------------- .. automodule:: pynpoint.util.wavelets :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: pynpoint.util :members: :undoc-members: :show-inheritance: PynPoint-0.11.0/docs/python.rst000066400000000000000000000036701450275315200163660ustar00rootroot00000000000000.. _python: Python guidelines ================= .. _starting: Getting started --------------- The modular architecture of PynPoint allows for easy implementation of new pipeline modules and we welcome contributions from users. Before writing a new PynPoint module, it is helpful to have a look at the :ref:`architecture` section. In addition, some basic knowledge on Python is required and some understanding on the following items can be helpful: * Python `types `_ such as lists, tuples, and dictionaries. * `Classes `_ and in particular the concept of inheritance. * `Abstract classes `_ as interfaces. .. _conventions: Conventions ----------- Before we start writing a new PynPoint module, please take notice of the following style conventions: * `PEP 8 `_ -- style guide for Python code * We recommend using `pylint `_ and `pycodestyle `_ to analyze newly written code in order to keep PynPoint well structured, readable, and documented. * Names of class member should start with ``m_``. * Images should ideally not be read from and written to the central database at once but in amounts of ``MEMORY``. Unit tests ---------- PynPoint is a robust pipeline package with 95% of the code covered by `unit tests `_. Testing of the package is done by running ``make test`` in the cloned repository. This requires the installation of: * `pytest `_ * `pytest-cov `_ The unit tests ensure that the output from existing functionalities will not change whenever new code. With these things in mind, we are now ready to code! PynPoint-0.11.0/docs/requirements.txt000066400000000000000000000000521450275315200175660ustar00rootroot00000000000000jupyter nbsphinx pandoc sphinx_book_theme PynPoint-0.11.0/docs/running_pynpoint.rst000066400000000000000000000126341450275315200204650ustar00rootroot00000000000000.. _running_pynpoint: Running PynPoint ================ .. _running_intro: Introduction ------------ The pipeline can be executed with a Python script, in `interactive mode `_, or with a `Jupyter Notebook `_. The main components of PynPoint are the pipeline and the three types of pipeline modules: 1. :class:`~pynpoint.core.pypeline.Pypeline` -- The actual pipeline which capsules a list of pipeline modules. 2. :class:`~pynpoint.core.processing.ReadingModule` -- Module for importing data and relevant header information from FITS, HDF5, or ASCII files into the database. 3. :class:`~pynpoint.core.processing.WritingModule` -- Module for exporting results from the database into FITS, HDF5 or ASCII files. 4. :class:`~pynpoint.core.processing.ProcessingModule` -- Module for processing data with a specific data reduction or analysis recipe. .. _initiating_pypeline: Initiating the Pypeline ----------------------- The pipeline is initiated by creating an instance of :class:`~pynpoint.core.pypeline.Pypeline`: .. code-block:: python pipeline = Pypeline(working_place_in='/path/to/working_place', input_place_in='/path/to/input_place', output_place_in='/path/to/output_place') PynPoint creates an HDF5 database called ``PynPoin_database.hdf5`` in the ``working_place_in`` of the pipeline. This is the central data storage in which the processing results from a :class:`~pynpoint.core.processing.ProcessingModule` are stored. The advantage of the HDF5 format is that reading of data is much faster than from FITS files and it is also possible to quickly read subsets from large datasets. Restoring data from an already existing pipeline database can be done by creating an instance of :class:`~pynpoint.core.pypeline.Pypeline` with the ``working_place_in`` pointing to the path of the ``PynPoint_database.hdf5`` file. .. _running_modules: Running pipeline modules ------------------------ Input data is read into the central database with a :class:`~pynpoint.core.processing.ReadingModule`. By default, PynPoint will read data from the ``input_place_in`` but setting a manual folder is possible to read data to separate database tags (e.g., dark frames, flat fields, and science data). For example, to read the images from FITS files that are located in the default input place: .. code-block:: python module = FitsReadingModule(name_in='read', input_dir=None, image_tag='science') pipeline.add_module(module) The images from the FITS files are stored in the database as a dataset with a unique tag. This tag can be used by other pipeline module to read the data for further processing. The parallactic angles can be read from a text or FITS file and are attached as attribute to a dataset: .. code-block:: python module = ParangReadingModule(name_in='parang', data_tag='science' file_name='parang.dat', input_dir=None) pipeline.add_module(module) Finally, we run all pipeline modules: .. code-block:: python pipeline.run() Alternatively, it is also possible to run each pipeline module individually by their ``name_in`` value: .. code-block:: python pipeline.run_module('read') pipeline.run_module('parang') .. important:: Some pipeline modules require pixel coordinates for certain arguments. Throughout PynPoint, pixel coordinates are zero-indexed, meaning that (x, y) = (0, 0) corresponds to the center of the pixel in the bottom-left corner of the image. This means that there is an offset of -1 in both directions with respect to the pixel coordinates of DS9, for which the center of the bottom-left pixel is (x, y) = (1, 1). .. _hdf5_files: HDF5 database ------------- There are several ways to access the datasets in the HDF5 database that is used by PynPoint: * The :class:`~pynpoint.readwrite.fitswriting.FitsWritingModule` exports a dataset from the database into a FITS file. * Several methods of the :class:`~pynpoint.core.pypeline.Pypeline` class help to easily retrieve data and attributes from the database. For example: * To read a dataset: .. code-block:: python pipeline.get_data('tag_name') * To read an attribute of a dataset: .. code-block:: python pipeline.get_attribute('tag_name', 'attr_name') * The `h5py `_ Python package can be used to access the HDF5 file directly. * There are external tools available such as `HDFCompass `_ or `HDFView `_ to read, inspect, and visualize data and attributes. HDFCompass is easy to use and has a basic plotting functionality. In HDFCompass, the static PynPoint attributes can be opened with the *Reopen as HDF5 Attributes* option. .. _data_attributes: Dataset attributes ------------------ Apart from using :meth:`~pynpoint.core.pypeline.Pypeline.get_attribute`, it is also possible to print and return all attributes of a dataset with the :meth:`~pynpoint.core.pypeline.Pypeline.list_attributes` method of :class:`~pynpoint.core.pypeline.Pypeline`: .. code-block:: python attr_dict = pipeline.list_attributes('tag_name') The method returns a dictionary that contains both the static and non-static attributes. PynPoint-0.11.0/docs/tutorials.rst000066400000000000000000000012211450275315200170610ustar00rootroot00000000000000.. _tutorials: Tutorials ========= Curious to see an example with a more detailed workflow? There are several Jupyter notebooks with tutorials available: .. toctree:: :hidden: tutorials/first_example.ipynb tutorials/zimpol_adi.ipynb * :ref:`First example: PSF subtraction with PCA ` (:download:`download notebook `) * :ref:`Non-coronagraphic angular differential imaging ` (:download:`download notebook `) The notebooks can also be viewed on `Github `_. PynPoint-0.11.0/docs/tutorials/000077500000000000000000000000001450275315200163335ustar00rootroot00000000000000PynPoint-0.11.0/docs/tutorials/first_example.ipynb000066400000000000000000002374411450275315200222530ustar00rootroot00000000000000{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# First example" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Introduction" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "In this first example, we will run the PSF subtraction on a preprocessed ADI dataset of $\\beta$ Pictoris. This archival dataset was obtained with NACO in $M'$ (4.8 $\\mu$m) at the Very Large Telescope (ESO program ID: [090.C-0653(D)](http://archive.eso.org/wdb/wdb/eso/sched_rep_arc/query?progid=090.C-0653(D))). The exposure time per image was 65 ms and the parallactic rotation was about 50 degrees. Every 200 images have been mean-collapsed to limit the size of the dataset." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Getting started" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We start by importing the required Python modules for this tutorial." ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import os\n", "import urllib\n", "import matplotlib.pyplot as plt" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "And also the pipeline and pipeline modules of PynPoint." ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "from pynpoint import Pypeline, Hdf5ReadingModule, PSFpreparationModule, PcaPsfSubtractionModule" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Next, we download the preprocessed data (13 MB). The dataset is stored in an HDF5 database and contains 263 images of 80 by 80 pixels. The parallactic angles and pixel scale are stored as attributes of the dataset." ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "('./betapic_naco_mp.hdf5', )" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "urllib.request.urlretrieve('https://home.strw.leidenuniv.nl/~stolker/pynpoint/betapic_naco_mp.hdf5',\n", " './betapic_naco_mp.hdf5')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Initiating the Pypeline" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We will now initiate PynPoint by creating an instance of the [Pypeline](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=Pypeline#pynpoint.core.pypeline.Pypeline) class. The object requires the paths of the working folder, input folder and output folder. Here we simply use the current folder for all three of them." ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "===============\n", "PynPoint v0.9.0\n", "===============\n", "\n", "Working folder: ./\n", "Input folder: ./\n", "Output folder: ./\n", "\n", "Database: ./PynPoint_database.hdf5\n", "Configuration: ./PynPoint_config.ini\n", "\n", "Number of CPUs: 8\n", "Number of threads: not set\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/Users/tomasstolker/applications/pynpoint/pynpoint/core/pypeline.py:286: UserWarning: Configuration file not found. Creating PynPoint_config.ini with default values in the working place.\n", " warnings.warn('Configuration file not found. Creating PynPoint_config.ini with '\n" ] } ], "source": [ "pipeline = Pypeline(working_place_in='./',\n", " input_place_in='./',\n", " output_place_in='./')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "A configuration file with default values has been created in the working folder. Next, we will add three pipeline modules to the `Pypeline` object." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## PSF subtraction with PCA" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We start with the [Hdf5ReadingModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.readwrite.html#pynpoint.readwrite.hdf5reading.Hdf5ReadingModule) which will import the preprocessed data from the HDF5 file that was downloaded into the current database. The instance of the `Hdf5ReadingModule` class is added to the `Pypeline` with the [add_module](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=add_module#pynpoint.core.pypeline.Pypeline.add_module) method. The dataset that we need to import has the tag *stack* so we specify this name as input and output in the dictionary of `tag_dictionary`." ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "module = Hdf5ReadingModule(name_in='read',\n", " input_filename='betapic_naco_mp.hdf5',\n", " input_dir=None,\n", " tag_dictionary={'stack': 'stack'})\n", "\n", "pipeline.add_module(module)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Next, we ise the [PSFpreparationModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html?highlight=psfprep#pynpoint.processing.psfpreparation.PSFpreparationModule) to mask the central (saturated) area of the PSF and also pixels beyond 1.1 arcseconds." ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "module = PSFpreparationModule(name_in='prep',\n", " image_in_tag='stack',\n", " image_out_tag='prep',\n", " mask_out_tag=None,\n", " norm=False,\n", " resize=None,\n", " cent_size=0.15,\n", " edge_size=1.1)\n", "\n", "pipeline.add_module(module)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The last pipeline module that we use is [PcaPsfSubtractionModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html?highlight=pcapsf#pynpoint.processing.psfsubtraction.PcaPsfSubtractionModule). This module will run the PSF subtraction with PCA. Here we chose to subtract 20 principal components and store the median-collapsed residuals at the database tag *residuals*." ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "module = PcaPsfSubtractionModule(pca_numbers=[20, ],\n", " name_in='pca',\n", " images_in_tag='prep',\n", " reference_in_tag='prep',\n", " res_median_tag='residuals')\n", "\n", "pipeline.add_module(module)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We can now run the three pipeline modules that were added toe the `Pypeline` with the [run](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=Pypeline#pynpoint.core.pypeline.Pypeline.run) method." ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-----------------\n", "Hdf5ReadingModule\n", "-----------------\n", "\n", "Module name: read\n", "Reading HDF5 file... [DONE] \n", "Output port: stack (263, 80, 80)\n", "\n", "--------------------\n", "PSFpreparationModule\n", "--------------------\n", "\n", "Module name: prep\n", "Input port: stack (263, 80, 80)\n", "Preparing images for PSF subtraction... [DONE] \n", "Output port: prep (263, 80, 80)\n", "\n", "-----------------------\n", "PcaPsfSubtractionModule\n", "-----------------------\n", "\n", "Module name: pca\n", "Input port: prep (263, 80, 80)\n", "Input parameters:\n", " - Post-processing type: ADI\n", " - Number of principal components: [20]\n", " - Subtract mean: True\n", " - Extra rotation (deg): 0.0\n", "Constructing PSF model... [DONE]\n", "Creating residuals. [DONE]\n", "Output port: residuals (1, 80, 80)\n" ] } ], "source": [ "pipeline.run()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Accessing results in the database" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The `Pypeline` has [several methods](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=Pypeline#pynpoint.core.pypeline.Pypeline) to access the datasets and attributes that are stored in the database. For example, we can use the [get_shape](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=Pypeline#pynpoint.core.dataio.InputPort.get_shape) method to check the shape of the *residuals* dataset that was stored by the `PcaPsfSubtractionModule`. The dataset contains 1 image since we ran the PSF subtraction only with 20 principal components." ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(1, 80, 80)" ] }, "execution_count": 9, "metadata": {}, "output_type": "execute_result" } ], "source": [ "pipeline.get_shape('residuals')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Next, we use the [get_data](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=Pypeline#pynpoint.core.pypeline.Pypeline.get_data) method to read the median-collapsed residuals of the PSF subtraction." ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": [ "residuals = pipeline.get_data('residuals')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We will also extract the pixel scale, which is stored as the `PIXSCALE` attribute of the dataset, by using the [get_attribute](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=Pypeline#pynpoint.core.pypeline.Pypeline.get_attribute) method." ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Pixel scale = 27.0 mas\n" ] } ], "source": [ "pixscale = pipeline.get_attribute('residuals', 'PIXSCALE')\n", "print(f'Pixel scale = {pixscale*1e3} mas')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Plotting the residuals" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Finally, let's have a look at the residuals of the PSF subtraction. For simplicity, we define the image size in arcseconds." ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [], "source": [ "size = pixscale * residuals.shape[-1]/2." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "And plot the first image of the *residuals* dataset with `matplotlib`." ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWIAAAEKCAYAAAAo+19NAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAADHyklEQVR4nOz9e7xl3VEWCj815lxr7+73DbmQGGMIt2NEuYmYD44fHrkIgt/vHFARBLxEDEb8RM/BG+GgXA8aFUX4ROUFIyDIXSSfRGO4iRwEEoRDuBwlRJCEACEJyft2995rzTnq/FH11Kgx19rde3fv7t7d767fr3vvvda8jDnmnDWqnnqqSlQVl3Ipl3Ipl3L/pNzvAVzKpVzKpTzZ5VIRX8qlXMql3Ge5VMSXcimXcin3WS4V8aVcyqVcyn2WS0V8KZdyKZdyn2W83wO4nzIePqLrpzzjfg/jUi7loZXN42/FdHRN7uQYH/3hj+hb3jqfatsf+8njV6rqx9zJ+e6HPKkV8fopz8Bv/yOfeb+HcSmX8tDK//2vvvSOj/GWt8740Ve+66m2HZ7zc8+84xPeB3lSK+JLOWfZR0kXQAugApQZkGn3+zraTyggNe0ngCiAuvs5+C+fXoA6CnSw7cuktp+2selg4wHaMeWSSn+hxR6BesvtHmS5VMSXcn6iu0pNB6AOYtGIWTFsTTlSmWoR1NG2kZoUZ1KyZTKlyu1VXKEOSVnDFOx8CNQ1ULaAXEccr0x2zHkE6kpsn6mNRXT3vJdyMUSh2OrpoIkHVS4V8aWcn7hVu+9zpYLz76n4dKn4XJlrUopCS5lWMvx73aM3fZud4/LwXAAACK3qZDHHNtizqMil9Xy/5GG3iC8Ua0JEXiYivyYiP3XC9yIiXy4irxORnxSRD0zfvVBEfs7/vfDejfrJJ+pwQx36f/NaMF2xf/MaqCv7XKqiTAoVYDoUTAdiVrLyO2DY2k8eP2CKJSSB9p3M9g+w82gRlC0wHgHDsVvBrsDrGj4ms8DNmk7amgtD8W1WEhCGOnxSR7HzXFrN91QUillP9+9BlYtmEX8NgH8E4OtO+P4PAni+//tgAP8EwAeLyDMAfB6AF8BeqR8TkZer6tvu+oifjBK4r3Sf1dGUryhQthKKtMz2gmgxZSZVzcrdImADrU3BAw0bth2zlapQsWPTSp4HQAfbQCagOPwhs+87AvPKTGl1JQwAKHbesLgB1GLXkJU9xI5vY7DxX8q9lbrX1Xp45EJZxKr6AwDeepNNPg7A16nJDwN4mog8B8BHA3iVqr7Vle+rADxwFJYLI1SCyeqkUuP3pggVovZvCTnEdrRS4vv2QjGIFxYmoYLi3xU0pV/E8WGJ48R50z/izMSGOyhB0jVlTJiYcwriddcB3y9dz6VVfO9EAczQU/27lYjIoYj8qIj8XyLy0yLyBXf/Cm4tF80ivpU8F8Avpb/f4J+d9PmOiMiLAbwYAFaPPv3ujPIBF6ke7FKFzAyUOaSwct3lFqRZuQCKosyCSliBAbbFcQHHgItBGUBTajq45VoAmT2QpoCUFHTjgsDFAAotftJYIHjChZJXv66NWdUR/CsAVr5dBYZjbUpcTXEPtIKT1a5IgcLsHDzcxtt9kXO0iI8BfISqPiEiKwA/KCL/1g27+yYPmiK+Y1HVxwA8BgBXn/W8y1dmKdpTv8qkGDYKLYo6FIcWHG5QhxqomGpThMUVKbCAG1KQjhBBDtzpaNuLiFuuGsovFLMftzg+rL5QAK783dKtY6/obR91uEFD6WtpYynHtohIB4c0j6COi3Ffyl0XBbA9J/xXre7vE/7nyv/d9zv5oCniNwJ4Xvr7XfyzNwL4sMXn33/PRvUAyj7XWlyBMWAl1RWcMGAlARdUkQhu8TNRBWZXfqW3RlUEZdKmPHOczJkQQYFIiwHQwwxU4AWC6i9nHS0ABzgNjko0cY3FcWqz1G18RdTGPrTr6ifEx66JMUG4RBzrJquqtH061kV+xRMUcimnFz0l7HBaEZEBwI8B+K0AvkJVf+TcDn6b8qAp4pcD+AwR+SZYsO7tqvomEXklgL8lIsQa/gCAz75fg7zookkZxO+OC4u2oBsgrrhMU9aVwxAL7DWzGciOCNZBVoYKyMbPlTBZHiPYEqAljgY3qKIOgrp2JV+bxVtXfj5a2TyOY8EyqUESFRg2imHr0MhsLIi6UgvGlR4D1sUYgQZlSCwW/TxqAXTl+8093pwXkktlfAZRYD69Hn6miLwm/f2Ye8HtcKozgA8QkacB+A4ReV9V3cvUuldyoRSxiHwjzLJ9poi8AcaEWAGAqv5TAK8A8P8B8DoA1wF8qn/3VhH5IgCv9kN9oareLOj3pJeltSrUgHTrB1O+BRrkBVLEDF7YfTMCn6XV6C48lVkpZl5KTQpuoZCEyEIE1RpeK25l10EgomGE1sEUamTpFRhbw4c5TBkyceuYCt4ZGDbQ3Tky/DkP0K8trpnYcfMcdvBijt/n5r77wQ+YMHZ8Svl1VX3BqY6r+hsi8n2wwP6lIqao6iff4nsF8BdO+O5lAF52N8b1QMvSPU4uu4pYQM7pZGVryk8LIENSzC5lBjQpxjgFLd4EIcjkirA47rt0yfPfujg+eiWcz6ODHU+2CAt+gCv3wdKbqwRk7IFBQV3ZNnNt0AmhFuLPMR5Z/NMGbWgRh2EQyjxOJKbYCX0E44TXkO5Fl82X5+RS9ohgPqfJEZFnAdi6Er4C4KMA/J1zOfgdyIVSxJdyzpJYBJ0FXBC823Cf3W2nEqFFHBgwt52xUEDp++R+l0mh2iAKJoB0w8sKzX8WGphz+pziOHVdATJLBAR1BooAdVTUtTTFSkXnmDcteSrTOqbrq64HHffOjAqZPTBYAR0V1b2CJUMDMA9Ci4RFv2SOhOXMc9Y2Z/k6L6WJBevObVKeA+BrHScuAL5FVf/NeR38duVSET9Esjc1V9tnSzjArFRz82PbsPQ0rOZ9xwqmQ0HAGaFU0SzMvBCE1Un33XWj8OfCQuwsSf+5Y0XmsWUrNSlxniuCbSDs0JTu/gldHFcRqdU7i0Sa173ic9kxMfYsNHFq2XP8J6kocG4Wsar+JIDfdS4HO0e5VMQPiWQrtkzaWZTEZBl8qh6YolKoRSBFoRsBZu2y2goUNSkQO1mjt9URmImjalO2zJaTSTEcN0VcBwCDBdh0lDhWMAqwu5AMW7seqZ65x9/Hdk4qteFYIbN0Cjsy/ipQNm2MBpskwBfp/AoMjBDlc8zGtsg0PIhxomNOcxBSJVKkg/ZGCzt5FrpuvGq7bxpUvUuFDNSHPIPmUhE/JJKx2BbookVq7nL1VOAMN0QFMxFoeuNJM6vSK65GBzNOroq0ZAggZbs5D9hrSYgC04FgutIUIBUptKUNB6QQxzNFXTYKUcG8HkJpdxa+j7dsLMU5Fh4RhyaaV2BKTmJOIpC2wHLLtA96SOUv0jiCUeL7U5lHENHrXMgMjJPX1dB2bCrrmIMqTSHjya2Mz9MivqhyqYgfREkvZeCZInu3IfwA+MtczYzLpSPpBnc4LhkP2Xqm8lNTchUtcKWw7wNbJhxQJay7nXcpueYZ7gDauVQs287KZUpH/doLXaDBHDFNC6uZItWyAeMY+6CCDOOkn/ydzArN98CN7BJYTvouQT+amBY2HvuZ6yjHQoC28DzZlLJCMF+sagznLpeK+AGUwF09TZjKaZ/3lq1fmUwnR91ftIpkUtVSjP14dWXKY5khZ4wKYPLMtxhPHMsVk9cLLlugHiHYGIArycqxtCy9LniVoJbtI7KjhKQiiswvoQyxQ4U1HCnbKbhmwUl4Vl5akBbJI2A9jD1Y8pJ5sayRUVXiWBG8m91DmOHFiGy/gFV8XDJrU+BC6mB/nXYht3paHg65hCYu5WJJCkiFlTqk75aSnt+cGszEC1Oapgj5oteV45UFwFZQtnbgKL4jAFStBkTqusGMPLrqxKTFWQ09v1bD/Q5ltbCKuf98KAuMV5uFW7GzAJFzDCRrc6bLn7ab24K2NzhG+Dgp5L1We5qXoNh11+qQycxxt8WNypvJIbZoaNT3qIPBLLLkMz+JRCHY6HDrDR9guVTED4gYFQqhBEQVItJRzkjLAszaUmDX3VZELoYQYli1oBnLW1JpSbaIhcdLLne2uD2YBSpCTQqQsAEtTLf0Qqkt6lJ0ePQEDOGqN8whAmP5Gjm+0MDt+w6W8G3U53bIVn++rgQ/xL6OZ5ctUHyeSNfTIv6z7ZstYgvw2eKXA6YqiGxDBhs77jVhCuk/fzLAFOYsXUITl3IBZNgoVtfsrWNx86Faui5gLvJ8oOEuVw+gMWAGZJxSMThmW0drLwQFdCuhOMtWewUCONScrGP/LGc9la1pXFELhNENlzlZirTkpcETVDLRvw5NSQ3HugMVcAxN2WsoK0lBsqwIM7UNQNTSYEARME9gPnDrfi0xno4aWE1RllkhW78cpRLXCJoq74PagsaFVAdjmmQjTwdnrwRsJDHG8BZo/XuReih66OghVs6XwbpLuRhCa9B/p9/cMFV39ZdKKluffJa7IFRKgFhaWvlfcst1eazStiOTgCnG2YK95SUSi00SNR0cY66Lc/N0eyloy/k7oYKXpLkVzi2tYgYts2UKRCF7fhZKnrQKwS7nOFvlC6imL8upUc8ijz/w7zwHTwJRFczLB+Mhk0tFfBElvYB8/qZDQR1NK5CSRmVhFqBECUmouc0hmfUgQBlawRxRRdlI4K/RTSNZrp2FGpZ0U+C0MoettvP6+CBeQIgLQpJQ9knp1bVhrcPG+MeZR6ysdUElRkwbALNCWoGgNm6jvzWrdTrsx6GzhOUcwTehZ+C/Az3Mkfm9xIUjI28xf2k+FW1f2YMn856HJe0ZkGUGxuNWUGkeEVmFsS4tqHYPk9SH8aKSXCriiybJcso4Y10Lts67XV1XDEcNA61CHqtpDJk1FEikGCe3vo6CeaUtcWGDKIheJk8+WLu1DEA87zhnyem4SKioqdtyWgjmtaBeMRc/cO58rWgQhcErcEVsypip1zKrKaDEEqlUUlDUksfS3H7jIZsSA2D99A587lxp69wWCe0SL8zLCCWcxmzBxjZ+HRL1zxcdBu8kPkOCGbQLwJHbTOgiG4CigBx5wXoBpqs+nxWQSaC13cu2080esgdLLFj3cKuqh/vqHiDJsSVgjzef3d/a3Oy9KcjcRtAfSNP7SetMzYW21kP+VbJUu3OI9i94hiJo8bEOQ4IBqKiXyRp2yHxewz1iDlyJUVGfxGDqLFbsQgb7d+L5/E/3LHYYGKkzR54Lo8Zph0XvzBfSXGZY4owYblbgGmM/2zEeZFFcBusu5R5JsB6ACNBkpTBsFON1U2bseGw8YoTLXJwTW7buUhertVtTEZpIJhgQCmLYJBw3WW7wjLzWbFOCccFzVK+GFpxmYZBP2vFvKCIulSASCK26BG+Udj1M4LBO0O33lgDRcN0d+pmiMT60Wc8yu2UJpBQ5RHnKfB8iVTxJFBYSQAfZr1zTfeOcl8QeiSFKOl/eJ/Gdu4JKVW36nDKYk3L2LrwPkcyXPOJLuauSLMpQeA4JZMuybIHxhnYc2joA0IZvRpqvQwR2vPaSCzO2pL3gZDV0lDCgr8dAShmahRfZXzC8MrjDRYLGRjbCeKNhvHWUhu9yAWGNYAiwaYEqZTazW9lMmthL3XK9z6Ci+HE7hgXndGrXASAlsADLwu9LRSxDm5No6+SLST5fbE/lu+dYSww+6Hy+fab3Zeu9LCGe5TGS9/AwWM6XmXWXclelc2cXVhGVcFczIltA2pTGPqZDQA5MYli4s6xFzHPZAdqYMseVdSeioHp+uWnRoo0zn2PHRCO6kSx+U8zp+jOkoGmeBFGDmOON/bFH6dQ0pYu5zdfHd5xeQMxjotXFYVjHo7b7kK+782QEqCV/19KaayTGNMs+5odMmDzkIsb3hmXj1REdbztLxz6pe+blAZR6yZq4dyIiHwPgy2CVDr5aVV+6+P5LAXy4/3kVwG9S1af5dzOA1/p3/11VP/aeDPo2JDMYIrAzoAVgaE3OrVqYWaD2RtVRMDuDgKnEMqPrvCxMIc6fu0JuKdGmCag8ctQ/WBUe+IOn3g5bRR3J4uhbGmVrrI7SUn5ZalNToSA0hcVrCMWE5p5HptqALlW5m8Nl+rFDHsWLy0dPu7wuiNewGNr8Nus8LX7kYLvilAkYjxQdrIDFudHmk/eUY0LROC+wP3gZi25pVv68BlSKsT6uCuZD22446he/WGR4/7baQzAPoJizd6mI74l4oeavgFXMfwOAV4vIy1X1Z7iNqn5m2v4voq8rekNVP+AeDffOJL0QXeGXZCGLW2SRFpstTabUhqKj8mrkfx6DymRf+m62OrPy6LLC5vZv2KrjyQIc+GGoHLXVQxBx62/fm6+AoP+u4yUjjQvoF4mAC7RZ0EvoIQlrWEi+njyHZEhM6BI/WonKhNUnSx5OGdMBUNXes0lQUAc3peuKOc6eztLTWE5bQD9oNEUs5g3ts4CTZP8mD5IoBNvLFOd7Jh8E4HWq+noA8AahHwfgZ07Y/pNhPe0ePOkUpYJsA83fu4XJ9vVSNLpqGMdVAU/GYHsggxua6xyKwF1hRLadYjhqMAMVQ/WMrVgEVFuQCU5rc8uwJPe8rhHjshPsXkdXp0Fo4beGn2GoZi890qHRjRWaM9AQ/OK8kAiasquklglaqz1p19VxgnlP0rhtG7eMJS2ALCtKxedBs5iburgX7h100EtBDyHF9bSxddCImpc0+j7DsVqtjwSzMDMy5vNWEM4FF1VcJnTcQ3kugF9Kf78B1ql5R0Tk3QC8B4DvTR8fevfWCcBLVfVf36Vx3rZkGhKAhkGicXIBNEqX5Bq3rdh5JDhQCTt2iLUff27Rfr786qnMWoDV44LV9QqpntK7tuPMB5aSG4HBZA0DyZWfva3SDGyvCLZXTTmxnVBcw5wsccd3iTebFWmBuVkSRMA6yG6tLpUHazKU2SCCgB5YIGjdFCWZEjqaq76fWdGU7DJAx3rGhHUs4aMtSFbcHs7XtmssW6Bct3tQUj2NgGvS9WQst0vzzjAVK7VR0QMYjhTjDTtu8fFHh+sER7VrSNfs68CDpYzlMqHjgsonAfg2b4tNeTdVfaOIvCeA7xWR16rqzy93FJEXA3gxAKweffq9GS3PnayucEsDP6QJ5ptQaXQwRrLiYjvpLVHtfukkrEukc+fNMkSwz13OEMHye1q9yUUXNCwVajhxtxjpQknEQPcMPyv0Pd8tg1ux7eK6d6zC5c99x9gH6/i/nj+M7n7tzKHsXu9yXBwrO2t3c8VrJS+bi26mrgn6+cvP0AOqyxSXFvG9lDcCeF76+138s33ySVh0c1bVN/rP14vI98Pw4x1FrKqPAXgMAK4+63n3zi7gixOBNx+Pv7xlkmbFhPuvHdUqfkaCBC1O6bBGFqwBmpVqbqx9Zxl0NoYyAnU2S63VK26WWrjNLpFYkDDO8UbqiMHGnKSyzfASlmrX6KnGZdI+UEVRs6bL3DjLka7MsahZl5tHs8+OgDm638H5bYyFpQKVqe/GEY1GfTz0KAB0mK/MsGannDPhcfy+zIriGjYgIEFXuCgW1AGYD9qYSuJAZ0obMwXLpL0iVjQWTZIouOQW9YOqzy6DdfdOXg3g+SLyHjAF/EkAPmW5kYj8dgBPB/Cf0mdPB3BdVY9F5JkAPgTA370no94j2erJypN8Wbr14T67ErQWP9LYBmplFsP6YYWxmS+7hILmSwl49bU12neOP5aNjalsG0tAqpe89Be20OpOeGtnxcUCIkAxJsd4ZH/Ph2iVyxLFqs42KcMGGAoVjOx0OBZXwnBF3eaytTTiNdWxJbN0wUwqthR0jMULGvNtJ0Tgz8anNhx+mbpNuKALvunivAz4JU8j7hnamJaLQHyXy5FSCad73yAfZ6psNQKXHMPgz1Pmfgf7hPUrHkBRyGVh+HslqjqJyGcAeCWMvvYyVf1pEflCAK9R1Zf7pp8E4JtUu1JavwPAV4oYQQiGEZ8U5Lvrstft9Ze5wAunQ9G1VpdmTXa0NvaXrwDcXWUZxcgW40vrtCvilYAdm0XSc71KpiLvk+D17gMTC6AMIC3fDSqM/chIh4mSrhdKwxceuuSnwjF9jpBSr8XHsZMO7MdiTQdaqB1mnxYf8Px74ARa7XH+tntcZywQS5iAG2bIgMdMzUrzghIQjt8SLuASDJWseLVLSulgiQdUlymA7WWtiXsnqvoKAK9YfPa5i78/f89+PwTg/e7q4M4qbrlGKvIhvHmkBZdkbppDpbUZAhBBGVqDoq29DtBSfSnFq56xbm/1wBvQlB2L8qDaZ9OBhGXZDdsNxij/SBReGquizFYop/nX/qM6z7hil1/r10VWRh2A6RDdAiRuBUv1NkxklLg7D/KjqwJDmr9CbYQuwSGgjzyUHJzjwlR9PgcLwDH4qUNKcQ6vw7wUXo/6IpA5xZnDK9t+Qcl4vqaxDZs23uj4LP195nGmIXlCbDLqkFfx+8vGAGVrYzflDUvLZjA3LwwXWuSyHvGl3IbQ+nIecIVZr/MapoCdFpZfBGUCRMYu4S+U+8/EI+d1ax3EPme5wliZ/C1PCRqY0UomhkLUoLzpUllI+p2fk6UBQRFtgbkwBV1RAag1pfwSsyU+CpjPsyiLWTR5AJoUZfISMoMhRBDBwXwPOqZFtnIJcbgH0Y1x6JkqBUnB+3GpiDXPcbJyqTyjT95yrMsEFD/+sMntq9p4IgsyxRUslVzjOssWGLZtziKRxqEL7hOWMefhAdBvisvMuku5DYlaCQmzA5JiGASVFlzCY5fWT1M+iG13ou50Xemu+rZlC1O4A0LBByzgvFMmH/RKrj9B5rGW2QYmFQ6PcKO0rcMTZbIFSPZ9D3SdOHIqMevwWpJIKyW5O8lUOk3LZVw15oayCDgGxqtJ+YaC8+/z6lTaYrW3vjwXGWn3bgknLAOKebHJyjoWs1zj2DH5du0a59X0/Ij0102sfjlvAWOlus0XWS4t4ks5k+QkhHgpaIVQ2awBQDqObrijcIULNIxPmlssAKpT3cLKgXrwqiU6jDe86M+hpUOzCDmKKeKAIPJ4Z0viAM/t56RyyRXX5oO+1Q9g1rl1IAbkuCUV7KN/aRHMtOo9iKmDYD4kLICAG2IxSvuLIlgeVEbBKJi1K5QffOhQsAhlZspIrC3S0JRUjJsLx57EruBN+/VEQBFNwXI/HRuvmddQJg24hYsp1Osvq8FVWxZJEqAyYYeME+FzYzWJGezLbJHp0ObBqs7ZPMvUzoGVRELORRVVubSIL+U2xC0YEd2hKmXrqHO/fb8d0fYzrNOlq5sCVlH9bMEioEJXpO33YIRLZRXDkJT6y2Mt03fT2DJzoPsezWPoAkjavlNBpCUvx8ZgXvydL0EX40e/b8dkSBYxz5tbPsU+ae7aXBDWSR8nmEaW8yrp+Gm+1I3cfWM1SGHxQEiaRo471e1gLGB53jraPWPHE1lc+0UXBS5TnC/ldKJJyTCDihxcYrkA2ksJdBlQ5RCYt9K98Bbs0+Sqq1tMTRHo4MG7Cgjb56QXPoJ0VM7uchM37TLYslKsCFhBPXUX7MCR0nRpqcMtVLZayjSxgCQWbnNWelTi7CzC695nTVenmOVazFS0mZki5FBr8wA66IKp1YKoibzMSowFrCv2boMqbllqEaAqikMJdZXuY5oDJfyTrPPMKw5YBGrXwIXNFXPx3yNwK4AW83wkHys9g2UGsOn55MxABMw7mz04mRuRXiy57Fl3KWcRuqhOH5MqgCuU8bglMLQkjua+T5vWPZiK0QrtJAvOlRIhgHklmK769iqodfdFIt7YWbnuBivQWiyh/96K9vCaXEkdG3/Vrg0QFV9MvKXR1lkibtGqwzFMda6pVkYWmw9T7mXjSm5PIXWgWcx1tKQVLb7QeIEfHQ27JsRQ/FqrF8JnsXeyHAgdDIRRkuK2e7EcbPtZtqzEpihbaSnWI1BL8jy4vSu6qCDHhZA4fxTrt/1YeJ51nSP4x2JEfg25fgaVMNleMgOjV89jEgjEO3cD0RE8W8oXTRS45BFnEZEDAL8FwBUAb1bVN9+VUT1EktN+w+Xl77mdEBUgLaeyVIy7+y8zr5Y4apyT31UE66CDQRbjy5+flJLbPujHtmNZ8/jp2IGjavsHIPDtSGBYQgT5+Bna2TPuHOTM+wY3m9Y5rzGfS/0QmnbXZgkv51ry/DJg2FEQ0c/3PsnQh3O1u+y7mKt8Qe152TmGpI+youbPwrk4xdgugDzpM+tE5CkA/gSs2tkHwWp4CQAVkTfCEjAeU9VX382BXnhJSotWIeA0sWrc4VC8hDFW5aYpp5ENpeJdGbRTPgMU9ZgZew2CYP1ewF/callsrF1bx5b0wWpgO0qL40rnm1fSBaXE03m5cNQR4cIzcGYBJdlR4AYBSAcH0NU3i30P26QgiuxIBcbrWRumcbuFSerWzHZPAsxiVmCGBJbZcBEgnIECDes64JVQkNqs/MSYGI4V2Hr2n/O54/hFwwLuJ6TtX/36d/BzAVCkU9AyA+Nk8Ei2rrNVT5nzWFNAL49vX9zgfsuTPrNORP4ygM8B8HoALwfwxQB+GcANAM8A8L4A/icArxKRHwbwF1X15+7qiC+a5JczWWnx2LjlUYdW9pFi7rVvtk0JCPm4jLpnK9Ld0wrBsPW8BypqWHQ9F0KXaopuPLIaE9MVsgSkWY5ZxB5+O1+qizHC2iLFy9uqq6kAGJoiGDaNvUAKHWr/wkcB+7SQkBddVw27zI1SSdMbNurV15wiODTFo8UtWucyz2huPPaxH8hC2KOIeT+ZUsxzxW3KVrW0aw+YgDAVWykh7e8888CKpc1jNgC7zD9p1ynVIC9rQ+UZdUt9tbT8gY4rHdgxN6/pWi6QPNmbh/6PAD5UVX/qhO9/FMDLROTTAbwIwIcCePIo4qUCgykNXXJxVSMwZlghP+9dyyW7onO7iQkStqgSf1NxEOvd4Ya6ciGW3L2wfFGTEsr90TpYIL0LJxooyTJV7ZVApprtK1Bj+9kFdS6zK/DoOIKTg0qtLkcbZ/tbF+5789+DzrcPspGWUpytzDraZ91zkO+lJsw8sViImRPGoPD+GbCNdk+Qjgm0esU+rqJtwbPAnUKmnsMdY+MpeOlzP46LGBNTBbb1Ag7sHOWmilhVP/E0B1HVYwD/+FxG9CAJXcAFE2CYmiJrShLQlYT1ZTCDu4pUUAyazAgognzP6UAwHUq8TEGTcsUTdXJFekWaaErTgUA8lVrdGuZ+xuxA4LQBc9CNJcRQ6BIrlgtRVu4RBEyKKwci68q8gWjlVH0BWUtn5Up163rrFeN8IbHjGZEtFyaKa6ASE/u7OCOFSqxjGEAgo3YLB+eOc8Dmop0k6ID86Xy9ZQLkmrb7Xyw9fLyeAmc+xs0jAmX1teoBOO0DbLEAeBo2FxcG5+YDW2CHjWL1hMZzEV5VLJINqhHncFNYqGkn5nAfxaCJJ7EiziIivxnAqKpvWHz+LgC2qvqr5z24Cy26+7uKoFQN5ZixOqzsBWLAJaLfs0DiRUVYUXSrh425nizKHi62u/oNU3Xss1iKc9foM9O1pFnMjRng15GsR1pK0d04b1sB1NR8NEuy2vYxHohLa4IxOit6sBcvF98h40BVLBnFcdaw7P0fU8ojIUPy/u06BOjTyZNXkGtG0PLOdLDufhMGSRCDPQd++goMUzsP2RzRcgpte6nS4INEb9yZW7FFtK78Ho0tCy/gqOOUsAHZaVmnRRvUtacDCxLkdFHkMrOuydcD+GYAX7X4/KMB/DEAf+C8BnXhZZ/+cUYCQGWy2EWktdBJVdWC95sCJQFBJHZDmRTDMVsjtZbzgbMO7RjR8JNKLimbDssGXf1d95SpxlZgqFni7XoTw2G5KFGxLRQXr0egwJE0Ohax2KUbnheABZQRwalk2ZoFa3UwgP54MrM2hh9nAMDAH9L5FQa6A4FdY0DDv4GG33LRSpouFhCx+ZzTtrmzyrySGN/ynmRvisdfslGyAuW1ZhhkL+6fFzb3eCIYK75YpfoVF0XsllwqYsoLsCjG7vIfAfy98xnOAyBLxeMSbung/NYFhCAzICwIL81ipEKLOsTutgOIzCpRNT7xteoc2hTw4cuc60x4p2HAea0p0SAUlrvdReHRLARfWUu6BqBZrAsLs0sFRlMAKq0uMdSL5GTlotbuhx2dc4+2Dt5IqdmVfek4RwPPIZ1lOjj/NpQYF6aJ90nDwo1C7AuYh90uWLe5rgR1pc1ST6wRMllysHJn4XLPhfWg55VAyPOtPgfSnqF4bmqruhcV1Qo/l6YwY7xtAcmeRgQOmeSx0YBwolg+YJS5hadwMeQSmlhue7Dn88MTPn9SSLdQJwuny8rylzZnU7lxtlepdy4zWyEly2WHB5zOzd/DilqMKZRj4sRS9vJ18zXuezl3TH+EYtu5tGR1FleOKoqZkaO8w3IOsoWdPs/bYXGNEfTM15YUbofjZxhpOR8n3KPOehU/DC30NNb4lYtwsYQPQgec69z5OjjWkcDhid1htWMnqKppv5OUaMd4qdwBQWOkVX7RDNDz6lknIs8D8HUAng274sdU9cvO5eB3IGdRxD8C4M/7vyx/AdZd40kh2XqjtbLEGiMQ4plzOUIu8cI360oFO0VhRBV1BWweLZE23GOT/YOZA1Zh/cBfyslxxPTS7vBmsxtc3LqiNTqmsbpbz/oFdsB+f46V6dsdA6O7/tbDDjOC/5wx1oBHVr1C7njOU9M7Ogjm0eZaA7dt3UAy6yG6XPA8eUrdqkfg1a27Na8xPBnAVlvdvS+dsvfzR43o5cKXFiimQZNhEcFd4TXkFdT3TVBFpDCH5wVvF5Us5rSPiln+XEwiu1PTPb1PYqyJc6s1MQH4K6r6nz1H4sdE5FX3s5EEcDZF/DkAvldE3h/A9/pnHwHrDfeR5z2wiyqVgRLCCikw1NGyxNgT4xE6qylbV+xTFu55pOHCIsWjoj5iO5ZpP884HytjqrndT8lYJZVm4sjShS+Z7eHbzmuBuqmoKQJfRwk6WnbBM+si4BpJ56Xil7TwUDkPzh0WU0AsMkRMNWPPy0AesdfpSqvVrHTfq0K9V14XjNu22hY71DxpsE4Pc9jxAD9OXEKzzvP1xr1Kc8qKaVmi99wMrK6r17BoyTAyAwMX5Pys+eIYWDsSjCHoF1pfsAq9M3A/7Rbv5lmIpY4LukXjXst5JnSo6psAvMl/f1xEfhbWQf6+KuJTAy+q+sMAfg+AXwDwR/zffwPwe7xDxh2LiHyMiPwXEXmdiLxkz/d/WkTeLCI/4f8+LX33QhH5Of/3wvMYz94xam9FdS4tn+P08O8EPpLVHIGV2o7ZLNXe7QQWFhvaZyelqTb3Nh0/F6Tfc6ylix9WEQuTZ7w3bRtJG9L2y/PRBZDyHAz9ueK4SHPYDTLPq3b3I6dMy3LuchAtHx+LfZbeAodEjyZR4zrWyAnXu3PN3T1P/9I+mrbdmcd8ShZ+Sgtwt1CdNI+LBS0HQ/cGWe+jEqZUyKn+AXimiLwm/XvxSccUkXeHGZI/co8u40Q5U60JVf2/APzxuzEQERkAfAWAjwLwBgCvFpGX73EZvllVP2Ox7zMAfB4soKgwd+Plqvq2cxugP4xlu1uYJsj1Y7O0+D3Qu3pI1t1wrFg/bgq3a5WUzrkvAp75rbRScwF6AOHalq36OFun5aVyIL4JH3/0kxNpnaRhx6hbf0vjmlt2oE5o3UFEoa74IzBUWsbfvIZ1JUnutxY06lZN0Ab6OQilQ0XBwKEyYNVSpzlP0xVp85ld7XQMAFHhzObStKJUC7QFxktFNcAGIWbhakHU+kW1e8NnYl5bh5Yye6Bsj7dAmZ0z3hYYRXgxMSFJeQ7NAuZ3vLaAFhIkAlq7yjHY8xj88li8LwaNTXEm1sSvq+oLbrWRiDwK4NsB/G+q+o47GN65yFmL/jwbwJ8E8J4APldVf11EPgTAL6vqf7vDsXwQgNep6uv9XN8E4ONwOpfhowG8SlXf6vu+CsDHAPjGOxxTL9qzBbrUW8kuoluz6YUPZTyY4tLBFNR4g1ZRwUwuabKcIgW29EwJqQqklN/o+gCOzbaJGhWwLsphgeY0XaBTyh2j4IZgdE6qBR01Og7zOOQlFyASEaorw6Ia0f+6snEYM8TOYYuFhHJsFnhSQj7HoZAA73Li15px2xmWgZfodVq8aL6qlxPNF54s4WRd27UZjl1SFbzM3AiPgPfX0YmytXHMgLeEagsmNoLVtWr3IkMiCW6YDxA84WED42ynpAuV9sxFx+8iXR0OWt2cn8xBr5LaXA1+nf58wmGQvdTE+yjnyZoQkRVMCX+Dqv6rczvwHchZEjp+N4DvgcER7wPgSwD8OsyC/W0APuUOx/JcAL+U/n4DgA/es93Hi8jvA/BfAXymqv7SCfs+d99J3FV5MQCsHn366UZW0YqiZ8ts4c7d1HpIL0xYQPHw98eJhIti3zXLhdfg50/KwD7YHePeOSC8wGMmxdIzNtyqW6O9kNkiRHthQ7eRHoe2TwT70jXmusaBu0q6OD9Wi0CiV4Lp+Bkvj4SZNA9xbRDA6wFnC9HKjbaLyO5+QC8jQtMu3XcBmgdCrLdabZF91zV70aGo49EtCFysvLFAzEVrrwWhlcyLTuOJHdL9CgjIV0vGG9SRJkEXeO1S6y+AqAqmc1LEIiIA/hmAn1XVf3AuBz0HOYtF/CUAvkxVP09EHk+fvxLAp57vsE6U/z+Ab1TVYxH5cwC+FhYwPLWo6mMAHgOAq8963qkeN0v/1WQpmUWS4QQV6ShFGaMkhtpxc5Estdq4weQRM6A0I1lqkRCCVreByg3t5WoJAMnnVVq1dr5hVuf7OsuA15AschQLJs6edhxJBLRY1QbIgBevCbAAYfFAz3SlzYVt5N4Aq6elfXVot4RJJWGlDt76ZzD3fzjaVbah2GnNKYzfm4Juw8ZaS9UrgnktGI4Vo8MFHY5c3PIc831u52H3bKAF0lhjmtQwLrzqVeogwPSIpyIfW2Au5pQLGM9d0/mKVU8zD4JejnOQFwtUxq/rCE9caYG91TGwumaeynSlYD5oY+sCfPme3Wc5x4SOD4F59a8VkZ/wz/537yB/3+Qsivh3wwr7LOVNME7encobATwv/f0u/lmIqr4l/fnVAP5u2vfDFvt+/zmMCQAtHbNyKnoKFTE1pId/R5I1ly23LiiTlF8HQ8AsuTK1brz5kaTybPS4ZJ6m8/fX44odnsuRSP1LizeSJdyAjWpdiQJFOCCnTgPoXHxTRm4pAtG1uCl+6SxS5H+03h0TryMgs2B02KOjjCl6JZyuaYeXS4iGCtzhnijqjjaGCEbmOWWSSIKhwq2vdoOD801FR/x9sGuw+9ks+O4RSpZuYLto511us0+s6Wg6ho+dTBOD0SS4x+em7s5RFOeniFX1B3EBL/MsivgGgKfv+fy3A/i1cxjLqwE8X0TeA6ZYPwkLuENEnuP0EwD4WAA/67+/EsDfEhGO7w8A+OxzGBMAe4Cng1aMhgEnUQvc5aysjL8GXlwkcv/7F0Owvbp7vjKpvWzSU7xU0Hq5oVl30ZU5K1O3fubaK+shqFdte/KU4+/aFEi2uDP1jEkTHIfth6Y44dlwo2A6tPGYtejYMeeNytyDe4hracHHzurfWn0OcmLh9ZFZMKlMgGzdC/E6GaSERYbbZBM6HKVAnAIIrF86pRUYb+E12P0uG8OOlywX4q7zumHtOSW5bAAU8pgVDA4ye9CoZtpZyJnap0Uh6+a9dPfGxxyYOdqzFLENct9TYDT2SwZCLFD8bOFZ3Etr+TLFucl3Avg8EfkE/1ud/vF3YMD3HYmqTiLyGTClOgB4mar+tIh8IYDXqOrLAfwlEflY2KP1VgB/2vd9q4h8EVpiyRcycHceogMwjQAgrQ2NWqBtZLUvd4fnlWA+RAq2Aepdl5lJFkXK14LtU+zlZz1i8ah6mdxqWmmnkLqXbTaeMt3OaAkvgLpbD69NXGbvsOxF6mfCJAoILbt2xe3iS7N0c2p1KIAZnjWY8ExCHB70mx4F6mi1MoZj2364kZUFQOgkgnalBcamw9bOx5RmUnqucOtaIJPVKB6P1FkKdr/GIw0IYNhUhw4Eq+uKuqEF247XJ5X0RdejZrJb9IQmoiM055awzhU70HBDU22NBGO4ZxH1lwP2SM+fW+T1wH5np2tuF6nRS0Wc4SQu1OLPVuJAc1vi28FfLm1uVKwWdR0dpkqY+t2WJ31h+IX8VQCvAPBmAFcB/CAMkvg/AfyN8xiM4zSvWHz2uen3z8YJlq6qvgzAy85jHDvHTvgZrZCWXZUw4YAn+oem8XB7KwZIlofsbq97icO7f4s6bpBccf4knqwJL97hpyZ2R3Ci07VHjCi/e/teQlrI6NkFef46fJvn1/4Yxo/t4YawnOueffaMoYM3fNHqAmPk+Sa2Bues+7nv+InvvW8sjdO7p1C7tu0z3p49gB3Jz0eaz52MtxPGnO+f/Z26hOTnLlP7soV9AeS8UpwvqpxaETvX7veKyEcA+ECYY/OfVfW779bgLoqEkgVQYR12s9VB67bjCwO961gBqAKDNCsTiEIww3GjVqmgZxok6zpbOUGx8vMNW+6L1CrJfhJeQQYC1bL/OuqXwjskL7BwNveMgCCVukTpSmKjYUH6GMbrAEQSfmoW4PYRP6YB1VZhztkAdZRIa7a6zOl6XYnwvgxe9hFAWMK5apzUVPlsAOaDgjo0S3vHIk64PeeKdDM2biUbYj6Qbiw6iNETS9uP0sEniV7GZ6WrUcF90t/l2O9HKiFKnjhhEB3M2xi27XqqwxLRZWQ0XnUX3Ktu7W8q5pV7fv1QzJM5KQ5yF0UVmJ7MheH3iap+L2Apzs7He7hliYMCViowfUYGxU5FtISlSjUMtXoknUE+1hMOZSLwFyErBengjyDaqxWPqcRfJw5WMOfEDSBceI671TJo1C12OJ4PJShWQbkDr6Pv4VZXVqOAtKigeLEexWyKODNOgOz6OsZdbdxMs+ZiAAVW14HxuKIOgumKz0fyQIoX0q+jKXf2icuKpmwVqEBdlZhfsl5yevAywEfMlgum4cI2xvlAMC8q5QWWW+yaWBGNC2SklpPmtnhWsqeS4wkyaXSathRmseMzlXzNJJnGYmEALhbyrfO5Bwn2SW4TVSZFOfYF7UAhpZ9Hg9nQDIx7aKQ+7NDEqZcZEflLIvLx6e9/BuCGpyS/110Z3UUQurY1KbD84ib3t7En0kvgSpeJD319hQRr+LHCJc+f7xlTvLTZvc4WdFbCC9cWaBac7SftOhawRVi/Abukz/O1alP2rBO8TMnlfh0LIhV3b5S7/L0fd2kJI30eZTJ7a7b/J2FNd4Go5NV093MPpNAFIWmVks2QPmM6OOeth64W9zth8A368oU6v53pmrqxLbwuYZ3hxbWYt8aaJrJ7fcLrMjpcft7LrMGyiISme2gYEyM+zb8HVc5iEf8lAH8GADyh4hNhrIaPB/D3AfzP5z66CyAqlvY6bNg8sikuQggA9kaRLdDEv5KCkWbVZqyxa+bpARJaRfESqgdMAqNu1ma0UooBhIHaXFy3glkVTgswrwC2IJKqHdzRov0a7r4g1XcobVGaD80qg3ommheXaQqqXzSiNgXZFomWFenQALZXzcploKtsvUawQwCz069sIWhZhtx/OgS2jxj3S9NiOGzhHT/QFU3qcVhqff/hVnfcy5LuUYIAlh27szLm57WIBVWlpXwH22Pw++TBRHblyGOR4rWNfYyra/68Oh86P44iwNaCBdHQFAByk4LpsKCOrPYGlG3dG0Clwo7MxnuglPfGSx4iOYsifi4sqw4A/hcA36qq3yIir4UVh3/oZEd5zRZtr0ODEDoMOBPygUgFzp+FW6ptn2Y9+6b+0mrejwoCaJaS+pjUyP65L1pXFS4fx49vuGyDSSw12fDv3Hw0c2RzMKlxctFw4QGYD1l9TVpZRo7d4RkfemcVIxWw4cLBCmUVCh1Nsa+mphiitQ/aeXJWYisxatBRpCjTWp8VxRe2mdvuc7mz1T+0ccWFSMvSKwBwnJRhpsEhKTWmp492P+vYul0Hz1gcEuECQtqkW6WkM1p7I4dBkidm98mhkaFV4FsGEbnoMJGoTIrRYSpLTyck0rySdqNwT+QyWNfkHQB+EyyV+KPQunJsYcXhHyrJ+Kio+ksg3csaOOjiJQtX1936oHPRYE0KtXuW+Utgwc4Tlqb0AibJ1tVy7Onl37ddYzII6iBuEfv1DfsTJFpChHbniMAe6xRUBF7q0c3WlofnpZC5QEaDENqwieK1R1JJSXOopGElNoQAQuUshrsD1g0kajUownpd9rwD0gKIhaWni59oCgxAj7OmDMgdi5iWeih1dJxeC8ABmNDqWVeznmP+8v2ddq32EyWuUQPCyFBDGASDcb/tM+mPKw4HpR6LYUzcJVF9+DHisyjifw/gq0TkPwP4rQD+rX/+PmiW8sMj4m6uK2CZAE3Vr+hCMkU5mkESB8yYsigq3VfAmBNAFMjpTlvNSqMCK0faXlpaOXNjOizxQuLMUdt4q7nMQWzHxWReOy/VrWtorvrVroUWbV8sxiuFXUVADTKnAj7FXvThyAJC80HrNJw9giiwXwyGgFuC43XzPAhNsNIZ4GM9bvPB5qB1ZR5LmZ3vy9uSFDrZLVxA6O3EIhc3eTG3/lkwFP2Y4tc4uPUaxXZKct99xaUVHK2xcoDOF67xemPQcJHRotDDtA/82SKXWdKxThDGH8rcOM1lq8G2Md41UNfAvC5pweKzie7ehnVedf8Jz00E8yVrIuQvAPhiAO8K4I+mhIkPxHlXObsgshPwWVhDmQUQmCItC6BZk/mgbmDQ+tiRpRWVU6uT1RYKBtg9zsL6xr5zJeuK+K1VTHMlkrDr2CUtHIGlpmpuS8w7gokzAmvWpHAYAMw4N+t4FDTlujMvnBsfqyVGtHoJQYXbaGDedUyWHbdL46YX0iZ1IXvmsONiR5W73XneOVRqZ7/kDhM2iuLtyduKFOh8f2/iGZ0oyVAoTHMuvH63vMd0b6mIU+XBGG/M+RnHcNYhX1rEJs4j/ot7Pv+8cx3RBZLuwR+cYUVlg2ZhhGXkSldre6EQWKGC7XaiTCbQWWzxd1YYSQIOSMkbZF7k5zQwaHddA69kUaEBALHr2izH6KuXzymp4E0KaNH9lwqMN9pY9yVp8NxkG3S4N6wDiIAWmu2clTaDi2XBCGDQTWbt4BxWLTPYKOGhWdHWxN2eG7QQ81QsW4/lKKOQD5LCreJeQN/kcz5ITA5fAAo0lHkk9yha70AALPVZB0AP7X6MR96phCa1pHmp7f5WttoibY5p66UdN9cXDnikCMSfhVzfhHMU3HXS3zx9fE7ZjnddCeMSmgjx1OaNqn7n4vOPBbBW1W8778FdFKErC4G98DV9mRSTJXsgLARyNKsH7eoKPWaaLdtkmfK4wOJvP1elW+hBsc411F6x8EUivBGLirTvxeslBOHfxxsJKmN7+QPvdqVeJmB13JRMYK8poYOLGOlTS8uWwoULQCRMkAWAjY81z9ci3dr2M5YLoSV+JrNC4DWNIXasTbLIyfxYiwW1BJiuGuOibIHVNXGlp7Fgsdg+reEyG5tjupoYDrxGFQyswOb75vvEm22p0YbbD0cKue6LwKwYjxZUPzQPArxPJaU9Aw1T5rNQEQFBCFD5DNJzSRY44NflzI3VDcVwbNd4tAxa3k2hQXSBRUQKgENVvX47+58FePl8AEd7Pr/u3z00oulBbxbrku/Kjf2l1JZkseSxtsBds2C7gNtCKS/H0o0p/WspxNLOkcZxEoe3s7qpFMh5RVMy+3jA3fhz8DA4p4hKcZ1lv4+NIP13O5brHQjvQfCPGWhNgdNbBuJPuCc8fggXt8GCn/n52dlnH8Sii2cB6O7zzjiTV9Ztmy3+kyRd+965WDyPOVaQn5V8/++FnKFV0j0TEfmDIvJ1IvKLADYAHheRayLyH0Xkc0Tkt5z2WGfBiN8TwH/Z8/nr/LuHQnI2G62E/HJogdd39Y8cNysbRKGeqLjFKma+/3DUF7bJL/qceMIlbcMg3ZKmFZQjFm1XtO4T2dKW9o5Z0aF+GwvWaAt0cQxeoQzu2uf6EEW1w3yDI+xKWItg9m1UpLOu8z46wNsNCWTl1pcHkSwvV9ucFzfmsoXHYkUVO4ov1y4uW0kp0Ijmr8rU6sSrJs6PGRivIWoeR9slAXQNZH43NPF5ff9l4M46b7iFHgoz1XxA/5NzN6/zYtKul2ycSCUXp6+R3rdQsFpaFiCfoVhE5zafUt36lNZhmwt+HRkMFYOQok2UeWn7uPTnIXrBgnUi8odhxc6eAquN87cA/DKsQuUzALwvrKHy3xSRrwHwN1X1zTc75lkU8dsAPB/WPDTLbwPw+BmOc6EleK1JYbXgkwBFe1fYI+7D1l6E6VBaUkGyKoYb2tUdiP3ZwQHSlCYVsbYXPUMTtiM6y6tM6PuqlX67cD2B4JYSjhiOWZHNFMrgrnAozaHR9kTsOKNXIItUZGnHCoaHAnXUVreC16awusIDIMXd4wpYPYqmEDnnxLWLIuiEpiztOki9s+15HyWwWhUNSp16erACwOBzMdmiGU0vfK7GG3QR0DjJCbYJRczzirT7MCerUdDaHykihoBiD5nGNfjpUsEn4z83AwGAV+rzLEHOoyYYao/VC6BLwe8CyhG406aI0XdVscSclmRDmh452DtW+TnLBYMmXgLgLwN4harWPd9/CwCIyHMB/K8A/hQs6e1EOWsZzC8VkT+iqv/VT/ReAP4BgH99huNcaDEFeLq7HmVRkrJmUEPnZjHFSzvsO4h0lguAnqFBBbKkJukJz72klzZBDfF1UsLhes5qdY4ZMefLS4vUx5cDNx2MocmY28MCkH2P6uy4WLLcafm26W9UtyiUEz3s2pztJE1of9z8k793wURpymyJwVLBtEQR6fbrEnoUHgNYQDNpLpbQEBfs6I6dF01azNm6XVxjzZ+TWdHBYQpFssTT5ztjKfZll/CRlHYdAD1YeGZ57u+isrxIrAlV3dfCbd92bwTw10+z7VkU8WfBuMM/IyIszv4cAD8K4K+d4TgXWmQChgVFp3vph92HjxQzqWYpGi9TMV0V6x4s5Ghq97AulQHEvx5hVsZim7Zxs7QyuZ/750LpkpTnThsn8kiPa1xUZjhIkdaYVIFxo5FkEQsIr58Wek6PZsCHnTgyoTnxZ5nSq3Txh4RzO5NiOG4TEQFBBgznplgs+JXNy/QPTUmlLbqss6CJiSeWZC6tNIUr2upqCLnQnoAx3rDFjQHaPE/dQiYpoDhrQC2EhYLrTLd/tsWfiyddKC3mSeR601HgKXkO+fnprGJJ17gPZ/WxRhAyFsbUwgvtWOctqhdLEd8NOYsingB8KIAPB/AB/tmPA/ge1QvmONyB5Ag64FaJSG+ZLNzs/A/esZgEeSDtkwr6xA9t57Ff0Fkj8a5kVzJbMmlb7h+WCrdbLCyxvVtG5kb351enQS2DjWXLMpca1yParC5a7qT+katqOLO2a/PkEC0CrNDc4MHmSdXnRABx/D1bpl2wUm3gwYHmvhyTU7923mW/rlzlDKLeKNDuWd/I1Tahwoz54lzXFKz0kpv5PC24aTc2SnoCQHUaX1qAujlBvwgIuZTIafYaVMWbBe4yltsZGcm6PonGOB82I0BmK206JBjrbsXLLhp9TURefsJXb4fF0h5T1VN3LjqVIhaRwU/wO1X1VQBeddoTnEVE5GMAfBkshPPVqvrSxfd/GcCnwRaFNwP4M6r6i/7dDOC1vul/V9WPvb1BoGWlMYOtaMeaAHoLtoCcVrNS+BCXjDNScZZWOKirCcFzJ9d4n8UM9J8Xp57lz7vFRGmppqJCc7No5gOLgunYGlPmtj5AU0IWkDLsgJiiFhgMA9O8bWz2y7xyr2Bo68aSHdFdW22WVmC1R4rxeoWOlvGlxQoVBS9auJ9ZhrSerPaCBC1PACswRGXG+zuYFWl0vNDsZmUGDawtOpybsYrxiGqrd1ymNrd1BlDF1g3RCAA3GEtjMnay0/z+l8kXNCaMqN0PWsodFMTnpiqGbTICfLGIWhVbRKuqOraSqxb4Ey+stFjwNT0XeQ73QEV3Qy6gqfeWEz5/Gqw42l8Rkf9JVX/qNAc7lSJW1dkpGutTDfE2xJX9V8DqWLwBwKtF5OWq+jNpsx8H8AJVvS4ifx7WPPSP+Xc3VPUD7nQcTN2VGRbESXzMZUEYpqnOxV5aJmzYl/bisN5BJtJvr5pFSZdbFAEBqMBghWzRAt5JWJpF5dbXsGnt4cMaFWlGu7T9GbwqG087ngVbAHU0k4tKj+O3D1vixHxgWWplsi7Aw2YX/GXt5OG4QmbFdHVAHUebI/QLzDLwaGPQ1qDUFdzqiRnrt29R1wXbR8eWijtYwaLORZ4lIBjWeA5X3uEG4q/EnWcqIQbDtPWyC1x8j8JZe9ulLJIw4txiyVgnbVvznoCyLA7P+eAzRCYEs9p8UZ3XvOfS9vHiSRwDgJacMrY2X2wpJdU6bLN41Xxo0EPZmpFAzjyt89YWK3GStR/33RCFoF4g1gQAqOqnnvSdc4q/CsDfhhVIu6Wc5eq+CMBLReSZZ9jnLPJBAF6nqq9X1Q2AbwLwcXkDVf2+RJj+YVi35nOXHIQJSZZq/ixbHDlQ1ruLmiCAxWHJwc34LS2Q9Pvec4YlqN0+OQDTWdXM9mJQzBki8Vk3nj3zQmtemrKLQFNyu6n4yrY6CyJtezPLJo093G/+TFznltQhoYAyZztj1GFhd/O6Zzx7LLoIZvIYiQlBmGY80mhIWlihL5JoUoZjWqBFd4+/I4tryHMSWHzqOwdgx2uS5XznueGCns8nu3PY35vWKDcvTnm8d0v0lP8ugjiT4sthne9PJWftWfceAN4oIm8AcG1x8vc/w7H2yXNhld0obwBws+jki9AKDwHAoYi8BgZbvFRV//W+nUTkxQBeDACrR59uH9aG+zFwFIEU0eBxdhHyhbKObDW0h7iuxGhTfOHUs9bcBSS1K3M1xS25KC25atZICe1qrmmZFHULFMj+xSMr42rWDLPO2OJnhOOKvl3jGVN5SeDk8XILraj+7ZOAWry+AwpYZLxMaXzCdGfbf7yx2H+hoKYrBVrWlrV2pVirn0OvR8zqZdIs3RbEsl+HFFAavEAOx0FqVtnY3zK1DhyEAeyKCCFwfMa/Hm7MgAc1d82aPckdgqYwkuLcwYL9Z8ABXJCk3YP8rPH5rDAYiKhHXqCkKopDFvNKILSS/bwBS6VzcvENfNyhsJJabO2Db8+VT6x4EIN112G9PU8lZ1HEFyaFWUT+BIAXwIKHlHdT1TeKyHsC+F4Rea2q/vxyX1V9DMBjAHD1Wc9TAOECA/AH04NFq9QEc2gWKBxvlKLOBXX3PQdnkuXa8DXW/7XPqybLEm278ahi2CrmdcHWWQjZNZ0PWNyb+f6615rrrtvdVVXWDUZgwsFf9rEi4aDxM1mR6otUU4L2+bCFpfEWe8GDETcDpfi5OS902at3OJ76OQtLH9ZfbboyNGzUq77VdatfwXrFw7F1c+4Wyi2iulqkBAsXRNuEEAMxXgDd4hYUPmdGlBkox4rxxmzztx78OZG9HTBMIUsEIDtLHIh9+TvUEmdoZffbJ3pZKORW4L0rr5nOS89LtAWS51W7RtYzzotgxpahgEwaC6tUV9I5bZ3eCheM85KLYu6eXj4KwH897cZnKfrzBbc1nNPLGwE8L/39Lv5ZJyLykQA+B8CHqupxGt8b/efrReT7AfwuADuKeK9QkfrvJ7mvnYvp20bEPL9cdIGX3F/4cb2bsywjENJeyNoYZY3fupAWVZc2ZvQKJFxpLiJQSOr5FucFApdGSn0NxcWBpHO3MdgBLGJPzFpaYocrA9K4djBF6Y/Ha5B0LjJXducTgafSgiN1L7+8S7dd0ZTNEpYgpxg87uL7kALUlYHfOkoEQ9tiK218WW6lVBZuPxeO3N064JluMpr1210n2rPMZ4meQFwbLe5s5YZF7efE4mIU/fUt349zlItmEYvIHznhq6fCIIk/A+BTT3u8MzcPvYvyagDPF5H3gCngT4K1YgoRkd8F4CsBfEymhojI0wFcV9Vjx7A/BBbIO5XMa7O6AET5RHO/EIGKCuxYMnVlnXOleiqs04ckwRwMsDGqDpiy6o4FdPxk20e6KHzuk5YTNuYDs1isA287FjOyxiNP350Bqc5JXWVtiGj3FIouvdDBSXVFK75ddYUQmXUwRcHgJJVwXUk064z7tbju6arvH/xYBO3OUqSb5YoIyilk9gVA4Y1LLeV7O1oK7nidVEK7T3H+2YGGxIgI136dYJsbuXs0AmagMts8OmB71SJxxNn7BIk2b7q8/oVFnC3KwNkV0XhA6bJwDB6MK4u55DyWbfqcx87Q0Jjuk1vMxbND69g6QseiApgBwcXcWTN8Dnhd7RkH9i5etyEKoNZzOtj5yUkIweMw+tqfUdVvPu3BzlJ9bQ2zRD8ZVpO46+Csujdv7NSiqpOIfAaAV8KcnZep6k+LyBcCeI2qvhzWFeRRAN8q9gCQpvY7AHyliJHNYBjxz+w90b5ze3dgABivS1QTyymfOTASysrx3uKQAbHYwBiTuxiVttBbfTk4EqnErB2wAcoTHiRadpMQu9I6ivWNI47rFb7MPeaCQkaHxgutQ7Ougo8bnaMlWviE9VTRWaPEDee1YL4CU4LXm1cRxx7av7C68tzT9SUOvhFf/FJnk6HVcigJL13CJyqN/TCC23nti5ELLaJEZLOINSiF8yFiccURess6W8bi8MjQxhjBU7INFMGmWVrBYZnu0S9dNmLxIWQvRxfXrmmexaq8sZIaPYu8yO54ID6GMEIEmBxeYmA3xgakwOzifmp/L84tgMcJuECiuhORuSM5i0X8RTCq2N8G8KWwbLp3h1muf/M8BqOqr4AV0ciffW76/SNP2O+HALzf7Z/YlSlcobklUyHeE6wpRz5sy+4aFtSQILdLRTSmBJqS4LZ2svbdSQ9t4I7ZYvJMthZ1J3aCoNTx9zpqK7qTamhYe/heO8RLpYriJ4xylvnFE9jLKG4VzZKuUVqpyKoRrIzx5XoHDMw5x3gnwk/FMwFj6qCR55OFhPK+OdlF3VplnYeuKE7CUYNjTe/GYY6wpBNbgs9H9h7mVev2MWwVWg0zt9KebD/VriECtLxXvojn64d/JgrjYfszUFOQNOpRs+OHWKAuQxEBr2h/eF4XwEayre8hKXDRS487CYBiC3nV9p3RJtFve45yAXnE5ypnUcSfCODTVfXficiXAPhOVf15EflZGDD9lXdlhPdAyqzAprnHoTzH3Re1qyOQFEMdTaFEMIe1EWZTpPMBMK2kVzD7XNQkWrDrlotxY7PraRu341amKKsF5XSU2AZqEMbBdTvxxMAf0EXM2eNtPkitlJI12URQ0ss3Hwh0Eqw2M8YbhmVI6qAc7rNXPFNnkHRzAoRnIBVYXffUagbrCqClYLoCr66GVkt3AQ2wGt58xdzqsrFmmDqIXbsHoiJotwFW1yrqINg+KpieYiyW9TvaGLLnQot+esTgoDIBekOCfTEeVZ87aYXsFYDaAsnrsYJGeUGlUrcFbTqQFjhOnOfuwXHFOzu0YPe6pU5zMeH8cuHhfM2rFispW3g3SkXNRZt8bBMWdbCRAndoi825yQVUxGJu+Z+CdbJ/T9goXw/gWwF8w1kyjs+iiJ8NgO7+E7AMEgD4d7CScA+uuAu5xOvCZU/WRvaQcvAu0krDUiWOZgXjo49cdgv3uOocz9KF7JQMh7qwpJeWNcfOGhfhLqtGlpas7EBL/m6Z3EoaBHNtltcyIBMWuc9NHdy866wyTgwi8BPVu5KlthMIdbfaEmNqMBKCJjgAOiq0CqKrcR6f0Oo0y42lPC39WoE990R8oZJRsXH6IBdVjpeFdrqEFGduqC+IwuPNbX66uePCubReOR3C+W0wQ7ueZM1zCLSo00Im1ReNhDl3wdKm97vrCHhFrUxFd52EqTwYGfc3eSA2Hj1H5SkXLljn8u0A/hAso/e1sKl8bwBfB+APwxT0qeQsivi/A/gt/vN1AD4awI8B+D2wOpwPtPAhDYwW/vJV+NPoGyaXlCm0xAKhgEzGcRVabpKsPWLJtX+4944lW6fZckbaT8wljnY9af9WQCYlkcSi0WrTkkcaQSiOUxumTKXM7sTdGHhoEdS1muKaBVoGDJuCeS3YPtJ6xjEpomyatWaFc0zrtjKLzRqso2C6WmLcWixt+eBt0mCb8Fz8MtfAPElAKMOmjZ2ZZF1QKyknnqNM1hWjbBEB0GUxdSpT1i4uW2Yt2rxZINOClhGw1Db3dZ0Ust+7bN3nSmgyK0oVKL02QfDM80IbC6q6B7BoRJrTtJninMdln7c5ZXPSDHWQwpfjHPFY3A0c4YJZxCLyxwH8ARhx4N8vvvtoAN8uIp+iqv/yNMc7iyL+DgC/H5bR9mUAvlFE/iwsEePvneE4F06yEkaig7HVUaSMqjMlPL10OPZaEtoCe/NKsL2aKmYlxRUNLuHGxCDR9ihEW/po7oSwTAoAHIJYe8Bwa5YoXzYWOs/8Y+J3dQDg5QxZQSzXrW2t52m9os3HnJSQj5djmteNYbF91JVg4qEOR8Bwza5vPDK33QJCgnml1iLoEXuxh6PWGbuugOmwdFbocAyM121yWeWOmYN1DegEo9PX/j6pwyeAze+QIQ3A6zJLKCDWR2ZK9Q7tz+/z+h0a82Np6xrHMqx50T4JcFjDx5L4wsaUIR+7nSeK9W/bfYwgclLgwXWvxmJh8JgWtxXgtz/mg4LpkC6GKdF5xQUiwRT58avGJuEiOt+1wgeIcenFY038CQB/Z6mEAUBVXykif8+3OV9FrKqfnX7/NhH5JRhN7L+q6r857XEeBNlLRtcTPltEkFsQxt6Obh8ew18cFXiHirbJPuv3ZtZzx6Lwv7vg1eLYDfP2r0tzJ/MxqGDyuAOaoYucraHkNgNuiObPFotNn+ZrQb1cMa27/kyhSsIAK7HPwDHTtUdAc25/50UyL5TLee4WwbT4SN7Wr6GwgWg1JSwzImmlS8FeXFc35+k+Bj9X+nPl66PVq8Ce5yw9e/usyfQMajpn/smgbAcz8N245xbqhVPEvxPA37jJ998F4M+d9mC3zSNW1R8B8CO3u/9FkvwC5Jq5bHoZUWk0xZIxuyWFp8yAuls/r5yjmQoIUVgDoavTCx9LARSG72YFavs1pV+2AAsAZRy5KwSUEhxEG785MvaYCFLpdgvqSuNaiD3OB+YNRMCIVv0KoaiaRd6UY+58QQt5OhTUocQ1DV5eczhq8zu55drhofQGkhUfrI65QRDDBhivafCwowiOs1tkVucJV8wHBZtHSnccSe43x8tgY6PCtdZODQu2ICBhI3pPkMX9F39Y2EvPFTYDpeR8h5dWWiAtnpuCSF0Xn9tABfy4Mipkockt8OeP60hoCNH5A3BWTdH+uSPjBJyHZLT4PZa7paEvGDQB4J0BvOkm378J1jbpVHJTRSwiv1dVf/A0BxKRRwG8h6q+9rQnv0iSrZ4uMOJ/z9kio1u+tEbhlognGujKlWAVICV00OKoSopme8roroa1LNIsI8cCCZMACNwwF8TJNDNNxcsj0WQUTJ4FPxwB5bi9RJqCajYnGmnQkVAytBoYXYAweQiW2MKaE4jgFa3jWZwDXRGt6octoMetFVW9icub03hzoI0MiuFIcfCO5oJzMZvXzruuwHijYjiaG23L06SLJ4yMxxYknA9KpFYz2cHmHhhSII9Ke3ZFZz3e2rZcHGmNcj4ELWGHCrFLH0daUAGfPPLG27XtdMgW26db/6WNMT+7kYLP+0fOe2K6FGeC1EEwrXn/UjCxpCf5vBXnxVPEKwRItFcmLHItbia3soi/WkTeCOCrAXyXqr5juYGIvD8MC/lTsMJAD5wiJpZqVpt2iiVvAywss/Tg1UGs5uyiEE63z/Lv7I7uedDotts+0jtn2TpGjwGGEufvS1eSf0tSZOGWNmXM8+qgvaWtlmAS41jCDgkayVxiWshcKPpW8r11dZoXr4MHaksA6aCU0iiFeeFkvzUcDtElozunW7QqJRIbOJfdgurHY1W6nP2Y72uGEWLepb9uWpwxf4VcacGOgmXasR/Tgsr93ITVzG4t+XnmQk0eczI8wrNZzLMOQNW+/2AHz/mYY27OSxTYeYkuhvxtEbl+wnenLvgD3FoRvy8M5/g8AP9CRH4e1q30CMDTAbwXgEMA/wrAR5wlm+0iSVTcAhqe59ZEF11XhOUUFojzdcPy7ZRsagi6UFjcv67pazelG9vV7OJrh5OSnrRPmeno7rM6dpkrmhE/rI1tELUqciF8QjGj8Z+leg3iY89UmzQpHeleZlLa+DKPDgtYCyc773hdsb5WjZN6KMFkAHolcBohVqyiloSTrF8tVkTp4G0bQBXzlRHzYUFdCY6fWlqrIm0eA2GA6UqDA9q5rBVWC+7ZTwYAO+XqlnLAHQxeRrp6Y0R07Abftq4t6EkKHZ/BYPX4mIta7eTVdQ8YH9jYjYcu4RENxy34y7rT81rC86iDACvHlqemyHm+aWwLHbPwdp5BIKzuO8u17eW8iBgi8jIA/zOAX1PV972DQ/0AgP/hFNucSm6qiFV1ghVr/woReQGA3wvg3QBcgVHX/h6A71PVt572hBdR+DBJBeqgoVzmAkSBdlcuWiTxSF0BKawSW4IG2sFt/7ygdwEvV/oFCKpV7Jra1i8j9XnsDCiFmyupRkHCk7vgTTb8S9qGllL6LtK4FS2poTQalaVLA0hWWrOIkwVZYG3X4QrtqBpWfaW0Uou0Mk9z35bXI27RMytw8OzIqijXt5Ba0bo9D6awror37evnkQtEXaeF2GGUvCh3+O0iWCiqRkNcwEVh0WZowKGZzjMR8doXAI4WVDvxe5v2Xd2wQvUbDK1Nl7NILMXeHtaSaHt18IeTHt6ASBxq1rt0yrV1eEGr+5G9GNldwO5Yzo818TUA/hGM63vboqofdh6DoZyFNfEaAK85z5NfGFm4ZZ316orBsoYaLQ1A34ONm/sDzeNmLLCOSC9ae+khaBZINyaHBejG7hkz6WIdzCFuSbOQi1t6UvmZWHGefMwECRC3VUGkeUObhRUQAgugA/ESB8zhtS3qiA4pK5vW32w+LK0offIkbkt47UkhBNY8CjAWK8O5LnbedfJ80oLI9vTh+vs1qvhCQkXFRcuVViQELWCViAcwoMnPZ+1S15XHjg1aynTZIIKOpAR2C3sxCtnmkWLGxCp9GYFEn49BEAlf6oHlbRtTDsYB6HoEWro2Woae8tyLBXhhLJyHnBfUoao/ICLvfj5HO1m8eNmLVPVmzIqQi1R97b5J8DkXNztHg+sBwMh3BCeQrFu66dK/XLHdKJgHs6rLBLcmpVNCMtnLPjOFVZuizVQrBswAYFqncfg+w7ExAiJDLisO2BiGbVPkZHZEQkDqZD0dthZQ6uwBHrf4y13VrtnYGHaMoqbErUWTzc94zfi2zPjaPCpNAd2JwZMWqpzpF7DBWjAfDMbEuDpg8+gQ/GkueHV0V/3QCv9AXQFuEYqlFlgRflWoYpejmxZZwBJDgkd8aAwKmb2q2xaY1wow7X0UVLJXEuXN2j15AowXo9K0eKkXpa9jqmKXituX2e4ngAi8NeqeAluxqDHaNUT36Fw3wp8rWtLctq5S4I8LuWr3vN6xLBa3W8gzvUEE5TGvQX7XxQujfTysacWHw5LfLhXxaYWBk9AFe256FMxZtDuKgAiwS0NbnENim4QLZMt7eW66sP5wnzTuHHSJXevinyIwaJkFBdqoXIt01HhJFcbvTYGdXLSnC9akgF++rsy2APrElyUz4HZl6S00b8OzA4tAR+N2saB9zQGwZMFxYQhObU0BWGkWdFvUOAe7vF0ZpTu+puMug2eZ/dF1d445TjVO9o07sUgGRYvnKwLyys+pigassTReFQjIpQvwUbHnOc/XRphMDQI5P3Bin0t4ovy6qr7g3E59ChGR9wPwZ2GkhafCyA3/u6r+6GmPcamIKW4Q7xDv3fVkME/oIoqnwiY+rRZEMZuW2OHH0aYgyIHVfbNPPe0veqSfiilPqFtpnnUW263M6lLH9MgDhrTmmgz8Fe90rAWQlW1PK6Z1p2iKKqfbziugiMEboXxcuVotYtuJCtcWAS9sM6lb32kRabvctgTPGugVnS8884Hg+OkriFrXkx04qQDqlimk4Z/DxrBjLep0PT4knA9PMff7s4/rTPpa2VhAjZ5JoyIiAnsRc0gwGRfYWi3lmhXaIpGjmmZmPABAbCeL8UTxHxhuXFWwXBBiPnhZc6JL1ua9ZOpgeFI54Ixd4+CO5JygifMSEXkKrF76pwH47bC6E38MVj3yy85KXLhUxC7x4I8Ny81JGNmqjJZFa0klJgFCF0wqYHTcTuAvz4BIxw1qGSXcawWLjdcVwLKWUCsxWbatO7G1ildsrxYcE7LwsbEtj8ySWgC15p5aADlsbwo/z/QogNtKcokFeqyxaNBKGtUhj/TyFQF00xIDgle7tO7uUJbKhIoBMPz06Oklxhop3C6B3/u4got8bOwCBp60LPYbEK2frErefkUq1SCZ8YZXkDvIfeI0rPcIwjnMFOOS9jyx/Gk4VRUgpSCKtzHAS6+EypJcYZGW5j01Vk2bvLavzBplQXPjWUIXxk9vwbqMMZ8rTnxeMMf5yRsB/CdYuYd/xabGchOv+GZy6qkSkd8nIjuKW0RGEfl9t3X2iyKC7iXI1mBnFd7sEAkfa1gZdn9f7uM/d1ozZYxt4cLuFaVrqDsvVQ5G7Xh5xPTS+KKGby63uOdFzYkUwZNdWpvSXt6wkPK/eyHLOdi3SbqGNg/Jq8nXm4JVPH73/Oy53u73fA+W923x3NxSOO66O66Mm+OkY57glUTBqdrPSQ4K6+JZ2oFablMp7YiinexW/24hIvKNMAX6XiLyBhF50W2O6hgGQzwVRuG9IzmLRfx9AJ4D4NcWnz/Vv7tj1qCIfAxshRkAfLWqvnTx/QGMdvK7AbwFwB9T1V/w7z4bBpLPAP6Sqr7ytOfN2KcxAuj3IVzIKAQvYqaHW4PRdocvBEn1bsUMtT24AKL0ZbimfPlSZTPrtEzesLv16bhGTZNmUYbrD8+MSu4hec+D7VM9A6xMXlltVuuKwZfXu1RsnmIWDwNW0WvPr5WuetkCw9YGXofSss78uua1YHrExjxsWkspAC3T7C5Jt3guFU1SVmQmAG2ujNUhcZ2skjZeV6yumVcRHZDVIfLZCv1EpxOes5gXFMFBX+itvKftTxpgXjSq83aDUraEEeiNJGUMkK/Na1MMC2tSaoM4IqhJD9CV73Ds92jhJbFwfGQCahsLjxcW/jlCE+fImvjk8zkSfgusBOaLAHyJiLwKwL/A6ZbPHTmLIl460pR3BnDtdk7eHVxkgHGWPwrAGwC8WkRevsBaXgTgbar6W0Xkk2B1kP+YiLw3rFPI+8Am6LtF5Lep6qK6w0knT3n8roTDunOFGpSt5EYG+Z4W7NLiIJTh+3R4cd392QWAxFOg92XrdVS41O5+7o+nAzB5XQkRWN0KDxZKUqoZTiDuSeU5Xk/81cXLLo6Jx3yVlqDCoGZds2i6Qq+JH0sAVZyXwXSiZIs1SUcTBAIPhgKztCppc0Hg29GC6QYwHhs8NR0Cc/GMx6CIee3p5GsqvLfgWvp7PTVWSx2Ml23PIrwZrUYCzD6PKi/guc+dHgpmQi0JpiC+m581KlY7toZyJ/S1401RwdJLSHPbYeTJgzgXOSdFfF6iqltYAfhvFZF3A/CnYXkVI4AvEJGvAfDvfbtbyi0VsYi8nOcG8PUicpy+HmDZdz902gu4iXwQgNep6uv9vN8E4OPQitHD//58//3bAPwjr5L/cQC+ybs6/zcReZ0f7z+d9uRLVkLHB0Z7mN1YTfshFFHsa3Bk9LLj8QITpkLjC5QUXCwAnZWjYUkZbKCBFVYArOXLFyazOsqMVudi6V6iHV+SK2k4uDQ8ssAi4b6f8BgA2NoemgKLeV5pbVax+sI5AeBuywmK/iRYYAkJxAILdC56tD262UIiu79XWt+MPfixlhAIYQ0tYkkk6Bf8nWMLvGi+XxYXSN+vzK6Ex2QQcP7JEvGFGzzF2AyQrnoclXJ6joMtsQxOL7Hnh1RU9RdhyvcLYTWKXwQL3h2hNdC4qZzGIn6L/xQAb0NfBH4D4AcBfNXphnxTeS6AX0p/vwHAB5+0jTcbfTvMIn8urE5y3ve5+04iIi8G8GIAWD36dPssWwipxisDDmUCokh6Uq47OCHoYvrfW3PHA3P2bcpkL3FuKJqDV6GkIsCC6ABhbA23SmqyXGit+PGZdjocAWNtijzGSZd6BiQCRnb8urEqaLKyc0TSSBoXRCBQTIdi9WypEOgp+NiHYy+C5K44K4Wda0T9NiV41qkAf9TGQFJ+MyL9WapVLwtoIlt9y4UuLXjxLG2ZKm7fRS3ftE9dSVDoci1ozl0dEEYCPSMWJIJYAHDYtP3K7JXgrrRqcDHutCghvCqzxDvam7TrDUNhTv+oiFMDgJ0g4J3cqwdAoXtrpFcCeKWIPAtGZzuV3FIRq+qnAoCI/AKAL1HVO4Yh7qc4ufsxALj6rOclMNV+ZEgiepKpRf8FySKkRZB3T/sBtIJ3tQ2t66jI1Q0Q3UFl8TIEL9etmOptgvLLpQlKMZaFV0EbU+fmuA6NZqQV2l7GOTKWG0SjaXz+MiqZHWjKN798MrdaE11UPS1M91zSQkqubA6cLi1i7sKgZlir+RqyR7RQwvEsDQpMTIhBY8VkhZ3wV8xAYXWzFEMIaGWx+PNYDevV6DMYRYncqu2y+HjMSOCw9l5c4MMbo8WbvIcuEUja97T6z+UWK84zxfmORUTkVv3oVPXNsCbLp9r+LCnOX+AHfQGs2MW/UdVrIvIIgGOvS3En8kYAz0t/v4t/tm+bNziD46kwi/00++6KPzihQPyzIPFTYSZXPydGoKZCNWgvR2C6pZV0XEbKT7QUspvaPfAIxb/EgiNpgQtAFescjNYWSKUFi9obDIyurJmtl4Mwml4oAF1tBVpQ2SIkrQ5A4KpGb0K8nKwLDPHuJPdCtP1c0ri6rhawsQ5OzSMeKnOrOwz0C+3SOypOe9PJ8HFavMMxgkYIeKPOleHGhJNyjCCPiYsFvRiozykX/eztiOHN84pV70xDdnGG2qhu3RzteSaXRfn5LIRChwAH3sVkUV+C8eRzkYtlEf/fIvJFAL7V4dC9IiK/A8BfhjUU/ds3O+CpFbGIPBvAd8KwVwXwfD/BP4BhIf/raY91grwawPM9R/uNsODbpyy2eTmAF8Kw3z8K4HtVVR3H/pci8g9gwbrnAzhVVgsVMd2omQowu6jZYspKuxBLaIaioilSmRF1eLNpsAzKLR+yujKLpcyAsNfa0LYLFgPaZ0wUieAi3cnixH0q+LDabaGxWgNeUW3daGscX9koxiPbb3qkBYECI07W8bBRrJ4wtsf2EeOqWq2LxnkejypYLrKriXA3JCtgVzTWScO+YDJFDmQNW1OWlpZsmXgyG5+4bFNlM2nPRChHd9fH6woUYFME6u2axuvA6nFT8PMaqF5QaDq085atYel8vqKtVhofMXurPeH3LN1T+P2wGsTssmw7xbORFqPO+9o3fQJTtkxK2bbzsiaHDAg4Iuba95VzXGgvGDTxYgB/Fxaj+h5YDZ5clfK9YQXSfhuAL4cVGbqpnIU18aUAfhWGyf739Pm3Avj/neE4e8Ux38+AYSwDgJep6k87AP4aVX05gH8GK8f5OgBvhSlr+HbfAgvsTQD+wmkZE5Fy7BJ8WLSXN79otlG3y94HerntvjTc9kc7TlAii3Xfzd+zVvCydU38XL5UtMKT25ibX0pJ54vztjFHIMahC45vH18YQCj2iC/uUYT2ou45wC2UwnnITtJHNeqhFitspP5ZpnP0gb007qR7djjIaf6ygi5Ts2A7OGL5bDCdmRZvfIHwuuxcadL2PaM8T27JtNw2zcnNONZxnby5aeOl9xbnP0+5QIpYVf8DgA8Wkf83gE+GZdS9G6wq5a8D+HEALwPw9ar6G6c55lkU8e8H8PtV9W2L7JGfB/CuZzjOiaKqr4ClCObPPjf9fgTgE07Y94sBfPFZz1k8y6qrL+DFd6prHAY/mGHGalN8+JYKbDzS4Kd2CoacZL707v4DbX8taPUQct87dRqZON7nKdU5gyle/Amtti6LESUuLAulN1c7UbScf8rOFhGohHgAxi6mI/I7A6R4eneZGKnXuCaOZT4obr2n7sR3WQmHxwO2L6KVW1O2YVNWIuq8cQlFxrKige1XxH2AKOSY7Z6cilasS8g6DgpsH0FTjPBiPlH5rKWFlw1xXesGEji23986AOoZkUa9A1S0S2Gm1LFPzMmJGrBLb97VnpZdMinGTZvH8LZgae5dpxSBmVB118C5Y7lAipiiqj+E82GMnUkRX4GxJJbyLJhJ/uCJIikjU4oW4ALUaUN1sAd82ChkVswH7NzgEe0cKHNYgJHujs7DBzUpn0gSATy12l58tiESf6DJcy0b2z/gg4LoIWdKEKYQp5ZqXGn1Duhe6nIDgY+SKcLIe2Ci2gI/KN7gc04vnCSL2c9bPOV6nG2+6rpg85QhlAH7uTW8GthnlZ2nK2oWqcMCnqRRtsB4ZGOOUp/p3IRnWnKF30RJkIYruTIJylwxbLRbYEbvRK0CzFes9rFwoeR8EXriuatXOZvUK8RJzDPSs1pLU8KtY0jjMEegmcdAO36z1LV7B8QnPTNwDE6ii4jwqOzZ1Khx0ln5+7yzO71/F1ARn6ecBcX5ARhpmaKehPFZAL7nPAd138QfpFwjoqOoAYgyl8mVz9Zs594tvt9JBRYEOT4HvTpakAeWOotg30OuiLToRsPqx76Troo03n0vj3I+JDWubIuNeH1czkMdxRTdum0L7V+kjo+aoJEY4zm8cN09WLjKWRGx5m8kLVDpDmnBSg0zMw67hB9YjnKHDaIn/Nwz17Eo+4LYMvFuruCMzeHWdOZo3+TasxLOFnJ3npgTcb7zrkLvnqcFBHWuUuV0/x5QOYtF/NcB/AcR+X8BOADw92GZbE8F8CF3YWz3Xtward4aiPVXgyMp2RWzHnUdvcyl+gM6r1tAii57UMGArnVQ5g6PxxpKldAAa86GVQLYgz+37aJ1jbu8VjVMYjtyV1uFNezN+uvmY70bvAsrWC3YtH1UgAIcD4LtowPKRvHIrwLj0Ra1NiijpqajdWW/c55zyu3tKOOcGch5zDxWLiTB7AgXv8QiEkqYi/BsnaAjkYP32c9RfJEUdRZECloFzY33u5orn8ebOedNoQObRyWs2OiwveBdx6Ke2DNlakq4K/PJ7VNxHn5WNqkh6BVAWcSKbAzCMlUDSsltsNh5OhYm4s/1fDXxw24Rn4W+9jNed/PPwwpeHMICdV+hqjdrK/1ASR2N9E4aEl/mluKJlvCR6VthJTkuCjRuJ/FdRUSUAX/I2b0ikgq8q8aWFo4f88Cx4+ULSYpdsqCtT523DOJLV9U4rHBcnNaXK+Md8V2j469j36SzUemz3oISLrkCDEeC+hZBpH2D14vmMtOtd6gjcOKTLMabyE5qrUhn8fG+Ba/aF8Q6Gg6/48r7QjxeV4cWcuW5puwjISeO1RbFuOxsPS9gLFsAeSOAUu0Y84FhwwY9SNAoGVAIi37h7cSiPUrjGsckYYemGSwSf9amK40rTNxXCiADgGqV/1jnONP1CLN1FLzzVpyXiriJqv4KrJHowyXpAbJuvxJ/RxAvJ0LU9vK0YJ2EizqvibXJTnNP7ge4QjyWsJia26+d1Wp8Xelc04765j9pxdTZFhNNCqijOKUotxbvCCLNKuy4y9wetPiYXm1YMtSy9zLIVZxZWdepA3JqMAogAnocfySFLCy208iOK+79A7MSHKZmWUY2myuiipb2KzOi7nOn6HyRs07GaS4556N5BZiax9BBGSN22l5lrnBsh6bg7TjJ+rzJ4pRxbC5ysQC4gs6LXYatcgrzsmzrDo/dF7hICIKNsWire33uSlNP/yw8qHIWHvGHAjhS1R/xv/80rCjyTwP4K6r6xF0Z4T0QFjmBWuBFnfWgo7nmSyoWK3WFohJ7yZjiub1qT+lwZG1xKB0h3y2Ykfivv6Bltrq1w8Zghekg4ZihhDX+KOG6ttrIdUzKhpIsMcMjtClfBqZCWUt0/M0F7KsCOnjAbvDgXQUOfkO7F5XY4XTVmlgyUEeFLJqucRRsH/U2QjV5DqlC2IkvYbonOeCVF0gUbx11bNe7faRgOrR7vrpm1iAXUD4LPbzgwcvJxqQOEVlNXwZWjZFSB2C8ISjbGttzXNOBBExEj6Qdq90jwKACjiMHflnfcGfh8WdrGgsgdu/J7GEAdQemmNPzS0za4SKkgHOcL//j8+SQijUE0M7itvrMJ9y325ELqohF5ONV9dtP+O6zVPXvnOY4ZwnW/UMAv9lP8F4AvhLATwL4PbCqQw+uUDHO7cGNljCLQA7QXLDuX7aOk0WS05iXgRxTPOoWsMEJQWMifEAXNlvTofBSbzC+rG7VWBv1RS0E9C99JFaQ4pTc7ziXNMu6c1sTljpsTamOxxZhZ3Zdq03QW/OAW31eBIiYZA7knQWa6Di8+YXN1ufcYBAdJVJ8cwBV4jnQDr6xe9UK6kcAFb01yXEHBBL3kngyQlkZxNOfg+OJGiN5n/SM7U5Aupe8LwnL7gKUQGfh2jWkfxlKuQXUEEWiFrDYTnD5HGTvO7fn332QrxeRrxaRqzFWkXcRke8D8JmnPchZoInfCuC1/vvHA3iVqv5/ReSDYZWG/vwZjnXxxB+cOpqlQ5oYYL8rsMN3XbIkAlpw6hiLtABJ2SxOSxdUub9SgUlwXqlMO57o8gUevA4CXw5SmQaxTbMi5EsGBTbSvaSkesW1aeO6DhtAJrrXZimLZ4AJnDZ3XH1uvJ7xLGaNFmDSVu+irhSTNGuZ8xnNR6s0OIHYb0l0rrBSm3LDAExrs06lIlzyidCOzwMDjvNKomdfWdbDEG+FVWwnpmvr4EWXQsFTQdtnZWoUNlPstuG8asqfzVgBtM7I2u6ZuFKMhXr2us7LTtA+N+L3G14xT7NCknascaOJ6oY2Ll+EywTgmItks+bjUG7F02NhBxiwgtwAzEPqWPPkkA8G8C8B/ISIfAqs/MM/AfAjAH7naQ9yFkVc0Yq//34A3+G//wos2+6Blngw15aeG+5b7sHF9y9hrV0yBexF1E17KXOAJidh2AGsCpek/YHWB62uHOsdGO1HuLZL3FAHoE7WFJJWrkC74uUd1ucv/8AATPq+C9aoYji2l8/4re4GX/FzzK50q6JsFKsnJqgIpisGnspsn9tYi/XVG9hZuZ1riRWX47YAZGuZLu+wEYAdlN2Tmb2o/XQVaC2rbA6nKwiGxnDs53I4YTgWrK9pK8+pVPoezONN1z7phfS2fE+0mAIHaw8nqzNb6IBzmb0zcztvr8iHjUbKNdB7GDJrFGYKyAONN0zaG+tGjzcUw7H6fHp3aV+4VFpsIryVZERw7FE/xD1ILtgCizV0BaDO0yq+oNCEqv6k19/5x7DSCwrgr6rql5/lOGdRxK8G8De9Ev3/BC8lCeDdATzwrIklTpeDEcApHyoqMzIZbrJPDxH4uWhxS3uJeitWm/utsXtYrkuGgALdA3yrFNTOXU3npoJavpjh+iq/7y+Yltjys445sByHpGtjYCg6WaOlUJ/kqqdxB3VsYaFRYd5S8uKUFmHt7kn/E2rzQkUa1inncs8zkQNinfciAAtMdNBU/n557UjPznK86dgZisjUxcz9zs9Yg8nS9+mYO/zp85STIJmLI78TwIcCeB2s+NgHichTVPXx0x7gLIr4MwF8A6wI+xer6s/755+AMxRgv4jSIs5AuOLSFB+t44ypxbZhESEspnhxtCkcmYChqpc+lMAT55VAnJnR4Y7S9hMYdWzlgT+6kxk/jiQOAICwZLC5u4seAeoBMhX0OOSsoXTIchg2GpZpXXl2HlNvB6AcS2DcWgTTldEsp7XBIlbAyKyv7SOWXcaXmSJJCTXF1iLwUb/52JpwijK7kH45Ii08rLmEq3Z4pysUUQsy0TmZDu1AhDwA52Zv2z3LzIpI9khWaJccIgCLygNokAD5vpktQVaKzzspdIH35vrIFagrtfT3eMhsv9lhHqayAwi+e10Bm6cU25+0yuSpcZ676mron4tusec/cq990bV0a+xfKO5ELqgiFpHPBfA5sO5CLwHwHjA9+VoR+ZOq+h9Pc5ybKmIR+VMAvllVj1X1tQDef89mfxWGcj2wEtijC198ABEZ5su3E7SLl5Ivl1PA/KUNfJdYYO1fkrrSBIM0l5KWIRX8sFEMN6or76GlQzMhYAYGH4tI47fqkkYFa5/ECmLR3VmBsk3kfEptZSGnQ8ucC4iAi8VMReycY2nKV4fG/JiupNoJyZJkmcxQxskis0QEG8qwVazeMWHYVmwfGXH8tCGdpx2X2DytKAlF3N5m1uqVCocuvO/fxrR0wBgbdeaHtGMFo8FrPRfpgp2ED6I3X1oIpFpAk1S4eS2x8AX8kpJ3qndbLscGLYiSqYEWSKsOo6x93hmsoxUr6LBtbgugo8rxfmQruXHZEQHlXKkvd/eWqRkxnI9zkwuqiAF8OoD/RVX/vf/9X0TkfwTwfwD4bljy2y3lVhbxPwfwbwG8WURmAM9R1a55qBfieeClc11dCUSiAV8kKuLk9mGhcLm/LjCyzp2/icUQLwHhBtKMPPCW3UpadmFJxvmtSE93bo43u9VLSxHox+RudrAv8r5ekY0W4bLzdRQSWlDvMrRyEqzQfZzGZdZeAYqlUWeYIyvjOEbyXOBz2GpEG54uPncRYFVTZgXazUUkrHDO0BbwbnFWeE5PU8JY/KuDWaldjeDlMxFjRj+vi3mpAyDZWmf1uBndPHdBZS4mSN/lewtErWj1xd4CleZpZZiFiRzcV9N+5yW8RxdU3l9Vfz1/4LXZXyIirzhhnx25lSJ+M4ye9nKEangIpTSGhMzNAmPGmcxmjQVemZUhqKBbEojMnlOQXiBaCGHtJIsF6WVoASmr+gbno9bROLBhmUbmFbObtHshqKDZUTjG7Bc1eNIFa8wC7drihYZXK/M1vQ78zgOHdEWLsSE4aSoI/nDGgwFEQSXKcrHKfdfaRmhjeZptwFoMVMCRopy6MMfu9FyqnUSldb3Q6nWUr9ugNo8UbB8R1Oodqj3gOV0xa79svLoe7D4sg7XhxcDmJ7Btb1mkhftJUOXi2v2Zss/acxbzkyzMqCbnz0LxQk0M+HWLkN0WVJjyL1sNLnxwh9NcNb5540nHPFZjmQRdTfuxBsR3zrDERcWIl0p48d0PnPY4t1LE/xTAvxaJdf1XFiUw80nP0xG5p9K5YnN6qaiIs9XoSlNF4mG3KmqN0E7FCNb+BcLlzEV+omQEjclQxu7uJcCncT29XftADd5e5v6i8vHQrFQqxMBBW8pvhkT48Nd1cmOT+1q27WXkmDlnoRBzsgKSJUYlsVzaBSdaUrxH81paTV/QYkspyKVdx06QqgI1UsnbMWQGhhsVosB0WHYKARGDZm0M3q86tnRozkeWHYjF963OLjAsW4IG1gcBaZHbydgzcXl/Y1HfpCScHIzkPBWORRscBHT1kW3MzZtaYr308qQKBuLGvphEXCEFKs9VLqgiTs2V94qqfuxpjnNTRayqny8i3wrrePGvAPxZAL9xyjE+kKJDxEY63HFmHd7EFFAs3HEG4Aog6a0JLFUV6pYWA2U7q70r1grZTwFaKq7BuzEodh7W2JfK3V+SaFmUoA9z/7J5aj+Kg86BlSsseJM4vLwGZnTFpVdYXWfPRIO2wkNdHdsExZCiFwp+SIpW0FHH6EYb1gqgOPc3tWjKipDBVSaSsJ9bV77zqGJ1zWwKuuOA1Ra2Vkd94EqSN7JMfmCCA3/ndyX9bnWOpWsIGscSp8It72cF4O2xsDR/qL8XXOJYmHlvc8A5tYHqMHef5/QYxR8WnGzPi3UXb/c6UyzPRS6oIkZrrkxZwVgUz4PpzFPJaYN1Py0iXwDgG1X1+llHeisRkWcA+GYYFe4XAHyiqr5tsc0HwIjS7wRDwL5YVb/Zv/saGH3k7b75n1bVnzjTGLQpCEbI5wP06b/wGrM3/CErrWNGi3wnqyDzkMM112h/1OGuQLxEsoVVygqusLuGTGdOlnNdqRVlSRItbcKykVBqWgA5Vqwfb7zXzr324cai4hZbp/xmYNgC7NRLi2i6UrB5J1tAVk8YX3deNUbDcKQYvQf4dGiuvnCei/Nqr2swH+gBTIdobZXU5yEV3NERmK/w3hnLYdgAq6PW9ogLatm2YBlrTJdNxXA0Qapi/XZL+NBigUmmCq8fN20eno3f34HFhbosOA1WBRc8cncZeC2SeOIJ4orFYuI+2gU3AQSTBsW6LdekYKPYUX4gHDqrQOeRqfhCkRhBtLDpvSmt83SNEN4zicUIIKvGz7wRDOdYge0CQxOfuu9zEfn7AN5x2uOUW3z/z2GKDwA+F8Cjpz3wGeUlAL5HVZ8Pq238kj3bXAfwp1T1fQB8DIB/KCJPS9//NVX9AP/3E7c7kNytNipyDW6JjSnbLiyt3ipoNVzhvwt2lG1KX+0kwwL7rGHfJgeAdtKPc2Csu672ebA0cootIYdlWm1Nny+VTQ7yAQFJVGK0uufckQrbovHcJluZdryWpdUF4jI8tjjHMqiI5bV016ExD/a7QrYVZVN3KGaR+p6t7IqGA/P4yTq2OdJQjt0zwHuYXX/OERkeSy9nAfPsHOsWkqGSnQBdGsuJHO983vTcdZDa0rg4L9FT/rs48pUA/sJpN74owbqPA/Bh/vvXAvh+WMH5EFX9r+n3XxaRX4N1B/mNcxkBH0Z/O0QBbBbBBzWLeDxCe4nQFBCrfln9BGlKOivWSrqQlalkJa94qLN1lR4uLfDWSYgsKRV0mGhcygBUTa2IXGkyW0omLwrjhWvmtVGthuM2NirZIdXdCEuQ15ACTRAxrvRR+r62wkCBpztWWdyiZsUwWXmyxiiQ0rBOSuCvaT4YlSjHwFoB0ud0tGtkicZyQ6OkqfGiNeah524LMAhQBFDFcFQdSmm4dHUlu8Shs2LMdSoiyy0tKplpEQtybfe8LBJoYu4Xno+od88gZOSWLztE53EFVDOj96hIM9O2vW2rO4V/yBqyASF657bGpGoZj2jzei7CRe/Bkvc6y8YXJVj37FTT+FcAPPtmG4vIBwFYw/rlUb7YydXfA+AlJ7W5FpEXw7MCV48+3T7Lz4xbNdYqSXYUwHAjtY4B7EEceivNHhrdcftzSyGpgKqZJsEfHbMSQ2dtVyf6MxGheH+06VBah+e8MCwi4S3lV6OehhbrzDxdAWQWrK6ZW14mC/wUBh8nb/ezNiWSa9hmK7xMiLlp14CgOZm3YYqDNRSCLzu1ynFcCBsLRWMMXVCQRYeOFatrfj1X2jxxcV1dBw7fOkcALBbJPL7B/PBgCCgw3Jghk7n/88FgCSOTfdnBBZKSS+jKg8wO6SxL1qjuFl3/GUHXpNQBhKfVdewolua9uqYYb1RLsllLLEbTVaTx2MK3esJ5wIOE58cKcvGsKFA2wPqY19pbxhHsTQlM9YALdEtnfxIF6758+RGA5wD4g7AGoqeSexasE5HvhldvW8jnLM6pIiffRhF5DoB/AeCFqmEzfTZMga8BPAazpr9w3/6q+phvg6vPet7+86g/w3XhZSU3eO9yJOnlPOH7/G8Zle6DX7qzL1/eTIvrXM00Nk37dedeSnLX87g7Wlnpx0pFQHe2g1+o2FxRdkyAaFKZrv8msoy+m0exmBdN8waeQ7p50ALD+udWkY71lM1CLDE+LQI4xktoARMgg5nBEiaixBgh8EI9bZHgz85yzgvJnnluH6afC9iCir/bz4OpetKcdveuXxhOetPUHKbO20prffs9n09PPt6dykXFiAG83+LvCkMSPhPnpYgBQFV/GsAdB+tU9SNP+k5EflVEnqOqb3JF+2snbPdOAL4LwOeo6g+nY9OaPhaRfw7L9rst4Q1XeBCKqp4PnFtd5oLay5qziyLgRh/ClVJuN8SgTShXoCu0wopewRgg9rpqrwGj+XAXG1Q2oOKUdnyBF+fxbsW0xGbgYKMR4szUNlpq8xqYDvpQgowSabE2Zxrn5XHmQ2Cil+BubKZ3zSsB1ojgaE6W4DxROWZmBjHaDE2Qj1vFFG09SMdSa+VUR9t4PvDqbLNZ0jI1S77MivF6xeraBBA3Jo482cDKdoYcGferXl2hXhlRB3MLeC3TQenoY/RcCAFZMLVXnJJ+J9ZqeLunbisg23ZMzuu8oqeR7rVDBHkhk0p+Ohc4BWbjRXeFqdyYsAVLg5LG80UwMCnGHUv+bsgFVcSq+uHncZyztEr6AgAQkfcE8N6wqflZVX39OYzj5QBeCOCl/vM7lxuIyBpW8e3rVPXbFt9RiQuAPwTgp+50QJkeBWSLpikcbAGZU4YX+9glJUwJjqW6QkzWNQCM3nMuzg1ui5aw4IrHCsA33LfMRrnTdK7GHXbrdQAAVwRUtNUU0HBsru10aIqsBZmAOhbMV3JdCq9HkZI7mJpNS82gEYnqbKxhkK+Zgc+MU0agjnNftbeEqmHWVBSztHNS6e5kwCkwFWLiVpltPlTIJF7E3VzxurJ7WbYWsLPC9LyXijJVQBVy/Rjy+DW73Gc8FSgC8aYA1VtBs74Gg24qpoTnK4BM0pWY3AluCRdgtFRlN+0l8cVjLgdg3kdhI7MiHZs4c1Omfi+9hGc2Juzcqd4FeP+lnd/vERNJTvS67lQW79LDKGfp0PEUmKn98WgsRRGRbwfworNUGtojLwXwLSLyIgC/COAT/eAvAPDpqvpp/tnvA/DO3h0EaDS1bxCRZ8Eeg5+A5X/fueRgER9Qp/YEpYsbaHvAw2KgdSee1eRc2hwxX0IAdOvtXLupxQBaYZoEA2TJHTwiAUH5ORoTQeyFL0OzqOxF9L+RrLbsWjvWKmjHAZC4wX7uBQOD2zDAE7gqkiL1j6Sm4vzJVa9DqtfLufZC+kWAshEUeh9Z8ZPFEopFo6MJANSt6by6EsyHI8pcUW5MkO0M1Ao5nuznNhWFnitkqi0+kNz/5u3Y72VW6FbCApdZAd5fTdvGPUQo4Hzv8zzkwOAuc0L7/Rdf7ZX07PJ+FKSX/aT97obyXRz+IkETt0riyHIuCR0L+XJY0Z8PB/BD/tmHwAJ6/xDAi85wrE5U9S2wGsfLz18Da8cEVf16AF9/wv4fcbvn3jkWlY4CxWlbHUXMgy3Rs21qDy6VbwSpUjAHK3hwLj1U2p+XTR8Di3aLyJSOosxiVuFxK8TTumAg3r2gpiEpOIcueH0z3XfXRFoMLtDRFx1fNDQHiEZeowCT2hsawTqJziS01Gm90srV0VkNBZax55lqZYMWGPIFYGABGaClLSfrL9PQho21XtLRau0Ox9Lxwesawded14q6UoBdVNTaG0HNOt5MNlHDseLwxmQW8GYLffwJ6GYLjCOwXkFKAWaDKYoq8JSVwSJDs2gzy4D3TGarQSyzRvowKZLw8ZTJeuzN7sEsa1KEt1EBbCS4yoS2woqlIk7W6t6sQ38W8zPcUvr5PCTDYM87c5d18YVSxNhN4rhjOYsi/lgAf2hR1u37nYXwHbgDRXyhRJIFSSuSBW1oGjjE0LlitFbTA0MamEXYpe2/76HKL0iydDteMHmqcyP8G5i9GAcDcLQW/YWailmTtSAw3jooyqKKVlhoms5PqzcZf/w7svayAvCklWjMqg6fkJvt0AQzxSPBhZg5F7IkmTVRdNmhw65z2NgkmLWtYTlmLnLUs/DMsDoDOgrUcdR53TpoyDQD2wl64wj16BjlyiFkHIECyFyBebZUyriH0miF2cL0LL7GmjGcPzLXfG4yRt5l6i08py41mc8Mnw9plnb2vrQkI/kkzclnvgscY4eH3u6JFzBafH7ucoEU8UlJHHciZ1HEV7B/JXgrgMPzGc4FkX1YF61N52MCCNd+h03gCrJsPbuISmVOCiEbOQsLOQyZAa1VEsdUjVrWJZ0w/TcFr2I8Ip2VFMdi4KW08ogRrEF7CcsGWNXUaHTY/8JJBTBZKU5eBxVtTS5y2WoHXZTZzpGL3wBuJc90r1vgqRynrDXnA+daGxZcc/giKSoqk7pW1Cuzfe5dlbfrASoFZSvWyn4tGI4U699YY3h8BanEVVzxThNUqynkaYaUguHG7EHYimFVUKtjxev2rOhE6l9b5MVpkjWgDHTzYAFORVZEMhvVmckkfGi0AEyz24F1/BmM+0uvq/gf6R5l2iHUFhSbc231JuZ2vOCy3025QIoYAETk/QH8VGJu3ZGcRRH/nwC+yIsdX/fBPALgC9CgigdeqLwguqss/QGMrrjSXrTYP7lvo6qlAntUX1W8w3ILvPHYBSmlGX7s0dgHNd0lqcB83KqeMTjVFSGa29hpOTGRgKBfBN0KoOumtHjtZAsMUdEL2F71tGRdBNH894E90XzsOkqfIuswBGDzwvKKy3q4ALwGhCsKT6ctWyuOz2QRVo0jrMH5KTOt3V2fuR5WrN5pgzJUHKwnjMOMazcOcHxwAEwF07WC7aOC8brgyltHrN62RpkrovP1NAHHx8AwAMMAKQJVxXB99MDVYJzeFbAZxRg2sM11azjxeNSy7+waDX7oLF8uPBOiiE9m2GRWSY8zN08onomsjDkRTOwY0HjoWREv7gc/58/h2HvprYwRInsgi3MTLgoXS34cxhf+NQAQke8C8GmJwXUmOWuHjlcCeKOI/KR/9n6w1OOPvp2TPwgSgRi+13w4M84madts2ZT++07SZ5kDSm5udieXUXUqzJ10Zl1s52OI7bLFS2snHT8npdDlpNIGWISoWa7dsLQp25Osoww3EFFpzAuEYqFS2MeLzTDEUlFEMDRl9Gk6Nk9ahopxrLiy3mI9zJjmAdvDEXWrmCuAOtiCdyCohyPKZoQMA1AGhMap1YKW5BpXmOmY4Ajeh7i/Zff3k+5dfu7iGsUfCzbt3LNfZ9kyYegm90Nvcc860XYPWwr33TaF27nPS0TkYwB8GWwJ+mpVfentHGbx9++DoQa3JWehr/2UiDwfwB8H8Nv9438B4BtU9cbtDuCiCZMGRGHBpZKtNq/AxW1TMM4CL8Zl5S2qDi3w96xYpSJa1phScSaGtuNalwfbJ9d6UBHMB/YyspiM1UJo46HinQ+83mwXRNSuDCL5yYHljoBeAUQFw5Fny4FWmbTjS1OMcV6mHW96pR1zm36nkiS9LWh6kq7BhRaY7ezvJbtRJDd62FTjLW8FMhfooNheEchkjUWhwMF6wsFqwtMOb+DR1THe6eAIT1w9wFQLrh2vceN4hc31Nd5x/QBaruDg7WtcrRXjO66ZRTxNgAjk8AB6sALWK8yPrDBfHTGvi825F8+ROelMsYpl05WCee33L3VA5rUR0mm8aY3t+H1ARH5PM1e9bIEhrFfGKVqBIT7P0N7jYTNVEE4CFl5KWgSlcZzvhZxXirOIDLC2Rh8F4A0AXi0iL1fVnzmfM9yenMUihkMSX3WXxnIhRCoIS0Yku0zwAu0GIaiaJmCx70hxHpv1aEpJ7IVDUmIUt850tIdeKlBr316H2wVf2EtEAk3Bh2KbES7vvJbEEoAXDreaDC3N2vadxRU1X/rqypCQS1WMx+1cgU07A6NsW4smKhSpwLhtEA5rVHQcbM61j6VMamNJBfRZznK4oZGeTUs5lDFo2bEusBXt0eOGpZdpcMzTruPqwQZXV1s88/AanrYyG6I8am/6tekAj08HePONR/GLb38OpBZsrwpWTzyKcTVAjraQa253rIxBUQ9MCW8fGTyhI7UPSiUvuVBZR2mu1n4R2UJ2RVumDPekRW3tPQm1GQjzyrsxF1/QJ4eQfI7nVVt0y1ahrMMxNQhos7JEHVUE3zgH7MhB5/O8t6jVXZJzhCY+CMDrmP8gIt8Eq3VzVkWcgaT82W3JmRTxk00iou3T2wU+UgoyAxuaXeD4Mv10tzEw08kbhwJekEV3HriwBHTP8boNmxWcOcEBB9T2E0gKkZAIz1EBz/i17QaviRvW9+LcdKOL7n0Md9zvfduURsMLqKJoZKnIcj6lWYcAry3fDxtL2VqiSiyOMAW4mQasSkUR+3dl2OIpwxEKFG+RR1AheGLcQg8qpqsFZSPYXh0hmwMMIsakUIUerKCrARhL5ynk+hv52qOims9L0MX4L8NJe5TbMtW83aTFcRZznJ/bZZ3iDJXENmkOGVDdgUhcCe+js527nPDcnCDPFJHXpL8f87IGlOcC+KX09xsAfPBtjEoAfL2IsKbNIYCvEpEu8/hu8IifVJLdPmaS6SA7NYZZFpMlEucqjYal7QFndwwJlgCgm1afl1YhXUgrGK4YjqS9ePmlSYEbwgnzgSmuum4Ws0zAamvXEkXNYdsoIRUvZlMma6Fj7ZW8KMyhtIagqVKXVONZA7490txIU66cp4AnlpS0Amy9s3M0slSztKvTy1oWmqBCjYI3tBY/1uSz6TEVgWwrBk9Lng8OG13uWPCOx69ie/UY9SkFjw7HeNeDt+ADr/wCDmXGfz56V/zEtXfFrIJHftM1XBuvYPuUEcPxiIOnD1i/4wCHv77yynHFss9WpXGWR9axTouFoiXhpPkgB7pRz+zzgDbS4lvHxG4Z0ClOcrXLFkDpO4VUT4Xn/LJGsMZi24ruWwykcb85vuGYgV02DPV2T8xgPEf89kQ5/Tl+XVVfcBdHQvnaxd97cxxOK5eK+CThA157iyGsGIq//UEXKqaMuqLsQGfBRcHwJMTfcrddaKJ1DS0TLdxebePMfeMiHdoVGhVwTqyIwt8k63ORmHq6WV0hqn7xOnJwjWPLc8Z5WL48+15YhbNIRqBs1HvweXUzpt4mq129RCXbDdl8tgFnPLocTYAqZD6wnf2Y0/GAzWiP/qrMeMb4BN5rNeGqrPHm+S143fBsbMYRz3jkOlSB67iC46evYzEdjlbR6dsSWVIHkWJ/E+7JAa4oUp/ni1ARCxBxAUs0wXbc3tKWrDgZONO8X5s/SEtOylh+BSL+GBz2pIjLtsUI5gMAo/Xbq17wP1LY76IyFpyrsn8jrHsG5V38szPJeXOJLxXxSZJcfaA9/BnnZCYVKVNSteMVZwgjGpJWtyxLX+pxLoDQEvUAHIv/BGOj9j+XLltY3/6yxwuaSi3ayYBSPDCo7eWVpDSyVVXQXmDhebM7y2tNMEVOKOis4D3KuUzWGqp0xWeIl7T9M2ZZ8uI4N2U0Hs0Yrls6Mtwiti4cRnooW2DeFkzTgOvTCtfnNR6fr+Ct84zjcgRgjWeM11BV8Oj6GE+s1rixrtG7bzoUbJ8yugekBn/wur28JgvpBH2Mis2V2yyIYFxMS4KV8qJXR/dysvLOz6Ok4jx7IIa80O6rXd2l4ufnKSlsBnNzjCOU9bJK4F2Sc6xv/GoAzxeR94Ap4E8C8CnndfDblbPUmngfAIOq/uTi8/cHMN3vqONdEWZH5ReDvwiwfcQi8SzULlNr7EnLkfAG+6QZN1gCDmChH1OeEi9vjm6LlTsAgrbkbW+WuJ0HxWjFygysPKU2W0q1Nne1jrYAxFhny8waPKCTI+M5sYQdlHNQMd7JKqhqEILMHsxLtYABoK/i1dJ0TWEkTeHYarMqvejP3K592DrveKMY336M8htPAKVAxwEYCsbrM9ZPDChTweYJwXx1wFwUbzu+ireuH8Ebhmfg51ZPxzuVI8xa8G7rX8dTh+v41SvvhON5xPF2xObqgRd4EuhQUCbg4O3VWyg1JVgmYP2EW5Cexg21e2DcbyrP3Xu3xJZ1AKZc4J0wWWm6VAegEl5g5mBJVm6y1Ktj/eZp2c+BTQAAi3Fkg0MApBZVnQKfLXAdY7ubcjaM+OaHUp1E5DNgVNwBwMu8wuR9lbNYxI/BaB8/ufj8vQF8BoDfe16DulCS8E3+HVabJ1OImMIaPANtaXUEpjprYK919PiWQwi5ulp3TreqRVM6bHopgfQi5HNnq7wutmc5TLQXfGkVh4vKcL+kcySFrDxGPneBK1NN1izfZZpq6bMEcwBofOosaUyhnHkNkTCiVqTneGM1IaKjSbWFZfCsuwnAVLCdB9yYV7he13i8GgW0ouCwbHCoKxwME1ZlxjBUq2PhkMN8YDQ0Uu2WPPDGLNDAaGmxM9Mx7mO+xDSv+TmDAJjRK+NkHLCEZs6M7ANxbpmnBT7X2s7HC0kKmUWu8niXadd3W85T2avqKwC84vyOeOdyFkX8/gB+dM/nr8ZuceSHTjprkC9dtWIuxMlQ3TgKCCEFxxIuS+wt46ksJgQgXoKqgrpWKEttpu9ypl/XkgmAzOLj0ujmkQuZ56ypslGMTpuLNjpJjG9qBw5OsqCVWUxj6aL3hBfYJdkDkeS6Dmxwmqxr0tZ2FHxhMXermNa5z34+4yK7ohtHQDVoZuNqxMGqYLgyYvWMgukRAaTgLY8/AgDY1AFPGY7w1OEGZgiqFrx9voLXP/7O+OV3vBOuXz+wINpaIds2QebdGGOi1RtumCnvN2A4OK8tOOWDKUR3CoIrHGVBZ2vLtVyo6ohomxSFkJYGg8+fQWIOFVUJqCTaOSkzMz2AO/SMlLJV6AYBYXHe7wlbIss9VPr3Q86iiGcAT93z+dPR238PnySrouPBzorRCfHBwYVZnB2GK4hKWzmC3qwYr1xG19HFlKZEwsQyrZh1FrrMMbTAj1TFeJzaDEVSicRiEVXB3KLkefmTXFZ2jNZB2kKi/Xkz7YrKhoHJYaMdNj5sKoYjr+PrSqquJBqPCtpxu0CYu99laosZCyFJtTFgNQLHG9R3PA7MM4oqVrViuLrGwTs/BdOjBVIFR287xK9NBTe2I9ZlxiPjBtUv4O3bQ/zS256G679xBZgEpTSGCee/joLp0ArCT1dNEZdJgWNPxElshPmgWcxdNmBiNUQyhcNbw6xYXddoi0XKmCgwwz2rFaKA074mpYS2bM5aN+7ovDy2hZBBU3psgLVjGqRf7AHO++6rcrfkAqY4n6ucRRH/BwCfIyKfoKozAIjICGt19AN3Y3AXTpaKcMkeoLWasLbYnMEnTfsjueuh/dpDFxazWIuenWdx8UHeTzL2lwJfbeNdCCC2S1Z3gypsjB0nWdMLCkBrqjmQ3O8d5SP9z73Xs7zY5Kqbhaz9Nal3S3ZsWOYBMgzQebbyo9MMmao1D/XAnWwKps2Ao80Kv7G5gimtstenNbbbAZikpZlJ8yy6MWVoIOG8+fq6a5YGByw/z/O/+zm9msUYlnO2b+78c0F/LzpvJvHMO7hCAIXDE/kZvpdyqYhD/jqAHwTwOhH5Qf/s9wJ4FJZn/fCKW55LEn5YYeklFLUMNgAtgAJ3BRMTYckIYKTd3Mm+lTvgGHJqt1S2SQFKGtPCfZwOk4L3sdeVcZUhaEp1n7vpLyP5wAFz5GvIL0jR7uUFzGploEhT4G97pUAO0LvUgqCFtWuTXskxqKVmQXLIpANOj64hV1YoRxPKakTZWjoyphm4scHhWwzA3T5SoMOA7bWCG4+u8LrNiNV6wjBUrIYZ23nA9toacjT0jJO0GIDUsdI+7xNTtEuGyNmHHDtjBF2yjHtNOgDHTxWDBWpPkczbxrncK8vQAu9zpqRlNlBmyIzXbQ7ryiz8OjRaJZCerXutFBcL+cMoZ6k18V+cIfEZAD7AP/4GAP9YVX/5TgYhIs8A8M0A3h3ALwD4RFV9257tZgCv9T//O7NWnIryTQDeGcCPAfiTqrq5kzF15+XDDrQXIsECdOtQYKUgvag2q6xlilCjZ/UvwaAK9RfJ2tprXwYTiLoUcrzghJZmnecgSijADpfWqHGhIm2BoEss0r3UxDQNTtA4byicbqKSMg+YxLcfWiq4FgDuAmcllal2LfBnVmhnbdIqHrTV/HVlMR9a9bPhYMBYAJmqpSUfWYH31duOUOY1to+OmK6MKFvBdFyw0UNsRwUOZgwHM3QWyI0BwzGhg8UCFAupBQ3jXoxNT5WNWAF9pUIzhTuTppgLHCXYi/eQ/feq0+4IIeX5te1Z5xjxPDaYw1PFndlDqCy3P2IB/+HYYJD50DR1xCw0jes+iNzHc98rOWutiTdh0XX5nOQlAL5HVV8qIi/xvz9rz3Y3VPUD9nz+dwB8qap+k4j8U1iR+n9yF8a5Y3kGhxRoCjC5kwFX8HsqHh4w4arckIGr4G0qX7a2YceqyPuChlKCQkIxI5Sa7afpc/TQBJWq7nkJpOGKQLNWdixH/hvSebP1vsfKyYue8tjaH19Is/PPLWg2dHNvmL67KOMAXa/MMh4keL3DMTDesG3LjQJdKSrccZkFZXKsFwDrVAQf2BV/cMm1KcFo0MniPBWxYLSEE0ULtsKK9afHKM95PGtuhcc9IcTh21UvDpefP+tCLa1m8+K57Hnxdp/I9OiCr+ne3xe5R3zl+yVnUsQi8n4A/hyA94T1qXuTiPwhAL+oqj9+B+P4OAAf5r9/LYDvx35FvG9MAuAj0EjZXwvg83GXFHFWIFlxBV6MRoAPZeXvDxXJcGQvyHxg9WoZSOFLbzxgf/G5X7KeuzoXGaOmNbpHeWnxzCjXcHRzq6dudy+oeGNOpAxAdSuuGm1ruirecBMtMHhkfFkVmwNCM1teS/Imhm1jUsw54BQ0Lx9qoUVq362uaafwAGB7tWA6LCizBQVtLktci15do76T9y5wZVY2FYdvq1hdE2weFZRZMK8E8xXBfGgTWTbSqGg+H8Oxj8G9iOmweQZlq8aOWCWF69luZdKAMKiAmbUmCmyLQA9aGjxvYDRThSt4SMeUCchjMH56HL/SygWGbQXTk6OVE/sFSmvFJAeyM7fMtozn+R5VW1vKw24Rn5qEIiJ/AEZVey6svxxrb/4PAD7vDsfx7FRQ+VcAPPuE7Q5F5DUi8sO+AAAGR/yGqjKT/w0+xr0iIi/2Y7xmOrp22wMOZZgs06wQmZNvlCTprBAWPQ9sMFu/ySKOu5MtaT6Q2SL2fzloFJZtHpdnSalTmHLlL6T98vlzplceXx0R2WbqReABsxSpbMMqHhvjwPBsb/bpyrjjQ6e6xlTKPC+z8Jqyta/qyiqaba+YMq0D630YXFTHgno4oB4OMc4yV0/0mLG+phivm3U8XhcMR/avbK18psymqMvsXZ+9Hx/P3XW05n2gwoufqaYD2uJK2h3hiR2LMx8387cXzx1pbzHPToUrszURCGw/IB8+p0xCgnOkEQtJi4W0Md4X0TP8e0DlLBbxFwH4y6r6j0Xk8fT59wP4K7faWUS+G8Bv3vNVB3WoqoqcuP69m6q+UUTeE8D3ishrAbz9VKNvx38MlpyCq8963tlvHRUV+g7LHRxR2oMcLmGybONQ+QFKUEBJQZVITR2SNVIlYAXCEd0Qk/XcKVdWyxocn+T2McYezohOHLE/wkMsMwAvJkSLOAf26CUssxNtTI1PHP3uYiEgI0L6RSUWEyv8k2Ebo3EJpGg0J4UUlM0KZRoday1miR9XFFQ/rzRrcFIMoOXeWjPFmNP4pyuwOhhAYxnQM0oZf2XyeaqNUhiL04KO1gU+ndLYTu7nCK9qUUBoocBbMM7qj8ykTvqxpNr1St5e2jFyYA9o9/Sec4eTXAbrmrwv9mejvBXAM261s6p+5EnficivishzHOqI9iN7jvFG//l6Efl+AL8LwLcDeJqIjG4V31YRj9NKrnyV8/jp3ndkeC9iYxvbj46cnzBCO7ZEmvGwtaysuqaFmpIhkrvaWb5o5xi26kkIrbFnJAokBVC2qUh84j9LXiC4wDCdVny/ud82gpY4YdHhnKknOBDCIb9ZXQmrNCsyBTYBK8kpXn8halMMZsmJ71cmTz4ZVh70bDju6gnBeN3ndJSOEaKTYjhq80o+L5DmoADbp9j5xxu2faZ6laIoWx/H1p6BTHNkoLIOtnDo0Ba3joEjizlMi6NZ5Ai4KWAGwInsMCtXrcVe1tKiAGbrks1rrKzqxmvwKnystc1KffcTHnjYFfFZ1ri3Yr/L/4EwOOBO5OUAXui/vxDAdy43EJGni8iB//5MAB8C4GfUTLvvA/BHb7b/uUq2fvf8vo9eFLvueZhzimxso7vfnypQcsLxl+PuAonBCU4/84KxtPp9n+5fxjEZMMqQw3I8nVWY/qWxZUtvScvbkbQPYQB2ZJ5XLagW3Y9piSemApkHhB8Ka0QzQYbV6yIgl7o9L+9xnkv+nqALAAEPdEyTxZznz7v6HHu2zayZjgO8DAjv8c52pjMdv7s390MUtqqc5t8DKmexiP8lgL8nIp8Im5pRRD4UwJcA+Od3OI6XAvgWEXkRgF8E8IkAICIvAPDpqvppAH4HgK8Uifypl6ZCQ58F4JtE5P+ANfX7Z3c4nlNJtmbtyW4WRigierBujSpfcHUL86BZiZmJIemFI284WBNqisVqK7QXMOCIAdg8uuvaxgs983hovFLAahLzJZZ+17CIaTFz28QSQOo2MhybO17hSR60rvnEJaoVy3LSGpbq85Wezh2utPTZe+O1/iWsg9U55q2x6xbnUBfoIJgPSguoZsW0WBgBBFZdVBp9cdLmHS0X4QVcwFoj1dksOgBVgelAQjnTi6HXEXzxpfKlZQ3jn8c2CTOnZ8JFhWPg9XHuCHVkRspeBbzH87qX8rAH686iiP8GgK+BKUqBtRYRmIL+4jsZhKq+BRYAXH7+GgCf5r//EE6oaeFtTz7oTsZwZqFCSzCBqEabnOxrkEXAIMh4w3YwVx7BCJBqyq0LxNV2LrD1ULAqJJIGDM6w428eFWwfocud8EQ0hW095dpLFtbgrC2wlJUMEAqkTK4AZmNQzI5d19HgkzI759UrdIV7P7TU2eIMCkWbGxTvpsFFKtgWqWVPClzRupUJWB3Z79ESaBTMV3xuaeXO6mnIFtCbvAD7DlbfeQ2p5ZACSEHCsNqRfqa5C4gBbX5Jd7RFjVXYNFph2XVpJHEIcWa054wKP3O7iRuLKrZXSmw7bJOiDcwXVlfYFSv5x3xWgr8eAVu/D/czGnapiE1UdQvgj4vI58Kw2QLgx1X15+7W4B4YCde9x9rsQ76I7Cyszb12i0TRXMVlgZc4/tKqzYyCsJjIQJC2zwKGOMm9vOn3rjzatu288ZN4uWOe+9zZjnMt2hTewg2mpbwPiulgkPz9AtqRZYSQYyB0cTNQLlvEAigEbAWVPYgOJsn7LBT7chjkhMc9XMIRseBoXGeGQOKwHYvGTrxMf1axrxQI4yA3Is0wRbBvLpjSE1xaxDuiqj8P4OfvwlgeLFm4osxgy80UI9tJNbpIqFgHjnndMqbCYvNuxrlbca77S4smGmn6OeKFYrSeeKaYhTZTKbLK1ggUD+Z0zIDBTpoVaO5EHS++oDEq0v6ktOkIyJQWpcyKSOci0sLrtuvl5LXrBpolP2xbx2cZWiByPuAgEEyO4gEtm1PTYFaoB3GP2ES1w6eDadJSsmvUUW7z3OH4KXBbtpwjh5BKf1/LJt1rcnTpLYh7CizAxKSQuXVcjkJPnEfvy8dEjKiSF2OTeJaysgcata27roUhkZk/90VUm9HxkMqpFLGIXIHVmvh4WDKHAng9gG8F8PdV9cZdG+EFlkwtY+WqHKQhLgh3Idltd/uItEpkXk5y2Bh7IVu6QXMaxABFNOW+7JDM7/iTllN2ncPKLO3ljfGm68kvYigjATAByBbhwvIkf1ocPuH7u8Pw4Hikt8Z0AJTJENJ2LWlOWc2tJhbCNBp/mIyCSHyZ1ah+qQAS06zJSbZjpBTl/DPNWR5vF9is6BaboH457ENOb/GuJbEoi7MTHL/VQ2PI2LhdGZemPFv2okERTIaZ2KaptMAhny0IjKbo4yeDJvjL7oEN28Uz3T3k7f7dV6v04dbDt1bEXmHte2HsiH8H4Ltgr8l7A/hcAH9QRD40JVQ8eSRBBqKwYtvxIvaFe+oAgKUeSYvasTwQL0fndvr3wiSMfQ8lLR74y1TdJU3sDVqdXd2LdBl5+44hkRcXvuyDAKr9QpDgFbIPYp7ARSFZ8ktFrW3cy/nl/uTELseXE2wiWKYIXDMCcnlKvSzpvmuO8aSFilZu3HNmFbrCZa3pJf+4/7udgPUyOO4MewR0lTye5X3vLPjF92RYNIhIOlZFntNow5Q+y/BLx/S4T3IJTQAvBvBbAXzgsqWIiLwvjDr2Z3G3UoovsiR3WyZFqVaPdjheWLYFSQGzw0NTWlSS7OhrAUAqcokXkgG2Zet4FUmcW/s5eA3bKEYPWyhsH/uZy2sKEG54ByMIun57PHdd7/JPrfUTosDMeOR1kOkaD2hMCF57UoCijTnQYe0+QB0QQciytXTqpWVqC15aNBRtHjS1BkpWbgT+Yk7beaPiWe11tGjjiROaihoNeSHJ/xZSU0CS8yeS4Chf0NoireneIeaUC3BWVgwMEqYQD6DS0g5IbQR2tGxewJdzcj9EER7hwyqn4RH/UQBfvK+vk6r+FIC/DeATzntgD4xIs3pYPYwdKbLVxxemjrtRdaC3JkMJdxZOU4R7Xwpp/NmMGUexcG0WHEn/HQSRFFPDv2Xn3DvXM7TjSDqHQQiNiRGNUDP7gdcbg0DK1Nv93hSHRB0L7tNxebMSz9bfHmWoS0W5UDzd3OV/CYfmtbZ7o9hnve4VQhes68D90uLVFublvrIz/o6TnPD8sNZ5fxYc8Y7HTbZMXkwugugp/z2gchqL+H0A/G83+f67YdXSnrSSI9k6ANMVaS+vMwDmg4QBZgUjzTKxVjveV602RQgg8MYO80yWa8YTuf0yMEMlHkFFDmHpssZLqO36knLOiQV9OrVCHKvMlto+t/YkVkQo9ckyCFXc+qbCT6674cLEfREYMhZzEwo+wxdUgqWfj1w3WtwzyYvSbhDRvZiNeMYirDxnaReTPZAuGDjYNQCWkSdTG1s8F10tC2n3q7Z7zp5yoWD93ijpb+IWdapXTG53DiLminrdc3AB5BKasFZIb77J928G8LRzGc0DLFRkdbSi2nTprT2NYLpqL1WZgXLc0kcjKr8SzKpAleB+ZuuWbqhI48RGZTK6qLkd04KDlIuUZ+hBpha574S4KF9qV4imFKQlhKT6yxHYU0QA80SKFqmuS2vGvx+2Dm0UwfFolck4LqApKSi8vY8AVaMXnrVeakozitak689YfSigpJiUC1AB6qi9Ms7zpMDozIN51aAnVU9ooZWcrVZpLBMIoJOguLZpFed6yz/ua1rglXWhZ+mZBT4u1houc/NOAEHl/ffkHi0wbjE/v2CK75I1YW0KbxaIq4hWhpeSrR7rriuNiZBxTzRL2v5A43z6Tzsgj4soLJOt3lyKc6cdU5wnUc20P+4yIMix9Be1+Gz5TuRjpnM019mLFC3cXSY77D1mHG958naMZcJJxotlz3j3Kpc9h9/h7Wr/XYyb854OsQxsLRfEfdIdb3GNy+9VTeF2HkU6RcfqgBv0tf8s7tHiWYxzXjSd94DDDqeR0yhiAfD1InJ8wvcHJ3z+pBSpaqUTBVGVDAqUbYMrUBqHtmuPRNpTMd4qC9WQB8YssAktOFdHNJoVEFZTprItla9hms1Np2W5bLVDhRTptF57AaA1ZmtM1aaNomA7r5UQACRKg3butybedJJ5lSqjVWvj0/jaYoV8Vu0ao8EqsWUxmlinpJG8EGWHZGtqas09vYvyCv3CtVzwfP/50DPzyBefEW2GyP0NPJlcWJU+exIa2Xs7wnvo/wWTIvGPwfKWvPaYAz9uUtYM5oayTd7FPqV+UURwugXtQZbTKOKvPcU2X3enA3lYhPxUthziC83odygmoavo32esLtetrWmfwchYKhrHYPGZkKRAcnCmU67pRZ6d/SBVobkS3NKC1zZeHbwg+uBfpJc6Z2jxOC39F4mZ0RSC5gAk9xuBaW1/W3Fz7QKd4dbDoJ5h4nmbQuZ9sHEpiPXWEQapHFuVO+PXWq0L1myGNq5tnovAlOHXtXZO8sYWg7oyGEqLfTYcg0lvfi8dpxdTyHZ87cucpnngM7Tki/N5yvzizvJXW4xygtE+SiKAxjlOnsCFk3rrTR5kuaUiVtVPvRcDeWiELwGaEs4R6U72PPxdJJybaXrxAaBYTd72fbOKumNjcSwq56xs47w2kJy4EPvvuUZTbHyr0f9cXsutxBXckuNq17YYq59bqnWqiPnzuSS/uTtGCm6eaPHp4pqX94HzVhOGnKAJ4/JKWKzZQuUzsENvW1wnUZil9af8bEpBuTR3to10izcXnm4xXUI4CY643zzhW8mlRXwpZxZZrt5iru98iEi9zSmqTMNF/kyamwk0Sw4CiAfMjFucOgXTol4nq04UcKuYzUNZ23ipKOvIY/Nlxh7YA2EdRxpuZifAjzu0v/N+8e6rBruh5u4evOa5XU9sE3APAgJqLAoWkreCOexJxyClWcdAGcXYBK706tA6WfC2ke0BldbBuNp4zU12pe9tkQjviDfpXD2RlGWy8LerNq/5GQnWDRUk4SRpYykbYDyqYR1HwC95Rm2OmFLf5i/ui8Aa3KYOMfOq3+7CyZ4F+mGTS0V8zrLPtVOQnmYvSLRJanrE3UNNLrAAVYO5ENgkXX2HM2ilhbsKBFsAgCVxJNywzMCckjWytaSDeHAn4dX7XgIqfYXh3TkLbAFt7LNy4xjJAg2LMg6QlFWCN7KCKyyK47ANFSvEky2O22LGDDhTqNLmv7O03bIk86O080k3j36PxsX8iVHuhmNEn0Bm+c1j6+jN9PTuGvM88RwJximzRn+76QBRarUlZljHZ4O8mkvTcZN5TzgPitaNW4AdA+LCyGWtiUs5D1HLOIPQik2Wrr8zuZUOGIATQSWliS5veqHULcUC7Xgtwe3VZkF2rihOgA0UjWEh6PRnzlSjuqSiDuhEEClCxICp1Kn4TlTKFf0CINiFcrJkd1vNCrRMQcQCwFKT4aYv9s+JIQ12UOhWYh45voB1astaq3PfWZvbLrP01I9bIe0aF5ZxzR5Euj7iygAXynQDgAZ57UvNSgtT9k60qHOP0zxedD13CU1cyp0Ko/7DkXTKxlJ+TeNZerB6wfKcseYvUrZ4vWh6I/wLBijgVdk6iyi99JkF0LpDtBc1F8fJfNLMIBBVKFq338BepWUNRnCxAFKt4WYkVKQFITDN2ZQLM/FENSrUAYi+bwDQKWBfjFp9h/5l5f7kPQOI8zcGhkE2wzFQqkI30l76bC3z+pmKDlPkZdloUx1OGBqTolRfsLxRKlkrORlGnNOcE01sbtow5rVPmvh9UPFMwzYXnUcFwlT+/DgktuQ2X9gAHSV7Dg+pnCbF+a6LiDxDRF4lIj/nP5++Z5sPF5GfSP+O2MlZRL5GRP5b+u4D7vU13Ez48lnXYgQevJPym91TvlQpAaDDFrNFlFKlgWYRLwvQxHGB4MMux9ltv2ebrOuWTIe8T5c2S8WfWgvl8wUWTos8K92kYLJVuzN+t2hzd+q9Kbt5l+LKL2Pgae6666JuTvDEEq4IHDxR3fL++TpjYavaFsDl/KNtR+WaMyLznO/Fd/Pzs0hb7tKhHwS5bJV0T+QlAL5HVV8qIi/xvz8rb6Cq3wfgAwBT3ABeB+Dfp03+mqp+270Z7u2JLJSukv8KWsimFAxDdupRYjEwuMVCM/nFLaz4pgBq49A2yxSAsG5tG5N6IR7rAOI/PT1ZRVqtYLeeGBDiS2w0Mmm0OLcWsaHi3a2bweuNMWQFW1qboMG7buxN154bRBBZfOykUftj1hWweaSEohw2morDs1YwmRUKdpamtWnpwfb9vEJQ4qxbycLSpsIU66C8THWS2mCqrAw1ija1Z6RNCuee126YsWjfVYO4cS6TCkjcvxIGQO+ZPBDyII31NuSiKOKPA/Bh/vvXAvh+LBTxQv4ogH+rqtfv7rDOUcICapW0upeXCQqK1DZHW0AoWYDZ0m3u/cJSgyncaSWYD8QxVEC0707BJpsiwOiud7O+AKQU6K56GmxMdSWYrtif4zVLkmjWo/bBrCUDg7BGstaYxBIQgHjH5zEfFx1PeLoK1AObjLJtixvPM68N7pGqGK+bgrcUckEFopC+VKuVUWaL59WVMV3qDIw3TEPqgL5mSA6Yqi8KboHW0gJhtNJjwSQjxKGGjuO7YIwAFhDl3BcF1GuOjN51uq4Eky/euc4x2zBxLuLWPWCKTerDjU1cFOfk2ar6Jv/9VwA8+xbbfxKAb1x89sUi8pMi8qXs9nwhJaCB9AKid+WB5LLP6V9StAFvJPwyIIF9bmo6d/y5gEL2urFAc6UTRNBVakO/bRfY8/00Kxvpt+nGlxecW3xPKGPnMvdZe5IgAx5rASt0SShpjD3flskf0mHDN5W0gOTj771fy3Oiv95gQGR8P50jX28E6RZjfNCU8A7sdLN/D6jcM4tYRL4bwG/e89Xn5D9UVUVOflRE5DmwJqKvTB9/NkyBrwE8BrOmv/CE/V8Mq7GM1aM7UPTdk4TN5iBSWMReIlOqQo6bxZfTnpmi2op+t8I2WuieU7kwu88tywWOSeqZpAd4uirAFXRuqziOacOVCCLmbKzBk9/ppkc2nf8k/zmy3FSteahbxAw8AojEDlVA53asyCQkRrrRll02mwfBMQQExGku7Y+aquMNWwU2tIg17kVd28+yda4y9ij8xUIS41tYtgwSdpCJtDTqoMd5dTTOOZ+ZrjYxu3l4SjbUqGqkukXd51XyrlJbpQdVLDnqQVs9zib3TBGr6kee9J2I/KqIPEdV3+SK9tducqhPBPAd3syUx6Y1fSwi/xzAX73JOB6DKWtcfdbz7u3dpRUU7Xq0WTieZCGzBM4q1Yuf041mSrErtDpoRNpDoYHWs3QWH7irK2BU35bfi9db8OQSFrffob0t+p9JhZdXTNlraJZ1JXaZLH1Vu0apajUauT3PkSCSbKmzGhkTMHg81oBGXPti3mnNw+Z+Zg+4TascV5z+FokzQMAcUentJtZnZ6EKx+iwgBKOsvmYDxCc7YIGUbE3XGDQ/qzUQaxbs5fJ3B5YhTcolbTueitMzHFOM8f5wMpDrogvCjTxcgAv9N9fCOA7b7LtJ2MBS7jyhogIgD8E4KfOf4jnKHSpl+659tsAfBmJNdItRhTQ6dzWxMIA0CzSru4ulaMp7qAvZTc9R/bRfqd1LLOaYth4Zt/Usvs45p0gG9J3HB65w9E8dTEHCU4IS1jb+NiNJI+xO0e+rnnxk1h9UAlTUf00vmhfFR6Ctp8cS0AbzSOIxdXn2QKQ2s1vYOhxnyTGkWsDC2siJ1pbgzD6OsTxL9/Hh0EuWRP3RF4K4FtE5EUAfhFm9UJEXgDg01X10/zvdwfwPAD/YbH/N4jIs2Cv0U8A+PR7M+yzS7Af0t8CC/BIae4qX1LCDdndzymukQ7NThHaFBEtuYA8XOkweAXfL7iqudip5rHYR2QpSGWd274VEpV8c6kbhhnj8n85E5C83HkNbK+mwj7Mlhvdla8OF9C6HAHxi+XYGPxThz4aU8WVZCEzRTFsKsqxoh4I5sPSFbNn0HTYqHF8x1TLgVYs504suGdKT5sH4AtJmYH1VpuF7YXceY0qgBTtOi93GLZ7B/MBYr7JgGCzVKuGpy3xh4dJXsoDK4t35m6JiHwCgM8H8DsAfJCqvubun9XkQihiVX0LgN+/5/PXAPi09PcvAHjunu0+4m6O77ylQ8CXi/jCOmalr8467KqYoSm7SuupWYrhpoq41YAu8l+oULI1xWMuxsJiO8XdeivA7tZ1woCRFeJSCbilbOfTFpCsCi2l2y67/EHXq+hc7cjW84Vjp4KZK+FW7F2j9CYhFaiX6Bx6CMWSYxwKWtyzjv8s7SfvFe9THdo8RHZdgl3atnu0JT0G7RemNoZkYdt02tcLOOlhkHvEmvgpAH8EwFfei5NluRCK+Ekv7rayulbwddP3QHt52YTUPvRNFsqzgxsSD1hhfw83bDu2p88uP7FdCi01WneAWa8zm4emJIMyN2sU3kI+uMe03gswHQJACSu3zGYZjjf66+hhEYQ1zACbjU9iMQh8Ns2dDoJ5MV91UExXC6R6dtqYlD0DpJHlmCz7hctPCEHHtl0UcFIfR3Uu86NtoSFFjvd1H0tCyv7nIK5LrHQn7y1TzR8+uTewg6r+LABI11Xg3silIr7f4ve8zIgC8DkbbFlUJrDHxLe1F5F4Y4+lyuTFcRzSgLvl4420TbIcGYGfVy3hgCyD4m2IdBBMV3wMaf+ybSwBSh0FODD+rRRr0aMC6COC+YqNf7zRYICDx2fI7PDJunW7JsNEZoNFrIuzWUlHTx2sPVFSaBVqcUBt0AbAMdq4pittEQlFSHaCeNujtXRWvJ3fNp1XiFZHrFsRTAk1OIO1auYrdr1lC4zX1Ltstxd+5z4Dvbczp3vNJB/CVflxehg1sXtPp5RnikiGFB7zAP2FlktFfFEkP2f5BVu8lLnu7GkkOMb5s5yI4C5uVspRaiFj04v3gOnBFulCZ5VlqzzoaOnaMhOiwK3vagQKIZUsFdPJsEnmJxMbjfEIAkvMU6RLhdVBB+6FEEtOrn9XAhT9+ftrSVlygUkjKt/F9abxYXlNaDCL8Pf0E9JfU3ctTwY5PTLx66r6gpO+vBmNVlW/8zZGdi5yqYgviux5oZZKzUTjpc8Wca7P0NV44MutLSgGuCWnHjCbzVImJc0oVhKQSWCQgxWMyWwNHaw4j41FMexxscl/zgkrUeAnH3sl2F4tkKqYDwSTjwHaUnkNxrBjT1eK8X6Z+svFpPbc2czBzZBH2QIypa4cYtdASxrADvVLB2Ba9ccGfOGQtphJUsCAeyYJ31bvdj0ca1Poab6o4DNNMBZNBlf5fDwJlPF58YhvRqO9n3KpiC+gdBQv8mVzUIauacZ10+d1bMqpJEUdrInkcgMaPdSYHFJXEhH6YSOmLNLxI8gE1pqw35nYkZkbHY1N+gUkjis8r/Xjg9r5Z+/cPB6ppyUjOhHXUTA5dKGJ6ZDLTHb0tYw5+xiFEMNBu3YrqN46VC85yXXtUA0aFJOTNXayFZ3POzjVL6zf9PlynhrV0Ez0mLNEeXzQkzTOLA8wNe00clF4xJdyCtFkNe2TZWpxhhYyh3hZiSu7zh0H94Rz2FiSr0z4I52rs/KS4s683JzKvMQ2uXCUyXnGUysO3mXbncDMgHpChf/bdz1s6NlVT0ODGmIctR9fp+T3zdOeeVxyq7vFdl/W3jILMiv4h1sn7YoqMNfT/bsDEZE/LCJvAPB7AHyXiLzyVvucl1xaxBdcGgSQlFe2iFNlLbIfAMFsP6JeMdDc5MyQqGPPkADQBayiL5zv11l7jh0ze0yLFwBKiqSyzi4z9hLXua5bu6dl3eTxhmL9BGv2mjKtg5i1vm51eKn0w1sQAYpimIHVNeMKb68W1KvSxu3bBzQiinqE6PRRV4DMguHIElcaHs5r1Q4uIM+1+BzpYuHhedm+CuLQCdkZaN02RJsXUxWQ0YHrJT79ZJN7w5r4DgDfcddPtEcuFfEFlM7KIl6Z8MawcDW9/CkrTouiRABJOoqVujIhjEDrjxhvwyHFx2JKh1CkHb+NZ8nrjXKUrljDai09q2I+YEo3MCTlw/OPx4rxRk2LhkJWxZkUEvCLJpYIRZ0zPRxVDMfVK5MtLGelMvaKZVubw5m8azSlqOoV1ajAWfOBn/mYVbngSbNyM2zk86UpYBeLHC1lAIVdTURIpOkzMZ+M8pBDE5eK+EGQfXBEtpD8Ga0DIKx5QKU2W2lHqYDOdqzgDoPKXXaOXXKPsISr5p/xXaWbn8aVMG1JSjYuqVs4EEp5aWUC9ALEFpIuSUL6oRMjJvQxCqqW1j05XUv8meCR1q9PInOPAcMMgQSEwfmTfiwRnFyOza+rjk7FYzGivNAqIGrtr4JxkuIAT0pRAPXhnoBLRXzR5SZKuLEE7CVmcCtcfLrfzMCa0SkEQhd11O58XafprCDSuWLz6mnASbECiGpmlpCgEZxq+ymK94arowAjWgeT2vBdAJjXxSqOrQumA0IS0iIcrkAjKDbZ+adDgRyIcaIzHk2+tABgijGhhckOqCKYDw1qITwSuDUZGKvGtmgc71QQKXF/mZLMrEYdAHiWYsd8IFSSes1FQaOHWxfdRLgaPbxyqYgfdMmYbcoeYw2LSMGlco3CNP6i1/4YQI9T5oVgryJICprUsYwRd5zgRRCLpTqj2M7UxgfArKCoY+EBNOK1e+Yg09cA2y5brbFtnjMsPudiUhTVYR2pQHGSb2Yr7AvAtep6LX08Xy+8ghy9l1r6MYl4tbX6ZFe+SRR3HIi76HKpiB9A6ctSIrDGUHJ09fcoSXO5U0pwScfLOnDYVQKBUY+NIkccmPtL+reDaVZTToBb4q5QI8U6VV+bVwI8OsTvxIWBpOypvNxSVbfw57XVcxiPNVnoAHFYQgOBhwOBa+vA7L8TEmekBTeZng1ki7gFRsvCAyHMIJMTV/bVIKbCvpReLjHiS7nQ4kWBcqSe7dW1WjpuV0sCtt28NiszJz90Cn5wnZj2ixrDVMReEa0OZskNC8qVFfJpZieZHdbPDZG+zBKYmSZmFeIEuZPJDqVsotXduMWbdxJMh96VOTFJireKqqum0DkZxTtoN6pZgj18DuLXAijTpVMbqWC3jLYqdvBNgkMKFLoV71foPOQFfn4pe+RSEV/KAyuy+J3KQvq/KbrcHg3a4Ge5yto+/m9mAQSckF14uPWpLaCYXfOW1CB9JbV8PdnaplXrv9PKX3J1oelybxX8vBUcI+1ckveFW7w1fSmLY/P37Kk83DrmHEQvFfGlPBiyt/KWpLrAyFabK9MEI6i77DkAllkIAFo7HjWOLtBgAeRjJJZBdBcuTFM2alfZAmNtx2hWrwTEUNd2ruFIrdAOx0SmgV+jek5y0OmmpuS67RiUI/3MFaVZ1G0eqYwbk6LNA5UoMfnJvQKWBRWV4CbnlG6mPAOwjtD5Gi7l5qIAHvLmoZeK+CGSfZYVaxeHa99Ze2kHadhvWGwKq9U4t6CapQFbGU1WEGMxeCp+FMd1nbfMFGSZqd3QOg278o7xuQKfDwXzIVrx82M141Jba6i4RoEFwRK7AwtLM6zxvLCwAWjV/6e9sw+Wu6zu+Oe7NzeJIG0JRl6UIgQ6ICgRM2qRllZnWnUMlGnsaGilNAKZDn1BmYpFx+pMi4BTW99QBpTCUEVpZ3zLGNCIKY6U4gvyJiGmUYmE14SSkLd79/SP8/x+++ze3Xv35t7sb3fv+cw8c/f3+z37/M6ze/fs2fOc55zS79xkRhdKuPjCqmfjF/7oBQ3XRlkJOvmjy2RA2dzKTTjB9AiLOBhGOm6rzZPEp/OlK6LIBTyWwszGvYNqWXRCno+iuFfhg64brQl5Cj90boGWsbX5T/6yZBKNnA7lDTz6oPyiKZ5TA0wT5ppvAsl/EbS6LsrXod6YO1BGeeS/NMoE/Nn45QJqGzdQMB1s6KMm+iLXhKS3SXpAUj2VR+rU702SHpa0UdJl2fljJf13On+LpPmdxpirKFe0WctDv0b2GKPPGyO7LcX0NvI7yMyv7zBGdxqju+qM7qozsrdZQY2PqpHJLMUx1/a4e2He7vQTPit3NDGygzLHRG1vw41QzyI1rGVbdpF4qMgNXLgBigVFt7hTy/zWXi6pES0xPtpct05jRm2vz3vebmPeLreaPb44Lfqle43Ph7GFYmyhZ6Pz+1L+ighFPAMMzOpdtUGlLxQxjRIl6zt1kDQCfAp4M/By4B2SXp4uXwl8zMyOB7YBqw6suINJHlrWFGYGbh2ONyIYai2KGkglkqxUpq5QrWG5KlM8aczcDVHkGq6ln/DlfS2LHLCJyrm0hJPVWc+UpU8sKduR5pCzCelA20Rf1NLGk9KqbbHky4xvRfQJWaL57NNT5KgolW+eXCmU8MypW3dtQOkLRWxmD5nZw1N0ew2w0cw2mdle4IvA2aly8xuAW1O/f8MrOQfTQR2UUaEosxX+IgxsbKHnfigUULHLbmSvlYp6JLN+wRWmV/9Qed/6SFEzrkV5kv28zzLI5b7XktZIhMIF0uazKfNxxhaKvQfXGFuoFNbWvFhZJgayQrlmLpoxb2Ul6JbY5mCWiSrOfcNLgF9mx48CrwUOA7ab2Vh2fkKB0QJJFwIXAoy+8NADI+mgUst8tJTrallmNCu3OO97Qa3MEVGf737RkT3G6C7/MEzIA0FD+RU77ooE74W7oalvVifOaqKe4s/KqAwDxhp6tnUTROuW6zw/MrhVPXaQLygqiyMuqmx44dC0IJktKKIsmXspbLPsoYxnGbOImpgt+qVESapfdS3AQYuPjo9MKy3Ks20kRhGWVlqxKvuWEQG1hrKakBAH3zJs9dQnC/NqG9JVuhXUsNw7xH81JSbK3C7txrRUgboGWJkSM58opQ84X4CMDRgVMMDWbjf0TBHPQomSLcDR2fFL07mngd+QNC9ZxcX5YJbJrcJ8AarM7VBLlT9ovt4URZH+jtcoKycXOYsbPmKbEAZWZiyrN4Yqd8jlURS5zzv1b+dugYaLoSgoWmzXro9Y6Y6op4RBRQhg+HurwLDx4Y75GyTXxP8AJ0g6Fle0bwdWmplJ+g6wAvcbnwf0zMKeM6S4WTLlB0nZjTX8y+ML/HyzIk4WcVKyBaWFm5RcqThNZSY2jyl214UVMcXJkq6PJku8UNo0rNXWDR1NGyuKPuNuDddSVWjVU56hQr6RlIc4m29QAcZAL8R1Q18s1nUqUSLpKElrAJK1ezGwFngI+JKZPZCGeC/wbkkbcZ/x9b2ew5yhg1U4IZl9op2vuDxfHjQryAkuhXwhroiwaBqsvait2dGaLHQ6jDVhYoQS7ges3l0bUPrCIu5UosTMfgW8JTteA6xp028THlURVEhZyikLZxPF4l2zsm46X3x+Mv9uEdkAKRJjX4t7IUt3mT8vDd7IFEfzQl2x+66IcyblqSh2FE5WEzCoBgNfTxhi+kIRB8PDhOodxflO/TsNVERvWCMzW+FPznfPdRaEiUmDihuWvu3s/rVYf+tbzAba2u2GUMRBEPQ9w75YJxvysJDJkPQk8PNJurwIeKpH4hxIhmUeMDxzmSvzOMbMFs/kBpK+me7TDU+Z2Ztmcr8qmNOKeCok3WNmHXNfDArDMg8YnrnEPIKcvoiaCIIgmMuEIg6CIKiYUMSTc23VAswSwzIPGJ65xDyCkvARB0EQVExYxEEQBBUTijgIgqBi5rwilvQ5SU9Iur/DdUn6eCrD9BNJp/Vaxm7pVEoqu/7nkp6U9OPU3lWFnFPRxTwWpJJYG1OJrJdVIOa0kLRI0u2SHkl/2ybDljSevT9f7bWcUzHTsmZBe+a8IgZuACYLAH8zcEJqFwLX9ECmaTNFKamcW8xsaWrX9VTILuhyHquAbak01sfwUln9zmXAt83sBODb6bgdu7L356zeidc1My1rFrRhzitiM1sPPDNJl7OBG825C899fGRvpJsWbUtJVSzT/tDNPM7GS2KBl8h6YyqZ1c/kMg9sOa+ZlDU78NINLnNeEXdBuxJNHUsxVUi3cv5xcrHcKunoNterppt5lH1SetRn8fSn/czhZvZYerwVOLxDv4WS7pF0l6Q/6o1os86gfGb6hkj6M7f4GvAFM9sj6SLcMntDxTINDZOVA8sPUjGDTnGjx5jZFknHAesk3WdmP5ttWSejX8qazSVCEU9NpxJN/caUcprZ09nhdcBVPZBrunTzehd9HpU0D/h1vGRWpUxWDkzS45KONLPHkmvriQ5jbEl/N0m6A3gV0FNFfADLmgUdCNfE1HwVeGeKnngd8Gz2E7OfKEtJSZqPl5JqWnVv8W2fhVc66TemnEc6Pi89XgGss/7fmZTL3Lacl6RDJS1Ij18EvB54sGcSzh7dvIdBjpnN6QZ8AXgM2If7slYBq4HV6brwFeCfAfcBy6qWeZK5vAXYkGS9PJ37MHBWenwF8ABwL/Ad4MSqZd7PeSwEvgxsBO4Gjqta5i7mdBgeLfEI8C1gUTq/DLguPT49/Y/dm/6uqlruNvM4J31O9gCPA2vT+aOANZO9h9E6t9jiHARBUDHhmgiCIKiYUMRBEAQVE4o4CIKgYkIRB0EQVEwo4iAIgooJRRzsN5KOkHSbpJ3FTrF253os05mSNqTEMwOJpBenLHkvrVqWoDeEIh5gJN0gyVIbk/QLSde0S7Eo6XBJu1Of2XrfL8XjR5cCR05ybkak+a3osvvVwD+a2fhs3LsKzOwJ4EbgQ1XLEvSGUMSDz7dwhfcy4F3AcuDTbfqdh+ea2A384Szd+3jgB2b2iJltneRcT5B0OnAi8KUZjjN/diSaEZ8HzpW0qGpBggNPKOLBZ4+ZbTWzR83sNuAW4A/a9PsL3Mq6Cd89OCWSLkqJvfemvxdk1zbjqQ3fmSzWG9qdy8bZkCzypyStTTkiirHOl/Rgur5B0iWF1Z7GBPhyGrM4bsdKPOfvrmzsJZK+Imlrcpf8UNJbW+a5WdI/yIsEbAduTudfJ2ldet6z6fFR6drvpgxpO9K1uyWdko15uqTvSnpe0pb0S+XXsuuS9B55ovg9kh6VdEVx3czuB36F5/4Nhp2qt/ZF2/+GJ7X/enZ8HL6FeWtLv98BngRGgWNxq3jxFGOfg2/7vhj4LeCv0vHydH0xcDuu+I/AE++0O7cMGAPOBY4BTgUuAealcS7At5ivSLItx9NEXpzdx3Br/4jJ5Ma3Bl/ecu5UfMv6K3Br/XJgL9n2bmAz8H/A36U+J6Tn7cKrFC8FTgIuAn4TT5a1DfgosAS3wlcCJ6XxXgHsAN6Txnot8H3g1uyeVwDb8S/I44HfBv6yRfYvAjdV/X8W7cC3ygWINoM3zxXxWPrQ70oKy4BL2vT7ZHa8Hrh0irG/B3yuzTh3ZsdfB25o6dN0DrfongUO6XCfXwB/1nLub4EHs2MDVnTxemwHzu+i313A+7PjzcDXWvrcDHy/w/MXJZnO7HD9RuD6lnNL03NeDLwQ/zJcPYWc/wz8V9X/Z9EOfAvXxOCzHv+Qvwb4BLAG+HhxMf0cfhvukijoxj1xEq6Mc+7ES99Mh9uBnwP/K+lmSedJOiTJthhPl/jZ9BN/h6QdwEdwS3O6vABXcCWSDpZ0VXJ9bEvjL8Mt25x7Wo5fBaxrdxMzewb/Ulor6RuS3i0pH+/VwJ+2zKl4LZfgr+ECPAnQZOxKcwqGnFDEg8/zZrbRzO4zs78GDgI+kF1fmc59L0VWjOF1906U9Pr9uN+0QtLM7DngNOBPcOv3fcBPk6+1+P9bjX+ZFO0U4OT9kO0poDVi5KP4F9EHgDPT+HcDrQtyO6dzIzM7H3c5rMdTij4sqVgEreH5npdm7VTcTfHjadxmEe5SCoacUMTDx4eA9xaLSrjl+0malcJS4BtMbhU/hOfDzTmD/ciPa2ZjZrbOzN4HvBI4GHirmT2OL0gtSV8mTS0bYh/QTVzwj5hosZ+B1xz8DzP7CZ7CsRtr+0dMUb3EzO41syvN7PeAO2jkG/4hcHK7OZkvJD6Ep5F84xQynJLGCoacqNAxZJjZHZIeBN4v6TP4z/BV5qvwJZJuAq6X9DfJam3lajxS4QfAbXil63OZ5ip+ilBYgluOzwC/DxxCIyn9B4FPpGiFNfiC4mnAS8ysiCLYjBcI/S4eJbKtw+3WMvHLZQNwjqSv4Ar9g3g+46m4GrhL0rV4Purd+KLnbfiXwkV4svMt+CLpK2lU+L4yPfczwGeB5/AFveVmdpGZPSfpX4ErJO1Jr81hwKvN7Jr0uh2Euzj+vgtZg0Gnaid1tP1vtERNZOdX4hbX54ENHZ57MPA8cOEk46/Gk6/vS38vaLnezWLdGXgS+qdxn+f9tCyoAe/ALb/deDTCncDbs+vL8YTq+4DNk8h7aJrTydm5Y/BY6524NXxpGxk302bxMsm+Psm9nUbM9uHAf+JKeA/ucrkKGM2euwz4Jh6NsRNP9P7h7HoNuAzYhEdx/BLfiJK/Jj+t+n8sWm9aJIYPhgpJH8FD3LqKle5XJN0N/IuZ/XvVsgQHnvARB8PGPwGbNOC5JoBb8TJewRwgLOIgCIKKCYs4CIKgYkIRB0EQVEwo4iAIgooJRRwEQVAxoYiDIAgqJhRxEARBxfw/zM2qlcA33jgAAAAASUVORK5CYII=\n", "text/plain": [ "

" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "plt.imshow(residuals[0, ], origin='lower', extent=[size, -size, -size, size])\n", "plt.xlabel('RA offset (arcsec)', fontsize=14)\n", "plt.ylabel('Dec offset (arcsec)', fontsize=14)\n", "cb = plt.colorbar()\n", "cb.set_label('Flux (ADU)', size=14.)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The star is located in the center of the image (which is masked here) and the orientation of the image is such that north is up and east is left. The bright yellow feature in southwest direction is the exoplanet $\\beta$ Pictoris b at an angular separation of 0.46 arcseconds." ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.9" } }, "nbformat": 4, "nbformat_minor": 4 } PynPoint-0.11.0/docs/tutorials/zimpol_adi.ipynb000066400000000000000000005616631450275315200215460ustar00rootroot00000000000000{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Non-coronagraphic angular differential imaging" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "In this tutorial, we will process and analyze an archival [SPHERE/ZIMPOL](https://www.eso.org/sci/facilities/paranal/instruments/sphere/inst.html) dataset of [HD 142527](https://ui.adsabs.harvard.edu/abs/2019A%26A...622A.156C/abstract) that was obtained with the narrowband H$\\alpha$ filter (*N_Ha*) and without coronagraph. A few ZIMPOL specific preprocessing steps were already done so we start the processing of the data with the bad pixel cleaning and image registration. There are pipeline modules available for dual-band simultaneous differential imaging (i.e. [SDIpreparationModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.psfpreparation.SDIpreparationModule) and [SubtractImagesModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.basic.SubtractImagesModule)) but for simplicity we only use the *N_Ha* data in this tutorial in combination with angular differential imaging." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Getting started" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's start by importing the required Python modules. " ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import configparser\n", "import tarfile\n", "import urllib.request\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "from matplotlib.colors import LogNorm\n", "from matplotlib.patches import Circle" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "And also the pipeline and required modules of PynPoint." ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "from pynpoint import Pypeline, FitsReadingModule, ParangReadingModule, \\\n", " StarExtractionModule, BadPixelSigmaFilterModule, \\\n", " StarAlignmentModule, FitCenterModule, ShiftImagesModule, \\\n", " PSFpreparationModule, PcaPsfSubtractionModule, \\\n", " FalsePositiveModule, SimplexMinimizationModule, \\\n", " FakePlanetModule, ContrastCurveModule, \\\n", " FitsWritingModule, TextWritingModule" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Next, we will download a tarball with the preprocessed images and the parallactic angles." ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "('hd142527_zimpol_h-alpha.tgz', )" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "urllib.request.urlretrieve('https://home.strw.leidenuniv.nl/~stolker/pynpoint/hd142527_zimpol_h-alpha.tgz',\n", " 'hd142527_zimpol_h-alpha.tgz')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Will unpack the compressed archive file in a folder called *input*." ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "tar = tarfile.open('hd142527_zimpol_h-alpha.tgz')\n", "tar.extractall(path='input')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Creating the configuration file" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "PynPoint requires a configuration file with the global settings and the FITS header keywords that have to be imported. The text file should be named `PynPoint_config.ini` (see [documentation](https://pynpoint.readthedocs.io/en/latest/configuration.html) for several instrument specific examples) and located in the working place of the pipeline.\n", "\n", "In this case, we don't need any of the header data but we set the pixel scale to 3.6 mas pixel$^{-1}$ with the `PIXSCALE` keyword. We also set the `MEMORY` keyword to `None` such that that all images of a dataset are loaded at once in the RAM when the data is processed by a certain pipeline module. The number of processes that is used by pipeline modules that support multiprocessing (see [overview of pipeline modules](https://pynpoint.readthedocs.io/en/latest/overview.html)) is set with the `CPU` keyword." ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "config = configparser.ConfigParser()\n", "config.add_section('header')\n", "config.add_section('settings')\n", "config['settings']['PIXSCALE'] = '0.0036'\n", "config['settings']['MEMORY'] = 'None'\n", "config['settings']['CPU'] = '1'\n", "\n", "with open('PynPoint_config.ini', 'w') as configfile:\n", " config.write(configfile)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Initiating the Pypeline" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We can now initiate the `Pypeline` by setting the working, input, and output folders. The configuration file will be read and the HDF5 database is created in the working place since it does not yet exist." ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "===============\n", "PynPoint v0.9.0\n", "===============\n", "\n", "Working place: ./\n", "Input place: input/\n", "Output place: ./\n", "\n", "Database: ./PynPoint_database.hdf5\n", "Configuration: ./PynPoint_config.ini\n", "\n", "Number of CPUs: 1\n", "Number of threads: not set\n" ] } ], "source": [ "pipeline = Pypeline(working_place_in='./',\n", " input_place_in='input/',\n", " output_place_in='./')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Some routines by libraries such as `numpy` and `scipy` use multithreading. The number of threads can be set beforehand from the command line with the `OMP_NUM_THREADS` environment variable (e.g. `export OMP_NUM_THREADS=4`). This is in particular important if a pipeline module also uses multiprocessing." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Importing the images and parallactic angles" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We will now import the images from the FITS files into the PynPoint database. This is done by first adding an instance of the [FitsReadingModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.readwrite.html#pynpoint.readwrite.fitsreading.FitsReadingModule) to the `Pypeline` with the `add_module` method and then running the module with the `run_module` method. The data is stored in the database with the name of the `image_tag` argument." ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-----------------\n", "FitsReadingModule\n", "-----------------\n", "\n", "Module name: read\n", "Reading FITS files... [DONE] \n", "Output ports: zimpol (70, 1024, 1024), fits_header/cal_OBS091_0235_cam2.fits (868,), fits_header/cal_OBS091_0237_cam2.fits (868,), fits_header/cal_OBS091_0239_cam2.fits (868,), fits_header/cal_OBS091_0241_cam2.fits (868,), fits_header/cal_OBS091_0243_cam2.fits (868,), fits_header/cal_OBS091_0245_cam2.fits (868,), fits_header/cal_OBS091_0247_cam2.fits (868,)\n" ] } ], "source": [ "module = FitsReadingModule(name_in='read',\n", " input_dir=None,\n", " image_tag='zimpol',\n", " overwrite=True,\n", " check=False,\n", " filenames=None,\n", " ifs_data=False)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('read')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's check the shape of the imported dataset. There are 70 images of 1024 by 1024 pixels." ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(70, 1024, 1024)" ] }, "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ "pipeline.get_shape('zimpol')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "We will also import the parallactic angles from a plain text file (a FITS file would also work) by using the [ParangReadingModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.readwrite.html#pynpoint.readwrite.attr_reading.ParangReadingModule). The angles will be stored as the `PARANG` attribute to the dataset that was previously imported and has the database tag *zimpol*." ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-------------------\n", "ParangReadingModule\n", "-------------------\n", "\n", "Module name: parang\n", "Reading parallactic angles... [DONE]\n", "Number of angles: 70\n", "Rotation range: -14.31 - 34.36 deg\n", "Output port: zimpol (70, 1024, 1024)\n" ] } ], "source": [ "module = ParangReadingModule(name_in='parang',\n", " data_tag='zimpol',\n", " file_name='parang.dat',\n", " input_dir=None,\n", " overwrite=True)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('parang')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The attributes can be read from the database with the [get_attribute](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=get_data#pynpoint.core.pypeline.Pypeline.get_attribute) method of the `Pypeline`. Let's have a look at the values of `PARANG`." ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "array([-14.3082 , -13.9496 , -13.5902 , -13.23 , -12.8691 , -12.5074 ,\n", " -12.145 , -11.782 , -11.4182 , -11.0538 , -6.43366, -6.0622 ,\n", " -5.69037, -5.3182 , -4.9457 , -4.57291, -4.19983, -3.8265 ,\n", " -3.45294, -3.07917, 1.61837, 1.99275, 2.36701, 2.74112,\n", " 3.11507, 3.48882, 3.86237, 4.23567, 4.60872, 4.98149,\n", " 9.6373 , 10.0041 , 10.3703 , 10.736 , 11.101 , 11.4653 ,\n", " 11.829 , 12.192 , 12.5543 , 12.9158 , 17.3717 , 17.7218 ,\n", " 18.0711 , 18.4193 , 18.7666 , 19.1129 , 19.4581 , 19.8024 ,\n", " 20.1456 , 20.4878 , 24.9167 , 25.243 , 25.568 , 25.8919 ,\n", " 26.2145 , 26.536 , 26.8563 , 27.1753 , 27.4931 , 27.8097 ,\n", " 31.7057 , 32.0052 , 32.3035 , 32.6005 , 32.8962 , 33.1906 ,\n", " 33.4838 , 33.7757 , 34.0663 , 34.3556 ])" ] }, "execution_count": 10, "metadata": {}, "output_type": "execute_result" } ], "source": [ "pipeline.get_attribute('zimpol', 'PARANG', static=False)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Bad pixel correction" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The first processing module that we will use is the [BadPixelSigmaFilterModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.badpixel.BadPixelSigmaFilterModule) to correct bad pixels with a sigma filter. We replace outliers that deviate by more than 3$\\sigma$ from their neighboring pixels and iterate three times. \n", "\n", "The input port of `image_in_tag` points to the dataset that was imported by the `FitsReadingModule` and the output port of `image_out_tag` stores the processed / cleaned dataset in the database." ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-------------------------\n", "BadPixelSigmaFilterModule\n", "-------------------------\n", "\n", "Module name: badpixel\n", "Input port: zimpol (70, 1024, 1024)\n", "Bad pixel sigma filter... [DONE] \n", "Output port: bad (70, 1024, 1024)\n" ] } ], "source": [ "module = BadPixelSigmaFilterModule(name_in='badpixel',\n", " image_in_tag='zimpol',\n", " image_out_tag='bad',\n", " map_out_tag=None,\n", " box=9,\n", " sigma=3.,\n", " iterate=3)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('badpixel')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Image centering" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Next, we will crop the image around the brightest pixel at the position of the star with the [StarExtractionModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.extract.StarExtractionModule)." ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "--------------------\n", "StarExtractionModule\n", "--------------------\n", "\n", "Module name: extract\n", "Input port: bad (70, 1024, 1024)\n", "Extracting stellar position... [DONE] \n", "Output port: crop (70, 57, 57)\n" ] } ], "source": [ "module = StarExtractionModule(name_in='extract',\n", " image_in_tag='bad',\n", " image_out_tag='crop',\n", " index_out_tag=None,\n", " image_size=0.2,\n", " fwhm_star=0.03,\n", " position=(476, 436, 0.1))\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('extract')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's have a look at the first image from the processed data that is now centered with pixel precision. The data van be read from the database by using the [get_data](https://pynpoint.readthedocs.io/en/latest/pynpoint.core.html?highlight=get_data#pynpoint.core.pypeline.Pypeline.get_data) method of the `Pypeline`." ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 13, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAWfUlEQVR4nO2dX4wd5XnGn2fP/rMNxJg/loVRTQVtykVipBUhggswJaI0ClwglBRVvrDkm1QiSqQEWqlSpF4kNyG5qBJZBcUXaYBCkBFKGxxjFEVqDEuBxOAQG2QSXNtbJzYY7F3vn7cXZ2Dne2d35szOzDlnz/f8pKM938ycmXfP7Lvf98z7fu9HM4MQYvAZ6rUBQojuIGcXIhLk7EJEgpxdiEiQswsRCcPdvNgox2wc67p5SSGiYhof4oLNcKl9XXX2cazDZ3h7Ny8pRFQcsH3L7tMwXohIkLMLEQlydiEiQc4uRCTI2YWIBDm7EJHQ1dCb6GPoQrOaDTlwqGcXIhLk7EJEgobxMeGH6lWO1TB/1aGeXYhIkLMLEQlydiEiQZp9kCijyZu8lvR8X6KeXYhIkLMLEQlydiEiQZp9NdFNTS4GDvXsQkSCnF2ISOhoGE/yKICzAOYBzJnZBMkNAB4HsAXAUQD3mdnpZswUQlSlTM9+m5ltNbOJpP0ggH1mdh2AfUlbVIVc/rVayPsdVtPvMWBUGcbfDWB38n43gHsqWyOEaIxOnd0APEfyZZI7k20bzex48v4EgI1LfZDkTpKTJCdnMVPRXCHESuk09HaLmR0jeSWAvSR/m95pZkZyyRxJM9sFYBcAXMINyqMUokd01LOb2bHk5xSApwHcCOAkyU0AkPycaspIURMcWnz11I4CTS+93wiFd53kOpIXf/QewOcAHATwDIDtyWHbAexpykghRHU6GcZvBPA02/9VhwH8u5n9F8mXADxBcgeAdwDc15yZQoiqFDq7mb0N4NNLbP8jAK3SKMQqQbnxg0yeNq+q222h2udXimrjrRilywoRCXJ2ISJBw/heUyWc1MsQWpVrNykBNMxfFvXsQkSCnF2ISJCzCxEJ0uyriZI6mUMrfx5gCw1q27J6v06Nn9b0kel39exCRIKcXYhIkLMLEQnS7E3TxTh6FY3e5LmKKHw+kPc9VNHz/t4MuIZXzy5EJMjZhYgEObsQkSDNDpTX1U1quxI6vVBXN5k777VyBV1d9Hvkanp/XWn4ZVHPLkQkyNmFiAQ5uxCREKdmr1qSuEcljTPa1uvVAu3LGu02K3OuVtgsiqs73Z3+vUvH5HtVPqsPUc8uRCTI2YWIBDm7EJEQj2bv16WDcuLTZTV6RpMP5ZWSrlarrdS3uRDqZvNmlZg777+TRufdDxjq2YWIBDm7EJEwuMP4fh22FxAMU92wnS0/jHftlgtx+XOX+U6KUnHd8NnSw343bPd20g/r4YbiOeGzrg7bB6wstXp2ISJBzi5EJHTs7CRbJF8h+WzSvobkAZJHSD5OcrQ5M4UQVSmj2R8AcAjAJUn72wAeNrPHSP4AwA4A36/Zvs4po0d7mVJZNO00tT+j0Z0mp9foFdNpw8+W06ucn1/c5Y9N7VsSb5fCaY3QUc9OcjOAvwXwb0mbALYBeDI5ZDeAexqwTwhRE50O478L4OsAPuoCLwNwxszmkva7AK5a6oMkd5KcJDk5i5kqtgohKlDo7CQ/D2DKzF5eyQXMbJeZTZjZxAjGVnIKIUQNdKLZbwbwBZJ3ARhHW7N/D8B6ksNJ774ZwLHmzKyZPtLoubrbx9Ezbad1/bm87k7tZ9G5PT527jS7pa5FzIX78s9crOlTlE6XbfL5zCorY1XYs5vZQ2a22cy2APgigOfN7H4A+wHcmxy2HcCexqwUQlSmSpz9GwC+SvII2hr+kXpMEkI0Qal0WTN7AcALyfu3AdxYv0lCiCYY3Nz4PiUzbdVPU03r7kxcvSAXfqjg+OHU7R4u0Pe+7TX7XKiz00ebj8H7PHrUh6a8do7SZYWIBDm7EJEgZxciEqTZ66ZE7juwxBzzvJz0TK670+ijI+H+kfD2Wnq/0+zm55z7mLHX6Jxd3k7/Wa+jM5q+qGxV50tLFWr4ppZ/BsJ714cxd/XsQkSCnF2ISNAwfin8EKxCiauiUFtemmq2WqwP07nP+mH7mnAugo0tDuNtxA3jvZpww1/O+rRTtz/Vpkt/tYIquOZDjJnPpxpFw/Kyw/oS51rtqGcXIhLk7EJEgpxdiEiQZgeqa/R0KamC0lCF5ZzTGt5r2REXWht3mnzteNBeWBMevzC2eLszOtpL2fn8Ka30obtU+Ixz+Wm8aOWH4jyBhm85DZ75bP6KsRxaXocXhunKaPg+nP6qnl2ISJCzCxEJcnYhImFwNHsZTVR1aai8lMui8s0+rp5nS94UVWTj6Atrw9L9816zj+bE9DNx9XD/kJ+mOuyeRcylnzXkL1tl5lNz3f5lrUT22YHf76fiZjR+ql2g5wdtuqx6diEiQc4uRCTI2YWIhMHR7GWoMfcdcKWk8spMLXGtzP5UfruPq9t4qMltNLx9C6Mt1w7/l8+PLbbNa9n81Z4xNBt+Z63zoS3D5xZFfutsaJfPLeB0eG6vjEkf40+185aKBrKlupyGD5ap8l1dUUXrOuPuni7E4dWzCxEJcnYhIkHOLkQkxKnZy+Lz23PmqJfW6P5c6Vj6mFvy3rXTue7tdnju+RzNPj8aXnfBpd3Pj4T7h5yeHT4f6tXR1LVGna5u+ZJWBXn3GdKxcf/1ZkpeuXP7ufOBHW7efOY+F8Td65z/3oVcevXsQkSCnF2ISJCzCxEJ0uxLUaTR8+aoV9HoAJhqm58z7vLRfR25ufGwfeGSsD2zfvHa0xtCO2Y/EWrE+bGwPTQbHj/yXnjutScXbbtoKJxXP+607pDTypmy1TkxafN638fkff81Fy4fHQTX/XV9u/OVpBNbaixT3YCGV88uRCQUOjvJcZIvknyN5Oskv5lsv4bkAZJHSD5OcrToXEKI3tHJMH4GwDYz+4DkCIBfkvxPAF8F8LCZPUbyBwB2APh+g7bWR9Uprn4oHqTLFkxx9aWm3DA+WKnFl4YeKUiHXRO2L1wcXvv8FYvt85vD4e36Te8H7Q3rzgXtDy+E/8tPnvxEaNvw4nTb4enQzuFz4WdHz7v5s5mh+fJDWNKNrf302IKheTpUVzgwLgjFefq9THVhz25tPkiaI8nLAGwD8GSyfTeAe5owUAhRDx1pdpItkq8CmAKwF8BbAM6Y2Ufdw7sArlrmsztJTpKcnMVMDSYLIVZCR85uZvNmthXAZgA3Avhkpxcws11mNmFmEyMYK/6AEKIRSoXezOwMyf0APgtgPcnhpHffDOBYEwbWRhWdXqIcdHZV1oIyVHltn+o55FNcw3PPrg33T1/uNPvVi1r52mtPBPtuu+J3QfvykbNB+/D5jUF739xfBO2zU4u6PJOKO+x+D/fsAW712cx3mJ6m6vdlQmsFSnxo+e+3KukQbT+WtOrkafwVJNcn79cAuAPAIQD7AdybHLYdwJ6GbBRC1EAnPfsmALtJttD+5/CEmT1L8g0Aj5H8FwCvAHikQTuFEBUpdHYz+zWAG5bY/jba+l0IsQpQuixQnB7b4LVyS0sXLNm84Kahzq4L2zMbQt248erTH7+//6oDwb57L/p90PYpr78a+0PQfufchqA9edH6j9+np9K27cxP88WC+zP03//soi7Plo72Zald2aoSujyzlLSPxPdBrLwKSpcVIhLk7EJEgpxdiEiQZu830pqz5LRGXxrZ3NLITK3LfHZ+TbDvbReuXrAw2/H5Dz4VtN88dWXQHn5v8eItVyra40teD/klm3yq/FzOXFP32aLceF+KOibUswsRCXJ2ISJBzi5EJEiz9xqvV9P4ed4FetPHlG3IlYNKafb3nGY/OhvGzf9v7pKg/avT1wTt90+tC9rr3lu89vC0W3LJy2YXd/dx9ta8+8BsiT7Jf0dVNLqP969yva+eXYhIkLMLEQlydiEiQZp9BfhYbjofO7svZ/lhIBscT8eUh8P48tCFsN1y2nj0g/DaY6fCc//v7y/7+P1TM1uDfc+N/1XQPjsdFho5c+LioD1+LJyDPv7HxWsPTxfEuh2ZUtJ+meVUbnw6Tx4ALBOj9zXqcuLwPo9+wGPy6tmFiAQ5uxCRMLjD+Aolh3xJoWwJYT9FM6dskm+7cJq5FU3SpZIzaaIXwiFsazrcP3o2PNeaU3767OLQ+4PTlwa7zrpDh8+Fdl9yOtw//qfwO0pf28sL+lCax3/fc+77Tq0gY74MlVtdpjD0lr5XeWHPuumD6bDq2YWIBDm7EJEgZxciEgZXs5dYGbQ0Tgemo2eZskhFoTgXKrKUJGXLa/Zw2aTWhxeC9thp/787DI+1Zhb3z427Q30FrAtOk38Ytkc+DH+PoQuL7aFZvwKsO3bWhRTPhb8Xp8PfK/i9vUb3KcV5obb2BixLUaitQHfXWj66hlVbPerZhYgEObsQkSBnFyISBlezl8FrMVcyuDDuni5nzAJd59r0Oi9VSsov95RdWipsjzid15oOY9JjZxZv90LLnds/4sjEvpdf+jiDf07h4uZDM6FdnHYLfs6Emt1mU5rdx9k9eXF1187o+Trpg7i6Rz27EJEgZxciEuTsQkSCNPtSFGj43I96vVqQf+1LSQWtuXyNXpQ50HJamTPp6bMl/88XlHsOruOnqPpc9wv5cXVz+zOx9MAOl9fg4/B5eQ9e35fU2f0eV/eoZxciEjpZn/1qkvtJvkHydZIPJNs3kNxL8nDy89KicwkhekcnPfscgK+Z2fUAbgLwZZLXA3gQwD4zuw7AvqQthOhTOlmf/TiA48n7syQPAbgKwN0Abk0O2w3gBQDfaMTKPiM37u6WEPZVpzIa3i3ZnNacXpNnNGLR8wGfS5+6lg27OflF8/DL4HQzXSkpb1cQRwfy8929Ri/Ifc9o+PTn646F92FsPU0pzU5yC4AbABwAsDH5RwAAJwBsrNc0IUSddOzsJC8C8BSAr5jZ++l91v73uuTjRJI7SU6SnJzFzFKHCCG6QEfOTnIEbUf/kZn9JNl8kuSmZP8mAFNLfdbMdpnZhJlNjGBsqUOEEF2gULOznZD9CIBDZvad1K5nAGwH8K3k555GLOwHKsTd8+a+A8jq25RWzuTR+3O7ad+ZZwler7YWdXqmvp1f6qisZk9rZa+5XT67+f0Fc9LT32FGo2c0ecGSWSV0da1x9MzJu1+mupOkmpsB/D2A35B8Ndn2j2g7+RMkdwB4B8B9jVgohKiFTp7G/xLLJ2vdXq85QoimiCddNj1sqrNEVeY6BcPEhXwJEJS4ckPUojCTL3llc+5arVS7FYbeiqbPZg3NCYEVpKwWDq3zwms5U1aBJSSC/06qDM3LhNZ6MEwvQumyQkSCnF2ISJCzCxEJ8Wj2BknrQPoQVlnSYaaiMF3GjvxUXKZP6JehqjP0VkWTA1ldnqPZ+0ajA32p09OoZxciEuTsQkSCnF2ISJBm7zZldKALV2dKXGP5VNv2AU4bp/Wu1/NFGr3Ms4gCnVyo0fPyC4rKUJWh6pTUPtfoHvXsQkSCnF2ISJCzCxEJ0uw1ky1ZVWMefiZe7TV8Qe58Wpd7Pe+vldH/Bf1CXsnsIo3uD88rHV1ZZ1f4/CrT6B717EJEgpxdiEiQswsRCXFqdq+9mpzfXkCV3O3M0tGePE1foD8zcfcK8exsXL1cqahS31Gd5ZxXuUb3qGcXIhLk7EJEQpzD+KqUqDbbZIXSonPnDvOLymMtvQzAIj6kWONQO/f3anLVlQEbtnvUswsRCXJ2ISJBzi5EJEiz10FaR5ZZLaboXGXxU2BzdbSbHls2rTcnElf7cwqF02pBPbsQkSBnFyIS5OxCRII0e900GQcue+0Szw8aXbG0+OK9u3ZEqGcXIhLk7EJEQqGzk3yU5BTJg6ltG0juJXk4+Xlps2YKIarSSc/+QwB3um0PAthnZtcB2Je0Vy9m4WtQsIXOX720o9FrD+i9XQGFzm5mvwDwJ7f5bgC7k/e7AdxTr1lCiLpZ6dP4jWZ2PHl/AsDG5Q4kuRPATgAYx9oVXk4IUZXKD+isXYZk2fGRme0yswkzmxjBWNXLCSFWyEqd/STJTQCQ/Jyqz6QeQIYvUUye7u6mJhcds1JnfwbA9uT9dgB76jFHCNEUnYTefgzgvwH8Jcl3Se4A8C0Ad5A8DOCvk7YQoo8pfEBnZl9aZtftNdsihGgQ5caLNr1cVkl0BaXLChEJcnYhIkHDeKC3K8RUSeFUmFCUQD27EJEgZxciEuTsQkSCNHvddHMaZR+tRts3RD6NNQ/17EJEgpxdiEiQswsRCdLsSyHdt3rQveoY9exCRIKcXYhIkLMLEQnS7INEk3H3Mucu0tFV7JJGXzHq2YWIBDm7EJEgZxciEqTZB5k8nV1V+5bR8MrZ7wvUswsRCXJ2ISJBzi5EJEizx0S/xKjL5gP0i92rHPXsQkSCnF2ISNAwXtRP0bBcobieoJ5diEiQswsRCZWcneSdJN8keYTkg3UZJYSonxU7O8kWgH8F8DcArgfwJZLX12WY6HPI8CX6nio9+40AjpjZ22Z2AcBjAO6uxywhRN1UcfarAPwh1X432RZAcifJSZKTs5ipcDkhRBUaf0BnZrvMbMLMJkYw1vTlhBDLUCXOfgzA1an25mTbspzF6VM/tyffAXA5gFMVrt0UsqtT2hms/WdXm5jt+rPldtBWmHdMchjA7wDcjraTvwTg78zs9Q4+O2lmEyu6cIPIrnLIrnL02q4V9+xmNkfyHwD8DEALwKOdOLoQojdUSpc1s58C+GlNtgghGqRXGXS7enTdImRXOWRXOXpq14o1uxBidaHceCEiQc4uRCR01dn7aeIMyUdJTpE8mNq2geRekoeTn5d22aarSe4n+QbJ10k+0A92JTaMk3yR5GuJbd9Mtl9D8kByTx8nOdpt2xI7WiRfIflsv9hF8ijJ35B8leRksq1n97Jrzt6HE2d+COBOt+1BAPvM7DoA+5J2N5kD8DUzux7ATQC+nHxHvbYLAGYAbDOzTwPYCuBOkjcB+DaAh83sWgCnAezogW0A8ACAQ6l2v9h1m5ltTcXXe3cvzawrLwCfBfCzVPshAA916/rL2LQFwMFU+00Am5L3mwC82WP79gC4ow/tWgvgfwB8Bu2MsOGl7nEX7dmMtuNsA/AsAPaJXUcBXO629exednMY39HEmR6z0cyOJ+9PANjYK0NIbgFwA4AD/WJXMlR+FcAUgL0A3gJwxszmkkN6dU+/C+DrABaS9mV9YpcBeI7kyyR3Jtt6di9Vg24ZzMxI9iQuSfIiAE8B+IqZvc/UfPFe2mVm8wC2klwP4GkAn+yFHWlIfh7AlJm9TPLWHpvjucXMjpG8EsBekr9N7+z2vexmz1564kwPOElyEwAkP6e6bQDJEbQd/Udm9pN+sSuNmZ0BsB/t4fH6ZJ4E0Jt7ejOAL5A8inZNhW0AvtcHdsHMjiU/p9D+53gjengvu+nsLwG4LnlKOgrgiwCe6eL1O+EZANuT99vR1sxdg+0u/BEAh8zsO/1iV2LbFUmPDpJr0H6WcAhtp7+3V7aZ2UNmttnMtqD9N/W8md3fa7tIriN58UfvAXwOwEH08l52+YHFXWjPlHsLwD91+4GJs+XHAI4DmEVb0+1AW+vtA3AYwM8BbOiyTbegrfN+DeDV5HVXr+1KbPsUgFcS2w4C+Odk+58DeBHAEQD/AWCsh/f0VgDP9oNdyfVfS16vf/T33st7qXRZISJBGXRCRIKcXYhIkLMLEQlydiEiQc4uRCTI2YWIBDm7EJHw/+yrlk5mujYuAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "data = pipeline.get_data('crop')\n", "plt.imshow(data[0, ], origin='lower')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "After the approximate centering, we apply a relative alignment of the images with the [StarAlignmentModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.centering.StarAlignmentModule) by cross-correlating each images with 10 randomly selected images from the dataset. Each image is then shifted to the average offset from the cross-correlation with the 10 images." ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-------------------\n", "StarAlignmentModule\n", "-------------------\n", "\n", "Module name: align\n", "Input port: crop (70, 57, 57)\n", "Aligning images... [DONE] \n", "Output port: aligned (70, 57, 57)\n" ] } ], "source": [ "module = StarAlignmentModule(name_in='align',\n", " image_in_tag='crop',\n", " ref_image_in_tag=None,\n", " image_out_tag='aligned',\n", " interpolation='spline',\n", " accuracy=10,\n", " resize=None,\n", " num_references=10,\n", " subframe=0.1)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('align')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "As a third centering step, we use the [FitCenterModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.centering.FitCenterModule) to fit the PSF of the mean image with a 2D Moffat function. The best-fit parameters are stored in the database at the argument name of `fit_out_tag`." ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "---------------\n", "FitCenterModule\n", "---------------\n", "\n", "Module name: center\n", "Input port: aligned (70, 57, 57)\n", "Fitting the stellar PSF... [DONE]\n", "Output port: fit (70, 16)\n" ] } ], "source": [ "module = FitCenterModule(name_in='center',\n", " image_in_tag='aligned',\n", " fit_out_tag='fit',\n", " mask_out_tag=None,\n", " method='mean',\n", " radius=0.1,\n", " sign='positive',\n", " model='moffat',\n", " filter_size=None,\n", " guess=(0., 0., 10., 10., 10000., 0., 0., 1.))\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('center')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The processed images from the `StarAlignmentModule` and the best-fit parameters from the `FitCenterModule` are now used as input for [ShiftImagesModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.centering.ShiftImagesModule). This module shifts all images by the (constant) offset such that the peak of the Moffat function is located in the center of the image." ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-----------------\n", "ShiftImagesModule\n", "-----------------\n", "\n", "Module name: shift\n", "Input ports: aligned (70, 57, 57), fit (70, 16)\n", "Shifting the images... [DONE] \n", "Output port: centered (70, 57, 57)\n" ] } ], "source": [ "module = ShiftImagesModule(name_in='shift',\n", " image_in_tag='aligned',\n", " image_out_tag='centered',\n", " shift_xy='fit',\n", " interpolation='spline')\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('shift')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's have a look at the central part of the first image. The brightest pixel of the PSF is indeed in the center of the image as expected." ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(17.0, 40.0)" ] }, "execution_count": 17, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAD8CAYAAAB3lxGOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAATVUlEQVR4nO3dW4zc5XnH8e9vZmd37RhiTB3XsomgIYWiqDWp4xLRi8gVFUpQQiQUEaXIF0hOqkYCJW04XEHVSKVqILlK5YQEX1AFC1CIUKvKCkYqUjEyYA7GUUISaEHGJoABg72neXoxf7fL7tr/Z9ZzWPv9faSVd2bfff/PHPyb07Pvq4jAzMrVGHYBZjZcDgGzwjkEzArnEDArnEPArHAOAbPCpUNAUlPS05Ierk5fIGm3pBcl3SdptH9lmlm/dPNM4AZg/6zTdwB3RcSFwFvA9b0szMwGIxUCktYDnwN+WJ0WsBm4vxqyHbi6D/WZWZ+NJMd9F/gWcFZ1+lzgcERMV6dfAdYt9IuStgJbAZo0/3Q5Z88ZkDl8alByqtxcvawrO2wIkw1Yojs128Day07XxFy97avtX5fusXiPyZjo6k5SGwKSrgIORcSTkj7TbVERsQ3YBnC2VsWfNf/yg/M3EvUq96pFzcS4ZjM5V2Jcci5SlzEbKInLmDneMMzMJMa0U1PF9HT9oHZurkxd6fb6diboknUtwuPT/9H172SeCVwOfF7SZ4Fx4Gzge8BKSSPVs4H1wKtdH93Mhq72YSUibomI9RFxPnAt8EhEfAXYBVxTDdsCPNS3Ks2sb06lT+Am4BuSXqTzHsHdvSnJzAYp+8YgABHxKPBo9f1vgE2nXEHiNW7qtT6kXqOnXutD7nV19rV3I1NX7y4jjeRcvXrvIPk6PvO+R5B4rQ8o6i9jZN8TOI3F3PcgFvGeozsGzQrnEDArnEPArHAOAbPCOQTMCucQMCucQ8CscA4Bs8J11Sx0yrTAHwylmnKyzS89/OOaVFNO8o+RRlv1g0aSTUyJuiI7V0biD2c0nfjDoKRs+1LqD3qyTUypuXp3GbN/EJf5Q6N5/58W0R/lZwJmhXMImBXOIWBWOIeAWeEcAmaFcwiYFc4hYFY4h4BZ4RwCZoUbbMdgp2Xwg+dkl9oetERdynbmJToGI9NVCMRY4ibLdlhmZJbomkguCZYZlF3aO9ENqGauyy+zDFn2fppY9Sy3LDnkOgt7sHy5nwmYFc4hYFY4h4BZ4RwCZoVzCJgVziFgVjiHgFnhHAJmhRtwsxDzl/fKNLYkGzVSDR3JvQg1krhqxkZTc8V4/bh2YgxAjCWWF8suoZboWWlM93A/v8xSZcn9A5VY7ivayds60byT3eIvVVcvH3rbcyfrvvnOzwTMCucQMCucQ8CscA4Bs8I5BMwK5xAwK5xDwKxwDgGzwjkEzAo30I5B0eflxFKbmyaXBBsfqx0Sy8dTU7WX13cDzoznbooYqc9tJZfo0nS2D65uouRtmhk3jLky95setvllb5/U0m7ND9auqe7rqb1kksYlPSHpGUn7JN1enX+PpN9K2lt9bej+8GY2bJmHnwlgc0QckdQCHpP079XP/i4i7u9feWbWb7UhEJ2N4I9UJ1vVV4+eR5rZsKVe6EhqStoLHAJ2RsTu6kfflvSspLsk1b+INrMlJxUCETETERuA9cAmSZ8AbgEuBj4FrAJuWuh3JW2VtEfSnkkmelO1mfVMV295RsRhYBdwZUQciI4J4MfAphP8zraI2BgRG0fxkwWzpSbz6cBqSSur75cBVwC/kLS2Ok/A1cDz/SvTzPol8+nAWmC7pCad0NgREQ9LekTSajof/+8Fvta/Ms2sXzKfDjwLXLrA+Zu7Ppo0fzmxzJ5/ySXBMkuHKbvnX2Jceyw3V6YRaGY8dxnbrR42rSSW1WpM1dfVnMjV3kg0OjWSS6NlRmWbcjKjRG6/xcz9ObLNQotprFvE77ht2KxwDgGzwjkEzArnEDArnEPArHAOAbPCOQTMCucQMCucQ8CscIPfkHROR1OqGzC7iWgr0cGX3kQ00TGYXBKsPVpff7YTsD2a6Ehr5rrGkvt11mpO5iYaea9+XKuVm6uZ6TTNLrOWGJNeQCO16WpytljEZrDuGDSzbjkEzArnEDArnEPArHAOAbPCOQTMCucQMCucQ8CscINtFtICzUGZRqCRZJmJZqFoJff8G60f1x7LNbbMLKvP2unEGIDJD2XmSk3FzHii8ShRVvNY7nhjb9dPtiyxBBnA2ExmabTckmCaqW/KybbgRGKu9ENvpllobnPSIlYk8zMBs8I5BMwK5xAwK5xDwKxwDgGzwjkEzArnEDArnEPArHAOAbPCDXh5Mc3vEGzW55BGkutgJboBsx2D7cQyV+3RXIZOLa8fd2xVbq6jH6lvCTu2ZiY1lz48WT+mWd+ZN/NObsm2sYOJjVlfTm5I2h6rP95U8nrIdPklZa4v2snjJdo1529u6uXFzKxLDgGzwjkEzArnEDArnEPArHAOAbPCOQTMCucQMCvcwJcXozGnmaGHexFGovGI5PJVmf38ZrLNQoklwY6tyjV5HF1Xv2TWmo++mZrrwpW/qx3TUH1jyy/f+kjqeAdjVe2YscO5u+T0m/XXaSuxBySAEg1k2X0NU41A7eRjb2IuzcxpiPLyYmbWrdoQkDQu6QlJz0jaJ+n26vwLJO2W9KKk+yTlekfNbEnJPBOYADZHxJ8AG4ArJV0G3AHcFREXAm8B1/etSjPrm9oQiI4j1clW9RXAZuD+6vztwNX9KNDM+iv1noCkpqS9wCFgJ/Br4HBEHH+X6hVg3Ql+d6ukPZL2TLaTC9Sb2cCkQiAiZiJiA7Ae2ARcnD1ARGyLiI0RsXG0Mb64Ks2sb7r6dCAiDgO7gE8DKyUd/2xlPfBqb0szs0HIfDqwWtLK6vtlwBXAfjphcE01bAvwUJ9qNLM+ynRmrAW2S2rSCY0dEfGwpBeAn0j6B+Bp4O4+1mlmfVIbAhHxLHDpAuf/hs77A92R5pxMtDhlOgEXmHshkTkeEInOwunEsmEAx86tP+b75+U2z7zoD+tfdV237vHUXJ8c+5/aMW8nlvF6YGxj6ng/Pbyidsz0eK5jcKaVuK0TS8QBxFgPG2fndvAtIL2cWeYukV2q7CTcMWhWOIeAWeEcAmaFcwiYFc4hYFY4h4BZ4RwCZoVzCJgVbsB7ES4g2bwzcImy2slrb6a+34bG2VOpuS7+8MHaMZkmIIA/Gl1eO+aV6SO1Y1Y0J1LHUyPR2JK8O8TcZeoW0G7lHuMayaaiDGWuinZyqbIB8TMBs8I5BMwK5xAwK5xDwKxwDgGzwjkEzArnEDArnEPArHDDbxbK7PGWba5IzJXeUy5zuGxjS6IXRY1cXZOJDqWXp89JzTUR79SOefzoRbVjHj308dTxpl9fVjvmQ/UlAdCcqr++Mg1FAO3EykLZR0vNJOpKrD4Eyb6pHtyf/UzArHAOAbPCOQTMCucQMCucQ8CscA4Bs8I5BMwK5xAwK5xDwKxwg+8YnNPhFD3s8svs8RbJvduU6FJs5Bq/UGJPuZljuZviv9+r7wb8r9ELU3ONJQp76u3z6mt6bVXueG/Ut06OvJe7rRuJjsH0UmWZvS4bvXu8zNy3gNQ+g/P+/yyigdDPBMwK5xAwK5xDwKxwDgGzwjkEzArnEDArnEPArHAOAbPCDbZZKJi/VFimcSK5HBMz9ZmmqdxcjaP1jTStI63UXONv1HetzIyPpubaN1nfvLN/xe+n5op2opvmrfq6lh3MPZYse73+th57N9nMlVjGKy21xF2uLqYT96+pRPcY5O7388Z0f734mYBZ4WpDQNJ5knZJekHSPkk3VOffJulVSXurr8/2v1wz67XMy4Fp4JsR8ZSks4AnJe2sfnZXRPxz/8ozs36rDYGIOAAcqL5/V9J+YF2/CzOzwejqPQFJ5wOXArurs74u6VlJP5K04J+3SdoqaY+kPZNx7NSqNbOeS4eApBXAA8CNEfEO8H3gY8AGOs8UvrPQ70XEtojYGBEbRzV+6hWbWU+lQkBSi04A3BsRDwJExMGImImINvADYFP/yjSzfsl8OiDgbmB/RNw56/y1s4Z9EXi+9+WZWb9lPh24HLgOeE7S3uq8W4EvS9pApzvhJeCrfajPzPos8+nAYyy8UNO/LeqIMafzql3fFZXqbCO3vFi2W6sxWV/XyPu57sPxw/WvutojucvYTCxD1h7NNYI2purHtN6tHzP2dq5LbTTRDdg8luvMa0zVj8su45XpPsx2mipz/5rO3QcjM27ufd7Li5lZtxwCZoVzCJgVziFgVjiHgFnhHAJmhXMImBXOIWBWuAEvLxbEnOaG5HZxuekTYzpd0AmJvedazWQTU6JpZeRo7qaYeqO+rkhGuxJ9Oc3J+tqbE8kGn8n6cY3ksmGaTjT4TOfqar4/WT/X+xOpuZionysyS5DB/EagBSdLLnt2En4mYFY4h4BZ4RwCZoVzCJgVziFgVjiHgFnhHAJmhXMImBXOIWBWuMF2DMK8jR1TXX69PH6yY1CN+nHZBG0lOtea7+duitHRZvKo9ZS58hPdjsps6NljPV0S7GiiY/BY/RhILgmWXF4s1Q047/bxhqRm1iWHgFnhHAJmhXMImBXOIWBWOIeAWeEcAmaFcwiYFW7gy4sxMzP/vLpfS06faQOKbLNQj8YA8xqkFtKcTO4f2Eo0C2WXUMuOq5NtFsqMS66WldnzL7UvIMBk/aaMMZXYuBHyjUAZiUatmHudei9CM+uWQ8CscA4Bs8I5BMwK5xAwK5xDwKxwDgGzwjkEzArnEDAr3EA7BoP5HU5KdNN1M38dtXOdX/M6sRYelJpLmQ0oJ3J1qZnI7WQnYPRwrl5RZhNOmN95upBEJyAkNwjt65JgJ5gq1WE5gA1JJZ0naZekFyTtk3RDdf4qSTsl/ar695xTrsbMBi7zcmAa+GZEXAJcBvyNpEuAm4GfR8THgZ9Xp83sNFMbAhFxICKeqr5/F9gPrAO+AGyvhm0Hru5TjWbWR129JyDpfOBSYDewJiIOVD96DVhzgt/ZCmwFGGf5ogs1s/5IfzogaQXwAHBjRLwz+2fReQdjwXcxImJbRGyMiI0tjZ9SsWbWe6kQkNSiEwD3RsSD1dkHJa2tfr4WONSfEs2snzKfDgi4G9gfEXfO+tHPgC3V91uAh3pfnpn1W+Y9gcuB64DnJO2tzrsV+Edgh6TrgZeBL/WlQjPrq9oQiIjHOPFKWn9xqgVkGiJ62lDUw+XF0k0fmf38Mo07AO3eLS+m6cS4xJ6M2eshJXtbJ5qFItt41M7tWZibaxFLgp1wrkT9i1iuby63DZsVziFgVjiHgFnhHAJmhXMImBXOIWBWOIeAWeEcAmaFcwiYFW6wG5LCorrLIhtViS4yZZfe6uXyYoklp2I6eSFTS4Il58p0A/ZS5rbPLM8Fyc1NsxulLtElwTJdkXOOt5jeTT8TMCucQ8CscA4Bs8I5BMwK5xAwK5xDwKxwDgGzwjkEzAo3+Gahxejh8lXZxqOeLmmWmauRK0yZC7BUm4UyerlUWbbxKLMkWGbvQ0g2MeXqSi2PNu8yenkxM+uSQ8CscA4Bs8I5BMwKp/Qa6L04mPQ6nY1KBuX3gN8N8Hi9djrX79qH46KIOKubXxjopwMRsXqQx5O0JyI2DvKYvXQ61+/ah0PSnm5/xy8HzArnEDAr3JkeAtuGXcApOp3rd+3D0XXtA31j0MyWnjP9mYCZ1XAImBXujAkBSedJ2iXpBUn7JN1Qnb9K0k5Jv6r+PWfYtc51ktpvk/SqpL3V12eHXetcksYlPSHpmar226vzL5C0W9KLku6TNDrsWhdykvrvkfTbWdf9hiGXekKSmpKelvRwdbq76z4izogvYC3wyer7s4BfApcA/wTcXJ1/M3DHsGvtovbbgL8ddn01tQtYUX3fAnYDlwE7gGur8/8F+Oth19pl/fcA1wy7vuRl+Abwr8DD1emurvsz5plARByIiKeq798F9gPrgC8A26th24Grh1LgSZyk9iUvOo5UJ1vVVwCbgfur85fk9Q4nrf+0IGk98Dngh9Vp0eV1f8aEwGySzgcupZPqayLiQPWj14A1w6orY07tAF+X9KykHy3FlzLwf09H9wKHgJ3Ar4HDETFdDXmFJRxqc+uPiOPX/ber6/4uSWPDq/Ckvgt8Czi+sMC5dHndn3EhIGkF8ABwY0S8M/tn0Xl+tGRTfoHavw98DNgAHAC+M7zqTiwiZiJiA7Ae2ARcPNyKujO3fkmfAG6hczk+BawCbhpehQuTdBVwKCKePJV5zqgQkNSi85/o3oh4sDr7oKS11c/X0kn7JWeh2iPiYHUHbQM/oPMfbMmKiMPALuDTwEpJx/82ZT3w6rDqyppV/5XVS7SIiAngxyzN6/5y4POSXgJ+QudlwPfo8ro/Y0Kgei10N7A/Iu6c9aOfAVuq77cADw26tjonqv14eFW+CDw/6NrqSFotaWX1/TLgCjrvaewCrqmGLcnrHU5Y/y9mPXCIzmvqJXfdR8QtEbE+Is4HrgUeiYiv0OV1f8Z0DEr6c+A/gef4/9dHt9J5bb0D+CidP2P+UkS8OZQiT+AktX+ZzkuBAF4Cvjrr/Y0lQdIf03nzqUnnQWVHRPy9pD+g8+i0Cnga+KvqUXVJOUn9jwCr6Xx6sBf42qw3EJccSZ+h80nSVd1e92dMCJjZ4pwxLwfMbHEcAmaFcwiYFc4hYFY4h4BZ4RwCZoVzCJgV7n8BSgIlQPFZebAAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "data = pipeline.get_data('centered')\n", "plt.imshow(data[0, ], origin='lower')\n", "plt.xlim(17, 40)\n", "plt.ylim(17, 40)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Masking the images" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Before running the PSF subtraction, we use the [PSFpreparationModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.psfpreparation.PSFpreparationModule) to mask the central part of the PSF and we also create a outer mask with a diameter equal to the field of view of the image. The latter is achieved by simply setting the argument of `edge_size` to a value that is larger than the field of view." ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "--------------------\n", "PSFpreparationModule\n", "--------------------\n", "\n", "Module name: prep1\n", "Input port: centered (70, 57, 57)\n", "Preparing images for PSF subtraction... [DONE] \n", "Output port: prep (70, 57, 57)\n" ] } ], "source": [ "module = PSFpreparationModule(name_in='prep1',\n", " image_in_tag='centered',\n", " image_out_tag='prep',\n", " mask_out_tag=None,\n", " norm=False,\n", " cent_size=0.02,\n", " edge_size=0.2)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('prep1')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's have a look at the first image and show it on a logarithmic color scale." ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 19, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAodUlEQVR4nO2da6xc13Xf/+vM895LihTJK5omFdNOhRhqkdgooTqwPzhyZVFuEPuDbcRNDRVRwARJCqdJEcspWiBoUThFEcet27iCJVgtXMt2EkOCEFFiFRlFgEI2WcuOZNmWLEiQaEq8lMTXvXce55zVD3fI2eu/OefMfc6QZ/0AgrNnnzlnz2Pfs/97vURV4TjOtU8y6QE4jrM1+GR3nIrgk91xKoJPdsepCD7ZHaci1LfyYnv27NGDBw9u5SUdp1KcOHHijKrOX6lvSyf7wYMHcfz48a28pONUChF5aVSfL+MdpyL4ZHecirCly3innNtnP3n5saap6UtmZ+3BtZppSqtp++v269XtwetFbF9Cf/drth/kaSlpbvuzoJ1ldKxto2/fF3J7Lu31Rh5/9Oy9cNaG39kdpyL4ZHeciuCT3XEqgmv2LeaDzU/YJ1h3N4e6W1hHt1p0bIP6rWbXhv1683bQz3/m+VoM6eqI4PVSs+dS2h+g3YB4P4Damgw/o8N7f9u+lvY1jr5+T/E4K4zf2R2nIvhkd5yK4Mv4Dea25GP2CVrCsvlMaBkfLutl107TpbRs5+VxPtemc1F/Y3hurdNSm45N+nbZnpA1TGnYCNs5LdszMq3R+xA2xbG0Cd8nmxOXrFnv8Pxv2nPRZ3T09BdRVfzO7jgVwSe741QEn+yOUxFcs6+B22oft08EpiJpkMsqIW1rPmMNKkFbZ60G1wYJZTKX5bOk6RNyiQ10ed5k8xgPFIVIUT9pdmQlprUS05uBTYT8+RUOjDR9bq9zrZvt/M7uOBXBJ7vjVASf7I5TEVyzj8Ht2/+5aSfkthoicxSGSrpQ2qTDZ+y5Qhu0NsmmTLbxbMZq9KxVbDsPhbaWecfSa7Vt9wuSZHQIbK1LIa1s06drSa9v+zlUNzyWbPKhezGP44qvD2z42u2avsO7fsO0j77xpcJzXW34nd1xKoJPdsepCGMt40XkRQAXAGQAUlU9JCK7AHwNwEEALwL4uKq+uTnDdBxnvaxGs/+Sqp4J2ncDeFxVPysidw/an97Q0W0RkT8162wKFeUUTqFtN9KQDIelsr97oNPZrp43SLOTRs9Zs0d29uAxh50WhJVesZ+3AwqkspBm53MlSnsgRbZy3iugcFr2w480fOgTwemw6NjDN/yWfS2H015lmn49y/gPA7h/8Ph+AB9Z92gcx9k0xp3sCuAxETkhIkcGz+1V1VODx68C2HulF4rIERE5LiLHFxYW1jlcx3HWyrjL+Pep6kkRuQHAMRH5Ydipqipy5YWcqt4D4B4AOHTokBeDd5wJMdZkV9WTg/9Pi8g3AdwC4DUR2aeqp0RkH4DTmzjODeWOt/6uaUuDdDRrRvbHvnDRtkPdXWAjBq5gVyc9W2Qn1kaxHZ01et6w7awxWgsLifB8xvYnqR1Xna4lwT4H7y1w2mk+V9a2P0OOpQ/3UBJ6z9K3Nn2ll7IuR2DTl4z86ul75lTeENt/tdnlS5fxIjInItsvPQbwQQBPA3gIwJ2Dw+4E8OBmDdJxnPUzzp19L4BvDnac6wD+l6oeFZHvAPi6iNwF4CUAHy84h+M4E6Z0sqvqCwB+4QrPvw7gA5sxKMdxNp7K+Mbfsf9fDBsUA83liqJ+suWybTzU/Frn5GyWOPcbx6gHw2rZPm5zTHrWJI3eWo1mt+2cU+NZ9/VoXyPU7BLpZs5vxzb7kvx3gcbnzy8hzS4UO6+8XxDGB/B7oDJVkd6Ptlfs7ySMoXj0wpcxbbi7rONUBJ/sjlMRrtllPLs6ShiWSsv2qGooL+coPVQUxhpWQ8lLXAm4+iktJdO54Tj7261cyGlZnrK7LFkQUzo+XHVGaagIXoonGZnm6lGg6sjXRstlyngdLfvpWrVeIBHa9j3XOrSsp/BadtUNw22F03wt2ZBXDr2NTLJ91jZDDu8+YtrTkPLK7+yOUxF8sjtORfDJ7jgV4ZrR7OwCy66NxuWVQ1TZRZVLDFGIa2HFUz4Xl2hqUUgrh60G6Z+ydrFpLW2ze6y9NJve8vBtrFKzs22OdbU53yojIBL2Ss35fQWanU1r9PlyJi52zQ1djNn1NqH9lEizM1ymKvydkDvxNLjW+p3dcSqCT3bHqQg+2R2nIlwzmj0KR+T0RKGg5VRQHHbapn7WZuxem4/u49f2r7dGZnZ5DcNSc/p2OP0z97Om536j6Vep2fM6u8eOfi2PMzqWtzUi91rbrnWGj+sd21eUemvltaTDw/fRZ3dZ+4HlCZXX7lIa63S0mzX/HnWZBj4B/M7uOBXBJ7vjVASf7I5TEa5azX777CdNO/ZXJ1Ea2EDzOZtzSamMEqdVYlt50i0oSUy+2DmlXIps5+zfHhwehayynX2O+snnPCO7uwbtUt/4Mls594dt/vjYjk5SN6E2H2/87jkzNF+Lnoj97gObPe8dNKnEFfv0k40/SgtelKaazsV+IY/89AvYbPzO7jgVwSe741QEn+yOUxGuGs1+W43yWZIPeq1NJYQaNqWzBjo93W01e3+2+GPgNEl8dBLYX9MZe93+NvKz5/TPZBcO7dls22ZN3p+z7XTOfiY5ZbUOfcy1VlLaOC8R9UUvp74k5bRT1E97IDUbVm40PscD1OliLKv5fYQ2++i74FTd5CsflcAm/wwEOp01u2yzX5Z2rN398N7fNu2jr/03bDR+Z3eciuCT3XEqgk92x6kIV41mLyqLBCCKQReypadzQwGbzhTbvjmPHNtutW71WB7kXY583TkPHJdoIl0eatJ01vb1rjNN9K+z40znSGM2OU4/6CfNzunVSs3srOnDS1Osu/bp8+sUX4w1vsmdF/ns23YUC8+ZuoPceZrysfQ7oHac9pvzbwf+AJwzgXMksK98n/IgbgJ+Z3eciuCT3XEqwtQu429LPlbYn8xS+OH2baadzts1b2/X0G6Vzox2UQXiMEquQsqujwiW7myeYddQXlay22o6M3yiu9P2dXeTfJijtMkztl0nuZHUhu0k4WV8WZvGTSvYNB2+sX6XlqxcIbZRbIrjzyRUUfx5lrr1FlDmMhwdTxV5wNVoik7IprhZctleWjbtzXCn9Tu741QEn+yOUxHGnuwiUhOR74rIw4P220XkSRF5XkS+JiLNsnM4jjM5VqPZPwXgWQCXxPCfAPicqj4gIl8EcBeAP9/g8Y1EZqzvKKeWymZt+GGYVjkOi6Q2md4S9sEk8iCsNa8Vm9Y4TLVP5rX+9vAxub+SRk/mbCxos0XtBrXrWfDY9tXoQ6glBXmnAGS5/RCX+8PP+2Jiv4sOm+K4PBSZtNiF2HxfnJqLjmVNH6XyClNJ076F0ncVuceWifzwffEmB5vtGP4MykzNa2CsO7uIHADwTwB8adAWALcC+IvBIfcD+MiGj85xnA1j3GX8nwH4Qwz/du0GcFZVL90eXgGw/0ovFJEjInJcRI4vLCysZ6yO46yD0skuIr8M4LSqnljLBVT1HlU9pKqH5ufn13IKx3E2gHE0+3sB/IqIfAhAGyua/fMAdopIfXB3PwDg5HoHU2RbT9pUNpns6tms3R9kt9UwdTJr8KRPepXSD9eWrb7NOfVUc/THyJqRSzJ1ryeX2OuHY0l3lNjRG7bNGn1b28aKzjaGBu3tDetMMFu3xu4W1WTKKff0Ymo/73NdazcOyVL7IfTZfZbdTula4XZC5P4aGeW53BN1hzqcvV1Js6NtX1zr8j4Gu9cW7HNwCbE+paXmEG16H7fv+PXLjx89d9/o6xRQemdX1c+o6gFVPQjgVwH8jar+GoAnAHx0cNidAB5c0wgcx9kS1mNn/zSA3xeR57Gi4e/dmCE5jrMZrMpdVlW/BeBbg8cvALhl44fkOM5mMLW+8QxrGi6jnDfITsmppAMJxGV8E9JiSY/8mMk3Hnyt4FI5p38mjZ6StE0pA3Y6E4ytRemwmlbntVtWZ++YsTp8V3vJtK9rDv2v55sXTd+2utX3DTJYd6ge9PnU7qHUg5zNKW1UdFP7eaU9247Cgik0N09H6+zI7k5pqKPyUOHeTX91C1tOS51QiTHNghgJ1ujsG8+lozgtdVm56DXg7rKOUxF8sjtORfDJ7jgVYaKavTBmnX2LKa0POO0P2SVj3R30kZ1dKFV0rRPVHzKwxgzLAkW+7zPU3kYafpZTR432iY7C6OtW9+1s25joA7NnTfvG9huXH++pXzB9bQ4qJxZzu2dyJtlu2nmg0zuZ/a6WWlaPdqjdnyFNT+miQq0c5RLgfACUajrhrzKw6ffJRl/r0u+CUlxJxuManXJMKaW49GggnZI0VPT71s5wTyW0uQPj2939zu44FcEnu+NUhKk1vUmTwuPLQgQ5LLU/2nVRyGTCxwqlG8pbbObjrKPDx/1Z28eVVtnUlrc5zDJIHUXusC0yte2asaa1fTPnTPttM2dM+8bGcBm/q2ZNb02ODSXO5+3C/m6QAnaZysde6NvXLrbtd5v1yRRHy+U0CKdN6BerPVpakwWLK8TmQfUZdr1tsLkW9LvI7HfLJtywogxX701IZkq95Pcc2fmCsWXF39Uo/M7uOBXBJ7vjVASf7I5TEaZWs2u/2PzFuiXpFeuYMNUUu9ZGZhEy72jkimsP720b9vd2UMjqDnssm9qUTW3BuWuk2a+jkNWf2famaf/c7Gum/Y7WadN+S22o6dti33NGNqyO2p9GjTRkk14fute2araP220Kxe1ROq0embSy4GeqHfvhZ202uY422wFALeivN9h0Zo/lJ4TyaSXp+PdK3gdS2reQLpk+SdMnQRVYXbR7NePid3bHqQg+2R2nIvhkd5yKMFWaXVqBS2Y+2m0UiG3lSmGo0V+xIMQw6iP3WaV0RBm1uXxUN9DpfZstC/05e+5shiutki23NdR2czPWpfKGWevi+tb2WdM+0HzdtEONDgDztaE77SyblIk+7LV35TZ8dmdideNsMtxPaJHrbZ8M2imloc64IiyPJRm938IVY9lGL1ypNdDscRh0sattQudO2qz5C8qApeQAwGZ0EBQCK43h6zVZ2z3a7+yOUxF8sjtORfDJ7jgVYbo0e+gP3ye7I/sDc5qfkn5zbGr1aE4hlxn5NaezVid2d5Bm3zXUZ73rSKPPkUZv095C2wrDmbnh2PZutxr97XNWk+9vWjv7bvJ335FQKulARu4/cAobyXMv77v8mPV8jXJJ5aSNM7Jns5TuBP4GGfmnp2TrVt4PoP6sO/wuNSF/i5w1O4XAclQqjbsW+EzUl+333N9uf2N1TltNCP++gzRVQimsPjjzzy4/3i67/uGoc/qd3XEqgk92x6kIPtkdpyJMl2ZvBZqd0w8xZGdnu2REgW1SKT49m7UfS3+OyhdRjHpoS89mSzT6THGZ5e1BOug97UXTd0PTavgb6udNezdp5d01q5Xfsn9jdXrITTcOz916ZZ/p66jVmOcym097MaPcBcRyffj6jPR+t2+/Ky4l3ac01t1keK6cNHnWpXaH04LbcXGKLCPh2a8+2kKy45Ye+93T7yhMTU0p2sK9LumNnjd+Z3eciuCT3XEqgk92x6kIE9XsyXabjliCmF3W5GEq3ZV+8h1mzc6le2eGgkupxHLetLquv822e5T+uW+HjTTQ7KvV6Ntm7PvaHeSV29Miu3nNavLtiU0dvYN80jdToxfxM2TD779sNfzZzCbiO8c1sYhOoNl75GffpTJL/cz2L/bsfkEe2NK79L1HpbjJv53TVNeo1LRJJU2v5TTVfJ9NaO8BS/Z3koS2dcrHKOHeQcFWl9/ZHacilE52EWmLyLdF5Hsi8oyI/PHg+beLyJMi8ryIfE1EirdUHceZKOMs47sAblXViyLSAPC3IvIIgN8H8DlVfUBEvgjgLgB/vpqLG1MbAA1S8Qibynrkq0jL9Mg9NqocEiyxeBnfohBMWq6lVNWFUyFpWHW0QdVlqGpLg1IyzTTs0ntbY7is31Yjd1dyf50T+5n87I2TWbaXweP60Qv/wLRvaFiTYkpL9cXa8HfS5Wozqf0NdSjXdGyqCyRBnaQip6nmQqwUpRqvmXVkF5veuJ1RlaGEzIAaSIyETG9qlvXrML3pCpfEY2PwTwHcCuAvBs/fD+AjZedyHGdyjKXZRaQmIk8BOA3gGICfADirqpduU68A2D/itUdE5LiIHF9YWNiAITuOsxbGmuyqmqnquwAcAHALgHeOewFVvUdVD6nqofn5+bWN0nGcdbMq05uqnhWRJwD8IoCdIlIf3N0PADi52osLV2ZlF9jwWHZNpGOj/mg/YPh3jd1hOYQ1bbFGt2Nh7aaBW6qQOaZGLqv1xI57pm41+0xt2G5QSSYu0TRbUnl1WmGT4R7S7EzoXruYtgqOBFIOOxVKNZ2M/o0pm9Jq3KYUV6zLw/0B2lNiM17kvU1tduE2e04z9Bmk6RWPY8bZjZ8XkZ2DxzMAbgPwLIAnAHx0cNidAB4sO5fjOJNjnDv7PgD3i0gNK38cvq6qD4vIDwA8ICL/HsB3Ady7ieN0HGedlE52Vf0+gHdf4fkXsKLfHce5CphsiCvri4JUUqUhr9xfI4USiKSMQ1qbbNOkYbH9lYciIx4DENKM7GJZSzgF9ugU2gkZZznd09XCnNi9Bk6nVQSnoa5zTeb1wJmla8VtJuznNNXRfgBrdDqeNb75fbO7bLgB4O6yjuP4ZHeciuCT3XEqwkQ1u3K66ND/nTS4KQ0FQCjds9ZZYJFGCsoG5S0KZaS0vpEmJ0iGQ8KyQNSnJSeL0igFYq5LmwV93ky4SmmRv8A8pddilvKhz8SbqQ2PZT969p3vUchrGqap4u+Gvzu6FbJ/Rb2kqrih5DcV7Q9QuG1o45eobNV4ezd+Z3eciuCT3XEqgk92x6kIk9XsXYpRD+zsQrZENFgwUT87G7OvfKB5olK9q9Xo7A4QSFCl8kQ5OVBzeWL25e4FOp01e4dEY6fM8DulsL96m+zuDbFiOPQn6NN7jtJUpaTZKZV0WD5KudxTSenost+JCWcnHR3Fs5fJ7ChfQ/h4bf4Vfmd3nIrgk91xKoJPdsepCBPV7PkFG8ectIeB48qaZdGmURal9MOzNuic88yFpXokY4OqbSZkP61xFmuKd0+CkjtZnzR5nzQmpT5e6lsdfr42fB9zNWtTPpPaHNYLmW2/ftImC9q9f9UpBjaFk1QOisVvTvsWReWiznS3mb43u/YzWqbPs0OppLNQw9N3RVsFpW3+3YS6nFwJkPS1sM3nrnE5qM5wfyss3wwAGsazF9jc/c7uOBXBJ7vjVISp8r80qabKXABTXmOxy+to8xqbQRJa1icpL9NR3A6Oz/tkeutRtZmGbXdqdpl5MQh5PVe30uRM3y5hTzfsMv6n9bOmff2rN9lxvuU5bAW8bF+ir5JNhrxsv0BVXt/sD5fqZ3tUAbZHqaSpsgrLqPD7kZTSN/P3Tm1emkfmtKCfpWDc5t8ctWkZj35wAq5+JOH78CqujlN5fLI7TkXwye44FWHKNPtQiwiV8ZEZyufMaadJxwiZ3iRwMZS82AxS79h2RlXsaj0yHXUCzU7psDJKR9xPrD61BkW6DqWsmq3bzYJttRtMu02ppft62rTfFpjmNtos98OX33r5cYdCcVmjX8jtd/lyf7dp/7jzFtN+aWnX5cdnluZM32LXfjn9/mj3WADQ7rBd65Amp4jryHzGUpl1ePA7qvWKNTmfu75MVVt7BVWKKX2bhBVe15NK2nGcawOf7I5TEXyyO05FmCrNjjzUKTQ01iJKooft8lGI4LCd9MkVkdNScbpnKp8buc8GsrEWlf0luzu9DS7gtBiGSVIcZKtm7eqNKNbWspTbVF6vZucuP9754jsKX5uRvZZTYvWi8NqdI8/FdvSzmXVxfam7x7RfXrretE8vDd/3xY59Tz3yY8gpzVfepbTLvaD0cfS9sp3dNGMNH2n24FzkixFpeP4NkmaXfkGKbNLsj5z8L8PXyX8+Meplfmd3nIrgk91xKoJPdsepCNOl2YugsD40G1c+7hJRWqBAs3esHqqVpKmqd2w7a1NZ4KB8FJf1jaQthXNqRto4EPUXKdUxa3hOU53TwMMUzACwEPjS76jZsslNMhonKN4PyOk+kQXvKwpRpfTPr/etrfxkZ6dp//TiDvv65aFdPrKbk0bPWKMv23b94vD4+mLxXkycMty22XZe64f7QuS7sWRfnKRkK+cS5KTZNQiNlv5qclgH11zTqxzHueoYpz77jSLyhIj8QESeEZFPDZ7fJSLHROS5wf/Xl53LcZzJMc6dPQXwB6p6M4D3APgdEbkZwN0AHlfVmwA8Pmg7jjOljFOf/RSAU4PHF0TkWQD7AXwYwPsHh90P4FsAPr1RA4tSSbOu5n7W6KSJtBHqKdJDFNeslK4ospGyDTXQelwiKPK/JimcU8x0GFpPplicJ42eZsWlj95sWa18srHz8uO5uhWoLTIac7tBgjUpyIW8nNkP4Q3S6K93bXuB/N0vLFvf+dCWHtnRKX8ASLPXlqjE88XhZ1i32xZI2OmBiPIgRL+DwDe+Q+W1aZ+INXnSIR2+Rl1exKo0u4gcBPBuAE8C2Dv4QwAArwLYu7FDcxxnIxl7sovINgB/CeD3VNVU4tOV7JBX/FMvIkdE5LiIHF9YWFjXYB3HWTtjTXYRaWBlon9FVf9q8PRrIrJv0L8PwOkrvVZV71HVQ6p6aH5+fiPG7DjOGijV7CIiAO4F8Kyq/mnQ9RCAOwF8dvD/g+sdTJhKWuocz06po1vWhqxc/onj24MUwpxmWjj/F8WRJw3S8GRDzQJ5xXnLlHRdXCaYffiH74P1fEo2+YtRaSlKyUylkLYHn9lc3fqYt+tWsDZJs3PJpoz3D4IyTEup/W4WqX2R8sYtUUx6kS2dNbou2/dYu8B2dTvORpBAgDU3pxhnjc52+MZSPrLduGA/z2TJtlmzy5J15uAS5Gu1rYeM41TzXgCfBPB3IvLU4Lk/wsok/7qI3AXgJQAfX/doHMfZNMbZjf9bjE5Z+YGNHY7jOJvFRN1lj+XfMO3DO+8aNiLTW3GV1sj0Ru6HGi7RCsJfgThtFXuOFoU2lppvoioiFFYZvO2crpNRmuScTIaLtMTtdihNdXu4dG817EDbDXuxZq3Y1JbraAnBVVm6VEm1T+8jo/fBy3izdO+Rq/KibTcu0LL9ommiHsQQ8/cYVQYi91hettd5Gb84PGH9otUI0qUfBi/LyR08ursGxz/y8ue5dyzcXdZxKoJPdsepCD7ZHaciTFeIa+gSS+6xumR9G1nTRBpH2E11qK+Uq7iSiyung+Y/iazpJTCJcWij0vuQkq2H0CtVonJE9ljWumlq30g6Q1o4cDvtUojwctOevF6nMOAC91jAanjW8ylp9kijUzsnc5qE6Z/J/bV5rlij1ygteJguKqqkSm8xdH9dOXdGbfrMzga/0S5p9g7Z+erFe1L8e49CvNeA39kdpyL4ZHeciuCT3XEqwlRp9qNvfOny48Pzv2k7Mw5LpdS7rIFYDAd2d2GbfFl56Mg2PrrNdvYoLRVr9KLsTzwsSo/F4bMJlaXKKDVy3hyeMG3ZgaUN0skNSpNEZayE0m2HbsBRVm/yJVAOS6V2sky29OXh69mO3jpL6Z9I6nJ4sv2uit1jOZVU85zV3Qnb0t84h1Foj45tNkccOWDZftlHz95bfPwY+J3dcSqCT3bHqQg+2R2nIkyVZjdk7JhcMlT2Z1cSYIFe5fI5bAtnOPQx6g9FKoewsr7nkkLRucKBcSc1C/YOgDjcNguyPeVcNqlBdt4m+cLX2HGcP+/wYC6FzCmbSZNzeqdlOj6Qr3Wqcc3tJGO7+uhx1jvs625/c/WLFKZ6ji7Gv6NQl0cOFLxvwZs75J/R49jo9eN3dsepCD7ZHaci+GR3nIowtZo9tLkDwOHdR+wBrOlzztFM7VATsW07sqOz73tZf+Abz5WkKeUVbyUU6e6SisyRDZ8qSyFrkY96YIfPo9LSpO9bNO6Sa4XwZxClXGZNHvVTO/BRbyyXaHKivsy/g7CPNPoFsqOfp3iM8+x4T/seoW2cYzPYrs5+I9R+dOl/YqPxO7vjVASf7I5TEaZ2Gc/oMi2p5my1E+1S6s9aQXZOCu/kCjFs4tLEfkwJhT7W6sN2HAlaYtaLzGU6so+tenytnD2GOX1WsErNm+RKW1LJpmjZvnKx4DpsXqQ2Z2mNXFp53GH23n6ByQ9xWCpXZkm6WdBHlWsX7cCEMhQz7LIdLt3zxUV7bpadCYf1knbZBPzO7jgVwSe741QEn+yOUxGuGs3OpojDu36j+AWc/ynUX2SW44qvQu6zSZSWmt0/C3R2lNKquB1q0oTNdnwuNvNxpiOuThs0M7IEsWaPQnMZls7BuTnMN6qs0mfzGZ97dOgpu7iqsGttVthOOsPBJcuUvnmZNhNIs0curqzDuR2+NtL39rs5ln195Gs3Cr+zO05F8MnuOBXBJ7vjVISrRrMzkTttmYYP9BSn9dUG2eSVbfSk6euURqkXpLzi8M6s2M7OGP2fsqAn2zf7A9C1axSGmofVaKN0TXRudo8tS6clo/s4RDgqn8VVdDldVNBf69F3Qa7LCfUnvQJbOe3FlGp0KtnEYah5Z+guK43itFOPdb9S2L8Z+J3dcSqCT3bHqQilk11E7hOR0yLydPDcLhE5JiLPDf6/fnOH6TjOehlHs38ZwBcA/I/gubsBPK6qnxWRuwftT2/88MZHM9bVBQZv0masKTWy8xa3Q52dk0ZPyHSrtWINX+sG+p81O6F1tqOziLdNCYR3QvbpvM42/WLfeA5jjfJHFxwbafg+a/rROpxfG4cbsw5njR+ci8smsy28wG5ehlAJMf59ToLSO7uq/h8Ab9DTHwZw/+Dx/QA+srHDchxno1mrZt+rqqcGj18FsHfUgSJyRESOi8jxhYWFNV7OcZz1su4NOl1Z845cw6nqPap6SFUPzc/Pr/dyjuOskbXa2V8TkX2qekpE9gE4vZGDWguPnrvPtKPyUQ1y/g6IdDSn/eW0wKyFAx1ZI+2ak66WlDUm25QL9CmnGy6rWkWppjRILS0cDM/vOboW+w8UaXTW3CXvmbNU90frW055HZfusv111uxBPDuXVY72avg3w+1Wy1457Kf4i8c2Ic3Ualnrnf0hAHcOHt8J4MGNGY7jOJvFOKa3rwL4vwB+TkReEZG7AHwWwG0i8hyAfzxoO44zxZQu41X1EyO6PrDBY3EcZxO5an3jyzi68N9N+463/cthg+3RJSbQqMRzZLsN+tnvO0pbzTZlOleoZznunv0B2M7OPvukfUPbOfu68xKP7epRrDzngivo43GUltPi/mAsZfsnEXyuwLauPfJ9jwZSvK/BtnQE7aO0hzQNuLus41QEn+yOUxGu2WU888hLn7v8+PA77zZ9kUmL3VQpLTUvvc25omVjsculcH/weilz10ypIsl11hRUWOmGrY09lgh0AKfIoiVtuFTnMFQ+NnIDrnF4LbnyNof3pNhUyZKB01BZ85oES/fIvZg+73yxY9rRsp3gsOtpw+/sjlMRfLI7TkXwye44FaEymj3k6A+tD9Dhv/9Hph1p5ZS1HdccCtxQSTOWhalyqqPQ3MbhmkqakcM7oVazF6XI5kBbPlfOtjfqZ10d6vSwxNIVx80mRB4Lp7EOrh2FtNJ7TKikE6cgMy6yJaY37VB8csNOl0cv3o+rCb+zO05F8MnuOBXBJ7vjVIRKanbm6DP/obD/9p//N6YtObuhBnZg0oFlZX8jzV4EaUalNuvVfMb2Rxo/fG2vOFVX9FpuhyWbOUS1KJ0zgJxKYoNeXw/LWNM+Ru28tYVHnyfp7vz8hcuPORW01O04JpHueTPxO7vjVASf7I5TEXyyO05FcM0+Bo9+/9+Z9h0/+6/sAfWhYTiy65K+57ZyWut6Qa1krqtMsI0/KUq3xaG2y6R1KVw2SslM19KZxsg+vla4xwEg0v9ROuhwnF2yoy+RZucSTqzZw3Zuj32s99WR170W8Du741QEn+yOUxF8sjtORXDNvgYe+cl/Gtl3+Ibfsk9wquOyGPXm3MhjpUu+3Kx9o7JVpLN7gd7lGPNl2mvg2O2oZJY9d9YavdcQaXSiSKMDNjuUdIrLKEd7IsvLpn0sfaDwWtcyfmd3nIrgk91xKoJPdsepCK7ZN5ijp79Y2H/7tjvtE6ydA50elSOiksLSK/lbzX75oQ4nHR3Zq9nez7q6IB13lO65pEx1KaEO5z0Paj/y0y+s71rXMH5nd5yK4JPdcSqCL+O3mLJURrfv+PXLj5Ntc6ZP2fV20ZqVZG7GHr9E/eHymtJja4fSJs/O2v5207ZnKW11sFTPG3Rudr0tucUIpaIOw2uP/vg/Fr/YGYnf2R2nIvhkd5yKsK7JLiKHReRHIvK8iNxd/grHcSbFmjW7iNQA/FcAtwF4BcB3ROQhVf3BRg2uijy6juqftyUf27iBvP7Gml8apamm9rH8G2s+t7N21nNnvwXA86r6gqr2ADwA4MMbMyzHcTaa9Uz2/QBeDtqvDJ4ziMgRETkuIscXFhbWcTnHcdbDpm/Qqeo9qnpIVQ/Nz89v9uUcxxnBeuzsJwHcGLQPDJ4byYkTJ86IyEsA9gA4s45rbxY+rtWxpnEJu9NuPNfU57VK3jaqQ9j/elxEpA7gxwA+gJVJ/h0A/1RVnxnjtcdV9dCaLryJ+LhWh49rdUx6XGu+s6tqKiK/C+BRADUA940z0R3HmQzrcpdV1b8G8NcbNBbHcTaRSXnQ3TOh65bh41odPq7VMdFxrVmzO45zdeG+8Y5TEXyyO05F2NLJPk2BMyJyn4icFpGng+d2icgxEXlu8P/1WzymG0XkCRH5gYg8IyKfmoZxDcbQFpFvi8j3BmP748HzbxeRJwff6ddEpFl2rk0aX01EvisiD0/LuETkRRH5OxF5SkSOD56b2He5ZZM9CJy5A8DNAD4hIjdv1fWvwJcBHKbn7gbwuKreBODxQXsrSQH8gareDOA9AH5n8BlNelwA0AVwq6r+AoB3ATgsIu8B8CcAPqeqfw/AmwDumsDYAOBTAJ4N2tMyrl9S1XcF9vXJfZequiX/APwigEeD9mcAfGarrj9iTAcBPB20fwRg3+DxPgA/mvD4HsRKVOG0jWsWwP8D8I+w4hFWv9J3vIXjOYCViXMrgIexEmg3DeN6EcAeem5i3+VWLuPHCpyZMHtV9dTg8asA9k5qICJyEMC7ATw5LeMaLJWfAnAawDEAPwFwVlUvpb2d1Hf6ZwD+EMMct7unZFwK4DEROSEiRwbPTey79Bx0I1BVFZGJ2CVFZBuAvwTwe6p6PvQln+S4VDUD8C4R2QngmwDeOYlxhIjILwM4raonROT9Ex4O8z5VPSkiNwA4JiI/DDu3+rvcyjv7qgNnJsBrIrIPAAb/n97qAYhIAysT/Suq+lfTMq4QVT0L4AmsLI93DuIkgMl8p+8F8Csi8iJWcircCuDzUzAuqOrJwf+nsfLH8RZM8Lvcysn+HQA3DXZJmwB+FcBDW3j9cXgIwKUqDndiRTNvGbJyC78XwLOq+qfTMq7B2OYHd3SIyAxW9hKexcqk/+ikxqaqn1HVA6p6ECu/qb9R1V+b9LhEZE5Etl96DOCDAJ7GJL/LLd6w+BBWIuV+AuBfb/WGCY3lqwBOAehjRdPdhRWt9ziA5wD8bwC7tnhM78OKzvs+gKcG/z406XENxvbzAL47GNvTAP7t4Pl3APg2gOcBfANAa4Lf6fsBPDwN4xpc/3uDf89c+r1P8rt0d1nHqQjuQec4FcEnu+NUBJ/sjlMRfLI7TkXwye44FcEnu+NUBJ/sjlMR/j9wOHfhq2SlAwAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "data = pipeline.get_data('prep')\n", "max_flux = np.amax(data[0, ])\n", "plt.imshow(data[0, ], origin='lower', norm=LogNorm(vmin=0.01*max_flux, vmax=max_flux))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Later on, we require a PSF template for both the relative calibration and the estimation of detection limits. Therefore, we create another masked dataset from the centered images but this time we only mask pixels beyond 70 mas and do not use a central mask." ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "--------------------\n", "PSFpreparationModule\n", "--------------------\n", "\n", "Module name: prep2\n", "Input port: centered (70, 57, 57)\n", "Preparing images for PSF subtraction... [DONE] \n", "Output port: psf (70, 57, 57)\n" ] } ], "source": [ "module = PSFpreparationModule(name_in='prep2',\n", " image_in_tag='centered',\n", " image_out_tag='psf',\n", " mask_out_tag=None,\n", " norm=False,\n", " cent_size=None,\n", " edge_size=0.07)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('prep2')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's have a look at the first image from this stack of PSF templates." ] }, { "cell_type": "code", "execution_count": 21, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 21, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAaoUlEQVR4nO2dbYxcZ3XH/+feedsXe9dOFtfECIOIivKhBGmVguADJA0JFJF8AARFyJVc+QtIQSBB0kqVkPoBVIkXqRXIahBuRUl4VaIIcFwTVCFVgXUTICHQmCgpcW3v5sX22uud19MPe+15zpndmdmd1/Xz/0mrnWfunbln5+6Z5/7vOc85oqoghFz7JKM2gBAyHOjshEQCnZ2QSKCzExIJdHZCIiE3zINdf/31un///mEekpCoOHHixEuqOrfetqE6+/79+7GwsDDMQxISFSLywkbbeBlPSCTQ2QmJBDo7IZFAZyckEujshEQCnZ2QSKCzExIJdHZCIoHOTkgk0NkJiQQ6OyGRQGcnJBLo7IREAp2dkEigsxMSCXR2QiKBzk5IJNDZCYkEOjshkUBnJyQSuio4KSLPA1gGUAdQU9V5EdkN4EEA+wE8D+DDqvrqYMwkhPTKZmb2d6vqzao6n43vBXBcVW8EcDwbE0LGlF4u4+8CcCR7fATA3T1bQwgZGN06uwJ4VEROiMih7Lk9qno6e3wGwJ71Xigih0RkQUQWlpaWejSXELJVum0S8U5VPSUirwFwTER+F25UVRWRdRu9q+phAIcBYH5+ns3gCRkRXc3sqnoq+70I4IcAbgFwVkT2AkD2e3FQRhJCeqejs4vIlIjsuPIYwHsAPAXgYQAHst0OAHhoUEYSQnqnm8v4PQB+KCJX9v93Vf2JiPwSwHdE5CCAFwB8eHBmEkJ6paOzq+pzAN6yzvMvA7htEEYRQvoPM+gIiQQ6OyGRQGcnJBK6jbOTbcidswftE4V89y+uN8ywsbJixlou2/3XbuBmG206xbHGd7s/LhkYnNkJiQQ6OyGRQGcnJBKo2bcRtycfMuOkVDJjKRXteGrSvkGo2UONvR61uj1W6uaFCXtsSLC9bl975+6/sfs2rKbXWs2Mj148AtJ/OLMTEgl0dkIigc5OSCRQs48ZXpe3QwoF+0TRanYfV9fNaPZcao+Vd/8q2qY0QeLmkIaN2fvXiovpv/e1n7S7r1y++vgn5+7f+LikLZzZCYkEOjshkUBnJyQSqNmHzGY0ORKrmxMX25adO8xYJ+32Rslr9ubp1qS9Zl+/omD45o2Nt7n7AVK1cXfUnWav1tqPg8d37Phrs00rVTN+tPytje2KHM7shEQCnZ2QSKCzExIJ1OwDZlMa3ZG4OLlMT5mxzkybcW3GavZ6yZ7eRqH53a6dvuY7aPa2mt7F0ZOqHadll3e/anV3suI0f5BrL2o/AwQxeAB4T+GjZvxo5dttDI0LzuyERAKdnZBIoLMTEgnU7H2mF40OAMlUU5MmO6wm1+tmzbi6265Xr+60p7NWst/ljVxTC6sN4UNd2L1jnN1tlyDsnrg4errq1q+7KSbnc+V9XD7Iy/d59MhZvQ+3XsCfj5jr4XFmJyQS6OyERAIv4/tAr5fuIeGle2PPbrOtvMeGnSo77bV4rWSvxet5O9bgbPtLaX8ZD/HbXTisJbzWfJyW3b4utVbq9uAtqbt++W04diWsNkt4rmK7pOfMTkgk0NkJiYSunV1EUhF5QkQeycZvEJHHReSkiDwoIoVO70EIGR2b0ez3AHgGwM5s/EUAX1bVB0Tk6wAOAvhan+0bS/qr0d0y1ZnmuDZr01+9Rq9M2e/quqvuXC9srNkbvsqU1/AdpgFp2PdOg25Qmjg9X/ca3tm16jW8HUtYIsvr+Zz7Qzah6WMLy3U1s4vIPgB/CeBfsrEAuBXA97JdjgC4ewD2EUL6RLeX8V8B8FkAV26rXgfgnKpe+Rp9EcAN671QRA6JyIKILCwtLfViKyGkBzo6u4i8H8Ciqp7YygFU9bCqzqvq/Nzc3FbeghDSB7rR7O8A8AEReR+AEtY0+1cBzIpILpvd9wE4NTgzR0tfNbpv2bTnejOu/klTs6/utvc8qxMu/dXdEvUa3Wv4UKe3bMu7/Fc/Dfj0WCeNw1TclrJULvs1qTrN7uPs/thhuyi/7Neb6WP6TtP7VlMh17qG7zizq+p9qrpPVfcD+AiAn6rqxwA8BuCD2W4HADw0MCsJIT3TS5z9cwA+LSInsabhWb2fkDFmU+myqvozAD/LHj8H4Jb+m0QIGQTMjR80Xr/O7DTjxoxbpjrZPCUtue5eoxfd9gm7vebG9VJT+9aLbtlpwYtyO4RbWZpUfP568NDF0f2+9bK/t2AvMNOi/bc0S179clgfk69U7Di1uQmNlZXmW/WYZ7/dYLosIZFAZyckEujshEQCNfs69DWuPm1LS2HG5sLXdtg2y42CF8sBblPDhpxbNHp1hxXa9algXPTx6DbtnABozeXh5zZOpheXC1+3Mhq1Vbu9Oul0d83F0oM4e9JurTtay23DtYdKgph+/cJFu2/DJgRca3F3zuyERAKdnZBI4GV8n/HpsMnsjBnXd9pr7XrJhYaCUlKNvA9R2bG/bK9N2bBUfdpemidTzUvaXMF1ZXHLUl2EC/WaK4Hl/nUawaV7vebs8CFC/3dU7ZyT+PTa4LI+Td2+BRemc51rfaVaKTVlU5q3sczGhQtmrOUyriU4sxMSCXR2QiKBzk5IJFCzo8+htjm7ZLVxnU2PrU07nZhzyz2DYd2H1mxmLSozVhvXZmz6Z7rDhp2KxUCzp1bP+wYwNafRG75FjPguL82x7zbjS2DVbbQRNdfUxafbhv+macHre/vmScV+aEnV37doHtx3yfX3A+ovv2rG2z0Ux5mdkEigsxMSCXR2QiKBmr0PpLt2XX3c2GXTY2szVqDWply8esLFnIPlnrVJt81pdpP+CiCZtJq9VLJieLLYzFvNpy7+bN8atYadBypOw6/mrd4tB2PX3QnSsK/16bStGt0Slq1KXTqxuI6xibMzqbqOspXmZ5ZPXTnslpZW9vOrv2I1/HaDMzshkUBnJyQS6OyERAI1+xZIptwyyr3NevjV3a7M1LT9iGtuOacvPVWZbo4rNq0elZ0uNj5pdXehaDV7IWfHoU4v+W0uIT2XtF/yetktQ10uN+9NnM/bpP0KXPlsF4iXDv2iw6W8LWWpa17/u+0uhp8rNz9/X8K66PR/WrWfUXJ5FdsZzuyERAKdnZBIoLMTEglRavZec+GTXbNmXJ1patSWOHoHjV5zbZjCWHpt0mrIxoQVpGnRCthiwWrM6aKtB7Wz2NScs4XLdlve6tGp1K7lTlwu/KWazR9YLDfzC06l9mbDogu816r2tVLzJa5cKepwOYEvU92SV2/HacXn8Ddfn7h19LlJl2c/4eyctPci7pg+YMZHLx7BOMOZnZBIoLMTEgl0dkIiIUrNvllSV0eusduWg67ubAaCaxMdNHpLi6aN89/Ddk0AgILT7Dmr2ScKVsCGGh0Abpg8d/Xx60uvmG37Ci+b8XU5W2Y5dSvez9VtPsEfq7uvPp7KvdZsq9btfYyXql7Du75Wvnt0EEv3mtyX0/bbNfVx+LAFlquNl3caPu/y7Kfc4gTXWmrc4cxOSCR0dHYRKYnIL0TkVyLytIh8Pnv+DSLyuIicFJEHRaTQ6b0IIaOjm8v4MoBbVfWiiOQB/FxEfgzg0wC+rKoPiMjXARwE8LUB2jo8WrqM2GWrdVeuuBGUSqr78s/uK7ClPLQr0dQIuqlq3nUsdV1bcv4yPm8v43cXL5nxa4vnrz5+Y3HRbNuff8mMZ5L2ZZRnk5UNty0WbCmumZKVE+eKVstUJ+y/Yd0tr9UgotiSLlt156pDKC4skeVW3kJdlxt1l/Hqyli1X5g7fnSc2XWNKwIun/0ogFsBfC97/giAuwdhICGkP3Sl2UUkFZEnASwCOAbgDwDOqV79zn0RwA0bvPaQiCyIyMLS0lIfTCaEbIWunF1V66p6M4B9AG4B8OZuD6Cqh1V1XlXn5+bmOr+AEDIQNhV6U9VzIvIYgLcDmBWRXDa77wNwahAGjoJk0oZY1KVNNoqu9VFQDtqXUW4pq9xhuxn78k6uRZMvB72jYHX23pJtZ/Sm0tmrj99cOGO27XNLXnck9mZDVa1YLtVtuu25RlPDT7tU22LqltPm7XtVXUdZJ8vRqDQ/iKTilbJblupKXnlhbZa1tpTDdvv6TrU5p+GxvejmbvyciMxmjycA3A7gGQCPAfhgttsBAA8NyEZCSB/oZmbfC+CIiKRY+3L4jqo+IiK/BfCAiPwDgCcA3D9AOwkhPdLR2VX11wDeus7zz2FNvxNCtgFMl10HKbjgeN5+TD4FM9SF6mL0LRWXvC5sv7qzLakrHVVInO5ObXx7Nm3G3Wfc2tAdib0vURTXRskZnhdXsglNW1IX3M75cdp+qa6rDmW0sbbE4Dt83puhnb5Ha9z9mouzE0KuDejshEQCnZ2QSKBmX4/Ea/L2LYfCsbgWQi1lkn1wtl0nZBdw1g6C1JeOSnwMOhj7QtGrajV43f0dF9Vq/LMu6f9MrbkMeLFilwD7stMt9xqK9r1tMS2gHsbO/anxpaNrbnvdj4O/q0OgXF0LZy1Yd5GyO9iYw5mdkEigsxMSCXR2QiKBmr0bvA6vu1bJtUCzt2hE/15uu9Oc4f7iYsgNV3LZt1X246pLvF/Vpna+pPbU5xs+bm4NP++S+s/U7Zr1s4FmP1d17Z/ca1OX45+6uLtIGzHd5vNad+xkddjlKmlJwt/4sJlhHXYYbzizExIJdHZCIoHOTkgkULOvg1Zs3De57FohTbi88XLzY0wr9vuzXmjfrsiP03Jz/4Y7Ow333qur1o5Xy3Yd/v+VbQnsyXRP87WuBvOO1K5Pr7uk/Vfqtg7fi5XdZvzC5eua+5ZtS2tfStrj8wcaDV9XrmlLumq3pZfd2JXOS8su9yD4vMN7LWvb3L2YqhX1UnE3ABqdRP54wZmdkEigsxMSCbyMXwetuITNVXttKGUbWkrLwfJOd9mYFuzYdzDRnFtGGVzxpm7fxqoLtZXt6Tt/2baEPV2wl/H5IM634mpYl5ye8GG7l6r2Mn5x1abEvrzavHS/5Dq8VNxlfM13dXUhxUbFdWIJSlHlOly2d5RJQVfXlnPlLuOl1mFc52U8IWQMobMTEgl0dkIiIUrNfqzxXTO+PfmQGWvZCkG9ZFsdyZTT7EEILOc6gfp2T74MVQtBGMqXRfJ6v5a3T5xPbejNt1FarjR1+kxh1mzLudzRmktx9ctUvS4v15r/Si2a3KfxVu17V1bseyUXXCrvcqDZXdepVs3udbjb32h2F2oru1CbC72hZsc/PvmP2E5wZickEujshEQCnZ2QSIhSs2+WxooViullG3NOik3NmbqWQbmC+z6V7r9fW0oZp75sskvNhdW+yy5+fTm4t7CYt3+DX73pU1Y7jdvhyz/Xq+4zuGT/DXMX7XvnLjXHLqvXaHCgtSyV3567HORErLhlvat27DW7VLdXGSoPZ3ZCIoHOTkgk0NkJiQRq9i7QmtVqumKFo4Sa3bUIamn763LOxcWkJdS3LdWZfFlqtxQUPufcxsarq81jVwsuzztpV9O6FW2n2X3bZKfRE5fj7zV63mv24JaJX5bqy1B5jZ6/ZP/O/IXmucwtuzUPKy5o78qRbbclrR7O7IREQjf92V8nIo+JyG9F5GkRuSd7freIHBORZ7PfuwZvLiFkq3Qzs9cAfEZVbwLwNgCfEJGbANwL4Liq3gjgeDYmhIwp3fRnPw3gdPZ4WUSeAXADgLsAvCvb7QiAnwH43ECsHDCdcuU9LXH3UjPnXFx759TFxsUluCdVr+HDfb2ehxu7kldOK/sSTmGefsPfW0jVje2x1Gn6FsUedmiq+TXn7UtJ5S7Bjn3+e2XjUt2py4XPrbrxJfuC3IVmG+tk2QXty66OgUs++PH/fgXbmU1pdhHZD+CtAB4HsCf7IgCAMwD2bPQ6Qsjo6drZRWQawPcBfEpVL4TbVFWxQZs8ETkkIgsisrC0tNSTsYSQrdOVs4tIHmuO/i1V/UH29FkR2Ztt3wtgcb3XquphVZ1X1fm5ubl+2EwI2QIdNbuICID7ATyjql8KNj0M4ACAL2S/HxqIhWOIr1Gn1WDRdN1qRJ9v3ULDaeFGqE9dLbZ2MXkA4jS7r79WL4aa3Znha+O1VH9u33o6DMsnVWdHhzpxLWvSW1piB/s6jZ5fsePCeft558+vmnFyPrhB4GoLqq8pV9veufCebpJq3gHg4wB+IyJPZs/9Ldac/DsichDACwA+PBALCSF9oZu78T/HOjdfM27rrzmEkEHBdNl16BiKc2mUGoRskrK9RvV3LZOWjrBuiWbQdSRxpaLTih37sJ3vRlNzobdaqc1lvOtc47vRdMJ0R+1QztmXjmrptOqXqVbDUlLuMv6ilU35c/ayPX3VxvX0/HLzsb9Md+mwR5e/iWsJpssSEgl0dkIigc5OSCRQs/eDIPSmvlVUar9PNXHhMv9ewfa01r6raFLxqbdO47ulpWHaahiGA9bR8L4tlZ8WvOGBlPYprS2htJbSUXacW7V/d+5y8/X5iy609orNrU1evWjNcqnNYZnwhjtXaDjDrzE4sxMSCXR2QiKBzk5IJFCzd0HHdlFBvFaqToBWrRhukboutpuE6bM+nu/0f7LiNHvZtmFOp21p6Vyx+fp6yZV3bomzu1bTHTS8qWLlkgu8Zu9Y7tkvS70UlJJycXRZesXauWw1e+OyW8bqS00F+PN8rcGZnZBIoLMTEgl0dkIigZp9C7Ro+NxHmgO3ZNXnX4srdSSuDbBZIuvz6FO37tQtyUzdWKpu+2RT49fL9r0ark2Vj8O3tJ5qM020lnd2dqz6sX1Besne95BLTZ0uyzbXvXH+gh1XXCJ+xBrdw5mdkEigsxMSCXR2QiKBmr0PHKs9cPXxHZMfN9u8zlan6VvysYO2wFr3SeYurz7nTp+Lw6eXSvblUxPNbSUbo1dXWrpesu/t21h5DW+O43P6yy6n/6LT5H49wUWXzx6026otL8NubN+myhObTg/hzE5IJNDZCYkEXsb3maMr/2bGd0wfsDuUfUVTe4lrKtdu8hIViStTNT1lt68G3VBK9hIfBZd6W3RrXl3I0I81GEvVhRvdGJdtyqtfFtzw1XvDcBov27cMZ3ZCIoHOTkgk0NkJiQRq9gFz9OIRM+7UIbYnXBivfuHCBjsCUrTLYZMJp+F9aq7X7D6VN7xf4O5LNNx9iYbT7P0sB0WNvjGc2QmJBDo7IZFAZyckEqjZh0wnTTlQTR+gXlf7brIu9bZFw/tU3vC9fTktH2fvQaNTk28dzuyERAKdnZBI6OjsIvINEVkUkaeC53aLyDEReTb7vWuwZhJCeqUbzf5NAP8E4F+D5+4FcFxVvyAi92bjz/XfvPhop0kHqefVlcDW6gY7DgHq8sHQcWZX1f8E8Ip7+i4AV7JFjgC4u79mEUL6zVY1+x5VPZ09PgNgz0Y7isghEVkQkYWlpaUtHo4Q0is936DTtTjLhusOVfWwqs6r6vzc3FyvhyOEbJGtxtnPisheVT0tInsBLPbTKLI+m9Wyw4rZd4IafDzY6sz+MIArVRkOAHioP+YQQgZFN6G3bwP4LwB/KiIvishBAF8AcLuIPAvgL7IxIWSM6XgZr6of3WDTbX22hRAyQJgbfw1DrUxCmC5LSCTQ2QmJBDo7IZFAZyckEujshEQCnZ2QSKCzExIJdHZCIoHOTkgk0NkJiQQ6OyGRQGcnJBLo7IREAp2dkEigsxMSCXR2QiKBzk5IJNDZCYkEOjshkUBnJyQS6OyERAKdnZBIoLMTEgl0dkIigc5OSCTQ2QmJBDo7IZFAZyckEnpydhG5U0R+LyInReTefhlFCOk/W3Z2EUkB/DOA9wK4CcBHReSmfhlGCOkvvczstwA4qarPqWoFwAMA7uqPWYSQftOLs98A4I/B+MXsOYOIHBKRBRFZWFpa6uFwhJBeGPgNOlU9rKrzqjo/Nzc36MMRQjYg18NrTwF4XTDelz23ISdOnHhJRF4AcD2Al3o49qCgXZuDdm2OYdj1+o02iKpu6R1FJAfgfwDchjUn/yWAv1LVp7t47YKqzm/pwAOEdm0O2rU5Rm3Xlmd2Va2JyCcBHAWQAvhGN45OCBkNvVzGQ1V/BOBHfbKFEDJARpVBd3hEx+0E7doctGtzjNSuLWt2Qsj2grnxhEQCnZ2QSBiqs4/TwhkR+YaILIrIU8Fzu0XkmIg8m/3eNWSbXicij4nIb0XkaRG5ZxzsymwoicgvRORXmW2fz55/g4g8np3TB0WkMGzbMjtSEXlCRB4ZF7tE5HkR+Y2IPCkiC9lzIzuXQ3P2MVw4800Ad7rn7gVwXFVvBHA8Gw+TGoDPqOpNAN4G4BPZZzRquwCgDOBWVX0LgJsB3CkibwPwRQBfVtU3AXgVwMER2AYA9wB4JhiPi13vVtWbg/j66M6lqg7lB8DbARwNxvcBuG9Yx9/Apv0AngrGvwewN3u8F8DvR2zfQwBuH0O7JgH8N4A/x1pGWG69czxEe/ZhzXFuBfAIABkTu54HcL17bmTncpiX8V0tnBkxe1T1dPb4DIA9ozJERPYDeCuAx8fFruxS+UkAiwCOAfgDgHOqWst2GdU5/QqAzwJoZOPrxsQuBfCoiJwQkUPZcyM7lz0l1VzLqKqKyEjikiIyDeD7AD6lqhdEZCzsUtU6gJtFZBbADwG8eRR2hIjI+wEsquoJEXnXiM3xvFNVT4nIawAcE5HfhRuHfS6HObNveuHMCDgrInsBIPu9OGwDRCSPNUf/lqr+YFzsClHVcwAew9rl8Wy2TgIYzTl9B4APiMjzWKupcCuAr46BXVDVU9nvRax9Od6CEZ7LYTr7LwHcmN0lLQD4CICHh3j8bngYwIHs8QGsaeahIWtT+P0AnlHVL42LXZltc9mMDhGZwNq9hGew5vQfHJVtqnqfqu5T1f1Y+5/6qap+bNR2iciUiOy48hjAewA8hVGeyyHfsHgf1lbK/QHA3w37homz5dsATgOoYk3THcSa1jsO4FkA/wFg95BteifWdN6vATyZ/bxv1HZltv0ZgCcy254C8PfZ828E8AsAJwF8F0BxhOf0XQAeGQe7suP/Kvt5+sr/+yjPJdNlCYkEZtAREgl0dkIigc5OSCTQ2QmJBDo7IZFAZyckEujshETC/wM5/TWz0ICX2wAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "data = pipeline.get_data('psf')\n", "max_flux = np.amax(data[0, ])\n", "plt.imshow(data[0, ], origin='lower', norm=LogNorm(vmin=0.01*max_flux, vmax=max_flux))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## PSF subtraction with PCA" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "After masking the images, we will now run the PSF subtraction with an implementation of full-frame PCA. We use the [PcaPsfSubtractionModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.psfsubtraction.PcaPsfSubtractionModule) and set the argument of `pca_numbers` to a range from 1 to 30 principal components. This means that the mean- and median-collapsed residuals that are stored with the output ports to `res_mean_tag` and `res_median_tag` will contain 30 images, so with an increasing number of subtracted principal components. We will also store the PCA basis (i.e. the principal components) and apply an extra rotation of -133 deg such that north will be aligned with the positive *y* axis." ] }, { "cell_type": "code", "execution_count": 22, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-----------------------\n", "PcaPsfSubtractionModule\n", "-----------------------\n", "\n", "Module name: pca\n", "Input port: prep (70, 57, 57)\n", "Input parameters:\n", " - Post-processing type: ADI\n", " - Number of principal components: range(1, 31)\n", " - Subtract mean: True\n", " - Extra rotation (deg): -133.0\n", "Constructing PSF model... [DONE]\n", "Output ports: pca_mean (30, 57, 57), pca_median (30, 57, 57), pca_basis (30, 57, 57)\n" ] } ], "source": [ "module = PcaPsfSubtractionModule(name_in='pca',\n", " images_in_tag='prep',\n", " reference_in_tag='prep',\n", " res_mean_tag='pca_mean',\n", " res_median_tag='pca_median',\n", " basis_out_tag='pca_basis',\n", " pca_numbers=range(1, 31),\n", " extra_rot=-133.,\n", " subtract_mean=True,\n", " processing_type='ADI')\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('pca')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's have a look and the median-collapsed residuals after subtracting 15 principal components. The H$\\alpha$ emission from the accreting M dwarf companion HD 142527 B is clearly detected east / left of the central star." ] }, { "cell_type": "code", "execution_count": 23, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 23, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAu+UlEQVR4nO2deaxc133fv7/Z583bF+40H7VRlh1LclhFruVEluxEdha7qGvYMVICkSu0dQsHSRHLSVvAQNDaQBHHaNIEQm1YLlzLrm1BsprFspbEbqyFMimZFCVxF9f3SL5t3uwz9/SPN+Q9v+/wbeR7M4+6vw9AcM67d+49c++cued7fps452AYxlufWKc7YBhGe7DBbhgRwQa7YUQEG+yGERFssBtGREi082TxnpxLjPS385RvcUS3YoFqu4B+yxvea/6ZX65RJkZvcF5fxCw8naJ+bgqNfEEut62tgz0x0o9Nf/KZdp7yLYXQLQwa+g+Zrqpql4sp1Xaz3u3ONtQ2BHRwOjYP7ji9v1ENfz1iSf2jY7SP0//xL+bdZtN4w4gINtgNIyK0dRpvLI+gpG+PpPTUmafL9foiv90Zb/8a7ZsgnU3T9kRXXZ+L+pbuqVx6XSlo+ZBI634HJBmCalyfmj6nsTLYk90wIoINdsOICDbYDSMimGbvMFkylxXz6UuvJcl2c611GzNaGwcZ0rqkuyURHs+52LzbAMCR/hdh05vW8FXPzBejY9ULSX2sNK896DabGH18E9/ce83Mt1TsyW4YEcEGu2FEBJvGrzJsVhKaWhemM/Nud+zFxl6o8fmn6UDrtF9NzakfMTpWQ6sLNBpLfy4ENG1n91n+XEJmP/YMbPHu8zfN6nNlhkqqXa3YV/wi9mQ3jIhgg90wIoINdsOICCZoVgBfGzs2DZGZiXU0KAy1Racv41hBUd/OGJnHEr6Zz5EZj01tZNLiY4OD4jLhuaSbzGG01hCntQXW9PWy1uHZnvKl18XprD5vd021azW9RsJrJih70Xm9+r1vdezJbhgRwQa7YUQEG+yGERFMsy+BFvdN0pi+ns0MlNU2DvdsoUK/t6lQzwrZvlknJ9IUdkrbW97v4UizczhtS6aatNbZbNP3+5ZMkSstaXDW7OULWoezD0AqEa5VlPi8RKOysF+DeDqdXZVLxUXu1TWOPdkNIyLYYDeMiLCkabyIHAOQx1x+0rpzbqeIDAL4NoBRAMcAfNw5N7k63TQM42pZjmZ/v3PuvNd+EMBTzrkvisiDzfbnVrR3HSKglE3JLNlyp9N6e1+Ykqla1pfUkZCOURiqi89v33YlrT9bfN3Zx5z0aYP8xhueD3qcw0wp7RQfO0XXoEr61l8f4HWKeErr7AYdO96rtTOH7s4WwvgBviaS0/2WAn2lyZbu63T2mw/y+nrl1hdU+1rX9Fczjf8IgIebrx8G8NGr7o1hGKvGUge7A/BDEXlJRB5o/m29c+5M8/VZAOsv90YReUBEdovI7ka+cLldDMNoA0udxt/lnDslIusAPCkir/kbnXNOOJVJuO0hAA8BQPq6zVYqxDA6xJIGu3PuVPP/cRF5FMAdAMZEZKNz7oyIbAQwvor9XFXY71tIV9dmSatx7Lfnj82x3PGehf2vhXzpnafZM4PaZl+rsn862fvJD3xgw4xqT57vCfvM9ug4xcKz3Z00O6d79o/HqaOZBKet0rIbLqfP5aee5nvT4rPPMf507uJMqP8TGbo3dOzCpLb/S5Gu2QAF/a9xFp3Gi0hORHouvgbwqwD2AXgcwK7mbrsAPLZanTQM4+pZypN9PYBHZc6NLAHgfzvn/lZEXgTwHRG5H8BxAB9fvW4ahnG1LDrYnXNHANx6mb9fAHDvanTKMIyVJ5K+8TGyR/euz6t2saw1erVGNmXSnEnfR5381aslSqNMmtJx2SV/G1dFpnZAduJERp+bP4fyw2dfdy7BTHb3CuXKyw7oXG+B57/e4ndP+etaqstSLD2vB/h++3zeUrFLtdOUg65ynv3uw5eN+CJpqGk9BX1a42eyVDW3tLbt8OYuaxgRwQa7YUSEyEzjffdPR1PW6Wk9FeS0ysmcnq5xyKs/Ta0VKY0yT4/ZFkTn8t1Ytw5NqW2H92/S782SCYvdY2varVe8FE4xkg893Xr6yxKgQdKFpZBfQbbBqaE4tTSlvOLqMg1yW/XDfktkDuNjNeqUhipDU3XPfZlDbVlicaoudiH2zXiAlkILhRd3CnuyG0ZEsMFuGBHBBrthRIS3rGZfyAWWQz85n5PrZXdN0m4LhJqyVuPwWDZD1UkXxj1z0LGxIbUtNUlauKjfWxvR53JxbcLKdIXbK2N6nWJqhq4Jm+bITFUoae2c6Q3DfGuT+lic7plTYLWk16ZHkL9GEr+g31vvJzMdh/2yG7B38BqbRSl8Fuz2y6HP3mfmfnKoSIurcwewJ7thRAQb7IYREWywG0ZE6LyQWCHib2qbZ7BO60TfBhrLLWzXDSj8k9032R3UeesDrE+rk7pfrAOH1+kw1FwqtOmfy+fUtlqKdOAQxYayeyfhp2yOU3hmo6w/c7ZXh9eWZ7XNPkduq4WzXl9J77PfgqM0VXy9OZTUecsDrNG5tBSvBwidK5YM38/nBd07dolwtN7C/gRqX1qHyNH1LBb09WwH9mQ3jIhgg90wIoINdsOICNesZucUTDKqNSSrKV+Xs2YX8s2Okb91S8pmsqE6T5Oynmff+Bj5Y0/NaHt3PhFqOS6b5Fijc70n0srJs/r9tY2hTudwWda+JS7JRJ+rlqUr7NuYC3ob+9VziLAL6Fi0niDevXNdC9+rgNYeMENfce/YSUp/VaeQYUd2dVC/Ay5z7d1rDttljc6xBQH7GqwC9mQ3jIhgg90wIoINdsOICNesZm9Jo0RwquTcSPHS6xKVJ3Kk8xzHmCfZ/1rv75dpLlNpqNxwUbULE1oL963TKbFmi+H7HduBF4mRZj/w2A2zevsC5YvYHu3IZh+jlFfVvD7WhtELl16fPTmotvk++QBQnqE4e9L4LkHPIC+ugWMN2NYdz1KKa07V7e+/SCnuFPm+V6nfqNEB/LUgOm+6T9vZmQqtz6wG9mQ3jIhgg90wIoINdsOICNeMZme7pFDpnu4ura8mp/pUu5wKNSbbwlP9+r2ZtD52oUglmlNk7/boGdbFKwuzmXn2nGOa7Oy9PaG/wCzF0TfIt+BdN5xQ7VeOb1btOuVjS54Ir0G9m+z/67WmZF/5FKXILlNOgLE3RsIG6eZUkt4b03p/403nVHt8olf3zVuLqE7Q9SQ7e4xiD6SbSlF79u+unL7vrJvZvz1G/hguSfHt3rpGlWz0VYpnZz8RXi9YjRx29mQ3jIhgg90wIsI1M41nd8KAzEiTpfnTDwNAVy6cpubP69DRekxPqUo0g2qUF5mCeRKD0w/FExQeS/1+2zY9hZ0qhqa5Op2XTWuJGB2bzWUkN6rD4f5xSmkVp2NzHVZOq5zspwqz/uciN96utHZ/zce0+fHMeL9qs5uqn/JKSCK0uDLT9DfXpfuZPxtWsq1Ulvf1Dwp0b6lCr2+ak5L+DDmSdzNTPardkoqre+Hqv1eCPdkNIyLYYDeMiLDkwS4icRHZIyJPNNvbReR5ETkkIt8WkbVd1c4wIs5yRMtnARwAcNEu8iUAX3bOPSIifwXgfgB/uWI946pJbIrgFExxNs1pbVfIeyYb0nmsxYIkuWCSq2iDUh/BM1MFdF4uqzSV0KajN89q11LxPhaHgu687rhqn5rV5sX1m6ZUu1SllM5D4TWqF/W6xabBadU+l+xW7cGcdvs9s2eDavtnqmf1vRjp0nr1bNCv2sMj2mU4oFs9cc4zxfF953TPREvIsWfiYrfnliq5XIaKo5dj869zxIa1WW+WTbAUjgxek6LvJIdlXwlLerKLyBYAvw7gfzbbAuAeAN9t7vIwgI9edW8Mw1g1ljqN/zMAfwjg4s/REIAp59zFn5uTADZf5n0QkQdEZLeI7G7kC5fbxTCMNrDoYBeR3wAw7px76UpO4Jx7yDm30zm3M96TW/wNhmGsCkvR7O8F8Fsi8mEAGcxp9q8A6BeRRPPpvgXAqZXsWEv53Bla/yPNEyetHKPSR34opFQo7JFKH3P4rJzQduHkNj1DyXiuu/nT2n46Df0DF+/T9tMNQ1orVz0def6cdhvde2KLav/KdYdU+6kX3qnPRbb0ek94TRIUnnmhoN12C+d0u/Sm/lzBMNmB/VPRmsi+47rU9L9893Oq/Tcnb1HtXrLLT/ohsKRtu9bpe3HT8Lhq7z+zUbVVGTByCU6QO2zArrj0HYvTd7TmfW72zeBrwimuuLQU+4msBIs+2Z1zn3fObXHOjQL4BICnnXOfAvAMgI81d9sF4LEV751hGCvG1djZPwfg90XkEOY0/FdXpkuGYawGy/IXdM49C+DZ5usjAO5Y+S4ZhrEarCnfeN8/uzZJKYCSVAqZUh01qDRPbVZrfPFSPLkc6Xuy1QYUvilkymWtNtId6sbZbq3vE5TSavu6C6r9oQ37VPsrL9x76XWOUhnVKAXT0wdvUu3YoNa6dU737OlCIUlYPtCv2tmi1pjr3ndatbk0VU82tCuP01oDp95+/NgvqHaVQnG7krQe4K1zZCks9b1bjqj2sbwuc91NvvHTDW8tgtNQs66mJsdElDlGQr2XjPYJSnVG37kM3evCtLbLp711oStNYWXusoYREWywG0ZEsMFuGBFhTWl2VX6HNHqcS/WQXoqRJmop+5sJ5t9GbZCmr2n39RYb6tnp0AbNZX9qZMstDWq9daw8rA/u2ZELU1r/336D9o3f8+p21Y5RDDV6tN04fc5bt+CsSOR6XevR13OiqPuyY0Tbs/e8Nnrpdf967eteKOn1k9+5/gXV/u//8AHVzqf0vU6eCNdvSlt1P396elS1OQ0Yp57ySzbxegqv3VQ4BoK+Y/1DOlX3zEx4jVIUT1Gn9RbOz1CtcO4CsuH7eRI4Y9USK0fZk90wIoINdsOICGtqGu+nd4pTtc4Wd1lyN2zwFLaL7GVeaGSsquc9jt7quDoqZ/6k/f0pVoymhrfecFJ3K6GnqONl7YY6siF0n53eo6f4sRt1PxK92tQ2sFW7jgaP6veP7jp46fWRSW2iKr+g2/1v6Os71q9DXoOBCd3vzVOXXp97c0Bti/fpfu6f1e6zsV59TSbGKLvsNs8sxemxyC3aTevvSYHNaVPh9sagnuJzFVchF9ZYt95/akzfu9514bS+ROYxnrZzGjCWFHH6fie9DL1lOnajvoAJ0MOe7IYREWywG0ZEsMFuGBGho5qd0/4kPG3HGqfF3MCpd0njY1Jrt9RU+IbKiN43MUOmOKr0waGiAZnqal64bYzSJiW2k/ai9M/7L+j0TsNeCqcP/Prratu39uhQhOS4vn7nc9rF2L1bn2tiz/WXXmfOkUlwi74mtR69ndN8BWTv8bUzh53etVW7tP6bkWdV++m9OsSV3Vg3DofrGOem9drB9Ix223UUGir0HXOeOTJ1VJvpGpQ5ym3mKjl6uKT6tIb3SZIGZ/ftdL9ep2CXYnaJVa7kVIU4llo4Ndel/Za0l2EY1zw22A0jIthgN4yI0Fk7O+lAX7VwmGmCbLVcVomppPRHq3mmW6H0wuw6Givr7Znzus0eri4W/ma6Ad3Pw2TPjtFSxIUJ0qB7w4OfeodOFf1f3/s91f6j5/6Zan/trq+r9u8+8YBq++sW9XdoV89P7tir2o/8+D2q7Yr6eu7oGVPtH4yFKbFqp7SOfqZ2o2o/dWiH7tcFugE3aK082hva9E+/vk5ti5eo0mpKa98gw7G84TWoDlDaKQoR3rZehyOP5/W9Khb0GknD0+mcwpr9Rriqa4K+zwFdb/8rySnY/LEgC7jO2pPdMCKCDXbDiAg22A0jInRUs3PK5uVQ5RLNZE+Nk6+8b8ZMTNM2kv/pCS188qPUzwX6LRe0DXSionX3tlFdolmrQuCX7tl/6XWxro/1jdNaR68bmVHt/3bi11Q7XtGfo9YXatQUhVB+97XbVVtIvzoqR/TogdtU+9/e+veXXv/52AfVtt4faQ0/8EkdL3BoWqd7Btmkf/rczWG/9J5wFHbqSLO3lFnyfOUdbePU0kdPjqh2rk+X8mJd3UiHurxKNvlMl76eQaC/gxWynbeUP/O+wMnUwuGz82FPdsOICDbYDSMi2GA3jIiwpjR7w9NA7OvOvsOOUwbxsbopVe9YuH9KV1xCTZtPEatT3DilVZYG2+G9bVRveFJLYRw/qnVgz3pt7/7xa6FN+lO36/RNuyfept+b1r7Zh5/RaapoM0rvCO3XnAapJfaA4JLBAcV6/4+Xf+XSa6F8ABO3am08/ZIuYzV4mPo5ovsW9+Tu7Nv1h8oc07buGC2ClG4mH3RP07MvB+t7R/EZxVl9rkS3PrYq4UyLC5Wy1uSNwsLfX07L5tvtK7Re5bw06txndYp5txiG8ZbCBrthRAQb7IYREdqu2X2dzuoi8Gy/sfj8mgUAYmWKMSe9FaOyzPVseLwGmTRT02SP7tbthDavIqnN2wh8CUXOydkTWl+Vtmqdx/nE4JWp+uZz2q7OuppTRyfIxly7SfuY++sebpFSRy3wuZOsb71DrdO6et2gvmD5Z9frfmozPGLv1osqs2PhDvEL+nr59xUA6hv1Wk02p+3b1aNe3rgecrCga5KmkkzDvTpO/9RJnWO8cS4MiJchfQ0alYXzA7R+36lrpflt6Ql/fYvLTvnnmHeLYRhvKRYd7CKSEZEXRORlEdkvIl9o/n27iDwvIodE5NsiklrsWIZhdI6lTOMrAO5xzs2KSBLAT0TkbwD8PoAvO+ceEZG/AnA/gL9c6EAi2jyRzegpbQGhaYNnlWx6kxE9TeKUwrUZqgIbeNVQaOrX6GLTmn5rjNxOhcwbsfmzE6E8QmmSaPpWy+t+vuPmE5de7z+oTVSo6fNu/ZE+1vF/TlV0qC9q6r7EKiLz7e+Chbf7nNunw1Kv/9I/qvap779DtYt0TXyzVGOQqtycJDNUniqtVrRd1fWG79+0Tdvpzoz1q3aVJNbpMW02bXHF9frZ3a0lwMwZnXba0b3sIhPs7IyuwONXqwlIEjT86kjuKkxvbo6LPUk2/zkA9wD4bvPvDwP46GLHMgyjcyxJs4tIXET2AhgH8CSAwwCmnHMXfyZPAtg8z3sfEJHdIrK7MVO43C6GYbSBJQ1251zDOXcbgC0A7gBw88LvUO99yDm30zm3M96bW/wNhmGsCssyvTnnpkTkGQDvAdAvIonm030LgFOLvr8uqE2F5onkCIljT28kKIyvUiQTFUmTJJudyP0wtqHs7Uspgip8bDpWgXThKFWU9cJr46QZYyNau6XT+r2Nhv69fXWf5xJLmnBgizZJnfplKi8bUDptZrk6/QqPxWaj7hunVLvx/nerduUNrdHvu3ePao+XQ9390qFtalttu76+OE96n9KE9faGdtTTZ3SZKnC5pwJVXqWquLx/ohC281Ndel+6Xux+PEv7s2k00RV+bwL6Por/nboa05uIjIhIf/N1FsAHARwA8AyAjzV32wXgscWOZRhG51jKk30jgIdFJI65H4fvOOeeEJFXATwiIn8CYA+Ar65iPw3DuEoWHezOuVcA3H6Zvx/BnH43DOMaoK3usrFkgJ4N+UttTqfbmA3bQVorDA7di6fJXk3HylBpnls3hUsK13WdV9temdaGhDsHj162/xc5WtS5pP/fm2FoaSVJYZBvantpbRulOj6tt2e8dM/v+81X1DZOwRzbVtTnIhtrg8obX00asOXAepPLF8/8h7xq14/T94A+x0uvhddXyA06nqc2pZZ2M7qmU34g7Aunjmb7dUBlv4VdVkkENzKeOzL1E+wOu0AoKgDEs1SOy0tzJVRyPHDe9WU3aA9zlzWMiGCD3TAigg12w4gIbQ9x9VPoVqa1nsoMemmTKBWvq+l2QNpk3bAOoxzMaj37wcFXL71u0G/c3T0H9LHi2k/56cLbVbsnqW27Wc/OWbmgNXhylkobP6cdi6r9qqlCYH+4X5cy3vK4vgalIf05+j6hXR2OnqI6VW0iRumduVzxl+/4ump/urhr4QN6tmNOefXxD/1Etb9zQNvwa/Q9SnhrPb09+jtSzlI48intV5+mMlX1Hfr9df9cLSHEFNvB9nDS9Jz2S7z1LEcpq+J+abS4hbgaRuSxwW4YEcEGu2FEhLZq9qAR07HKpFv89MYBlXOK9Wqf8lRa2yEvTGl99b4NOj9xzavLfEtGa9tf1ksHKJINdCp7TLUb5Og8mAu125TrVdsqO3ROqxKlL05Oax3opydmO/nMqN53Zgddg0MbVFuynI47fL2cePSloGzrZM9P5vS9+/f7Pqnad73tiGrvm9DloH5xx7FLr1/af53a9oNj71TtFm1MGtYvbzyd1/7ojWm9thCj70FliEqMk4+6r5djGX3t2a7uJvW5WIfHKE218/zyW/T+AjHs6phL2sswjGseG+yGERFssBtGRGivnd0Bzvc/Zunh65rkwnbHeJcWnYmE1lNTNa3HDiPMg9YT13by/zKr7dHDSe27HZAmmqTcx0cOh6mROYV18py2u7PcSmhTLUrJcAGh/xBpxg/oePZtPTrzz+++Tducv7D7N1Xb1+msIVnbtpSDaqmVrJuJVHj9OUb/X//Cj1X7Gw/dp9o/eqde52D/9zMIc79xGeqZcb1W07tO+0iU6N5VvBJOsRn99U/l9b71bvqQ/VRKitaVEp6fSC1P+VfJ9yC9Qd/46rj+vvL9CLzPLSn2s8eSsCe7YUQEG+yGERHaO40XQFLhnCOe0lPv+kw49eGpnDhtdqpQiCtXjPnZuA5bffe60Ny2t6CroaZj2kzy7IQOJd19Yqtqp6jfUg77FlD12Dpl3sJGqkLarU1z6efDVFPVPnrvbv2H//TpR1T75ZJO2XTr206q9t6j3udYxFrD1xM0VWypwOuZCVMHtXT51tO/ptql6/R7uw/q6XBhq75ofdtC+TJzpF/3g74H2wcmVPvlSZ2OW4rhvUpu0jKIU0djitI/TeipeW6UKtecCuWI9OgpP9Nyfcm0zBVk/Kov/F5OAzYf9mQ3jIhgg90wIoINdsOICG3V7BJzSHhurvXK/Kd33eRuSBoxlWQxrMkm9fuP5IcuvY6JTsE8VdIas0ippeuU8qo2rVNPdZ31wnY5qnSTNvM1yF22dFinM64PhuL4/e/7udr2o5d1yOu/+r+fVu2B7ZOqHf/ukGoP/Yuw3FEqoa/P6RN632SPXlvgdYrCeW0q6n01/Fyz27TAL1Wp4u512uxUHNP+yskZvb+fpsrR4ylB/Xr5gF63aMFz660fn780FACAUkc70tH5STKXeTqdU6EHDf3eJH1/43G6ZnR96941kASVFFtiujF7shtGRLDBbhgRwQa7YUSEtmp253RaKnbZzAyFNmcO7+SQVnbJ3Dak7aunprVNulQMdXaDw2cpBBMXtCbn9YMYpxT2CNIUqnhKrweM3nZatY811qt294bQ3XOySq625HvgqDzU1EG9FtG1Tl/fbq+s9Z0jx9S2v/17HR674d4x1T79Y22vHhjXn3PytvAart+q1w7OQS9k5Lr0ekC+j1JJN3R7U2+YcuyNhHatdce1to1tpnJQLe4C4b0PhL5/g/q9ZSr7HSfbeaOg+xnzdHqtoG3y7P5aht4+MqjTqnFJMt8lNrjCFOH2ZDeMiGCD3TAigg12w4gIbU8lrdICk9bwfXzTGa2PAk7rQzJlfFbbTJNxbcdUHuj0XhmjMr+k81JjpPEpzDLhSb3kJNmIr9e+72entOZMD+ntRS8E842f3KS2Dd9zTrUvTOrP3LdX386uj5xV7Xw5PPa+qU1q230fe061H33un6h27rYp3e+M1t3Dnh05m9T3LlbT16v6Sr9qY6suw8S+CScmw/233DCutr19QH/Gl8/rmIh4TK9rnGl4527o+1qmNOCJXioPxWHB5JcfTHk6nMs9NWgth9agWnzlCeetUbWEIy8Re7IbRkRYSn32rSLyjIi8KiL7ReSzzb8PisiTInKw+f/AYscyDKNzLOXJXgfwB865WwDcCeAzInILgAcBPOWcuxHAU822YRhrlKXUZz8D4EzzdV5EDgDYDOAjAO5u7vYwgGcBfG7R43m+ybHk/Pl0SiVth2xQGZ9UTuupUkXvz77GfgphVyb7KZX5DSijkJAbPoXWo7Az1N0BxTxnyEe6dE7bhXfs0GmtXz8SplHuulfr05mfrlNtt45Scb1H6+gE+WMPdIX93NGn7ehHC9o3PrtBp3divVqt62Nv6wtt65MV8hmn27zzA7rc1j/u1vkDuGRzsRje+zTFPLwxra8Ja/SJvE4h5sdmBOf0vQpGKH2z7jbcFKV/Zvu2vx7VRb4ZpLNrlIb6vNPrL40yfcn8TN2JJeahIpal2UVkFMDtAJ4HsL75QwAAZwGsn+99hmF0niUPdhHpBvA9AL/nnFPuPs45h9Yfwovve0BEdovI7sZM4XK7GIbRBpY02EUkibmB/k3n3Pebfx4TkY3N7RsBjF/uvc65h5xzO51zO+O9ucvtYhhGG1hUs4uIAPgqgAPOuT/1Nj0OYBeALzb/f2xJZ/RLENFcoFoKdQzHKSe6tR5lTZ6gdn5K60bnlfplO6WQRqyTdqtTmWD2UYen5VxW97tMPtKZMX3JD5Z1fjtkw88xU9Rx3qVRvU6R6dXXpHJa/5jGhvXnfPO1UGnlt+t+TZLNnuOx37VZ+/R3JXRffnLwhvC9FKtdz1KZr0Dr0Xfeely19x3TPgB+/rWulL43x6ksdZrWcqqUMyGY8bRyN2lfWpdoKatEvvE5ivkve+tMmezC/Yjl9LlrJfLl4ByL3LcrYClONe8F8DsAfi4ie5t/+yPMDfLviMj9AI4D+PhV98YwjFVjKavxP8H8uUjvXdnuGIaxWrTfXXYBUtlwmsSmHr+SBwCkaVpfou3JrJ5yqWlSn95W7aVpOoUugk1vOUolXfCr3OhjxckVN3i7Nmn1Zclc5smR3x59UW37wZl3qfapCR3GyxJiQ05XthnrCfevk1kuTdcrm9bT0MMTero8PU3mtQvhFPadv6gr6O6ZHVXtF1+5XrVBobrrN07pfp/tv/T61KwO4+VVJw5LjU3reynD4ediucHUZqmqa5ZdXPX+fvrnYl33o9U1XH9P+PvKKdskfmXmNh9zlzWMiGCD3TAigg12w4gIa0qz17yUzaxx4mSKqJKpoqdPh4rmZ3S4oo+vLwEA67Rudizl+CeRTHHOq9ApXNKK1gPSXMaVGO0L02s9dvpWtW22QjqQlk2F0mW9cpRKH3lrEfVe/aGqJ7XZrtxPenVSX+9kkcJ83x76WZ2e1WsJ6T4dslqZ0Pemq1/fuwtT2gwILxV1elDvu9j3YDatzZd+KmnWzY2WSsGUDprue3GS0ob5pc1a0j0vrLlZo8dSK296sye7YUQEG+yGERFssBtGRFhTmt2nJc00uR/WqCRTtabbbEOt+SGyQ/pYcVofCBoU8pohEU/b46XwN7OR0Lq5+7Du12xC6+4N/doW/rM3Q/fZ9ZReeDCryybdvemgah/doMNU953ZqNr186G+Lc5QyaUNWuumDmgNP/RPdfqnYpXKGXtrEbyNfSaQ0vp168CUarOWPp4I86JUyI4udKw6pRjndY1EJtThVXZlJvfXDLnm5mdpHYjCbX1avkN53e9Uvz5Xlv1G8vr+cMmnK8Ge7IYREWywG0ZEsMFuGBFhzWp2JpfROnuGtBn7KddrWjv7Ya1ulssNkd28n8pBkf1ayN7qp0pmE31pvdZamT6tzd48q329e3pD7Rwnm/ydw0dV+9Gj2lf+5mGdUqBaphJCg6HGFLoGdU6btF3388wBSok1oK/RLaNhCOzpGZ0uu69b29nPT2k9OpbvUe0c+eWruAgqfcT6n/V+QLbzmJdinP3Rha53jeIHeDufK5cLP+csa25a9+H1Asd9WQGNztiT3TAigg12w4gINtgNIyJcM5qdSx0FVUq1yy7nC5WxTdA2Ksks02RDZk1Pttv6plDfcmnpoJvtvqT7KGVwOR2ee+a8tnX/rzfuUu2erdoOf3RK29kdrTUk/NjuLdquXp+hVNyUgimgx0Lsgr5Gr0po079pq05TzaW5Rrik8+l+1Z6mOAj48pVTitF9rnM5Y9K+tWL4OWPsi0F+9tlevdbA/u0NWhOp+L4fnOGKvq8JLhPeBuzJbhgRwQa7YUSEa2Yaz/D0jaftjqbeysWVK2xWF3axjG/Qbqr+VBAA4t7xGid1uqYgRdPMjJ76Jbrnn87FZvTtiW/UU+8YSYJCWfdLSCK4zeG0lCvs8PVrFGk7mY7I8olub8o7VdZupZPntWkt2aVNa8KVgci9NuadO5jV14+zDLe4x9JU3XmVgXjaDqo2W+GMr/S94YpGvgt3QNeP3XqZBl/QVcCe7IYREWywG0ZEsMFuGBHh2tXsZFIJKvO7xwLaXNayjStmkimuQRVLwRLTd8lM62MnhrXOdmTD4lDIRj28Jbnt2rQ2O6W1cJ5dQ8nslFinz+1r1PgEVUqhfqOH0ibTugaH+c6Ohea1Ypn27SITF6doJtfd/usnVNsPLY2Tu3FA14DNpKzZ/VTTCUoNzcs8bD6r8zrHAubeFo3OYdQrkGZqudiT3TAigg12w4gINtgNIyJcs5qd4dS7whLT06uObfSLVMx0nGKYyiz1dIfaeKqmbco1CmVknZghm7Nvb02Qeyanc45tpGNRaqPq6zrUVDaG5wrIxMwlsdjWHc8tXPrI168BH5zt5pSiWehzzVL1Wv+atKRUJtdm9OrP0VLiyXt/nX0JaM3DgdaBqN+81uPf2zqtIXFq6ZVIDb1c7MluGBHBBrthRIRFB7uIfE1ExkVkn/e3QRF5UkQONv8fWOgYhmF0nqVo9q8D+HMA3/D+9iCAp5xzXxSRB5vtz61895ZBi42URKXnX53q0rqO/ZLZXs3aLE7vn57MedsoxJX0PutEtrP3emmpZqiEVdBPx57U6YlradL0WboGvq89+Rqwru4bKKj2bIHKKHHoQSE8d4x0M/uzu2kqv7VBh5KyX4PyfyeXh1qO007pz1WmFM7qc3N6MYpTYP921t31BGv8+emERmcWfbI75/4BwAT9+SMAHm6+fhjAR1e2W4ZhrDRXqtnXO+fONF+fBbB+vh1F5AER2S0iuxv5wny7GYaxylz1Ap1zzmGBGYxz7iHn3E7n3M54T26+3QzDWGWu1M4+JiIbnXNnRGQjgPFF39FmWCP5cc7VSa0/Y5wiiH662M+ZNb4fO99osKikWHmKC8/1aL1a9kondZHdnFMXF8bpx5P8B2SdPnbgpZ7ifsTZT0EW9uVm+3XDl9UUYx5wKq4M2ZwLlA5qWOcPqHjpn2L0GdN0jVjvO7Zvez7+Qv4VfGzW8PUSDReOa/DXLbLzl4bqFFf6ZH8cwK7m610AHluZ7hiGsVosxfT2LQA/BbBDRE6KyP0AvgjggyJyEMAHmm3DMNYwi07jnXOfnGfTvSvcF8MwVpG3jG/8Yvjx7y7FJYI4Fn6RWGTS4fBjvSkvGWtjR7bd/FntS58eDO3sQzmtXU+e075LiV7tV880yF8g7u0fXKDSx8NaY5Yq2hbOqburZDuHd305xrwlLTVdP15fKZFtPOFp63qe0nz36esdT9BaAvXbX3/h9ZI66312RUhTzASvCy2UvnwNYO6yhhERbLAbRkSIzDTeh8Nh2eTCUz/HaZYYb6qezumpdZXSTrNbKofX5rKhKenU+X7dD5oltoRNsslQ5p92uvTC1U/9tMgAkOLP5Sidtmdq4ik/h96WZrUbcLJXb+eSO346reyQTrVVmtFm1ESmTm0yn3mfqzhLUoZuM19PTk/O6c3WOvZkN4yIYIPdMCKCDXbDiAiR1OxMS1pf0vCpfq0pa5X5UwqzuyaXCGpxHSXzWKEU6shUWuvNcknr5BbXUK4wxJVEfc2ZWHidgkOEG3RuPrbf7OnXJsOZya75dwYQkItxwKGnnmtumdZAeC0hWKSMku8GzKa32eks767fe41pdMae7IYREWywG0ZEsMFuGBHBNPtlYLfHeo3LP1G4p2fL7e/RduAiuZ1yeGylOH8J4mpl4fLENXovlz5OJLXNuVbzyj+Rfb+lRDPrf9KrrF/99YU8ad8YpXsOcuRCzPZsTgvmbU9Q2q86rTWw6zP7VDjv+tdqVFb5Gtfki2FPdsOICDbYDSMi2GA3jIhgmv0K4HLRfhnmqRm2KWsd6Pt5A63aueT5a7ONPpulUlGkbdl+nSK96peialCoKFpKDOsmp11OU+lkH0eljxyVbOZ0ULxGEs/Mv57APvz8mfl6csirr/ir7C/xFsee7IYREWywG0ZEsMFuGBEhWqJlldC+9YukLuIqQG7+1EYx0vuFPJVg4n6QzblQo/39dFCs0UmTc/rsNMWcV8v6q+Nr6WSPXluo5bWvAZdo4nJbjHhrD1wuK0brJ6mUtsOzr0KUsSe7YUQEG+yGERFssBtGRDDNvsq0pBemNseg+7qb/bxbKuqxyZn0K2vjhqd32R4dxBb+3WeNzusDvm28zvbr+MKfuSUvH12jRGr+Y/OaiF8qytDYk90wIoINdsOICDaN7zQ8g6XptQ9PWXlaz6ml61Qd1Z9Oc5VWTpOc7OIUzBQ6yiGyXlWclvDZRSQAT+PjVFU38NyRWaoYS8ee7IYREWywG0ZEuKrBLiL3icjrInJIRB5cqU4ZhrHyXLFmF5E4gL8A8EEAJwG8KCKPO+deXanOGZrFqoRySuxYtj7Pnpep8EphpWziSma1jq5O69JJvousr7GB1tRQi9GyNmGsCFfzZL8DwCHn3BHnXBXAIwA+sjLdMgxjpbmawb4ZwAmvfbL5N4WIPCAiu0VkdyNfuIrTGYZxNaz6Ap1z7iHn3E7n3M54T261T2cYxjxcjZ39FICtXntL82/zUj16+vyxT/3xcQDDAM5fxblXC+vX8rB+LY929GvbfBvEcdLuJSIiCQBvALgXc4P8RQC/7Zzbv4T37nbO7byiE68i1q/lYf1aHp3u1xU/2Z1zdRH5dwD+DkAcwNeWMtANw+gMV+Uu65z7awB/vUJ9MQxjFemUB91DHTrvYli/lof1a3l0tF9XrNkNw7i2MN94w4gINtgNIyK0dbCvpcAZEfmaiIyLyD7vb4Mi8qSIHGz+P9DmPm0VkWdE5FUR2S8in10L/Wr2ISMiL4jIy82+faH59+0i8nzznn5bRFKLHWuV+hcXkT0i8sRa6ZeIHBORn4vIXhHZ3fxbx+5l2wa7FzjzIQC3APikiNzSrvNfhq8DuI/+9iCAp5xzNwJ4qtluJ3UAf+CcuwXAnQA+07xGne4XAFQA3OOcuxXAbQDuE5E7AXwJwJedczcAmARwfwf6BgCfBXDAa6+Vfr3fOXebZ1/v3L10zrXlH4D3APg7r/15AJ9v1/nn6dMogH1e+3UAG5uvNwJ4vcP9ewxzUYVrrV9dAH4G4Jcw5xGWuNw9bmN/tmBu4NwD4AnM5f9ZC/06BmCY/taxe9nOafySAmc6zHrn3Jnm67MA1neqIyIyCuB2AM+vlX41p8p7AYwDeBLAYQBTzrmLsbSduqd/BuAPAVzMWTW0RvrlAPxQRF4SkQeaf+vYvbQcdPPgnHPCidrahIh0A/gegN9zzs2IVzOqk/1yzjUA3CYi/QAeBXBzJ/rhIyK/AWDcOfeSiNzd4e4wdznnTonIOgBPishr/sZ238t2PtmXHTjTAcZEZCMANP8fb3cHRCSJuYH+Tefc99dKv3ycc1MAnsHc9Li/GScBdOaevhfAb4nIMczlVLgHwFfWQL/gnDvV/H8ccz+Od6CD97Kdg/1FADc2V0lTAD4B4PE2nn8pPA5gV/P1Lsxp5rYhc4/wrwI44Jz707XSr2bfRppPdIhIFnNrCQcwN+g/1qm+Oec+75zb4pwbxdx36mnn3Kc63S8RyYlIz8XXAH4VwD508l62ecHiw5iLlDsM4I/bvWBCffkWgDMAapjTdPdjTus9BeAggB8BGGxzn+7CnM57BcDe5r8Pd7pfzb69C8CeZt/2AfjPzb9fB+AFAIcA/B8A6Q7e07sBPLEW+tU8/8vNf/svft87eS/NXdYwIoJ50BlGRLDBbhgRwQa7YUQEG+yGERFssBtGRLDBbhgRwQa7YUSE/w/VwJHaGivXHAAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "data = pipeline.get_data('pca_median')\n", "plt.imshow(data[14, ], origin='lower')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's also have a look at the PCA basis that was stored at the *pca_basis* tag. Here we plot the second principal component." ] }, { "cell_type": "code", "execution_count": 24, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 24, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAckklEQVR4nO2dXYxcR5XH/6d7vuzxZCeOjeO1EwLCLMrD4qysbBDRCpIFZQMieUAIFq38YMkPy0pBIEGyrFZC4gFe+HhAIGuD8ANLwqcSRbDgNUa7SKuQCXEgickmMclix/GE2E7GHntmuvvsQ197bp07XdU19/bHTP1/Umu6uu5H9b19pup/z6lToqoghKx/aoNuACGkP9DYCUkEGjshiUBjJyQRaOyEJMJIP09Wn5rUka3T/TwlIUnRePUcmnMXZKW6vhr7yNZp/PkXPtHPUxKSFC//y9c71nEYT0gi0NgJSYS+DuNJtYhRZsFgSE+96ooyz3PuiMjLuEOTHsGenZBEoLETkgg0dkISgZq931ip69OzgW0LGj0go2N1+aqhRh9K2LMTkgg0dkISgcP4tUwP845EudaASofu0S5F0hXs2QlJBBo7IYlAYyckEajZhw2fPjV1fXOlrcRada/lr+Fa/Q6rhD07IYlAYyckEWjshCQCNfug8YXE9tG/XPCri63vX1ssZc5d8NEnptPzsGcnJBFo7IQkQlfDeBF5EcAcgCaAhqruEZHNAB4EcAOAFwF8RFXP9qaZhJCyxPTs71XV3aq6JyvfC+Cwqu4CcDgrr0/UvNYBIuq8IHBf9juLdv8qnCxQ30NEOr9itu1m/2GnzDD+LgAHs/cHAdxdujWEkJ7RrbErgJ+LyOMisj/7bJuqnsrevwJg20o7ish+EZkRkZnm3IWSzSWErJZuXW+3qupJEXkTgEMi8vt8paqqdJgTqaoHABwAgPG37lgng2BC1h5dGbuqnsz+zorIjwHcDOC0iGxX1VMish3AbA/bWS2D9L3acw3Jv7+CBrV+9qiD+b+Uxur2KucA5M4tJW/8Wpt3HxzGi8ikiExdfg/g/QCeAvAwgL3ZZnsBPNSrRhJCytNNz74NwI+l/W9sBMC/q+p/iMhjAL4nIvsAvATgI71rJiGkLEFjV9XjAN65wuevAbi9F40ihFRPOrHxZfTUsPpUY79Tie8RzEmXrzca2867Dz62KKPRo5alsgkC7IOKUG5u871yxWHU7wyXJSQRaOyEJML6Hcb7hlFDNF2z1GgvNB4uTFPVjnV2yFozZamZsql3hup2+NtC523LUiL8tnD5KgzlHUa3HHt2QhKBxk5IItDYCUmE9aPZexkCW+LYoamQvnqr86rUuva8BQlvNHotoNlbOV1eaKdP3wPVhsMaBvZ4xj7zgHVH9rMxbdizE5IINHZCEoHGTkgirB/NHqBfoYxl0xX52hYKWdWQQs1XW00ZKJciFC4bClt1drZa2I/ve5R+BlJlaG4fYM9OSCLQ2AlJBBo7IYmwfjR7lcsVRcpV8Wjh0LRJbfn0qf+8hX19x4L5WjZ8vW42brr9gPW7e/VuYIqrJTjl1Ukl5afMs4Z+ZgwbROw8e3ZCEoHGTkgi0NgJSYT1o9ljGIK5xZeJio03mlybAc1uv2ctV183k8ztua2GD4hK/3OLiH2xwrOIGB0e4b/2zslHdLoAf7MKJ+9/7Dx7dkISgcZOSCLQ2AlJhDWr2UN+dJ/mKc6ntge357Jx43b7zn7gYF4zn1/eVjXM/+ZFUzYyXKy/O+cr17qpG7H61T2Y1Nzt7fz2/PeomecBtUCXUvCNm+cF+fsV0rIt+yiihIYvUEgd3VnzF44VaEc/HiOxZyckEWjshCTC0A7jy04V9Y6Lyq6k0sOQzPwwVZv+YbssmWGlccUVInVzd1vtMDxAIY2VuQb1keXx88hI06kbqfndfJaWaXjLGcabulCIsFfORTWr6GqLmG4bcvP1wxXHnp2QRKCxE5IIXRu7iNRF5AkReSQrv0VEHhWR50XkQREZ610zCSFlidHs9wA4BuCqrPwlAF9R1QdE5JsA9gH4RsXt65oq0y4HXW0xxwrUF0Jgc+41tRp90bi/jGYvhMtad1peOttQ20K73HO3zLEwaoq1xvL7uqvZN4wtec9lWWq6vrdm7nu1TLuWrJ/OUPxd5EuRNzaYFsz3gMDvtitMC+5BGquuenYR2QngAwD+LSsLgNsA/CDb5CCAu0u3hhDSM7odxn8VwGewHLZxDYBzqnr53/kJADtW2lFE9ovIjIjMNOculGkrIaQEQWMXkQ8CmFXVx1dzAlU9oKp7VHVPfWpyNYcghFRAN5r93QA+JCJ3AphAW7N/DcC0iIxkvftOACfLNiZGGxf8jiE/ZMwSzgG/ekz6oqB/1UqzvJYO+ZDNv2ob8lrQ7PXuNSUaVkMaX7nR7CO5ENmrJhacuk1jbtkuB2396gtN92e50FguLxo9b69vwz5rMGUnjiEUwhqst590H9Yb+0wpnxbMm8rMQ7BnV9X7VHWnqt4A4KMAfqGqHwdwBMCHs832AnhoVS0ghPSFMn72zwL4lIg8j7aGv7+aJhFCekFUuKyq/hLAL7P3xwHcXH2TCCG9YGhj4ytfLie3e2gqY2g5Y8+hgxRj4SPSQdupnza9s9XshnzsfDGu3jbMLTbNs1Udc3eo5+Lfrxq/5NRNj8075VGj/xda7s/wktHs52U8V3Jjtwr3csnd134t9/mA/3mKauB5i/fpjTlW4eEMl38ihPQIGjshiUBjJyQRBqrZvX71ijWNo+2CIjyk6X3rKgf2tTHSBd2d07M2dVQoZt/Guy91jq2vXwz4kG3Iudl8bKLhlN+06fyV99dvPOvUbR2bc8qj5gHB+ea4t3ymvvzA4A0Td39hydXw1oe/2DAavpX3hXeeN79iOTAtP8Z3XiaNdeE306XfnT07IYlAYyckEYbX9RZLwV1ms5/m3kammQpmHc1nlw2szmmPZTOvyvjyMNUOSS1Nk7ZKL7kxrPU5dyw+Mr/cuJGL7rEaG83wd9KknZpyp6lev9kdqr/z6uVo6bdPvOLUTddd15vl1caUUz7T2OSU89chNNRumvDYZiEl1nJ9y96cVqDvs8l8bTZf6d6tF6Jw5/P7rzItGnt2QhKBxk5IItDYCUmEgWr2op7tft+QNo7NOBS1q8cFVtT3/pTB9ZpdPaWVe+9uu7TkavClS+7tsxp9/Ix77rxOt661lv0lTC86xTdve80p37LlD075XZPPX3l/3cg5p26jcbVdMnNzX667SU1O1q52yku5xl4cdZ9LLLbcL7JQN9NjzZTY/PW2btDCkjp2eqzZWiJ+sMHfq93elIMrC3UBe3ZCEoHGTkgi0NgJSYR142cvs6prlRTCHgNay7dUkvUhNxomJdOce/vGzrnbj581Pv1chOvin9kVXt121UZd/bphxPWz25DXUVk++FTNrdtWd8Nfm+ZmTMjrpt5tzJnmst/9tSV/HkOblsoXEmvravY3ZJ6ntJr+tNX5e12cLgtTtlNgzbF85zHlbn/a7NkJSQQaOyGJQGMnJBHWrmaPTFvlaPpAauiCzg4t++Otdo9erxu/uufYS0ajN8+7Pubxs8avfs7d38a/533rF24ylRYzbfLYiWu95fHdy5p+qvZ7p26z0fCbahNOeVrd6bJjhRxZyzRMgMCiCRCwS0ctmfkDjVy9vfbBORB9JGbF8fxvyvfsij07IYlAYyckEWjshCTC2tXslpCG9+ixoFYLHCu/xFBBMtn0Q7XOfl9bXjDz02vnXT06atbJHLlo4u4X3fLsB9xlmKrkm0f/5sr7/75ul1P3jzuOOOVbJ1y/es0Izaa5ivO5NFVvLLl6/2LDvUYLDb+Gz8cqhJ6fFOYt2PsemgeRwy6BHcT3DIrz2QkhPmjshCQCjZ2QRBisZq9wDnoBn0aPPFRxmWUT15xPTxx5Nuujb+Y0ZeuCiX2fN7Hcbrh64eS91Og+nv7jdqf8yMbdTnli82NuWdwv8sylnU75hfmtV96fmr/KqTs7v8EpX1xwU0s3G51j5ZtGs9t5CiFl3GrZ+HfpXBc4Vj+Wg2LPTkgiBI1dRCZE5Nci8qSIPC0in88+f4uIPCoiz4vIgyIyFjoWIWRwdDOMXwBwm6qeF5FRAL8SkZ8C+BSAr6jqAyLyTQD7AHyjh23tG6G0v3aIlh+jFVbnMMUm/FMwG7lUU2JXdDErvthmnrndXT11WPjpszc65bfddNq7/W/euN4pH399y5X3r190XW+Li2bFF5te266SkyuquRc2DVhhNd/AKi7530XB1RaY8mrxpbGKSd+WJ9iza5vLa/uMZi8FcBuAH2SfHwRw9+qaQAjpB11pdhGpi8hRALMADgF4AcA51SszGE4A2NFh3/0iMiMiM825CyttQgjpA10Zu6o2VXU3gJ0Abgbwjm5PoKoHVHWPqu6pT/mzjBBCekeU601Vz4nIEQDvAjAtIiNZ774TwEn/3itQpbchIjy2sGsgRZDP1QbAmQ4aWlHTpgRWoxPRyO1vMxuPuds2J3rvrukFP/jjXzlle71nz7jutcaF0c4b2+sXytmU697EuN6aBU1udjXnKrjXPCvExqZF60UatW6exm8Vkens/QYA7wNwDMARAB/ONtsL4KHqm0cIqYpuevbtAA6KSB3tfw7fU9VHROQZAA+IyBcAPAHg/h62kxBSkqCxq+pvAdy0wufH0dbvhJA1wPqZ4hpBrBxSu+pvIS2w5+ABv3tB4+d0oRqN3hDTEJv/eY1w8qVrnHLdTt2dc7/XSO4y2KWlm5vca6Kj9hqhY1mNw9pONy7cO4MvXLZqze2uBr2651Nr89dCCImGxk5IItDYCUmEgWr21cb4liZyOmFoiqtTtrHYxXV+TdnU5zS7TJiUym5YOBo19/atFa/79JMmJfbrrs6uL7nlhdxSVfPXGp09bpexCq0Dlntf6+wnB1C4Vy37OMC31FRZzd6Dm8menZBEoLETkgg0dkISYbj87D5/YUg3h7bPV0XOJe4pJt66Nr6s08fG3WWRJsbc9E0Lm9zbZ1NPDytbj7qzH0fOuUtRtcbd76Fvn7ry/uLWyJsTs3koo3hgOegyvvXYpb477etrA3t2QhKBxk5IItDYCUmE4dLsPiKXaI7CHtqeujAH3bN/SPAX1od2i/k502OjrmafHF90yuOmfnbezfkpdq73gNj1VfdZQ+0PLztlveSmvK7tcJeDhixrdrNCM7QemM9egkKsfEijD8fl7gh7dkISgcZOSCIM7zC+RJqpFQ/nScUbdJkEUgo70ybtsDLk5hsxq5KMLrveNk24w9trJ99wyhtH3GH9zqlzTvmZ0+5wuF+uuR3fd89TP/2qU268dsYp1ybd3ITNzW750tXLN8BOcdVRO4yPWM03JHMCw3ZfCrIY11n09lzFlRDig8ZOSCLQ2AlJhMFq9pI6POpUPpeMf2ZjuJ1OtUmTZHSddYfVrWbPrSQ6WnPraqYd4zV3CuymifNOGdtecYon5qavvH/1zBSqpH5ief5tY4PR1Rvdubn1aza7O1+71Smev36jU750TW7l1Ukz7dfq7tBPKp9KOuAGDWr0MqsQhzy0sb/BLmDPTkgi0NgJSQQaOyGJMFjNHuNLrzI81hAb4Vog/y/TfgcbDuvxqwPAaH25XK/ZHNYuo0azT4/MO+WpTe4Szjs3nlsubHOPVQgNNRw/v8UpP3fa1dnINaXlZobG0rXu8wG5xvWjn79ug1Oeu97tgxY2L1+Hgl89kO65cD/ycRBRz2JWcS6nzr9r1LEMhSXEO8CenZBEoLETkgg0dkISYXhj43tJQA8VXJyFpXw7P2sQs7eYWPm6WSbYlkdyZetXt7q6GdDZW0bnnPJUbVnDT9fn7eYO55qur3vBzC39v/Fppzw/sdzWi1vcPqQ16vrZraZf2Ox+j0tbzffekLtGMbHvgKvR4c5FCMWjF+5zhQwijTp7dkISoZv12a8TkSMi8oyIPC0i92SfbxaRQyLyXPb36t43lxCyWrrp2RsAPq2qNwK4BcAnRORGAPcCOKyquwAczsqEkCGlm/XZTwE4lb2fE5FjAHYAuAvAe7LNDgL4JYDPlmpNGY0U4ZcMnSV2LnLMsWo2dbTxped961ajX2q688RfX3L904tGV1udfba27N8+VZt26paMkD69cJVTPj7nLrM8Pz/ulPNNXTRjvMakWUbJTKtvTrjXpLnRE18QWAK7uJyWqY5J1eVbmhtx8y2CBH3+5UV+lGYXkRsA3ATgUQDbsn8EAPAKCmEahJBhomtjF5FNAH4I4JOq6qRMUVVFh/lGIrJfRGZEZKY5d2GlTQghfaArYxeRUbQN/Tuq+qPs49Misj2r3w5gdqV9VfWAqu5R1T31qcmVNiGE9IGgZhcRAXA/gGOq+uVc1cMA9gL4Yvb3oSobFqWPYo9tyqUPHaGnQvnu8r7dplluyGr4hqlvmiWIl9Q4tHNS2Or5801Xg7+24P5jvrDopqm2c7t1fPmLLU0ZzW23HTFxC3V/2cGzxDUAYKSzXx3w/47UPxWh1Bzz4O+34pyLK9FNUM27AfwDgN+JyNHss39G28i/JyL7ALwE4COVt44QUhndPI3/FTp3frdX2xxCSK8YaLhsmVUviweLGIzb8NfAsULNdL6H3dgMre10xGbTHYovNpaH3tZtV6+5tyvkmptvuEPv/PbWTXd+0R3Gv37RDXG9YFxtrUuuRMg31aoHG7KqdujtW2HHYve14bAm/Njnaov+/Zn74Tt2adlZYhXiTjBclpBEoLETkgg0dkISIc0prgF9r7Ghi3m9autaZrqmuP9fG+5CrM6U13rTFb9LgTRV9txW0+fLC0331s8vuXp/Ycmtt88WCufOa+eQb9NePqt9fe61kGutFtDV+WtSCIcNhOJaBjBNtQzs2QlJBBo7IYlAYyckEZLR7D7fZMEnGtL0Rjo7Ws+GhhaEnbtz0/y/Xcpp5ZAkXDLhsjaNlSUffpv35wNFjb644Gr41qJxnjc9SyGZLqTwDMR2MQW/e2dfuk3zVfOF1gYIpZ0qTHW2kt73PCC6Lfbc/vrVwJ6dkESgsROSCDR2QhIhGc3uI1YPFbSeb9UqW1fww7vFZk4LN+s2bt7saqVuQDI2c88TCtNjA370YormzpuqvSC2XVZnB3zjeZ0enGZauDfmGUp+9adQPEVoKe81Bnt2QhKBxk5IItDYCUmEdavZvTq88rxUERQ0vCnmtHSjYdM1udva2Hf7LMG3bJXV6C1TLvh9jY4uXF5ftxGYB17wV/v82aEYiGDyAU9d7O+gQr96bP1qYM9OSCLQ2AlJhDUzjK8022zJYXtheOwc0AxRI0+W39umsBKx/5tdv11xGI+O9YVhYkTKpZXq88cu7dKybcmfJ+RqiyGwa6UZjXswLI+FPTshiUBjJyQRaOyEJMKa0eyWkIYvo7eC+qqQvWh5h+KU1thY3JyutiGtBY0e+l/d2TUXDOO1RwpECOd1evS1D2h8p7bsaqYRu5d5TjQMGt3Cnp2QRKCxE5IINHZCEmHNavZeEp0SSPJvA9M7PfsGsamhzfTYYLu9SwrFaeEoXR7rGy+z9FEfQ6F7udJwL2DPTkgi0NgJSYSgsYvIt0RkVkSeyn22WUQOichz2d+re9tMQkhZuunZvw3gDvPZvQAOq+ouAIez8kBRdV+9RKTzq7ixeVnUvDyoivtq1cxLzMutb6l0fNljw7xC19d77e3xAvX2XM6xA6/i9wjv0821t98x+hoMAUFjV9X/AnDGfHwXgIPZ+4MA7q62WYSQqlmtZt+mqqey968A2NZpQxHZLyIzIjLTnLuwytMRQspS+gGdqnoHQap6QFX3qOqe+tRk2dMRQlbJav3sp0Vku6qeEpHtAGarbFQV+HygZTVV1P6RcfZldrW+8sL+ZfzXEduHjhUdx5DPShUZD+BN1TXkfvGqWW3P/jCAvdn7vQAeqqY5hJBe0Y3r7bsA/gfAX4jICRHZB+CLAN4nIs8B+NusTAgZYoLDeFX9WIeq2ytuCyGkhyQTGx+VWjpEySnrq8Zq8lCuN8/SR+XbYsqd0/AFd+0mvqDbbWN0eOllkatMS90HGC5LSCLQ2AlJhGSG8Q5lh7M9HLY7q4zGjgWrHLZXOFyOlT2x7jXvqa3U6eXwegiH7nnYsxOSCDR2QhKBxk5IIqSp2UtqSN/qqMGlj0pQCAEupIru2amD18C/c9yxoyikEO9eo5e+XkOu0S3s2QlJBBo7IYlAYyckEdLU7JbQ0r3orNGrPpd318glhqsMEe7ls4goSmj09vbL74c1fVSvYM9OSCLQ2AlJBBo7IYlAzd4NniWaY/etkujlh7yppCL96L4prhVPAfa2pcJnIOtdw7NnJyQRaOyEJAKNnZBEoGZfDT3U4T7dHdKUMUtLF1aWDmn0Es8DCpsOKE6hfe5y+69l2LMTkgg0dkISgcZOSCJQs5OVidToVeaNKxwb/ckXsN5hz05IItDYCUkEDuOHjL65hnqZUqnsCjukJ7BnJyQRaOyEJEIpYxeRO0TkWRF5XkTurapRhJDqWbVmF5E6gK8DeB+AEwAeE5GHVfWZqhpHIqlQ7xdcaaFj57YPPXeojbZW1yhSijI9+80AnlfV46q6COABAHdV0yxCSNWUMfYdAP6YK5/IPnMQkf0iMiMiM825CyVORwgpQ88f0KnqAVXdo6p76lOTvT4dIaQDZfzsJwFclyvvzD7ryOIfXv7Tix//3EsAtgD4U4lz9wq2Kw62K45+tOvNnSpEVxnFISIjAP4XwO1oG/ljAP5eVZ/uYt8ZVd2zqhP3ELYrDrYrjkG3a9U9u6o2ROSfAPwMQB3At7oxdELIYCgVLquqPwHwk4raQgjpIYOKoDswoPOGYLviYLviGGi7Vq3ZCSFrC8bGE5IINHZCEqGvxj5ME2dE5FsiMisiT+U+2ywih0Tkuezv1X1u03UickREnhGRp0XknmFoV9aGCRH5tYg8mbXt89nnbxGRR7N7+qCIjPW7bVk76iLyhIg8MiztEpEXReR3InJURGayzwZ2L/tm7LmJM38H4EYAHxORG/t1/hX4NoA7zGf3AjisqrsAHM7K/aQB4NOqeiOAWwB8IrtGg24XACwAuE1V3wlgN4A7ROQWAF8C8BVVfRuAswD2DaBtAHAPgGO58rC0672qujvnXx/cvVTVvrwAvAvAz3Ll+wDc16/zd2jTDQCeypWfBbA9e78dwLMDbt9DaM8qHLZ2bQTwGwB/jXZE2MhK97iP7dmJtuHcBuARtHPfDEO7XgSwxXw2sHvZz2F8VxNnBsw2VT2VvX8FwLZBNUREbgBwE4BHh6Vd2VD5KIBZAIcAvADgnKo2sk0GdU+/CuAzAC7Pnb1mSNqlAH4uIo+LyP7ss4HdS+ag64Cqqgwob7GIbALwQwCfVNU3RPJzxQfXLlVtAtgtItMAfgzgHYNoRx4R+SCAWVV9XETeM+DmWG5V1ZMi8iYAh0Tk9/nKft/Lfvbs0RNnBsBpEdkOANnf2X43QERG0Tb076jqj4alXXlU9RyAI2gPj6ezeRLAYO7puwF8SEReRDunwm0AvjYE7YKqnsz+zqL9z/FmDPBe9tPYHwOwK3tKOgbgowAe7uP5u+FhAHuz93vR1sx9Q9pd+P0Ajqnql4elXVnbtmY9OkRkA9rPEo6hbfQfHlTbVPU+Vd2pqjeg/Zv6hap+fNDtEpFJEZm6/B7A+wE8hUHeyz4/sLgT7ZlyLwD4XL8fmJi2fBfAKQBLaGu6fWhrvcMAngPwnwA297lNt6Kt834L4Gj2unPQ7cra9pcAnsja9hSAf80+fyuAXwN4HsD3AYwP8J6+B8Ajw9Cu7PxPZq+nL//eB3kvGS5LSCIwgo6QRKCxE5IINHZCEoHGTkgi0NgJSQQaOyGJQGMnJBH+H5EX7MaDdgLbAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "data = pipeline.get_data('pca_basis')\n", "plt.imshow(data[1, ], origin='lower')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Signal-to-noise and false positive fraction" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Now that we have the residuals of the PSF subtraction, we can calculate the signal-to-noise ratio (S/N) and false positive fraction (FPF) of the detected signal as function of number of principal components that have been subtracted.\n", "\n", "To do so, we will first check at which pixel coordinates the aperture should be placed such that it encompasses most of the companion flux while excluding most of the (negative) self-subtraction regions. We will read the median-collapsed residuals with the `get_data` method." ] }, { "cell_type": "code", "execution_count": 25, "metadata": {}, "outputs": [], "source": [ "data = pipeline.get_data('pca_median')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "And we use the functionalities of `matplotlib` to overlay an aperture on the residuals after subtracting 15 principal components." ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 26, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAA0DUlEQVR4nO2deZRc1X3nv7/aq3f1otaKJBA7GLAJXgAbg4kBJwGfcRhjOyEnOEwmnoyT2IlxMpMZn5Mz48zkeJlxlkNiB5zxGGMDw2Jjg1lik7AJS4CEhHahtVvqtbq6utY7f3Shd3/fUi+Suqtaer/POTqq2+/Ve7feq1vvfu9vE+ccDMM4/Yk0ugOGYdQHG+yGERJssBtGSLDBbhghwQa7YYSEWD1PFm1tdrGejnqe8jRHdCtSUW1Xod/ysveaf+aP1ygToTc4ry9iFp5GUTo8jHImK8faVtfBHuvpwLK/+Ew9T3laIXQLK2X9h1RTQbUnxhOq7ca8250uq22o0MHp2Dy4o/T+ciH49YjE9Y+OUT8O/Ke/nnKbTeMNIyTYYDeMkFDXabxxfFRy+vZIQk+debpcKs3w253y9i/SvjHS2TRtjzWV9Lmob8nW/NHX+ayWD7Gk7neFJEOlENWnps9pzA32ZDeMkGCD3TBCgg12wwgJptkbTJrMZeOZ5NHXEme7uda65VGtjSsp0rqkuyUWHM+5yJTbAMCR/hdh05vW8AXPzBehY5WycX2sJK896DabGH18E9/ke83MN1vsyW4YIcEGu2GEBJvGzzNsVhKaWmdHUlNud+zFxl6o0amn6UDttF9NzakfETpWWasLlMuzfy5UaNrO7rP8uYTMfuwZWOPd528a0+dKdeVUu5C3r/jb2JPdMEKCDXbDCAk22A0jJJigmQN8bezYNERmJtbRoDDUGp1+HMeqjOvbGSHzWMw38zky47GpjUxafGxwUFwqOJe0kDmM1hqitLbAmr40oXV4unXi6OvxkbQ+b0tRtYtFvUbCayaY8KLz2vR7T3fsyW4YIcEGu2GEBBvshhESTLPPghr3TdKYvp5NLZpQ2zjcs4Y8/d4mAj0rZPtmnRxLUtgpba95v4cjzc7htDWZapJaZ7NN3+9bPEGutKTBWbNPDGgdzj4AiViwVpHj8xLl/PR+DeLpdHZVzo3PcK9OcezJbhghwQa7YYSEWU3jRWQ3gAwm85OWnHOXi0gngO8BWA1gN4BbnXND89NNwzBOluPR7B90zh3x2ncBeMo592URuava/sKc9q5BVChlUzxNttyRpN7eHqRkKkzoS+pISEcoDNVFp7Zvu5zWnzW+7uxjTvq0TH7jZc8HPcphppR2io+doGtQIH3rrw/wOkU0oXV2mY4dbdPamUN3x7JB/ABfE2nW/ZYsfaXJlu7rdPabr2T09Wruzar2qa7pT2YafzOAe6uv7wVwy0n3xjCMeWO2g90BeEJEXhGRO6t/63XOHay+PgSg91hvFJE7RWSdiKwrZ7LH2sUwjDow22n8Vc65/SKyGMCTIrLF3+icc8KpTIJtdwO4GwCSZy63UiGG0SBmNdidc/ur//eLyEMArgDQJyJLnXMHRWQpgP557Oe8wn7fQrq6OEZajWO/PX9sjuWOtk7vfy3kS+88zZ7q1Db7YoH908neT37gi5aMqvbQkdagz2yPjlIsPNvdSbNzumf/eJw6molx2iotu+Ga9bn81NN8b2p89jnGn849Phro/1iK7g0dOzuk7f8yTtdsEQX9L3BmnMaLSLOItL79GsAvA9gI4BEAt1d3ux3Aw/PVScMwTp7ZPNl7ATwkk25kMQD/1zn3YxF5GcD9InIHgD0Abp2/bhqGcbLMONidczsBXHKMvw8AuG4+OmUYxtwTSt/4CNmj23ozqj0+oTV6oUg2ZdKccd9HnfzVCzlKo0ya0nHZJX8bV0WmdoXsxLGUPjd/DuWHz77uXIKZ7O55ypWXXqRzvVU8//Uav3vKX1dTXZZi6Xk9wPfb5/PmxptUO0k56PJH2O8+eFmOzpCGmtZT0K41fipNVXNzC9sOb+6yhhESbLAbRkgIzTTed/90NGUdGdFTQU6rHG/W0zUOefWnqcVxSqPM02O2BdG5fDfWlV3DatuOTcv0e9NkwmL32KJ26xUvhVOE5ENri57+sgQok3RhKeRXkC1zaihOLU0pr7i6TJncVv2w3xyZw/hY5RKloUrRVN1zX+ZQW5ZYnKqLXYh9Mx6gpdB04cWNwp7shhESbLAbRkiwwW4YIeG01ezTucBy6Cfnc3Jt7K5J2m2aUFPWahwey2aoEunCqGcO2t3XpbYlhkgLj+v3Fnv0uVxUm7BSTcH2fJ9epxgepWvCpjkyU2VzWjun2oIw3+KQPhane+YUWDXptekR5K+RRAf0e0sdZKbjsF92A/YOXmSzKIXPgt1+OfTZ+8zcTw4VqXF1bgD2ZDeMkGCD3TBCgg12wwgJjRcSc0T0LW3zrCzWOtG3gUaap7frVij8k9032R3UeesDrE8LQ7pfrAO7F+sw1OZEYNM/nGlW24oJ0oFdFBvK7p2En7I5SuGZ5Qn9mdNtOrx2Ykzb7JvJbTV7yOsr6X32W3CUpoqvN4eSOm95gDU6l5bi9QChc0Xiwfv5vKB7xy4RjtZb2J9A7UvrEM10Pcez+nrWA3uyG0ZIsMFuGCHBBrthhIRTVrNzCiZZrTUkqylfl7NmF/LNjpC/dU3KZrKhOk+Tsp5n3/gI+WMPj2p7dyYWaDkum+RYo3O9J9LK8UP6/cWlgU7ncFnWvjkuyUSfq5imK+zbmLN6G/vVc4iwq9CxaD1BvHvnmqa/VxVae8AofcW9Y8cp/VWJQoYd2dVB/a5wmWvvXnPYLmt0ji2osK/BPGBPdsMICTbYDSMk2GA3jJBwymr2mjRKBKdKbu4ZP/o6R+WJHOk8xzHmcfa/1vv7ZZonqDRUc/e4amcHtRZuX6xTYo2NB+93bAeeIUaa/cAja8f09mnKF7E92pHNPkIprwoZfawlqweOvj60r1Nt833yAWBilOLsSeO7GD2DvLgGjjVgW3c0TSmuOVW3v/8MpbgT5PteoH6jSAfw14LovMl2bWdn8rQ+Mx/Yk90wQoINdsMICTbYDSMknDKane2SQqV7Wpq0vhoablftiUSgMdkWnujQ700l9bGz41SiOUH2bo/Wbl28MjuWmmLPSUbIzt7WGvgLjFEcfZl8C96xdq9qv7ZnuWqXKB9bfG9wDUotZP/v1ZqSfeUTlCJ7gnIC9G3tCRqkmxNxem9E6/2l5xxW7f7BNt03by2iMEjXk+zsEYo9kBYqRe3Zv5ua9X1n3cz+7RHyx3Bxim/31jUKZKMvUDw7+4nwesF85LCzJ7thhAQb7IYREk6ZaTy7E1bIjDSUmzr9MAA0NQfT1MwRHTpaiugpVY5mUOWJGaZgnsTg9EPRGIXHUr/PWKWnsMPjgWmuROdl01osQsdmcxnJjUJ3sH+UUlpF6dhch5XTKsc7qMKs/7nIjbcpqd1fMxFtfjzY36Ha7Kbqp7wSkgg1rsw0/W1u0v3MHAoq2ebzx/f1r2Tp3lKFXt80Jzn9GZpJ3o0Ot6p2TSqulumr/54I9mQ3jJBgg90wQsKsB7uIREVkvYg8Vm2vEZEXRWS7iHxPRBZ2VTvDCDnHI1o+C2AzgLftIn8J4KvOuftE5O8A3AHgb+esZ1w1iU0RnIIpyqY5re2yGc9kQzqPtVglTi6Y5CpaptRH8MxUFTovl1UajmnT0VuHtGupeB+LQ0EvP3OPau8f0+bF3mXDqp0rUErnruAalcb1usWyzhHVPhxvUe3OZu32e3D9EtX2z1RK63vR06T16qFKh2p392iX4Qrd6sHDnimO7zuneyZqQo49Exe7PddUyeUyVBy9HJl6nSPSrc16Y2yCpXBk8JoUfSc5LPtEmNWTXURWAPgIgH+otgXAtQB+UN3lXgC3nHRvDMOYN2Y7jf8agD8B8PbPUReAYefc2z83+wAsP8b7ICJ3isg6EVlXzmSPtYthGHVgxsEuIr8CoN8598qJnMA5d7dz7nLn3OXR1uaZ32AYxrwwG81+JYBfE5GbAKQwqdm/DqBDRGLVp/sKAPvnsmM15XNHaf2PNE+UtHKESh/5oZCSp7BHKn3M4bOyV9uF46v0DCXlue5mDmj76Qj0D1y0XdtPl3RprVzwdOSRw9ptdMPeFar9gTO3q/ZTL12kz0W29FJrcE1iFJ45kNVuu9nDup17S3+uSjfZgf1T0ZrIxj261PRvvvMF1X583wWq3UZ2+SE/BJa0bdNifS/O6e5X7U0Hl6q2KgNGLsExcoetsCsufcei9B0tep+bfTP4mnCKKy4txX4ic8GMT3bn3Bedcyucc6sBfBzA0865TwJ4BsDHqrvdDuDhOe+dYRhzxsnY2b8A4I9EZDsmNfw356ZLhmHMB8flL+icexbAs9XXOwFcMfddMgxjPlhQvvG+f3ZxiFIAxakUMqU6KlNpnuKY1vjipXhyzaTvyVZbofBNIVMua7WelkA3jrVofR+jlFZrFg+o9o1LNqr211+67ujrZkplVKQUTE9vO0e1I51a65Y43bOnC4Uk4cTmDtVOj2uNufjqA6rNpala04FduZ/WGjj19iO7L1btAoXiNsVpPcBb50hTWOqVK3aq9u6MLnPdQr7xI2VvLYLTULOupibHRExwjIR6LxntY5TqjL5zKbrX2RFtl09660InmsLK3GUNIyTYYDeMkGCD3TBCwoLS7Kr8Dmn0KJfqIb0UIU1UU/Y3VZl6G7VBmr6o3ddrbKiHRgIbNJf9KZItN9ep9dbuiW59cM+OnB3W+v+ytdo3fv0ba1Q7QjHUaNV24+Rhb92CsyKR63WxVV/PwXHdl3N7tD17/ZbVR1939Gpf92xOr5/8xlkvqfb//tmHVDuT0Pc6vjdYv8mt1P18/sBq1eY0YJx6yi/ZxOspvHaT5xgI+o51dOlU3aOjwTVKUDxFidZbOD9DIc+5C8iG7+dJ4IxVs6wcZU92wwgJNtgNIyQsqGm8n94pStU6a9xlyd2wzFPYJrKXeaGRkYKe9zh6q+PqqJz5k/b3p1gRmhpesnaf7lZMT1H7J7Qbas+SwH12ZL2e4kfO1v2ItWlT26KV2nW08pB+/+rbtx19vXNIm6gmXtLtjq36+vZ16JDXyqJB3e/lw0dfH35rkdoWbdf93DSm3WcjbfqaDPZRdtlVnlmK02ORW7Qb0d+TLJvThoPt5U49xecqrkIurJEWvf9wn753bYuDaX2OzGM8bec0YCwpovT9jnsZeifo2OXSNCZAD3uyG0ZIsMFuGCHBBrthhISGanZO+xPztB1rnBpzA6feJY2PIa3dEsPBG/I9et/YKJniqNIHh4pWyFRX9MJtI5Q2KbaGtBelf940oNM7dXspnD70kTfVtu+u16EI8X59/Y40axdj9059rsH1Zx19nTpMJsEV+poUW/V2TvNVIXuPr5057PSqldql9d/3PKvaT2/QIa7sxrq0O1jHODyi1w5GRrXbrqPQUKHvmPPMkYld2kxXpsxRbjlXydHDJdGuNbxPnDQ4u28nO/Q6BbsUs0usciWnKsSRxPSpuY7uN6u9DMM45bHBbhghwQa7YYSExtrZSQf6qoXDTGNkq+WySkw+oT9a0TPdCqUXZtfRyITenjqi2+zh6iLBb6ZbpPu5g+zZEVqKGBgkDbohOPj+C3Wq6P9+5QOq/acvfFS1v3XVPar924/dqdr+ukXpQu3qedu5G1T7vp+/V7XduL6e57b2qfajfUFKrOJ+raOfKZ6t2k9tP1f3a4BuwFqtlVe3BTb9A28uVtuiOaq0mtDat5LiWN7gGhQWUdopChFe1avDkfsz+l6NZ/UaSdnT6ZzCmv1GuKprjL7PFbre/leSU7D5Y0GmcZ21J7thhAQb7IYREmywG0ZIaKhm55TNx0OBSzSTPTVKvvK+GTM2QttI/icHtfDJrKZ+TtNvGdA20MG81t2rVusSzVoVAu++dtPR1+MlfaxvH9A6enHPqGr/1d4Pq3Y0rz9HsT3QqAkKofzBlstUW0i/OipH9NDmS1X79y7556Ovv9F3vdrW9lOt4RfdpuMFto/odM8gm/TzL5wX9EvvCUdhp440e02ZJc9X3tE2Ti29a1+Paje361JerKvLyUCXF8gmn2rS17NS0d/BPNnOa8qfeV/geGL68NmpsCe7YYQEG+yGERJssBtGSFhQmr3saSD2dWffYccpg/hYLZSqty/YP6ErLqGozaeIlChunNIqS5nt8N42qjc8pKUw9uzSOrC1V9u7f74lsEl/8jKdvmnd4Bn6vUntm73jGZ2mijYjd2Fgv+Y0SDWxBwSXDK5QrPffvPqBo6+F8gEMXqK18cgruoxV5w7qZ4/uW9STu2Pn6w+V2q1t3RFaBMmdRz7onqZnXw7W947iM8bH9LliLfrYqoQzLS7kJ7QmL2en//5yWjbfbp+n9SrnpVHnPqtTTLnFMIzTChvshhESbLAbRkiou2b3dTqri4pn+41Ep9YsABCZoBhz0lsRKstcSgfHK5NJMzFC9ugWwS+dtQJdLU3YsPsA8uNaV8e1eRsVX0KRc3J6r9ZXuZVa53E+MXhlqr7zgrars67m1NExsjEXz9E+5v66h5uh1FENfO4461vvUIu1rl7cqS9Y5tle3U9thkfknXpRZawv2CE6oK+Xf18BoLRUr9Wkm7V9u7DLyxvXSg4WdE2SVJKpu03H6e/fp3OMlw8HAfHSpa9BOT99foDa7zt1LTe1LT3mr29x2Sn/HFNuCQm9LS34ym0fwYP/8VNqnH7qqsvwV5/6CC4+I3D4uGh5Lz57/ftwRk9H/TtqGCfJjINdRFIi8pKIvCoim0TkS9W/rxGRF0Vku4h8T0QSMx1rodCWDFZVD2ezuGhFL85b2oNVXUFW1Be2vYUfb3gTB4eCp9KVZ6/Cv/vgu3HnDe+ua38NYy6YzTQ+D+Ba59yYiMQBPCcijwP4IwBfdc7dJyJ/B+AOAH873YFEtHkindJT2iyCQcizSja9SY+eJnFK4eIoVYGtRJGIRvHH77sKH73gArzvnruRL09Of37viUdRLJexLTGI8lIHKQPf3L0e39y9HgAQaZvszbMHdmPZa+144OXXUKqa67pbmrAonsaOQ+z4OslED6VJoulbMaP7eeF5e4++3rRNm6hQ1Fdl5U/1sfb8G6qiQ31RU/dZVhGZan9XmX67z+GNOiz1rL/8V9Xe/+CFqj1O18Q3S5U7qcrNPjJDZajSal7bVV1b8P5lq/Q9O9jXodoFklgH+rTZtMYV1+tnS4uWAKMHddppR/eyiUywY6O6Ao9fraZCkqDsV0dyJ2F6c5O83ZN49Z8DcC2AH1T/fi+AW2Y6VqOpOId3r1iJjmQKFy8OdOPr/X3YMnAEZRZKxOuH+vDFHz2B1/cFsdx//tHrcP/nPoEPX3rONO80jMYzqwU6EYkCeAXAWgB/DWAHgGHn3Ns/k/sALJ/ivXcCuBMAYt3tx9qlbpQqFXzuJ48jlYrhtf5DJ328WCSCgbFxTBRL2LD7wMxvMIwGMqvB7pwrA7hURDoAPATgvOnfod57N4C7ASB11vITD3M7QX7vkncjFY3jaz97HgCwbXCgJlLqRClVKvjSQ0/hbx57HgNj43NyTMOYL47L9OacGxaRZwC8F0CHiMSqT/cVAPbP+P6SoDgcmCfiPWT68PRGjML48uNkoiJpEmezU8Th7I4ufP5dVwMAfty3EVtHDlf3pRRBeT42HStLunC1XmvYlQnaNy+7AJ94xyW446EHkSuVEOnR2i2Z1O8tl7WSemOj5xJLmnDRCm2S2v9+Ki9boXTazPHq9BM8FquhlrOHVbv8wXeqdn6r1ug3XLdetfsnAt39yvZValtxjb6+OEJ6n9KEtbUFYaoHDuoyVeByT1mqvEpVcXn/WDZoZ4ab9L50vdj9eIz2Z9NorCn43lTo+yj+d+pkTG8i0lN9okNE0gCuB7AZwDMAPlbd7XYAD890rHqzbXgAv//Pj+Kuf/nx0YE+nySjUfzxVVfjPStX4taLL5738xnG8TCbJ/tSAPdWdXsEwP3OucdE5A0A94nIXwBYD+Cb89jPE+aHu7YAAGJNM+w4B+TLZfzO/3sIF/cuwQNvbJr5DYZRR2Yc7M651wBcdoy/7wRwRe07Gs/H17wLz+zYhz2Z4bqfe+vAALYOHNsMZxiNpK7uspF4Ba1LMkfbnE63PBa0K0mtMDh0L5oke3X1WGe1deG/XHYT/vjiAq59/OsYKU5qukuWBUsKZzYdUe99bUQbEt7TuWvaz7FrXOeS/pe3gtDSfDzQjO2JFC6orMXL+4JzF1dRquMD2p6a8tI9X/2rr6ltnII5skovCsbIxlqm8sYnkwbseGC9yeWLRz+fUe3SHvoe0Od4ZUtwfYXcoKMZalNqaTeqazplFgV94dTRbL+uUNlvYZdVEsHllOeOTP0Eu8NOE4oKANE0lePy0lwJlRyvOO/6shu0x8Kqzz4H5Msl3L9jA/Iuf3SgN4IVLW146pZPI1cs4cq//XvkSzMsnhnGPHPaDfZ92RH86cuP16y4170fY6N4c/gIhkcn0JlO4WBmbOY3GcY8ctoN9oXErz/+HZQPhj7WyFgg1H2w+yl08yNaT6U6vbRJlIrXFXW7Qtpkcfcoruu9CLlyEeuHdiGdGFbbr+984+jrMomta1o362NF9VP46ez5qt0a1/Ig7dk58wOBBs/DIT1GpY1f0PGchQ7VVCGwT2zSpYxXPKKvQa5Lf472j2tXh137qU5VnYiQ0xKXK/7qFfeo9qfHb5/+gJ7tmFNe3Xrjc6p9/2Ztwy/S9yjmrfW0teo1j4k0hSPv1371SSpTVTpXv7/kn6smhJhiO9geTpqe036Jt57lKGVV1C+NFj0JO/upxO+fdxO+dvlvYWm6o9FdUSzvaENnc3rmHQ1jHjltBnsEgmf7NuHFI9uwe2z+HWhmyxc+/H489Ud34KOXXTjzzoYxj5w2mr0Ch69t+WGju1HDpoP9ODA8ip6WOnj1GMY01HWwV8oRHatMusVPb1yhck6RNu1Tnkjq1faBYa2vrl6i8xMXvbrMF6S0tn2/XjrAONlAh9O7VbtMjs6dzYF2G3ZtatsD+fV44IdVX+9zgRylL46PaB3opydmO/noar3v6Ll0DbYvUW1Jczru4PXxxKPPBmVbJ3t+vFnfu9/feJtqX3XGTtXeOKjLQb3r3N1HX7+y6Uy17dHdF6l2jTYmDeuXNx7J6B/g8oheW4jQ9yDfRSXGyUfd18uRlL72bFd3Q/pcrMMjlKbaeX75NXp/mhh2n9PmyR6PRNAaT6HsKhgpNM6+bhgLldNGs9+48ny8/NE/xH9914dn3tkwQshpM9gnSiWMFfM4NJ6Zeec68v6la/DEr9yJP3/X9TPvbBjzSH2n8Q5wvv8xSw9f18SntztGm7To/NehTXjPjzbCwSGVBoaLWo/tQJAHrTWqp/n/bUzbo7vj+gejQppoiHIf79wRpLjiFNaXRVbh7PZu/OyN3YjvTNeEG8co50UuHiwgdGwnzfghHc++qlWnNv7tM7TN+UvrflW1fZ3OGpK1bU05qJpayboZSwR6lmP0f/fin6v2t+++QbV/epFe52D/94MIcr9xGerRfr1W07ZY+0jk6N7lvRJOkVH99U9k9L6lFvqQHVRKitaVYp6fSDFD+VfJ9yC5RN/4Qr/+vvL9qHifWxLsZ49Zcdpo9tJsP3Gd+bvnXsJPNm8z33ij4Zw2g91H5jQly8lRcQ47jgw2uhuGUefBLoAkgidwNKFNGaXRYOrDUzlx2uyUpxBX5wS3rL4In3/HB/DsgR34n1sfUNvfuTgwt23I6mqoyYh+6j47qENJ1+1dqdoJ6rdMBH2reNVjBUCJMm9hKVUhbcmpdvLFINVUgfNzrtN/+M+fvk+1X83plE2XnLFPtTfs8j7HDL+HXIEHNHGqqcDrmQkT27S34Hef1oumuTP1e1u26elwdqW+aO2rAvkyurND94O+B2sW6R/WV4d0Om4ZD+5VfJmWQZw6GsOU/mlQT82bV1Plmv2BHJFWPeVnaq4vmZa5goxf9YXfO0NS5KOcNgt0APD64EEsa27HxZ1LF8TT/X9cfQP+4ZZbcHZXV6O7Yhin12DfMTqAW37yj7j5iX+Em6ng+DyTjMbw4VXn4Jo1Z5peNxYEp51mf23wYKO7AGAyicY1P/h7XN2xBm+NjMz8BsOYZ+o62CXiEPPcXEv5qU/vWsjdkDRiIs5iWLOkKYH39ZyHxw/8AgCwMxNMpSOiUzAP57TGHKfU0iVKn1Uc0emKmw55YbueFW9ktIDHoq8Cy4K/lcldNrdDpzMudQbi+INXv662/fRVHfL6Oz/8tGovWjOk2tEfaPnQ9etBbrxETF/fA3v1vvFWvbbA6xTZI9pU1PZG8LnGVmmBnytQxd0ztdlpvE/7K8dH9f5+mipHc9EY9evVzXrdogbPrbe0Z+rSUAAASh3tSEdnhshc5ul0ToVeKev3xun7G43SNaPrW/KugcSopNgs042ddk92AEhFY7j3vZ/B4lQ7DuWGsX5o58xvmiMS0ShuPPtsPLxlS93OaRiz4bTS7G8zUS7h0f0v4ZXBHTg0MTTzG+aQP3v/NfjqjR/BX1z3obqe1zBm4rR8sgPAPTufRsU9VfeFuid3bMdN55yD72/aWNfzGsZM1HWwO6fTUrHLZqorsDlzeCeHtLJL5qoubV/dP6Jt0r+x6gN4ct9W7MoMoszhsxSCiQGtyXn9IEIphUUCW+fP+nbj6v/z9xgvFYEkENmv1wNWX6oLQO4u96p2y5LA3XOooN/LvgeOykMNb9NrEU2L9fVt8cpav6dnt9r243/W4bFLrutT7QM/1/bqRf36R3To0uAa9q7Us6nD0O7IzU16PSDTTqmky7q9rG306OutMe1a6/ZobRtZThGPNe4Cwb2vCH3/OvV7J6jsd5Rs5+Ws7mfE0+nFrLbJs/vrBPT2ns5R1eaSZL6DaOUEU4SfltN45qNnXIa7Lr0W9133KSQiXLX85LioZzEe/MyncOkZQQz2eGl6hwrDaAShGOw/ObAJD+56HV95/WcoVCafbnPlcnPD2rNx3tIe/O41C7I4jmEc5bTV7D7jpQI+/8Kj6m+/eeFluGXtBfirdc/h+eHtsz7WRV296FzUguf27gEA/K+XXkB+sIR/en79DO80jMZS/8HupwUmreH7+CZTeipc4bQ+JFP6x7TNNB7Vdkzlge6Am886H+/sXYb2eBLSN6nNLlq8GGu7uvDyxH6MF4o4u6sL/dkx7D006RRzTk83fnjzb2LXwBBu/MY91T44/OPOlwFMXsz4ENmIz9K+74eGteZMdunt414I5tbnzlHbuq/ViTQHhvRnbt+gb2fTzYdUOzMRHHvj8DK17YaPvaDaD73wS6rdfOmw7ndK6+5uz46cjut7Fynqe1d4rUO1sVKXYcIyrZ33DgX7r1jbr7adv0h/xleP6FJe0Yhe1zhY9s5d1rp4YkCvkcTaqDwUhwWTX35l2NPhXO6pTGmnaA2qxleecN4aVU048iwJxTT+WHzyh9/H5575EZ7YEzzVf+288/GVG2/Ch89aiytWLMd9t/5b/OF7rzy6fefAIA5lxvDMmzuRjIViUmScRsz4jRWRlQC+DaAXkykL7nbOfV1EOgF8D8BqALsB3Oqcq69R+yTIlYr4wdbJssrR6m/epv4+PL51K/aMjGAol8OL+/YiUwieYKVKBVd/425E8o0PsjGM42U2j6cSgM85534hIq0AXhGRJwH8FoCnnHNfFpG7ANwF4Avz19X55+EtWyY936pj+bbv3w8AiC6ACDrDOFlmU5/9IICD1dcZEdkMYDmAmwFcU93tXgDPYhaD3U85HIlPnV0ml9N2yDKV8Uk0az2Vy+v92dfYTyHsJsh+SmV+K5RRSMgNn0Lrkb080N0VinlOkY907rC2C597rk5r/ebOwITXdJ3Wp6PPL1Ztt1h3bPi9WkfHyB97UVPQz3PbtR19V1b7xqeX6PROrFcLJX3sVe3BpG4oTz7jdJsv/5Aut/Wv63T+AC7ZPD4e3PtkXF/PrSP6mrBGH8zoFGJ+bEblsL5XlR5K36y7DTdM6Z/Zvu2vRzWRbwbp7CKloT7i9PpLeYK+ZH6m7tiJZWU6Ls0uIqsBXAbgRQC91R8CADiEyWm+YRgLlFkPdhFpAfAAgD9wzil3H+ecQ+0P4dvvu1NE1onIuvJo9li7GIZRB2Y12EUkjsmB/h3n3IPVP/eJyNLq9qUA+o/1Xufc3c65y51zl0fbmo+1i2EYdWA2q/EC4JsANjvnvuJtegTA7QC+XP3/4Vmd0S9BRHOBQi7QMRynHGvRepQ1eYzamWGtG51X6pftlEIasUTarURlgtlHHZ6Wc2nd7wnykU716Uu+bULnt0M6+Byj4zrOO7dar1Ok2vQ1yR/QP6aRbv0539oSKK3MGt2vIbLZczz2O5Zrn/6mmO7Lc9vWBu+lWO1Smsp8VbQeveiSPaq9cbf2AfDzrzUl9L3ZQ2Wpk7SWU6CcCZVRTyu3kPaldYmaskrkG99MMf8T3jpTKj19PyLN+tzFHMVrcI5F7tsJMJvV+CsB/AaA10VkQ/Vvf4rJQX6/iNwBYA+AW0+6N4ZhzBuzWY1/DlO7kl83t90xDGO+WFBuYIl0ME1iU49fyQMAkjStz9H2eFpPudQ0qV1vK7TRNJ1CF8Gmt2ZKJZ31q9zoY0X7dL8q52uTVnuazGWeHPnE6pfVtkcPvkO19w/qMF6WEEuadWWbvtZg/xKZ5ZJ0vdJJPQ3dMainyyMjZF4bCKawF71LV9BdP7ZatV9+7SzVBoXq9i4d1v0+1HH09f4xHcbLq04clhoZ0fdSuoPPxXKDKY5RVdc0u7jq/f30z+Ml3Y9a13D9PeHvK6dsk+iJmdt8Qusuaxhhwwa7YYQEG+yGERIWlGYveimbWeNEyRRRIFNFa7sOFc2M6nBFH19fAgAWa93sWMrxTyKZ4pxXoVO4pBWtByS5jCuxuj1Ir/XwgUvUtrE86UBaNhVKl/XaLip95K1FlNr0hyrs02a7iQ7Sq0P6esfH9clj5wd+VgfG9FpCsl2HrOYH9b1p6tD3bmBYmwHhpaJOdup9Z/oejCW1+dJPJc26uVxTKZjSQdN9Hx+itGF+abOadM/Ta27W6JHE3Jve7MluGCHBBrthhAQb7IYREhaUZvepSTNN7odFKslUKOo221CLfohslz5WlNYHKmUKeU2RiKft0Vzwm1mOad3cskP3ayymdfeSDm0L/8VbgftsL6UX7kzrsknXLNum2ruW6DDVjQeXqnbpSKBvx0ep5NISrXUTm7WG73qfTv80XqByxt5aBG9jnwkktH5duWhYtVlL74kFJbLyZEcXOlaJUozzukYsFejwArsyk/trilxzM2O0DhSfumBnzXcoo/ud6NDnSrPfSEbfHy75dCLYk90wQoINdsMICTbYDSMkLFjNzjSntM4eJW3GfsqlIpVo8sJa3RiXGyK7eQdVdOFyT2Rv9VMls4k+16u1Vqpda7O3Dmlf79a2QDtHySb/nu5dqv3QLu0rf163TilQmKASQp2BxhS6BiVOm7RG9/PgZkqJtUhfowtWByGwB0Z1uuz2Fm1nPzKs9WhfplW1m8kvX8VFUOkj1v+s9ytkO494KcbZH13oehfLXOZrev/25ubgc46x5qZ1H14vcNyXOdDojD3ZDSMk2GA3jJBgg90wQsIpo9m51FGlQKl22eV8ujK2MdpGJZllhGzIrOnJdltaFuhbLi1daWG7L+k+Shk8kQzOPXpE27r/aetVqt26Utvhdw1rO7ujtYaYH9u9QtvVS6OUiptSMFXosRAZ0NfoDQls+ues1GmquTRXD5d0PtCh2iMUBwFfvnJKMbrPJS5nTNq3OB58zgj7YpCffbpNrzWwf3uZ1kTyvu8HZ7ii72uMy4TXAXuyG0ZIsMFuGCHhlJnGMzx942m7o6m3cnHlCpuF6V0so0u0m6o/FQSAqHe88j6drqmSoGlmSk/9Yi1TT+cio/r2RJfqqXeEJEF2QvdLSCK45cG0lCvs8PUrj9N2Mh2R5RMt3pR3eEK7lQ4d0aa1eJM2rQlXBiL32oh37sqYvn6cZbjGPZam6s6rDMTTdlC12TxnfKXvDVc08l24K3T92K2XKfMFnQfsyW4YIcEGu2GEBBvshhESTl3NTiaVSn5q91hAm8tqtnHFTDLFlaliKVhi+i6ZSX3sWLfW2Y5sWBwKWS4Ft6R5jTatjQ1rLZxh11AyO8UW63P7GjU6SJVSqN9opbTJtK7BYb5jfYF5bXyC9m0iExenaCbX3Y6zBlXbDy2Nkrtxha4Bm0lZs/uppmOUGpqXedh8VuJ1jmnMvTUancOo5yDN1PFiT3bDCAk22A0jJNhgN4yQcMpqdoZT7wpLTE+vOrbRz1Ax03GKYSqz1NoSaOPhorYpFymUkXViimzOvr01Ru6ZnM45spSORamNCm/qUFNZGpyrQiZmLonFtu5o8/Slj3z9WuGDs92cUjQLfa4xql7rX5OalMrk2ow2/TlqSjx57y+xLwGteTjQOhD1m9d6/HtbojUkTi09F6mhjxd7shtGSLDBbhghYcbBLiLfEpF+Edno/a1TRJ4UkW3V/xdNdwzDMBrPbDT7PQC+AeDb3t/uAvCUc+7LInJXtf2Fue/ecVBjIyVR6flXJ5q0rmO/ZLZXszaL0vtHhpq9bRTiSnqfdSLb2du8tFSjVMKq0kHHHtLpiYtJ0vRpuga+rz35GrCubl+UVe2xLJVR4tCDbHDuCOlm9md3I1R+a4kOJWW/BuX/Ti4PxWZOO6U/1wSlcFafm9OLUZwC+7ez7i7FWONPTSM0OjPjk9059zMAg/TnmwHcW319L4Bb5rZbhmHMNSeq2Xudcwerrw8B6J1qRxG5U0TWici6ciY71W6GYcwzJ71A55xzmGYG45y72zl3uXPu8mhr81S7GYYxz5yonb1PRJY65w6KyFIA/TO+o86wRvLjnAtDWn9GOEUQ/XSxnzNrfD92vlxmUUmx8hQX3tyq9eqEVzqpiezmnLo4208/nuQ/IIv1sSte6inuR5T9FGR6X262X5d9WU0x5hVOxZUim3OW0kF16/wBeS/9U4Q+Y5KuEet9x/Ztz8dfyL+Cj80avpSj4cJxDf66RXrq0lCN4kSf7I8AuL36+nYAD89NdwzDmC9mY3r7LoDnAZwrIvtE5A4AXwZwvYhsA/ChatswjAXMjNN459xtU2y6bo77YhjGPHLa+MbPhB//7hJcIohj4WeIRSYdDj/Wm/KSsTZ2ZNvNHNK+9MnOwM7e1ay1677D2ncp1qb96pky+QtEvf0rA1T6uFtrzFxe28I5dXeBbOfwri/HmNekpabrx+srObKNxzxtXcpQmu92fb2jMVpLoH776y+8XlJivc+uCEmKmeB1oenSly8AzF3WMEKCDXbDCAmhmcb7cDgsm1x46uc4zRLjTdWTzXpqXaC00+yWyuG1zenAlLT/SIfuB80Sa8Im2WQoU087XXL66qd+WmQASPDncpRO2zM18ZSfQ29zY9oNON6mt3PJHT+dVrpLp9rKjWozaixVojaZz7zPNT5GUoZuM19PTk/O6c0WOvZkN4yQYIPdMEKCDXbDCAmh1OxMTVpf0vCJDq0pi/mpUwqzuyaXCKpxHSXzWDYX6MhEUuvNiZzWyTWuoVxhiCuJ+pozNv06BYcIl+ncfGy/2dqhTYajQ01T7wygQi7GFQ499VxzJ2gNhNcSKjOUUfLdgNn0NjaS5t31e08xjc7Yk90wQoINdsMICTbYDSMkmGY/Buz2WCpy+ScK9/RsuR2t2g48Tm6nHB6bH5+6BHEhP3154iK9l0sfx+La5lwseuWfyL5fU6KZ9T/pVdav/vpChrRvhNI9V5rJhZjt2ZwWzNseo7RfJVprYNdn9qlw3vUvFqms8imuyWfCnuyGERJssBtGSLDBbhghwTT7CcDlov0yzMOjbFPWOtD38wZqtXPO89dmG306TaWiSNuy/TpBetUvRVWmUFHUlBjWTU67nKTSyT6OSh85KtnM6aB4jSSamno9gX34+TPz9eSQV1/xF9hf4jTHnuyGERJssBtGSLDBbhghIVyiZZ7QvvUzpC7iKkBu6tRGEdL72QyVYOJ+kM05W6T9/XRQrNFJk3P67CTFnBcm9FfH19LxVr22UMxoXwMu0cTlthjx1h64XFaE1k8SCW2HZ1+FMGNPdsMICTbYDSMk2GA3jJBgmn2eqUkvTG2OQfd1N/t511TUY5Mz6VfWxmVP77I9uhKZ/nefNTqvD/i28RLbr6PTf+aavHx0jWKJqY/NayJ+qShDY092wwgJNtgNIyTYNL7R8AyWptc+PGXlaT2nli5RdVR/Os1VWjlNcryJUzBT6CiHyHpVcWrCZ2eQADyNj1JV3YrnjsxSxZg99mQ3jJBgg90wQsJJDXYRuUFE3hSR7SJy11x1yjCMueeENbuIRAH8NYDrAewD8LKIPOKce2OuOmdoZqoSyimxI+nSFHseo8IrhZWyiSue1jq6MKJLJ/kusr7GBmpTQ81EzdqEMSeczJP9CgDbnXM7nXMFAPcBuHluumUYxlxzMoN9OYC9Xntf9W8KEblTRNaJyLpyJnsSpzMM42SY9wU659zdzrnLnXOXR1ub5/t0hmFMwcnY2fcDWOm1V1T/NiWFXQeO7P7kn+0B0A3gyEmce76wfh0f1q/jox79WjXVBnGctHuWiEgMwFYA12FykL8M4BPOuU2zeO8659zlJ3TiecT6dXxYv46PRvfrhJ/szrmSiPwHAD8BEAXwrdkMdMMwGsNJucs6534E4Edz1BfDMOaRRnnQ3d2g886E9ev4sH4dHw3t1wlrdsMwTi3MN94wQoINdsMICXUd7AspcEZEviUi/SKy0ftbp4g8KSLbqv8vqnOfVorIMyLyhohsEpHPLoR+VfuQEpGXROTVat++VP37GhF5sXpPvyciiZmONU/9i4rIehF5bKH0S0R2i8jrIrJBRNZV/9awe1m3we4FztwI4AIAt4nIBfU6/zG4B8AN9Le7ADzlnDsbwFPVdj0pAficc+4CAO8B8JnqNWp0vwAgD+Ba59wlAC4FcIOIvAfAXwL4qnNuLYAhAHc0oG8A8FkAm732QunXB51zl3r29cbdS+dcXf4BeC+An3jtLwL4Yr3OP0WfVgPY6LXfBLC0+nopgDcb3L+HMRlVuND61QTgFwDejUmPsNix7nEd+7MCkwPnWgCPYTL/z0Lo124A3fS3ht3Lek7jZxU402B6nXMHq68PAehtVEdEZDWAywC8uFD6VZ0qbwDQD+BJADsADDvn3o6lbdQ9/RqAPwHwds6qrgXSLwfgCRF5RUTurP6tYffSctBNgXPOCSdqqxMi0gLgAQB/4JwbFa9mVCP75ZwrA7hURDoAPATgvEb0w0dEfgVAv3PuFRG5psHdYa5yzu0XkcUAnhSRLf7Get/Lej7ZjztwpgH0ichSAKj+31/vDohIHJMD/TvOuQcXSr98nHPDAJ7B5PS4oxonATTmnl4J4NdEZDcmcypcC+DrC6BfcM7tr/7fj8kfxyvQwHtZz8H+MoCzq6ukCQAfB/BIHc8/Gx4BcHv19e2Y1Mx1QyYf4d8EsNk595WF0q9q33qqT3SISBqTawmbMTnoP9aovjnnvuicW+GcW43J79TTzrlPNrpfItIsIq1vvwbwywA2opH3ss4LFjdhMlJuB4A/q/eCCfXluwAOAihiUtPdgUmt9xSAbQB+CqCzzn26CpM67zUAG6r/bmp0v6p9eweA9dW+bQTw59W/nwngJQDbAXwfQLKB9/QaAI8thH5Vz/9q9d+mt7/vjbyX5i5rGCHBPOgMIyTYYDeMkGCD3TBCgg12wwgJNtgNIyTYYDeMkGCD3TBCwv8HI5hr3hHflv0AAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "fig, ax = plt.subplots()\n", "ax.imshow(data[14, ], origin='lower')\n", "aperture = Circle((11, 26), radius=5, fill=False, ls=':', lw=2., color='white')\n", "ax.add_artist(aperture)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Next, we use the [FalsePositiveModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.fluxposition.FalsePositiveModule) to calculate both the S/N and FPF. We set position of the `aperture` to the coordinates that we tested and the radius of the aperture to 5 pixels. For the reference apertures, we will ignore the neighboring apertures to the companion (i.e. `ignore=True`) such that the self-subtraction regions will not bias the noise estimate." ] }, { "cell_type": "code", "execution_count": 27, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-------------------\n", "FalsePositiveModule\n", "-------------------\n", "\n", "Module name: snr\n", "Input port: pca_median (30, 57, 57)\n", "Input parameters:\n", " - Aperture position = (11.0, 26.0)\n", " - Aperture radius (pixels) = 5.00\n", " - Optimize aperture position = False\n", " - Ignore neighboring apertures = True\n", " - Minimization tolerance = 0.01\n", "Calculating the S/N and FPF...\n", "Image 001/30 -> (x, y) = (11.00, 26.00), S/N = 5.54, FPF = 7.28e-04\n", "Image 002/30 -> (x, y) = (11.00, 26.00), S/N = 4.85, FPF = 1.43e-03\n", "Image 003/30 -> (x, y) = (11.00, 26.00), S/N = 5.75, FPF = 6.01e-04\n", "Image 004/30 -> (x, y) = (11.00, 26.00), S/N = 7.43, FPF = 1.53e-04\n", "Image 005/30 -> (x, y) = (11.00, 26.00), S/N = 10.16, FPF = 2.64e-05\n", "Image 006/30 -> (x, y) = (11.00, 26.00), S/N = 8.92, FPF = 5.54e-05\n", "Image 007/30 -> (x, y) = (11.00, 26.00), S/N = 8.35, FPF = 8.02e-05\n", "Image 008/30 -> (x, y) = (11.00, 26.00), S/N = 5.59, FPF = 6.99e-04\n", "Image 009/30 -> (x, y) = (11.00, 26.00), S/N = 7.81, FPF = 1.16e-04\n", "Image 010/30 -> (x, y) = (11.00, 26.00), S/N = 6.46, FPF = 3.25e-04\n", "Image 011/30 -> (x, y) = (11.00, 26.00), S/N = 7.34, FPF = 1.63e-04\n", "Image 012/30 -> (x, y) = (11.00, 26.00), S/N = 7.17, FPF = 1.86e-04\n", "Image 013/30 -> (x, y) = (11.00, 26.00), S/N = 6.97, FPF = 2.16e-04\n", "Image 014/30 -> (x, y) = (11.00, 26.00), S/N = 6.32, FPF = 3.66e-04\n", "Image 015/30 -> (x, y) = (11.00, 26.00), S/N = 8.25, FPF = 8.55e-05\n", "Image 016/30 -> (x, y) = (11.00, 26.00), S/N = 9.85, FPF = 3.16e-05\n", "Image 017/30 -> (x, y) = (11.00, 26.00), S/N = 9.98, FPF = 2.94e-05\n", "Image 018/30 -> (x, y) = (11.00, 26.00), S/N = 8.71, FPF = 6.31e-05\n", "Image 019/30 -> (x, y) = (11.00, 26.00), S/N = 11.85, FPF = 1.09e-05\n", "Image 020/30 -> (x, y) = (11.00, 26.00), S/N = 9.01, FPF = 5.23e-05\n", "Image 021/30 -> (x, y) = (11.00, 26.00), S/N = 6.85, FPF = 2.37e-04\n", "Image 022/30 -> (x, y) = (11.00, 26.00), S/N = 6.41, FPF = 3.39e-04\n", "Image 023/30 -> (x, y) = (11.00, 26.00), S/N = 7.57, FPF = 1.38e-04\n", "Image 024/30 -> (x, y) = (11.00, 26.00), S/N = 6.88, FPF = 2.33e-04\n", "Image 025/30 -> (x, y) = (11.00, 26.00), S/N = 5.45, FPF = 7.91e-04\n", "Image 026/30 -> (x, y) = (11.00, 26.00), S/N = 5.32, FPF = 8.99e-04\n", "Image 027/30 -> (x, y) = (11.00, 26.00), S/N = 4.38, FPF = 2.33e-03\n", "Image 028/30 -> (x, y) = (11.00, 26.00), S/N = 3.06, FPF = 1.11e-02\n", "Image 029/30 -> (x, y) = (11.00, 26.00), S/N = 4.45, FPF = 2.18e-03\n", "Image 030/30 -> (x, y) = (11.00, 26.00), S/N = 4.89, FPF = 1.37e-03\n", "Output port: snr (30, 6)\n" ] } ], "source": [ "module = FalsePositiveModule(name_in='snr',\n", " image_in_tag='pca_median',\n", " snr_out_tag='snr',\n", " position=(11., 26.),\n", " aperture=5.*0.0036,\n", " ignore=True,\n", " optimize=False)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('snr')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The results have been stored in the dataset with the tag *snr*. Let's plot the S/N as function of principal components that have been extracted. As expected, for a large number of components the S/N goes towards zero due to increased self-subtraction." ] }, { "cell_type": "code", "execution_count": 28, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "Text(0, 0.5, 'Signal-to-noise ratio')" ] }, "execution_count": 28, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEKCAYAAAAfGVI8AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAfS0lEQVR4nO3de5wcdZnv8c+XEJYBgcElIhnMJioGMQGiI64GEUVO8MiBnKgoiwoqZt3VFY9r2ERcuS0SCHqUVVFEBBW5LMaAsHKRiyyKHANBg2AAkUsmQIIYl8gAITznj6qGTjOX6nTVVHfX9/169Svdv+rLU9NQT//uigjMzKy6Nis7ADMzK5cTgZlZxTkRmJlVnBOBmVnFORGYmVXc5mUHsCl22GGHmDx5ctlhmJl1lFtuueXRiJjQWN6RiWDy5MksXbq07DDMzDqKpPuHKnfTkJlZxTkRmJlV3JglAklnS1ot6fa6skWSfifpN5J+JKl3rOIxM7PEWNYIzgEOaCi7GpgWEbsDdwELxjAeMzNjDBNBRNwAPNZQdlVEPJM+/CWw81jFY2ZmiXYaNfRh4MLhDkqaC8wFmDRp0ljFZNaxliwbYNGVK1i1dpCJvT3MmzWV2TP6yg7L2lBbdBZLOgZ4BjhvuOdExJkR0R8R/RMmvGAYrJnVWbJsgAWLlzOwdpAABtYOsmDxcpYsGyg7NGtDpScCSUcABwKHhdfENsvFoitXMLh+w0Zlg+s3sOjKFSVFZO2s1KYhSQcARwNviYgnyozFrJusWjvYVLlV21gOHz0fuAmYKmmlpI8AXwW2Aa6WdJukb4xVPGbdbGJvT1PlVm1jViOIiEOHKP72WH2+WZXMmzWVBYuXb9Q81DN+HPNmTS0xKmtX7TRqyMxyUhsd5FFDloUTgVmXmj2jzxd+y6T0UUNmZlYuJwIzs4pzIjAzqzgnAjOzinMiMDOrOCcCM7OKcyIwM6s4JwIzs4pzIjAzqzgnAjOzinMiMDOrOCcCM7OKcyIwM6s4JwIzs4pzIjAzqzgnAjOzinMiMDOrOCcCM7OKcyIwM6s4JwIzs4pzIjAzqzgnAjOzinMiMDOrOCcCM7OKcyIwM6s4JwIzs4rbPOsTJe0IfBzYDQjgDuDrEfFIQbGZmdkYyFQjkDQTuAf4O2AQeBI4DLhb0huLC8/MzIqWtUZwGnA+8LGIeBZA0mbAN4AvAm8qJjwzMyta1kSwJ3BELQkARMSzkr4ELCsiMDMzGxtZE8GfgSnAiobyKcDaPAOquiXLBlh05QpWrR1kYm8P82ZNZfaMvrLDMrMuljURXAB8W9LRwC/SspnAKSRNRpaDJcsGWLB4OYPrNwAwsHaQBYuXAzgZmFlhsiaCowEBZ9e9Zj1wBjC/gLgqadGVK55LAjWD6zew6MoVTgRmVphMiSAingaOkrQAeEVa/PuIeKKwyCpo1drBpsrNzPKQeR4BQHrhX74pHyTpbOBAYHVETEvLXgxcCEwG7gMOiYg/bcr7d4OJvT0MDHHRn9jbU0I0NpbcN2RlGnYegaRLJW1bd3/YW8bPOgc4oKFsPnBNROwCXEPFm5nmzZpKz/hxG5X1jB/HvFlTS4rIxkKtb2hg7SDB831DS5YNlB2aVcRIE8r+SDKDGOCx9PFwt1FFxA3p+9Q7GDg3vX8uMDvLe3Wr2TP6OHnOdPp6exDQ19vDyXOm+5dhlxupb8hsLAzbNBQRH6q7f0RBn79jRDyU3n8Y2HG4J0qaC8wFmDRpUkHhlG/2jD5f+LtIliYf9w1Z2bIuMXG2pG2GKN86bftvWUQEz9dAhjp+ZkT0R0T/hAkT8vhIs0JlbfIZrg/IfUM2VrKuPno4MNR/lT3AB1v4/Eck7QSQ/ru6hfcyaytZm3zcN2RlG3HUUDqqR+lte0nP1B0eB7wTaGX10UtJkszC9N9LWngvs7aStcmn1lTkUUNWltGGjz5K0lxTW3a6UQDHZvkgSecD+wI7SFqZvm4hcJGkjwD3A4dkC9us/TUzHNh9Q1am0RLBW0lqA9cC72LjUT9PA/dHxKosHxQRhw5zaL8srzfrNPNmTd1oyRBwk4+1pxETQUT8DEDSFODB+tVHzWxkbvKxTpF1iYn7ASRNBCYBWzQcvyH/0Mw6n5t8rBNkSgRpAvgBsA9Jv4DYeKjnuKFeZ2Zm7S/r8NEvAxtI9it+Angz8B7gTl64bISZmXWQrIvOvQV4Z0T8TlIAayLi55KeAk4Eri4sQjMzK1TWGkEPyVBSSEYOvSS9fwewe95BmZnZ2MmaCH4H7Jrevw34mKS/AT4OeIlEM7MOlrVp6CvAS9P7JwBXAIcCT5HMCDYzsw6VdfjoeXX3b5U0maSG8EBEPDrsC83MrO2N2jQkabykhyW9plYWEU9ExK1OAmZmnW/URBAR60k2qh92iWgzM+tcWTuL/x1YIKmpPY7NzKz9Zb2wv5lkLsGApNuBv9QfjIiD8g7MzMzGRtZE8CjwwyIDMbPyZNlS07pX1lFDHxr9WWbWiWpbataWy65tqQk4GVSE2/w7mH/FWR5G2lLT/z1VgxNBh/KvOMtL1i01rXtlHTVkbSbrxuhmoxlq68yRyq37OBF0KP+Ks7zMmzWVnvEbbyniLTWrxYmgQ/lXnOVl9ow+Tp4znb7eHgT09fZw8pzpbmKskMx9BJLeQbLa6MuBWRHxoKQjgT9ExDVFBWhD88bo1VXEIAFvqVltmWoEkg4DLgLuBqYA49ND44CjiwnNRuJfcdVUGyQwsHaQ4PlBAkuWeTV423SKGH0JIUm/Bk6OiAskPQ7sERH3StoDuCoidiw60Hr9/f2xdOnSsfxIs7Ywc+G1DAzRD9TX28PP57+thIisk0i6JSL6G8uz9hHsAtw0RPk6YNtWAjOz7DxIwIqQNRGsAl41RPk+wO/zC8fMRuJBAlaErIngTOB0STPTxy+TdDhwKnBGIZGZ2Qt4qKcVIetaQ6dK2g64GtgSuI5km8rTIuJrBcZnZnVqgwG8tIjlKVNn8XNPlrYCdiOpSdwREeuKCmwk7iw2M2vecJ3FTa01FBFPAEsl9QAzJd0dEffnFaQVw4vTmdlIss4jOEfSP6b3twBuBq4CVqQTzaxNedy5mY0ma2fxLOCX6f2DgO2AlwLHpTdrU16cLn9Llg0wc+G1TJl/OTMXXuukah0vayLYHlid3j8AuDgiVgMXkPQZWJvyuPN8uYZl3ShrIngYmCZpHEnt4Kdp+YuA9UUEZvnwuPN8uYZl3ShrIjgbuBC4HdgA1BaZewPwuwLispx43Hm+XMOybpR1HsEJkn4LTAL+IyKeTg89A5xSVHDWOo87z9fE3p4h1/pxDcs6WebhoxHxwyHKzs03HCuClxjOj5f/tm40bCKQNAf4cUSsT+8PKyIWtxKEpP8DHAkEsBz4UEQ82cp7mhXBNSzrRsPOLJb0LPDSiFid3h9ORMS4EY6PHIDUB9wI7BYRg5IuAv4zIs4Z7jWeWWxm1rymZxZHxGZD3S/I5kCPpPXAViSrnZqZ2Rgofc/iiBgATgMeAB4C/hwRVzU+T9JcSUslLV2zZs1Yh2lm1rUyJwJJu0v6bnox/pWkcyVNazUASdsDB5NsgTkR2FrS+xufFxFnRkR/RPRPmDCh1Y+1Fnl2rVn3yLrW0EHArcDLgJ8AV5AMJV0m6X+1GMPbgT9ExJqIWA8sBt7U4ntagTy71qy7ZB0++m/ASRFxbH2hpBPSYz9uIYYHgL9Nl7geBPYD3BPcxkaaXevRM2adJ2vT0KuA7w1R/j2gpQHUEXEzcDFJjWN5GtOZrbynFcuza826S9ZEsBp43RDlrwMeaTWIiDg2InaNiGkR8YGIeKrV97TieP0is+6SNRF8C/impGMkvTW9fQ74Bv71Xjlev8isuzTTR7AO+GfgxLRsFXAscHoBcVkb8+xay5N30CtfU3sWA0jaBiAiHi8kogw8s9isO9RGoDWu3XTynOlOBgUYbmZx0xPKIuLxMpOAmXUP7+/QHjI1DUl6MXASydDOl9CQQCJi2/xD6y6u/pq9kEegtYesfQTfBmaQdAyvIlkl1DJqrP7WJmABTgZWad7foT1kTQT7AfunY/6tSZ6AlY1rTdXj/R3aQ9ZEsJpk1JBtAld/R+daUzV5BFp7yJoIjgFOkHR4RDghNMnV39G51lRd3kGvfFkTweeAycBqSfcD6+sPRsTuOcfVVVz9HV2ztSY3I5nlJ2siuLjQKLqcq7+ja6bW5GYks3xlSgQRcXzRgXQ7V39H1kytyc1IZvlqekKZpK9L2qGIYKy6Zs/o4+Q50+nr7UFAX2/PsLNL3flulq+sTUP13k+yteSjOcdiFZe11uTOd7N8bcqexco9CrMmNLP6qbfUNBvdptQIzEqVtfPdncpm2TSdCCJimyICMWtGlmYkdyp3Hw8bLkZTiUDSy4HdSNYaujMi7i0kKrMcuFM5f2VeiF3DK06mPgJJ20r6D+AeYAlwCXC3pItq+xOYtRtvqZmv2oV4YO0gwfMX4rHqd/GS1cXJ2ln8FWB34K1AT3rbLy37ciGRmbXIW2rmq+wLsWt4xcmaCA4CjoyIn0XE+vR2PTAXmF1UcGataGZugo2u7Auxa3jFydpH0AP8cYjyx4At8wvHLF+e0Z2fsudveM2u4mStEfwcOFHSVrUCSVsDxwO/KCIwM2svZTe1uYZXnKw1gk8DVwADkn6Tlk0HngBmFRGYmbWXdlg80TW8Yigi266TaW3gMGDXtOhO4LyIGPOemv7+/li6dOlYf6yZWUeTdEtE9DeWZ928fh/gFxHxrYbyzSXtExE35BSndSFPAjJrb1mbhq4DdiLZsrLedumxcS94hRmeBFRl/gHQObJ2FotkNnGjvwb+kl841m3KHntu5Sh78pk1Z8QagaRL07sBfF/SU3WHxwHT8KghG0HZY8+tHF7nqbOM1jRUmzsg4E9A/f+9TwM3At9qfJFZTdljz5vhpoz8+AdAZxkxEUTEhwAk3QecFhFuBrKmdMokIPdl5KuTfgBYxj6CiDi+lgQkzZfUW2hU1jU6ZRKQ+zLyVfbkM2vOpmxM81ngImBtvqFYt+qESUBuyshXO0w+s+w2JRF4q0rrOm7KyF8n/ACwxKbsWWzWddyUYVW2KTWC3YBVeQdiViY3ZViVbcqexQ/mHUTa+XwWybyEAD4cETfl/TlmI3FThlXVsIlA0uMMPZv4BSJi2xbj+ApwRUS8W9IWwFajvcDMzPIxUo3gE2MRgKTtgH2AIwAi4mmSyWpmZjYGhk0EEXHuGMUwBVgDfEfSHsAtwFGNk9ckzSXZGpNJkyaNUWhmZt2vHUYNbQ68FjgjImaQLGI3v/FJEXFmRPRHRP+ECRPGOkYzs66VdT+CLYBjgEOBScD4+uMR0coy1CuBlRFxc/r4YoZIBO3Ia9OYWTfIWiM4ETgc+CLwLDAP+BrJonT/2EoAEfEw8KCk2oDt/YA7WnnPseBlds2sW2RNBIcAH4uIbwIbgEsi4pPAscD+OcTxT8B56X7IewJfyOE9C+W1acysW2SdR7Ajz/9KXwf0pvevAE5pNYiIuA14wT6a7cxr05hZt8iaCB4AJqb/3gPMIhnd80Y23qOgMrp1bRr3e5hVT9amoR+RtN1DMvnreEl/AM4hmRHc9pYsG2DmwmuZMv9yZi68tuW2/G5cm8b9HmbVlKlGEBEL6u5fLGkl8Cbgroi4rKjg8lLEpiPduDaNtxc0q6ZNWXSOiPgl8MucYylMURe4blubxv0e1i3cxNmczIlA0s4kS0G8hIYmpYj4Us5x5coXuGy6td/DqsXbjjYvUx+BpMOA35NsVP8pkuGetduYrEnUiuEuZL7Abawb+z2sejy0u3lZawQnkEwm+9eI2DDak9tNp2ygXrZu7Pew6mm2BcDNSM3NIzirE5MA+ALXjG7r97DqaaaJ081IiayJ4D+BNwD3FhhLoXyBM6uGZloAPFIukTURXA2cIuk1wHJgff3BiFicd2BmZpuimRYADyRJZE0E30z//ewQxwJoZfVRM7NcZW0B8Ei5RKZRQxGx2Qg3JwEz60geKZfYpAllZmbdwANJElk3pvn8MIcCeJJkIborIqJaDWtm1vE8kCR7jeA9JDuTbQ2sSssmkmwruQZ4GbBa0lsiomNHFpmZVVHW1Ue/CPwKmBwRkyJiEjAZuJlkstlE4C6grZeaMDOzF8paIzgWODgiVtYKImKlpKOBJRHxXUnHAJcUEaSZWdUVOQO6mZnFWw5R/lcki9ABPAJslUdQZfOUczNrJ0XPgM7aNPRT4JuSXi9ps/T2euAMkslmANOBP7QcUcm8OYuZtZuiF9LLmgiOJPnFfzPwVHr7ZVr20fQ5jwOfySWqEnnlQjNrN0XPgM66Q9lq4ABJU4HaTIvfRcRddc+5LpeISuYp52bWboqeAZ21RgBARKyIiEvT212jv6LzeO8CM2s3Rc+AHrZGIOl0YEFE/CW9P6yI+GQu0bQB711gZu2m6BnQIzUNTQfG190fTuQSSZvwlHMza0dFzoBWROddx/v7+2Pp0qVlh2Fm1lEk3RIR/Y3lTfUR1L3Z5pJe1HpYZmZWthETgaT9JB3SUDYfWAeslXSFpN4C4zMz61pLlg0wc+G1TJl/OTMXXlvafKXRho/OB35SeyBpL+ALwLeBO4F5wDHpv2ZmXSvvFQfaab/k0ZqGpgM/q3v8HuAXEfHRiPgS8EngoKKCMzNrB0WsONBOk1dHSwS9wOq6xzOBK+oe/wrwcBoz62pFXLTbafLqaIngIeAVAJL+CpgB3FR3fBuS5SbMzLpWERftdpq8Oloi+AlwqqS3AaeQbETzX3XHdyfZnczMrGsVcdFup/2SR0sEnyfZivKnwIeBj0bE03XHP8zzq4+amXWlIi7as2f0cfKc6fT19iCgr7eHk+dML2XyaqYJZZK2A9ZFxIaG8hen5U8P/cpieEKZmY21btinZLgJZVlXH/3zMOWPtRqYmVkn6OZN7jdpZrGZmXWPtkkEksZJWibpsrJjMTOrkrZJBMBRJLOVzcxsDLVFIpC0M/BO4KyyYzEzq5q2SATAl4GjgWeHe4KkuZKWSlq6Zs2aMQvMzKzblZ4IJB0IrI6IW0Z6XkScGRH9EdE/YcKEMYrOzKz7lZ4ISNYvOkjSfcAFwNskfb/ckMzMqqP0RBARCyJi54iYDLwPuDYi3l9yWGZmlVF6IjAzs3Jlmlk8ViLieuD6ksMwM6sU1wjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzi2mrROTOzbrBk2QCLrlzBqrWDTOztYd6sqcye0Vd2WMNyIjAzy9GSZQMsWLycwfUbABhYO8iCxcsB2jYZuGnIzCxHi65c8VwSqBlcv4FFV64oKaLRORGYmeVo1drBpsrbgROBmVmOJvb2NFXeDpwIzMxyNG/WVHrGj9uorGf8OObNmlpSRKNzZ7GZWY5qHcIeNWRmVmGzZ/S19YW/kZuGzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKk4RUXYMTZO0Bri/oXgH4NESwilKt50PdN85ddv5QPedU7edD7R2Tn8TERMaCzsyEQxF0tKI6C87jrx02/lA951Tt50PdN85ddv5QDHn5KYhM7OKcyIwM6u4bkoEZ5YdQM667Xyg+86p284Huu+cuu18oIBz6po+AjMz2zTdVCMwM7NN4ERgZlZxHZ8IJB0gaYWkeyTNLzuePEi6T9JySbdJWlp2PM2SdLak1ZJuryt7saSrJd2d/rt9mTE2a5hzOk7SQPo93Sbpf5YZYzMkvUzSdZLukPRbSUel5R35PY1wPp38HW0p6f9J+nV6Tsen5VMk3Zxe8y6UtEXLn9XJfQSSxgF3AfsDK4FfAYdGxB2lBtYiSfcB/RHRkRNhJO0DrAO+GxHT0rJTgcciYmGasLePiH8pM85mDHNOxwHrIuK0MmPbFJJ2AnaKiFslbQPcAswGjqADv6cRzucQOvc7ErB1RKyTNB64ETgK+DSwOCIukPQN4NcRcUYrn9XpNYK9gHsi4t6IeBq4ADi45JgqLyJuAB5rKD4YODe9fy7J/6QdY5hz6lgR8VBE3Jrefxy4E+ijQ7+nEc6nY0ViXfpwfHoL4G3AxWl5Lt9RpyeCPuDBuscr6fAvPxXAVZJukTS37GBysmNEPJTefxjYscxgcvQJSb9Jm446ohmlkaTJwAzgZrrge2o4H+jg70jSOEm3AauBq4HfA2sj4pn0Kblc8zo9EXSrvSPitcA7gI+nzRJdI5L2yM5tk3zeGcArgD2Bh4AvlhrNJpD0IuCHwKci4r/rj3Xi9zTE+XT0dxQRGyJiT2BnkhaQXYv4nE5PBAPAy+oe75yWdbSIGEj/XQ38iOQ/gE73SNqOW2vPXV1yPC2LiEfS/1GfBb5Fh31PabvzD4HzImJxWtyx39NQ59Pp31FNRKwFrgPeCPRKqm0znMs1r9MTwa+AXdJe9C2A9wGXlhxTSyRtnXZ2IWlr4H8At4/8qo5wKXB4ev9w4JISY8lF7YKZ+t900PeUdkR+G7gzIr5Ud6gjv6fhzqfDv6MJknrT+z0kg2LuJEkI706flst31NGjhgDS4WBfBsYBZ0fESeVG1BpJLyepBQBsDvyg085J0vnAviTL5T4CHAssAS4CJpEsIX5IRHRM5+sw57QvSZNDAPcBf1/Xvt7WJO0N/BewHHg2Lf4sSbt6x31PI5zPoXTud7Q7SWfwOJIf7RdFxAnpNeIC4MXAMuD9EfFUS5/V6YnAzMxa0+lNQ2Zm1iInAjOzinMiMDOrOCcCM7OKcyIwM6s4JwIrlKRzJF2W4/tNlhSSct28O+84zTqJE4Flkl4oI72tl3SvpNPSSW8jOQp4f46hPAjsBNyW43taTiRdL+mrZcdhzdl89KeYPeenwAdIVkF8M3AWsDXwD41PTKfAb4iIP+cZQERsIFkMzcxy4hqBNeOpiHg4Ih6MiB8A55EugZtuAHK7pCMk/R54Cti6sckl/cX4dUlfkPRoutnLaZI2q3vOFunx+yU9ldY+Ppke26hpSNK+6eMD041HnkxXbX1d3fv9taTzJa2UNJhu8vGhZk9e0q6SLpX0Z0nrJN0kaXp6bDNJ/yrpwTTm5ZIOrnttLe73SfpZGscySbtLmibpF5L+IulGSVPqXlf7ux4p6YH0dUsk7VD3nKyf/S4lm808oWQDl/0bzm83SZdLejz9Xs6X9NK64+dIukzSUUo2e/mTpO9I2qp2HHgLyUKJtdrjZEnjJZ0uaVUa34OSFjb797fiOBFYKwZJagc1U4C/A94D7AE8OczrDgOeAd4EfAL4FPDeuuPnAh8k2YDj1cBHgLWjxHIa8C9AP3AvcFntAgVsCdwKHAi8BvgK8E1J+43yns+RNJFkY5AgWfPltcDXSKb/Q9IENi+NYTrJMiGLJe3Z8FbHA6eQLJO8Fjgf+HfgGJIF0bYETm94zWSS5rWDgbcDuwBn1x3P+tknpe+9B8k6XRcoWa2ztibPDSRr8eyVfs6LgEvqkzRJTXBaevy9JOv3HFUXx03Ad0ia73Yiacr7ZPq896WxvxdYgbWPiPDNt1FvwDnAZXWP9wIeBS5MHx8HrCdZz36k110P3NTwnKuBs9L7u5BcbA8YJo7J6fH+9PG+6ePD6p7zIpKL7JEjnM8Ftc8cKs4hnn8Sydo7WwxzfAD4fEPZ9cD3G+L++7rjB6Zlc+rKjiDZUav2+DhgAzCprmzv9HW7tPDZfWnZ3unjE4BrGt5j+/Q5e9X9jR4ExtU951vATxs+96sN73M6cA3pkja+td/NNQJrxgFpk8iTJL/8bgD+qe74yoh4JMP7/Kbh8SrgJen9GSSLhl3XZGw31e5EsqvTcmA3eG5zj2OUbE7yR0nrgDkkC6tlNQO4MZKd8DYiaVtgIvDzhkM31mKoU3/utb/V8oayretqMwADEfFA3eObSf5Gr27hs1el/9b+7q8D9km/33Xp36i26dMr6l53RyT9NPXv8xJGdg7Jwm93SfqapHc21DKsZO4stmbcAMwl+eW/KiLWNxz/S8b3aXxdUGwz5WeAfyZpulhOsvfwFxj9ApaHxlUd1w9xbKiyPP4ew352RISk+s/ZDLic5G/VqD65N/3dRbKP8GRgFrAfSdPfryXtH8k+AVYyZ2VrxhMRcU9E3D9EEsjLbST/Xb61ydf9be2OkiGt00jWboekKeXHEfG9iLiNZLu/VzX5/suAvZXse7GRSHbCWgXMbDi0N3BHk58zlD5J9Rsw7UXyN7ozx8++laT/5P70O66/Pd7E+zzN8/0mz4mIxyPi4oj4B+CdJPvuvrKJ97UCORFYW4mIu0jWwz8rHeUyRdKbJX1glJd+TtL+kl5D0pH6NPCD9NhdwH6S9pa0K/BVko7tZnydpO/hIkmvl/RKSYfWdcguAj6Tlr1K0gkkHaunNfk5QxkEzpW0p6Q3At8ALo+Iu3P87K8B2wEXSnqDpJdLerukM5VulJTRfcBe6WihHdIRTZ9OY3u1pFeSDCj4b5L9dq0NuGnI2tEHgRNJOhl3ILlg/N9RXjOfZD/aqcBvgQMjotZU9W8kF/6fkFxUzyEZ+trYhj6siBhQsnf0IpL+iyBpZpqbPuV0YBvgVJIN31cA74qIX2f9jBHcR9K5/WOSv8dVwJF1x1v+7IhYJWkmcDJwBcnopQfSz2pm05PTSJp+7gB6SP7uj5OMaqoNBFgGvCMinmjifa1A3pjGOpqkfUkuzBMi4tFyo8mfpOOAd0fEtLJjse7lpiEzs4pzIjAzqzg3DZmZVZxrBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhX3/wFSiZHBERvzoAAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "data = pipeline.get_data('snr')\n", "plt.plot(range(1, 31), data[:, 4], 'o')\n", "plt.xlabel('Principal components', fontsize=14)\n", "plt.ylabel('Signal-to-noise ratio', fontsize=14)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Relative photometric and astrometric calibration" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "With the next analysis, we will measure the relative brightness and position of the companion. We will use the [SimplexMinimizationModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.fluxposition.SimplexMinimizationModule) to minimize the flux within a large aperture at the position of the companion while iterative injecting negative copies of the PSF. This procedure will be repeated for principal components in the range of 1 to 10. We need to specify two database tags as input, namely the stack of centered images and the PSF templates (i.e. the stack of masked images) that will be injected to remove the companion flux. Apart from an approximate position of the companion, the downhill simplex method of the minimization algorithm also requires an estimate (e.g. within ${\\sim} 1$ magnitude from the actual value) of the flux contrast." ] }, { "cell_type": "code", "execution_count": 29, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-------------------------\n", "SimplexMinimizationModule\n", "-------------------------\n", "\n", "Module name: simplex\n", "Input ports: centered (70, 57, 57), psf (70, 57, 57)\n", "Input parameters:\n", " - Number of principal components = range(1, 11)\n", " - Figure of merit = gaussian\n", " - Residuals type = median\n", " - Absolute tolerance (pixels/mag) = 0.01\n", " - Maximum offset = None\n", " - Guessed position (x, y) = (11.00, 26.00)\n", " - Aperture position (x, y) = (11, 26)\n", " - Aperture radius (pixels) = 10\n", " - Inner mask radius (pixels) = 5\n", " - Outer mask radius (pixels) = 55\n", "Image center (y, x) = (28.0, 28.0)\n", "Simplex minimization... 1 PC - chi^2 = 3.64e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (12.91, 26.00)\n", " - Separation (mas) = 54.81\n", " - Position angle (deg) = 97.56\n", " - Contrast (mag) = 5.69\n", "Simplex minimization... 2 PC - chi^2 = 1.87e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (13.49, 26.75)\n", " - Separation (mas) = 52.42\n", " - Position angle (deg) = 94.92\n", " - Contrast (mag) = 5.45\n", "Simplex minimization... 3 PC - chi^2 = 2.88e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (13.31, 26.66)\n", " - Separation (mas) = 53.12\n", " - Position angle (deg) = 95.23\n", " - Contrast (mag) = 5.49\n", "Simplex minimization... 4 PC - chi^2 = 4.44e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (13.13, 26.27)\n", " - Separation (mas) = 53.89\n", " - Position angle (deg) = 96.62\n", " - Contrast (mag) = 5.55\n", "Simplex minimization... 5 PC - chi^2 = 3.00e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (12.76, 26.43)\n", " - Separation (mas) = 55.16\n", " - Position angle (deg) = 95.88\n", " - Contrast (mag) = 5.63\n", "Simplex minimization... 6 PC - chi^2 = 2.78e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (12.60, 26.44)\n", " - Separation (mas) = 55.71\n", " - Position angle (deg) = 95.80\n", " - Contrast (mag) = 5.62\n", "Simplex minimization... 7 PC - chi^2 = 3.61e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (12.02, 26.26)\n", " - Separation (mas) = 57.87\n", " - Position angle (deg) = 96.22\n", " - Contrast (mag) = 5.82\n", "Simplex minimization... 8 PC - chi^2 = 4.30e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (12.21, 26.25)\n", " - Separation (mas) = 57.17\n", " - Position angle (deg) = 96.32\n", " - Contrast (mag) = 5.73\n", "Simplex minimization... 9 PC - chi^2 = 2.96e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (11.33, 26.18)\n", " - Separation (mas) = 60.37\n", " - Position angle (deg) = 96.22\n", " - Contrast (mag) = 5.98\n", "Simplex minimization... 10 PC - chi^2 = 2.97e+02 [DONE]\n", "Best-fit parameters:\n", " - Position (x, y) = (11.59, 26.26)\n", " - Separation (mas) = 59.42\n", " - Position angle (deg) = 96.07\n", " - Contrast (mag) = 5.82\n", "Output ports: simplex001 (89, 57, 57), fluxpos001 (89, 6), simplex002 (70, 57, 57), fluxpos002 (70, 6), simplex003 (75, 57, 57), fluxpos003 (75, 6), simplex004 (73, 57, 57), fluxpos004 (73, 6), simplex005 (63, 57, 57), fluxpos005 (63, 6), simplex006 (79, 57, 57), fluxpos006 (79, 6), simplex007 (71, 57, 57), fluxpos007 (71, 6), simplex008 (66, 57, 57), fluxpos008 (66, 6), simplex009 (60, 57, 57), fluxpos009 (60, 6), simplex010 (78, 57, 57), fluxpos010 (78, 6)\n" ] } ], "source": [ "module = SimplexMinimizationModule(name_in='simplex',\n", " image_in_tag='centered',\n", " psf_in_tag='psf',\n", " res_out_tag='simplex',\n", " flux_position_tag='fluxpos',\n", " position=(11, 26),\n", " magnitude=6.,\n", " psf_scaling=-1.,\n", " merit='gaussian',\n", " aperture=10.*0.0036,\n", " sigma=0.,\n", " tolerance=0.01,\n", " pca_number=range(1, 11),\n", " cent_size=0.02,\n", " edge_size=0.2,\n", " extra_rot=-133.,\n", " residuals='median',\n", " reference_in_tag=None,\n", " offset=None)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('simplex')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "When running the `SimplexMinimizationModule`, we see the $\\chi^2$ value changing until the tolerance threshold has been reached. The best-fit position and contrast is then printed and also stored in the database at the `flux_position_tag`. If the argument of `pca_number` is a list or range (instead of a single value), then the names of the `flux_position_tag` and `res_out_tag` are appended with the number of principal components in 3 digits (e.g. 003 for 3 principal components).\n", "\n", "The `res_out_tag` contains the PSF subtraction residuals for each iteration so the last image in the dataset shows the best-fit result. Let's have a look at the residuals after subtracting 10 principal components with the best-fit negative PSF injected (i.e. which has fully cancelled the companion flux)." ] }, { "cell_type": "code", "execution_count": 30, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 30, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAAqZUlEQVR4nO2de6xlV33fv7/zus+5c+c9dx54jG2MhwaMMzgOoATsQglFYKXUgtLKkSxZrahEEqRgJ6USUis5qhQSKVEit6C4Eg04CWDHgYDrGCpaYnuMH/g9Y2N7Zjwzd1535r7Pa/WPezz79/uue/Y+93XOHe/fRxrNWXfvvfba++x19vqu32NJCAGO47z1KfS6AY7jdAfv7I6TE7yzO05O8M7uODnBO7vj5IRSN09WHBoK5dHN3TzlWwuhMhlSAm0X2i7N5Z+a647I2u50hdrEWTSmpxf9Nrra2cujm7H387/TzVO+teDO3LDlRsWWC3VbLk8mFQQe02X9kPD+VG5UkgP4RyY61lkzjvzZV9tu86/BcXKCd3bHyQldHcY7HaCG04V5HrdTkTR4s5ThDanrJglQG7LlQo3qLlNVdHxRtbVZse0oVDOug+VIv3t1rgX+ZnecnOCd3XFygnd2x8kJrtm7DenV2gYrvPvOFJNdScvyT7OQaa1IGp81vTaBsZkuaiYdy2a8ULTlutLZ2sQHxKa4+kD7di1WLs4l9XG7XN93jr/ZHScneGd3nJzgw/i1hkeZVNbDdsAOYSXLq62Yvr3BJrCGqpCGwywZoqE1nYslREUNtVkixHKC/XipyNcxmPyhOGN37jubbtab3+TD/DfxN7vj5ATv7I6TE7yzO05OcM2+GmidSBKxSK6iTbrj7OIqzfaRaVq7AtYkBcQ6OvopV7qc6xJyjy1PUbupqsgE1pd8Ls3abWxqy4qwK9Rph6o6lPad227/wPesRNGeBVVXfShfet7f7I6TE7yzO05O8M7uODnBNXsHNDkDTI3L2sZMGpLs06GYrrtNKCn9FEchr2x352+TQ2DLKpsM6fvSTLqLaxyGirZUN7Sfh1j4gy3yPYjq25jUV7lA7eTvgs7F4ba1Ddqt195g1vBa378V8De74+QE7+yOkxM6GsaLyKsAJrFgvKmHEA6IyGYA3wKwD8CrAG4JIZxbm2Y6jrNSlqLZPxxCOK3KdwB4KIRwl4jc0Sp/aVVb1ytIQhbmbbk0a3eoDyitx7Ka5WhmFtfQblNk+9a2bSC2fZdJC2udzVqX9X6d0lDx3EOR7on2H9BzGECshTkUl8NUOY2VsduzTb6WXlcUP6D3HbD7ViZsXTWee+CQ40uMlQzjPwXgntbnewDcvOLWOI6zZnTa2QOAH4rI4yJye+tvO0IIx1ufTwDYsdiBInK7iBwUkYON6ekVNtdxnOXS6TD+gyGEYyKyHcCDIvKC3hhCCCJsrLm47W4AdwNA/+69+fJPdJx1REedPYRwrPX/uIh8B8D1AE6KyFgI4biIjAEYX8N2rims+yLoJ4r927XeZb3JdvXSVLrNuax8uasj9ljW6NGqLGWyKVN6aG1L51TSTU55lRHvHqWpUtcR63uOD1jab76ur9GX7qfA7aqO2AspXUguNIpbIH8K9qvnuYdLTcNnDuNFZEhENrz5GcBHATwD4H4At7Z2uxXAfWvVSMdxVk4nb/YdAL4jC2lTSgD+VwjhH0TkMQD3ishtAF4DcMvaNdNxnJWS2dlDCK8AeM8ifz8D4Ka1aJTjOKuP+8YDqG60uk4aVqtVzpMtl+yz2s+cfbFZ97EuLM7ZstGFWSutEjxfUGB/AdXOOsfGz6ZXzvEA9UFqWkNvI/s13b/onnCMP81N6PmAyG0hJa4eACpn7U3U8e7REtc8T8HlrHwB63z62d1lHScneGd3nJyQm2G8NjvVB9KH2hySyeaeyB1U1cdupNkrltC51DA/MmFlhJ1G6Z5p6K2HpVJPlxeRuZHNfpynSr022J2YPTB4RdgobXXK8LpAEotNiNH9TwlPrg/Zi+B0WNE9oWvm50Z/13xN64F12CTHcdYC7+yOkxO8sztOTnjLanY2Q81vSQQXLyHEoaM6DRKwSBglL6uk0hdxeqcm6WjW8FFIpnIl5fP2UbaAuW322PKFdM2pl0Lia0aW6yfPD9CTo+8Jr+LKZrtIs1NTOBV1s6RWceX0WJX0Mmt6/XpjN97IPZbmdvjVyPdXXxebH/me9AJ/sztOTvDO7jg5wTu74+SEt4xmT03JDOsOynb0IqUMznJxZb1r3FBpqaOsMEh2FU07NkoVzXXRNUfnVs3mNMmcGprtxKy7ea5Ba1IOM53fnD4HEtn0bdG4J3OIcHTNkYsr7a98JuqDKc4CyF5KKva/SD7z88jhsVxXN/A3u+PkBO/sjpMTvLM7Tk64ZDU7h6HGO7QP4WQf8lmyV7MvNy+7FKWl0iGYJAOzUjQFStFUmkl+f8tT9lieD2BdWJqhc1M7yyrUtDq6NBt9fTh9iWetb+NwWNardntmaun59t8d+7czvFxUVWn8wjxpdE47Tc+B9tUA4vkD/f1EPvpks2/wd5kRYrwa+JvdcXKCd3bHyQne2R0nJ1yymp1hmzLbV/XPGqeOYr/lGvnGNylFM2tMXTf7r8+P2nJ5kg6ldmrtW6PUT+wzXhu2ZU4VxcsXaU3KdnbWq5E/Oz0pfedsu2d2KT1LBmq2KffRMkscixChquN02VEMOs2BsP+ATh/dIDt7aLS3yQPZy1obfwua4wgD9lyNCvl6zKasU7VK+JvdcXKCd3bHyQne2R0nJ1wymj3KB4Z0u2X/KauJ5rYq/2qOWx5O14yc94zzGev5grScZ0Bsh2d77OTbVWXkwz90xP42s/0661xaK0/tsw7spUmrGdm/PcrbR/qV8wdoAvmvc7pn9hsvX7DXqVM4txYrSeqm54B9C/QS2ICdx2j2pfvGSy3dv6LJ8e4K9oEIRVt3cdqW+f7xvNJq4G92x8kJ3tkdJydcMsN4HqKyeyGHI6aFPvIQKXJV5JTNnEKY3FK1ROAcSywR6oMcokltmdbusnbf6cusfbGwmexnr1g7Ew+P9XXJoK2rSa6jxTkq06n4nmw8lHye3wyC3HrZhMjusrxSq7qnbA6LVmmhds1vo3s2q66LvnZ+Dmqb7LGVM1bqBF5yR7eLntdCxv1sDGZc1yrgb3bHyQne2R0nJ3Tc2UWkKCJPiMgDrfLlIvKIiBwWkW+JSDRf7jjO+mEpmv0LAJ4HMNIq/yGAr4YQvikifwHgNgB/vloNy0ovxCar+pAtR0shafnFUjYlZBWINSa7pbIZSsMmFa6L3Wcb/cn+lQnb0Nm3k4Z8wdobZ99mheLgZju5EJ7cePFzldwzw0YrErf+X7v91Hvte2HwJM1FqOkCNlFF4bFkUqxttefuf8NOuOg5Eg5tjlZx5fDY2fYmrlBi85itq3zO/oHnEqIVe6eSc/F8CbtYsytz31meM7FtyXQp7oCO3uwisgfAvwTwP1plAXAjgL9p7XIPgJtX3BrHcdaMTofxfwzg95DMdW4BMBFCePMn+SiA3YsdKCK3i8hBETnYmJ5ebBfHcbpAZmcXkU8AGA8hPL6cE4QQ7g4hHAghHCgODWUf4DjOmtCJZv8AgE+KyMcB9GNBs/8JgFERKbXe7nsAHFu7ZsbaLC0FM7BIKmSl1dgVNEo7TTZQPhcfX1OadNPzVltNj5ELJkmvygVy+1Va7cwv24vuf9XOgUbpnM/YhtZPjpgylObs32onD8ple1Fnr95kyuwPMDhu95/bnOjbmTGrT7c+ZY+duIrmMYbtXEP/ODtJJB95voS1cWOzbZfMWN3d1Ms/kZ6PUlrTNUfLMEdhwSpdOftq1NPnGqLrWIPp7sw3ewjhzhDCnhDCPgCfAfCPIYTPAXgYwKdbu90K4L7Vb57jOKvFSuzsXwLwuyJyGAsa/mur0yTHcdaCJbnLhhB+BOBHrc+vALh+9ZvkOM5asL5845WsYVs32zhZE7HurlDqI627OYUVhyqy3Xx+C9mUKX1xUaV/Ls3ZfQdO2/Loi9b2Pf4+O2l54eqkcaVJSu9EPvnNWrrttrrN6tfyueQm9D+8wWybeDftSxqSQ4Zf/4y9icUTSd1RO0nb9p8iG/M5K8T5u9RxEX0Tdtv5a6gdfbYczttHXKfuZv//6hZ7rJCPRBSGyuNi9VBGS0v3pz9jtQ30TEV+JSqdVt/ybO7uLus4OcE7u+PkBO/sjpMT1pVmr6jliVizs46LfKBJT0V2dp3tiWyYvEQQL7PEaZeL9BtZUimI6v22ruoGWz78H6yA3TBic0+HE4mW5vPO0TJVYz8lfUopm2pD9lxVJdM5xbL027oGTtlHw8TsA3j/la+Y8tNP7r/4eerdNnDhXIn8A2iOpEBxDpw2TPs1sI9DccpeY+UYtXsn3aNyoo3rQ2TvnyI/evJnr24mTZ+Stor95vn5ZI1eOZ/x3lXV8ZLX7A/QDn+zO05O8M7uODlhXQ3jtYsgD4Mil1bK3pmFHtZzWCmbSeY3kaltox1yDRyloaOq78I++/s5u9uOO7dtsSev1W1dG59LvhJthgOAMoVBDh6zLq/H32/NaexaqkM4i788Ybb9q8ueN+Vvl6415f/6vu+a8jOze0z5pZtOXfxce2ybPTGHodIwvkYmwv4z9sseGE/2n9pjv3dezac6ak9WoOekqExxnJ23TsNh4Qyv0Qow7Z9Bfl5DMWMVIgoD5tVqtGTgZ59db9vhb3bHyQne2R0nJ3hnd5yc0FPNHrkMKt1SjFwV7bGcapdXHeEUQ5rI7ER1sSsur97BzKto0LkdVmcPHrW3ePKE1bNz++yF7X4jOf78L1kx1kdpkmZ2s43QFi/76KumvLV/6uLnbZUps+3+Q79kyv3P2rq/fPQzprzj2pOmXCwkbWVT2uxu9h213+3oU/bLq9qpB5y+Truhpq/i2thA56J7og8fPmzPy269s3u43bbIq8001RxJFMo8Qav5cJpvTsNG5jV9HVx3p/ib3XFygnd2x8kJ3tkdJyf0VLOzbtGukJE7LLkucnpi1ui8XJTZnmWX5FTS5L5ZG2l/7hKtQMopgOvbrUYfftYuaXr8/UnjSpRmanK/PXbXJ46b8ol/epstT1rx++KTyfbhV207eUHSC1eQb8FJu/+xo3aNp9963/+7+Pn7R+y8xOxOe/9GXqZzn7bnmtvS/h1UJpdWfk5A9myZsw9GWWlndsme38xpqKiuYTu505y0mt/YwjOWJ4vmnGg7p57WbeW5hXb7Mf5md5yc4J3dcXKCd3bHyQk91eyRrtG+27T8TZO0GB/LGp3DWHUqnzQbPBDbMbnu6ihpO+VDHSgt0sCovZD6eWvkn9ll9erY/vHkPA1agunYqCkfJpv90El7T4Yf2WjK1WuS3/aqrSq6xtjnnFJJv2Jv8I8uu+ri51nS3Ffea/NUHf6snacoVO3+/WdsW6rqMua3UsgqheayRtcpwwDrRz47ljF502e3hxqFNtP8gb5jHIqblUoqK9KjqeYmeP5KX1NaPf5md5yc4J3dcXKCd3bHyQm91ezk767jr1mjR77vgTQQ+UjXB/j49ponywbKKZqjZYDUuSvnbEPnp+wt/tANz9q6yWj6s+8l6Z3Ynl8miRlo2aR5a/rG+GZKo6zuabRMdSFdNbK/O9/fNx7ddfFzhb6rX3zS+tlXzqTf/zrFLgy/1n6uoT6QvrxWbYe9Rw2dxoq/R5bVlGq6MJ/+btT3pMQGbypyfHukw9kPv5hsL9F8VVOlx46uQZ+j/SbHcd5KeGd3nJzgnd1xckJXNXsoWPs353ozPuik+1iLsCSa20o5u2j/hlqyqVmyv3HlKbIpj7HRnvOYWZ3dfyqpj1Muj15x1pR//MI7bF3j1l697XDSTs4tNrPdtnt+0iaZ43gB1n1mG/tmF9PtwFmGYO27wDb5y79r7ezNfnv/Xr7FPoYjL9jy7I6kvqGjtDTXJtL/9ETXqu3fZ3zNxY00iXTcTh5EeRGpPq2l2bed5zgi3/eU5Z8BoFBVx/Pcjb5E9413HCezs4tIv4g8KiJPicizIvKV1t8vF5FHROSwiHxLRNZg+XjHcVaLTobx8wBuDCFMiUgZwE9E5PsAfhfAV0MI3xSRvwBwG4A/T6tIGkBZrb7CIa66nLXKhdDqJ5zGKgr103YrXp2DwlBBoY3FCXubGsOUrvh48pvJbr7nXttk9+U0wDTUPnWdWhWHhpmNTXaYWRq3WkevqAPE16VTe0Xmw6VCt8zIAtp2/korNzZ/9xlT3r73n5nyqV+h1F5HkjEurzYrbA4rsZRht2plwtpp5UW5bM9bnbMmQx5aR9Y1dfjcdtvOylkap7PMpGG+kJJsKA9jTtGmVzRKk26ZX3lY4M2EZeXWvwDgRgB/0/r7PQBuzqrLcZze0dHvu4gUReRJAOMAHgTwMoCJEMKbP19HAexuc+ztInJQRA42ZqZXocmO4yyHjjp7CKERQrgWwB4A1wN4Z6cnCCHcHUI4EEI4UBwcWl4rHcdZMUsyvYUQJkTkYQC/CmBUREqtt/seAMeWevIo9RSnz1Ww9gqs6XllSw4pVOGJoZKu6/pfoxBMTo1Mdc9uT8r1jVY09R9Pv8Ws1bR5p0yry7Jd6a7f/IYp3/ntz5lyVuqjFZFi4mGz5/iNVmQOnrLvioa93dj2T7ah47+WCNgdu+2qt+OHttpzZ+hq7ZY6P2lP3KBlpwoZ1sgiPRd6+bIKpf3mtOlsduYlnaI5KDV/w+HI88rUmZayqpPZ+G0iMtr6PADgIwCeB/AwgE+3drsVwH1ZdTmO0zs6ebOPAbhHRIpY+HG4N4TwgIg8B+CbIvJfADwB4Gtr2E7HcVZIZmcPITwN4L2L/P0VLOh3x3EuAbob4ipWp3NYn9aBkUYn+2mTWl4k+yLr3eZsImbYDbWRksIKiJfTHXijvc20MUTabDulUeLQxiESYOrw+X6rIcsT9rxfvuffmnJ9u70wXnIo0yV2jRh8yWrjyoRdeqr2722ZzMjY8nejFz83f5Oeiw12YqL8Bulu0vBzO9X+5ErbpGesRCmtajwPRM9gWbt7s0trMWVfxPNC1UjToy3a9TZtaSh3l3WcnOCd3XFygnd2x8kJPQ1xrZxLWdKJfa8pRVBWat4o1dFg+/03HrI7F+fsvhfezvZuKqq7yPMQOvwVAGYoffHQIet/PXW50viUJrlCPvrlSduOIvmJ18mHKSuF9mrB935uh73ml37Lho5es/G0Kb9yaospT/1aImivG7F29uu3v27Kf9+0S0/z8tA61XRaKnMAqG2w7R58g0KMabmomort4FTckR89pycnjc5zTtofg5cc73QNZ3+zO05O8M7uODnBO7vj5ISuanZpUkw1m6v18k/kS8zx7by8DtfF/tY61rtJKa9mdti6I21Leov92bVfM/s4z21Jj5Xf+IrV5eXJ5Pe3WbENqY7YqsbuPWTKL/7+FabMS01rLc2+BkuGNahO5U0aMopjIF44stOUhzbQkln15D48fmif2VammP4omxmnCVe3m2MzOFUU6/3oeaXddVw5b+Nzsa88xzGkzknR/TVLnXlaKsdxvLM7Tk7wzu44OaHryz9prcipeTWRLzxpcPZjZl961sZaC3MOL70kMAA0Su01+WLn1jZ81skNWva3fMH+vh67yW4vqrRol3/XZvZhDX/sv9tY7i+/89um/N/u+bQpa20X3Xv+Klj78XbOoaY0KedB2/t9u/PkHnsdfXRPpnaRIfm65Kb0HUnPa8rfLb/OqhuT+913xm7ssyZ8zI9yOX1up6py/vGcCMeg8z1iDc9Lf+nj2c5u/ObdN95xHO/sjpMTuh7iqoeSkelDDZd5lRY2TfA4k91h2W3VrBBLKZZrG+2Yq++UHZ/xkKtOxxvJkZH7qUqrihbP269g2+NJXaeutf6uYw+fMuVd/8m28yu/80m7/ddPmPKpJ3Zc/ByZ3lLSIgOLmLB4mKq+ny1P2/sz9JMXTbl8rTURzm2lsFT6rvt+nnx5G47YE0/upZVWabg8t43knjIRcipzvVIqYIf8C3XTM0lDZu1+m7VKK5tv2VYXhSOrNFUcDttpujF/sztOTvDO7jg5wTu74+SErpvetGkgCuPTep78Hlnfp5ntACDQyqsmtRSZ5Tj9cIPDYdnjlVNeKz3Fq4gaN1IAI1utOW3m7KgpF2uJTrzwDqsZp95mTW3bH6c0VCfsb/e2K+25Tpa1iZC0bob78fwua9MqXLAXuvOnSd3Hf90ee/KGa0z5ir+eNeWRHx62B3zU7l+eSq5zeqc9L+vVKs2n1DfZCyvMJBfGzx+HtPK8BOvo8nkKu1bPKLu7Fvh5zljuqTTVfi4i1vvoCH+zO05O8M7uODnBO7vj5ISeana2c2otXN1kBRNrM3ZhrdMyyk3SNQWVFphdVjlVNNeFERJUROUXif8iLztV35Z+bGPQnuvs1Ymm3PiirWviXXbfkzdbHd3/tE1x9eyxMVO+/O8SA+2Rz1stu/eWn9t2ffg62653Wn/l6T2miKnPnb/4eeBRu0z14AkOeaUUzlftNWV2p61uTMoDJ21dfROgfUmHU2qv4tnkkednilNvs2bn54TTmev5mTIvR8Z2c05nPkQpyGhORPcV9iVwO7vjOAbv7I6TE7yzO05O6L5mV7C/sLbtsvYtkl2YU0sVKI1yY6PVpE3lG1/L+I0TWi63dMTq1TrbOZWTdIOXg56yt/hCYdCUS5uto/Osil9k/Tl4xGrZjT+24Z6DJ6z9+o3rrbh77V8kea2ar5lN+MEbT5ryu/70/aZc20DxBJtICz+W6PSh43bf6gj5nI/YLy+M2nJ11Lat76xuB83VULpsDnGVs/Ye1dUcyRDdz7qd8oiI0lZFqbmSz2wL52ddAtvZKQ04P2N6X/IhCVlrS7/Zho72chznkqeT9dn3isjDIvKciDwrIl9o/X2ziDwoIoda/2/KqstxnN7RyZu9DuCLIYT9AG4A8HkR2Q/gDgAPhRCuAvBQq+w4zjqlk/XZjwM43vo8KSLPA9gN4FMAPtTa7R4APwLwpfTKrO2SU0nVle92kZbL5djhwOl0ScPLbPuY9ChWmGO1Oa0vZUIq0bJBZkkrstVyrLyM28rY5x+qbRLFONtdp3cWqWxF5+ykbUvYmWj4Xf9gv/rr9/9rU5650orfYh+lvH7d5kbSaZTKL9N8S5V8D/o5TTjFD9DxZ96TlDkmYugo2+TT9evA8WR/fmai+AueN6Lvne3w9eH2xxaop/FzwjZ7fkZ1ujP2T+mUJWl2EdkH4L0AHgGwo/VDAAAnAOxod5zjOL2n484uIsMA/hbAb4cQLuhtIYSANrE3InK7iBwUkYON6enFdnEcpwt01NlFpIyFjv6NEMKbKUxPishYa/sYgPHFjg0h3B1COBBCOFAcGlpsF8dxukCmZhcRAfA1AM+HEP5IbbofwK0A7mr9f99ST866RJcj3/cNbJdk/ZRu19Q2UfYtro2Q+KJcZDxoCXRureXmt5G2neAJAVvkuPL5rcnxUTwA5R6rbaCq6TLKA/ZCyz9LROXJ99E1nR025eFNM6a8a8QM5nBiyJ586hdJPu7pHemx8uUp8vm34esYOG5vkl66i5dVqlNaZbZPs84uq8HlLOWni/LsUS6CKH02pTe3uQjtNv7uinaFKwjNHzQr7VNLc37ATunEqeYDAP4dgJ+LyJOtv/0+Fjr5vSJyG4DXANyyvCY4jtMNOpmN/wnaLxd30+o2x3GctaKn7rKMWU0lxRURiIcybMbjoaM2q9Ro6MeutrwaTeTWS3etqSQEpyqqTNpjp/bZhg29bisrbk3Ge42zdpw4eQWNDSt23D74sjXrNX9h50im35GY0/pfs/vWChTCSud+aYN1892w2U626ns0vde2a+Akrbxy3l7Hrh/bL3PiSjum7T+l0z21TxEOxCnIC+T6bIbuNGwv02o+/Iw1eTEaMikWZtWqQzRsZ3NZlVd84RWNCC09Ow1pjepY3mGO41xqeGd3nJzgnd1xcsK60uxtpwGxyCqtvC9bSdiMoo4v20hQ1EZppdUJ1vB2/yaFFGozCZv1pq6kP5C5cfpqK+52jE5d/Hz+BStIK+dNERf227pnr7H2nMJJq7vLJxMtPLfLzh2Uz1m302Y/3ZNB6z5bP0hxT8o0Ovy6vX9s4tpx0NZVHbE3uDRj95/fpF1F7WnZ1MYuxQOTtL+6pZx+LIhtN6/IyzocZB4rqeeqSqZihjU6183zA8vV6eYcK6/CcZxLAe/sjpMTvLM7Tk5YX5pdwdqrPkrL+KSEsAIAmu0nAAKHw5JbZOSaS3X3naYwVmUzZbtu6Szp0SumTHnurDX6Tzy2PamX0mnP7SJ/WHa9PWY1OreldkUiKoeeseGwfefsNc9ut/e38Lp1p+W0ytqvYXq33TZ4wrbjzLvsNc+MkQ2aUl6NHGr/mPIcCei7mtti624MJfdQh7sCse2bbePRMtecHbqUXCeHQUeuzjT3wBo9Otcq4G92x8kJ3tkdJyd4Z3ecnLBuNXvk+16n3yXWS2X7hxLpVRMayRGtpK/Y/srL51Y3cUqspMxzDdzO2mtWrFXm2O9e+dmTX321ZMv9YzYMdW7O1l0sUqjpsUQrz+6019joI59+sulzOGf/aVrmWk0BcPhmhXzhZ3aSnqV7UL6QMR+j9yVb+NzWDLGr5nKi74p35XTlND/QN9H+GWtQ/AWfi9Ossb/AcsNY0/A3u+PkBO/sjpMTvLM7Tk5Yt5qdqZympXoGKf0Q6T62MXOMuoa12eCx9PTE7DMdCkmZ5w4a1E5Op9V3luLdr0jEWoHmEirnKP3w2RFTDmNWVJbO2uswS1PxksGU+rhJ8wO1EfZBt9vnNycV8v2MbM6ckon9GOieVDeqfSlPQY1t4eQzwXkN9Ln5misX6JqH0/3uebtelqnJORHY54FSofH2tcDf7I6TE7yzO05OuGSG8ZyJlt0PeYjFK4foYRIPtXkYyW6SHOLa4FUz1YiM0yLxkLRuvU4xT+6c/cqFs2/C7ssultVRe+zwYTt+LlIo78zu5DObftg9s0ErmrIZsGyTzaJZTt4bJTovh2dyhlhuS9rwmOuuU4Zd/t55xVO9UgubVKNsvjz0HkqXjtrdNkqLRqa1Mq3Ww8/kWuBvdsfJCd7ZHScneGd3nJxwyWh2Jit1NGugoHQ269PITEf6n7VvIM2utR27VHKqY9arkRlPnbtKejRKr82riJLO5pVB9fzBPGWVilIw0XWwi3Ck4VXkbtY94O1sqotTMCXnnt9it7AprjJnD+aVWbVO53kfdgnmY9lkGK3Aq07NzyevWNQNjc74m91xcoJ3dsfJCd7ZHScnXLKaPVoBliUQ25F1mWyvkZ03csW12zkNsLavloXsumzT53NZj1djs2dN3n8qPTRUu5UCi/gLqJ/2MrmGsi/B/Ajbq6lu0uGS0m7WwhVOycyvnGgR3eQP0kh3Xeb0TgWbtdrY4eNVctPzk7N7bZHq1s8kz0NE8y1rEMKahb/ZHScneGd3nJyQ2dlF5OsiMi4iz6i/bRaRB0XkUOv/TWl1OI7TezrR7H8J4E8B/E/1tzsAPBRCuEtE7miVv7T6zVsCGRGCDaXlCjUKQyVNybo6rouX221/LId38lwCzz3oEE3WvjXyq2ffgiitMoV7ahs/69Xo2Iw5kLT0W2zvL9nsWZn+AMWZtC8z/YuOYiaE/RiU/zrNvdQ5PTZp+BLNkQjds4K6jkiTr30EayaZb/YQwv8BcJb+/CkA97Q+3wPg5tVtluM4q81yNfuOEMLx1ucTAHa021FEbheRgyJysDE9vczTOY6zUlY8QRdCCIgHfXr73SGEAyGEA8WhoXa7OY6zxizXzn5SRMZCCMdFZAzA+Go2aq0p1NMFFKcUYr3Fcc7aNs529WKd7e58MlvU6bOEzjO30wrBEsVEZ9m3tS5n33bW7HyNHA/A8ez6tTE/ajexZi+xbzzZxlnDc9nURXELfM2M1tKsq9lvIbKr0/xLND+j5hoiv/l1wHLf7PcDuLX1+VYA961OcxzHWSs6Mb39FYCfArhaRI6KyG0A7gLwERE5BOCft8qO46xjMofxIYTPttl00yq3xXGcNeSS9Y1fKsYmHflepx/LWi6yMavxUaTreJVljqVnTa80O88dcM60yB+AyqzLtV94FENOTwJr9EKNr8turw1rI76ti33IWVdzjD8fbzQ/jUU5HoBh33jtx8CxA2zfz/Jfj+ca1p9O17i7rOPkBO/sjpMTcjOM1/BwK3LX5KEfDeciV1O1P6/e2ezjoTjHOlLjVN3RKqLkCsoSgE1cnGpam/nSVkoBgEYfuZLSkHdmjFfgUXVRO6sUxsv3u0TnTgtPFvpuCnSP2OTF34dOLc2Si49lmcSrCsXps9Y3l1hzHcdZLt7ZHScneGd3nJyQS83ORGmoebknWsWV95/fmojhaEkh9qytpM8X2I22yOaxJpnHmlV2gaV0z5PJZw4z5boL7EpK4Z6cXluv8lqnEAheAivLPZbR4clNMts1KUSYX1+sy/V8AF8zuwjH6cjT27neucSb7zhOp3hnd5yc4J3dcXKCa/ZFyHJxjZaH0ksO8fLEnPJa0l0y6xvUksIpS0MB2ctWsT+Bdi2N3Hp5ieGU8NjFytp+rZeCAhZZHprOxe3WcwuA1fSs2dnXgOvm+QHtAxDNp7C76/r2fl0y/mZ3nJzgnd1xcoJ3dsfJCa7Zl0EU+pii7UoUNsnhsjVKX1yaZGd5fWI6bcb8QLSstdLZUQor8iHna2qSrg5kS9cpnHg+gNvJ566O8Mmo3ep+R0sdczgtHcuxCfoeXup286WSs8t1nPzind1xcoJ3dsfJCa7ZVwGtQSMdSMZc9gOPUkup3VlzR0sEsxZmm3NK2iqOT2ebPcek8xLNoHkL7RPAcfhZcxzsh89zEzXVVvb353TPXI5s+jl+veX40h0nX3hnd5yc4J3dcXKCa/ZVJvajJ994TkNN++u4cV7aiLVwtJQUk5bLrdk+zhtYbFllu0NlmmO/dd3UDjpVg/3VOVU3z0WoU3P8ela6ZyfB3+yOkxO8sztOTvBhfK/hIa5KZxynQaYymeY4zVKaDIiGwxmhtyVOu8xprZQ8iVZh4aE2v2IyTG9mWP8WCzvtJv5md5yc4J3dcXLCijq7iHxMRF4UkcMicsdqNcpxnNVn2ZpdRIoA/gzARwAcBfCYiNwfQnhutRrnWKKQ1kK6gK0NcwXJx8j0RnC6bNbdvF2nseJllJho1VY2R6Y1Lb3ZTgorebNfD+BwCOGVEEIVwDcBfGp1muU4zmqzks6+G8ARVT7a+ptBRG4XkYMicrAxPb2C0zmOsxLWfIIuhHB3COFACOFAcWgo+wDHcdaEldjZjwHYq8p7Wn9ry/wbR08f/oMvvgZgK4DTKzj3WuHtWhrerqXRjXZd1m6DhLA8LwURKQF4CcBNWOjkjwH4NyGEZzs49mAI4cCyTryGeLuWhrdrafS6Xct+s4cQ6iLyHwH8AAvzqV/vpKM7jtMbVuQuG0L4HoDvrVJbHMdZQ3rlQXd3j86bhbdraXi7lkZP27Vsze44zqWF+8Y7Tk7wzu44OaGrnX09Bc6IyNdFZFxEnlF/2ywiD4rIodb/m7rcpr0i8rCIPCciz4rIF9ZDu1pt6BeRR0XkqVbbvtL6++Ui8kjrO/2WiFSy6lqj9hVF5AkReWC9tEtEXhWRn4vIkyJysPW3nn2XXevsKnDmNwDsB/BZEdnfrfMvwl8C+Bj97Q4AD4UQrgLwUKvcTeoAvhhC2A/gBgCfb92jXrcLAOYB3BhCeA+AawF8TERuAPCHAL4aQrgSwDkAt/WgbQDwBQDPq/J6adeHQwjXKvt6777LEEJX/gH4VQA/UOU7AdzZrfO3adM+AM+o8osAxlqfxwC82OP23YeFqML11q5BAD8D8CtY8AgrLfYdd7E9e7DQcW4E8AAWYuPWQ7teBbCV/taz77Kbw/iOAmd6zI4QwvHW5xMAdvSqISKyD8B7ATyyXtrVGio/CWAcwIMAXgYwEUJ4M+C1V9/pHwP4PSS5eresk3YFAD8UkcdF5PbW33r2XXoOujaEEIJItIBSVxCRYQB/C+C3QwgXdH64XrYrhNAAcK2IjAL4DoB39qIdGhH5BIDxEMLjIvKhHjeH+WAI4ZiIbAfwoIi8oDd2+7vs5pt9yYEzPeCkiIwBQOv/8W43QETKWOjo3wghfHu9tEsTQpgA8DAWhsejrTgJoDff6QcAfFJEXsVCToUbAfzJOmgXQgjHWv+PY+HH8Xr08LvsZmd/DMBVrVnSCoDPALi/i+fvhPsB3Nr6fCsWNHPXkIVX+NcAPB9C+KP10q5W27a13ugQkQEszCU8j4VO/+letS2EcGcIYU8IYR8Wnql/DCF8rtftEpEhEdnw5mcAHwXwDHr5XXZ5wuLjWIiUexnAH3R7woTa8lcAjgOoYUHT3YYFrfcQgEMA/jeAzV1u0wexoPOeBvBk69/He92uVtveDeCJVtueAfCfW39/O4BHARwG8NcA+nr4nX4IwAProV2t8z/V+vfsm897L79Ld5d1nJzgHnSOkxO8sztOTvDO7jg5wTu74+QE7+yOkxO8sztOTvDO7jg54f8DAvK7yjKagDoAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "data = pipeline.get_data('simplex010')\n", "plt.imshow(data[-1, ], origin='lower')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Let's also plot the measured separation, position angle, and contrast as function of principal components that have been subtracted." ] }, { "cell_type": "code", "execution_count": 31, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "Text(0, 0.5, 'Contrast (mag)')" ] }, "execution_count": 31, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgIAAAHoCAYAAAA7coe1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAABMeElEQVR4nO3deXhkZZn38e+vu0UIi2wNsiXVCCiIssUWZJFFfBF9YWScEYwKbhkUBIZxRpjM6ygaBQVUBkYMizBSgAzLiIAsgoKOCKShhWZvmlToBu1mFYgsTd/vH+cEqqsryUlXnVRS9ftcV1059ZxTz7mr0K67nvOc+1FEYGZmZq1pWqMDMDMzs8ZxImBmZtbCnAiYmZm1MCcCZmZmLcyJgJmZWQtzImBmZtbCnAiYmZm1MCcCZmZmLWzGWAdIWhs4CHg/UABWA5YAdwK/jIjf5xifmZmZ5WjEEQFJG0s6G3gC6AFWAfqB64ESSWJwg6T7JH18IoI1MzOz+hptRGAucD7QGRH3VjtA0mrA3wDHStosIk6ue4Q5WH/99aNQKDQ6DDMzswkxZ86cJyNiZrV9GmmtAUkzI2JJ1pOM9/hG6uzsjP7+/kaHYWZmNiEkzYmIzmr7Rrw0MN4v9amSBJiZmdkbMt01IKlX0uFV2g+X9M36h2VmZlabYrFIoVBg2rRpFAoFisVio0OalLLePvgp4K4q7XOAT9cvHDMzs9oVi0W6u7splUpEBKVSie7ubicDVWRNBDYguWWw0lPAhvULx8zMrHY9PT0MDQ0t1zY0NERPT0+DIpq8siYCg8DuVdr3ABbWLxwzM7PaDQ4Ojqu9lY1ZUCj1Y+D7klYBbkrb9gG+A5yUR2BmZmYrq729nVKpVLXdlpdpRCAiTiFJBk4DHkofPwTOiojv5heemZnZ+PX29tLW1rZcW1tbG729vQ2KaPLKvNZARBwPrA/snD5mRsRx4zmZpP0kPShpvqQVXivpzZJ+lu6/TVKhbN+7Jd0q6V5J90haNW3fKX0+X9JpkjSemMzMrPl0dXXR19dHR0cHkujo6KCvr4+urq5GhzbpjFhQqOrB0vrA24C5EfHyuE4kTScZSdiXZF7BHcAhEXFf2TFfAt4dEYdLOhj4aER8XNIMkrUNPhURf5S0HvBsRLwm6XbgKOA24BrgtIj45WixuKCQmZm1kpUqKFTRwZqS/htYDPwe2CRtP1PS1zPGMRuYHxELIuIV4GLgwIpjDiQpawxwKbBP+gv/g8DdEfFHgIh4Kk0CNgLWiog/RJLR/BdJyWMzMzPLIOulgZOAjYEdgb+WtV8FfDRjH5sAj5U9X5i2VT0mIpYCzwHrAVsBIek6SXdK+pey48vvWqjWJwCSuiX1S+pfssRFEM3MzCD7XQMHkAzTz5VUfi3hfmDz+oe1ghnAbsB7gCHgRklzSBKFTCKiD+iD5NJAHkGamZlNNVlHBNYhKR5UaU3gtYx9LAI2K3u+adpW9Zh0XsBb0vMuBG6JiCcjYohkLsCO6fGbjtGnmZmZjSBrInAHyajAsOFf1P9AMmcgax9bSpqV1iM4GLiy4pgrgUPT7Y8BN6XX/q8D3iWpLU0Q3g/cFxFPAH+RtHM6l+DTwM8zxmNmZtbysl4a+FfgOknvTF9zbLo9m6S64JgiYqmkI0m+1KcD50bEvZJOAPoj4krgHOCnkuYDT5MkC0TEM5JOJUkmArgmIq5Ou/4ScB6wGvDL9GFmZmYZZL59UNK7gK8AO5GMJNwJnBQR9+QXXj58+6CZmU1GxWKRnp4eBgcHaW9vp7e3ty61D0a7fTDriADpF/6hYx5oZmZm4za8YuLwYknDKyYCuRZCylpHYBtJby97vq+kCyQdnxYKMjMzsxo0asXErJMFzwV2AJC0GcmEvHWBI4Bv5ROamZlZ62jUiolZE4F3kMwJgGQ2/20RsT/wKeCQPAIzMzNrJSOtjJj3iolZE4HpwCvp9j4k9/EDPAJsWO+gzMzMWk2jVkzMmgjMA74oaXeSRODatH0T4Mk8AjMzM2sljVoxMdPtg5L2AP6HpNLf+RHx2bT9O8BWEfG3eQZZb7590MzMWknNtw9GxC2SZpKs9PdM2a4fk9T+NzMzsykoUyIg6a3AjIhYWLFrKaC6R2VmZmYTIuscgQuAD1Vp/z/AT+sXjpmZmU2krIlAJ3BLlfbfpvvMzMxsCsqaCMwA3lylfdUR2s3MzGwKyJoI3AZ8sUr7ESQrApqZmdkUlHXRoR7gJknvBm5K2/YmKTv8gTwCMzMzs/xlGhGIiD8AOwOPAgelj0eBXSLi9/mFZ2ZmZnkac0RA0ptI7hr414j4ZP4hmZmZ2UQZc0QgIl4FPgiMXYLQzMzMppSskwUvJ7kcYGZmZk0k62TBQeDf0kWH+oEXy3dGxKn1DszMzMzylzUROAx4Bnh3+igXgBMBMzOzKSjrXQOzRnlsnvVkkvaT9KCk+ZKOq7L/zZJ+lu6/TVIhbS9I+qukuenjzLLXHCLpHkl3S7pW0vpZ4zEzM2t1WecI1EzSdOAMkjULtgEOkbRNxWGfA56JiC2A7wMnle17JCK2Tx+Hp33OAH4I7BUR7wbuBo7M+a2YmZk1jcyJgKStJP2rpDMlnVv+yNjFbGB+RCyIiFeAi4EDK445EDg/3b4U2EfSaKsbKn2snh63FvB41vdkZtYIxWKRQqHAtGnTKBQKFIvFRodkLSzrMsQfBi4D7gJ2Iikr/DaSdQZ+m/FcmwCPlT1fCLx3pGMiYqmk54D10n2zJN0F/AX4t4j4bUS8KumLwD0kExgfJil7XO09dAPdAO3t7RlDNjOrr2KxSHd3N0NDQwCUSiW6u7sB6OrqamRo1qKyjgicAHwjInYBXgY+BRSAXwG/ySWy5T0BtEfEDsCxwIWS1kqLHX2RpNTxxiSXBo6v1kFE9EVEZ0R0zpw5cwJCNjNbUU9Pz+tJwLChoSF6enoaFJG1uqyJwNuBn6XbrwJtEfESSYJwTMY+FgGblT3fNG2rekx6/f8twFMR8XJEPAUQEXOAR4CtgO3TtkciIoBLgPdljMfMbMINDg6Oq90sb1kTgedJlhyG5Nf5Fun2DGCdjH3cAWwpaZakVYCDgSsrjrkSODTd/hhwU0SEpJnpZEMkbQ5sCSwgSRy2kTT8E39f4P6M8ZiZTbiRLk36kqU1yniWId4t3b4aOEXSvwM/AW7N0kFELCWZ0X8dyZf1JRFxr6QTJB2QHnYOsJ6k+SSXAIZvMdwDuFvSXJJJhIdHxNMR8TjwDeAWSXeTjBB8O+N7MjObcL29vbS1tS3X1tbWRm9vb4MislanZER9jIOSX+FrRMTdktqAU4BdgYeAYyNiSo1pdXZ2Rn9/f6PDMLMWVSwW6enpYXBwkPb2dnp7ez1R0HIlaU5EdFbdlyURaDZOBMzMrJWMlgiMeGlgjPv3az7ezMzMGm+0OQIPSPqkpDeP1oGkrSWdxRvX883MrMm4CFLzGq2gUDfwXeB0STeSrDr4OPASyZ0C25BMINwKOA04Pd9QzcysEVwEqbmNOUdA0vuAQ4DdgQ5gNeBJkiqD1wEXRMSz+YZZX54jYGaWXaFQoFQqrdDe0dHBwMDAxAdk4zbaHIExSwxHxO+B39c9KjMzmxJcBKm5Tdjqg2ZmNjW5CFJzcyJgZmajchGk5uZEwMzMRtXV1UVfXx8dHR1IoqOjg76+Pk8UbBIuKGRmZtbkVqqgkJmZmTW/Me8aqCRpbSoSiIh4ul4BmZmZ2cTJlAhI6gDOBPYEVinfBQQwve6RmZmZWe6yjgj8BFgb+BxJdcHWm1hgZmbWhLImArOBnSNiXp7BmJmZ2cTKOlnwUWDUxYfMzMxs6smaCBwNfEfSFnkGY2ZmZhMr66WBn5OMCDwo6WVgafnOiFir3oGZmZlZ/rImAkfmGoWZmZk1RKZEICLOzzsQMzMzm3iZKwtKerOkz0o6WdL3JB0maVwTCCXtJ+lBSfMlHTfCOX6W7r9NUiFtL0j6q6S56ePMstesIqlP0kOSHpD0t+OJyczMrJVlLSi0DXAtsBZwT9r8BeAbkvaLiPsz9DEdOAPYF1gI3CHpyoi4r+ywzwHPRMQWkg4GTgI+nu57JCK2r9J1D7A4IraSNA1YN8t7MjMzs+wjAj8E7gLaI2L3iNgdaAf+CPwgYx+zgfkRsSAiXgEuBg6sOOZAYPgyxKXAPpI0Rr+fBb4DEBHLIuLJjPGYmZm1vKyJwK7Av0bEX4Yb0u0eYLeMfWwCPFb2fGHaVvWYiFgKPAesl+6bJekuSTdL2h1eX/cA4JuS7pT035I2rHZySd2S+iX1L1myJGPIZmZmzS1rIvASSYnhSm9J9+XtCZLRiB2AY4ELJa1FcmljU+D3EbEjcCtwcrUOIqIvIjojonPmzJkTELKZ5a1YLFIoFJg2bRqFQoFisdjokMymnKyJwC+AsyTtKml6+tgN+DFwZcY+FgGblT3fNG2reoykGSSJxlMR8XJEPAUQEXOAR4CtgKeAIeDy9PX/DeyYMR4zm8KKxSLd3d2USiUiglKpRHd3t5MBs3EaT2XBh4HfkowAvATcDDwEHJOxjzuALSXNkrQKcDArJhFXAoem2x8DboqIkDQznWyIpM2BLYEFEREkScqe6Wv2Ae7DzJpeT08PQ0NDy7UNDQ3R09PToIjMpqasdQSeBQ6UtCXwjrT5/oiYn/VEEbFU0pHAdSTLFp8bEfdKOgHoj4grgXOAn0qaDzxNkiwA7AGcIOlVYBlweEQ8ne77avqaHwBLgM9kjcnMpq7BwcFxtZtZdUp+VLeWzs7O6O/vb3QYZlaDQqFAqVRaob2jo4OBgYGJD8hsEpM0JyI6q+0bcURA0mnA8RHxYro9oog4qsYYzczGpbe3l+7u7uUuD7S1tdHb29vAqMymntEuDbwLeFPZtpnZpNHV1QUkcwUGBwdpb2+nt7f39XYzy8aXBszMzJrcaJcGMt01IOlrktqqtK8m6Wu1Bmhmjed78s1aU9bbB/8dWKNKe1u6z8ymMN+Tb9a6siYCAqpdQ9iB5DY/M5vCfE++WesatY6ApOdJEoAAFkgqTwamA6sCZ1Z7rZlNHb4n36x1jVVQ6EiS0YBzSRYYeq5s3yvAQETcmlNsZjZB2tvbq96T397e3oBozGwijXppICLOj4jzgL2AH6XPhx8XtXoS4MlV1ix6e3tpa1t+PrDvyTdrDVlLDN88vC3prcAqFftbbvxweHLV8HXV4clVgO9jtinH9+Sbta5MdQTSJX//A/h7KpIAgIiYXv/Q8lOPOgIub2pmZlNFzXUEgFOA7YC/IVl58BPAPwMLgY/XIcYpx5OrzMysGWRNBD4EfDkirgNeA+ZExKnAccA/5BXcZDbSJCpPrjIzs6kkayKwNjA8Dv4csF66fSvwvjrHNCV4cpWZmTWDrInAI8Dm6fb9wMGSBBxEixYU6urqoq+vj46ODiTR0dFBX1+fJ1eZmdmUknWy4D8Cr0XEaZL2Bq4iWZlwGnB0RJyeb5j15UWHzMyslYw2WTDr7YPfL9u+SdI7gE7g4Yi4pz5hmpmZ2UQbMxGQ9Cbgd8CnI+JBeL1ugKfHm5mZTXFjzhGIiFeBWVRfdMjMzMymsKyTBc8HvpBnIGZmZjbxMs0RAFYHuiTtC8wBXizfGRFHZelE0n7AD0lWLjw7Ik6s2P9m4L+AnYCngI9HxICkAsndCg+mh/4hIg6veO2VwOYRsW3G92RmZtbysiYCWwN3ptubV+zLdMlA0nTgDGBfkoqEd0i6MiLuKzvsc8AzEbGFpIOBk3ijcuEjEbH9CH0fBLyQJQ4zMzN7Q6ZLAxGx1yiPvTOeazYwPyIWRMQrwMXAgRXHHEhyGQLgUmCftF7BiCStARwLfCtjHGa588qUZjZVZJ0jUA+bAI+VPV+YtlU9JiKWsnwVw1mS7pJ0s6Tdy17zTZK1EIZGO7mkbkn9kvqXLFlSw9swG93wypSlUomIeH1lSicDZjYZZU4EJO0lqU/StZJuKn/kGWDqCaA9InYg+fV/oaS1JG0PvC0irhirg4joi4jOiOicOXNmzuFaK+vp6Xl9eephQ0ND9PT0NCgiM7ORZUoEJB0G/BJYE9gTWAKsA+wI3DfiC5e3CNis7PmmaVvVYyTNAN4CPBURL0fEUwARMYek5PFWwC5Ap6QBkloHW0n6TcZ4zHLhlSnNbCrJOiLwFeDIiDgEeBU4Pv11fgHZJ+ndAWwpaZakVYCDgSsrjrkSODTd/hhwU0SEpJnpZEMkbQ5sCSyIiB9FxMYRUQB2Ax6KiD0zxmOWC69MaWZTSdZEYHPgV+n2y8Aa6fbpwGFZOkiv+R8JXEdyK+AlEXGvpBMkHZAedg6wnqT5JJcAjkvb9wDuljSXZBLh4RHRkosd2eTnlSnNbCrJevvgUySXBSAZvt8WuJtkIt9qWU8WEdcA11S0fa1s+yXg76q87jLgsjH6HkjjMmuo4RUoe3p6GBwcpL29nd7eXq9MaWaTUtZE4LfAB4F7gEuA09LiQvsAN+QUm9mU1dXV5S9+M5sSsiYCRwKrptvfAZYCu5IkBb5/38zMbIrKugzx02Xby0gq/pmZmdkUl3VEAEmrAp8Atkmb7gMuioi/5hGYmZmZ5S9rHYEdSe7dP4WkVPBs4GRgQbrPLDcu12tmlp+sIwJ9wP8Cn4mIFwEkrQ6cm+7rzCc8a3XD5XqHK/UNl+sFPBnPzKwOFDH24oGS/grsVLFSIJLeCfRHROZbCCeDzs7O6O/vb3QYlkGhUKBUKq3Q3tHRwcDAwMQHZGY2BUmaExFVf7RnLSj0ALBxlfaNgIdWNjCzsbhcr5lZvrImAv9GUjvgYEmF9HEw8AOgR9K6w4/cIrWW5HK9Zmb5ypoI/AJ4B3AhyaTBR9LtbYCfkyxC9GT616xuXK7XzCxfWScL7pVrFGYjcLleM7N8ZZos2Gw8WdDMzFpJPSYLIuldkk6X9EtJG6VtfyNph3oFamZmZhMra0GhDwJ3AJsAe/PGioNvA/49n9DMzMwsb1lHBL4JHBsRHwVeKWv/DUmVQZsiXKXPzMzKZZ0suC1wTZX2pwHfMjhFuEqfmZlVyjoi8DTJZYFKOwIL6xeO5amnp+f1JGDY0NAQPT09DYrIzMwaLWsicCHwPUmbAgHMkPR+koWH/iuv4Ky+XKXPzMwqjaey4KNACViDZAnim4DfAa7sMkW4Sp+ZmVXKlAhExKsR0QVsCfw98AngHRHxqYh4Lc8ArX5cpc/MzCplnSwIQEQsABZImgGsmk9IlhdX6TMzs0qjVhaUtA+wXkRcUtZ2HPB1kiTiV8DBEfFsvmHWlysLmplZK6mlsuBxwKZlHc0Gvg38FPgXYDvAU87NzMymqLFGBP4EfDgi5qTPvwfsEhG7pc//DvhWRLx9IoKtF0lLSCY+trL1SVaMtHz5c54Y/pwnjj/riVHvz7kjImZW2zHWHIG1gcVlz3dl+cJCw2WHp5SRPoxWIql/pGEiqx9/zhPDn/PE8Wc9MSbycx7r0sATJOsJIOnNwA7ArWX71wRezic0MzMzy9tYicAvge9K2hs4CXgR+G3Z/ncD83OKzczMzHI21qWBrwGXk9wd8AJwaESULzr0WeCGnGKzfPU1OoAW4c95Yvhznjj+rCfGhH3Oo04WfP0g6S3AC5XFgyStm7a/Uv2VZmZmNpllSgTMzMysOWVda8DMzMyakBOBFiNpM0m/lnSfpHslHd3omJqZpOmS7pJ0VaNjaVaS1pZ0qaQHJN0vaZdGx9SMJP1j+m/GPEkXSXKZ+TqRdK6kxZLmlbWtK+kGSQ+nf9fJ6/xOBFrPUuCfImIbYGfgCEnbNDimZnY0cH+jg2hyPwSujYh3kFQ79eddZ5I2AY4COiNiW2A6cHBjo2oq5wH7VbQdB9wYEVsCN6bPc+FEoMVExBMRcWe6/TzJP5pTrijUVCBpU+DDwNmNjqVZpROZ9wDOAYiIV6ba2idTyAxgtXTRuTbg8QbH0zQi4hbg6YrmA4Hz0+3zgb/J6/xOBFqYpAJJkajbGhxKs/oByZocyxocRzObBSwBfpJegjlb0uqNDqrZRMQi4GRgkKTQ3HMRcX1jo2p6G0bEE+n2n4AN8zqRE4EWJWkN4DLgmIj4S6PjaTaSPgIsHl6nw3IzA9gR+FFE7EBS9Cy3IdRWlV6fPpAk8doYWF3SJxsbVeuI5Pa+3G7xcyLQgiS9iSQJKEbE5Y2Op0ntChwgaQC4GNhb0gWNDakpLQQWRsTwqNalJImB1dcHgEcjYklEvEpSaO59DY6p2f1Z0kYA6d/FYxy/0pwItBhJIrmeen9EnNroeJpVRBwfEZtGRIFkUtVNEeFfUHUWEX8CHpM0vALqPsB9DQypWQ0CO0tqS/8N2QdPyszblcCh6fahwM/zOpETgdazK/Apkl+oc9PH/o0OyqwGXwaKku4Gtge+3dhwmk864nIpcCdwD8l3h0sN14mki0gW9Hu7pIWSPgecCOwr6WGSEZkTczu/KwuamZm1Lo8ImJmZtTAnAmZmZi3MiYCZmVkLcyJgZmbWwpwImJmZtbAZK/tCSWsDBwHvBwrAaiSlPu8EfhkRv69DfGZmZpajcY8ISNpY0tkk9aZ7gFWAfuB6oESSGNyQLnP78XoGa2ZmZvW1MiMCc0lWQuqMiHurHSBpNZKVko6VtFlEnLzSEZqZmVluxl1QSNLMiFiS1/ETYf31149CodDoMMzMzCbEnDlznoyImdX2jXtEYLxf6pMtCQAoFAr09/c3OgwzM7MJIak00r6VniyYdvzpEXYF8BIwPyLuquUcZmZmlp9abx88AzgLOA84N32cB5wNXADMkTRHUtXhiKmuWCxSKBSYNm0ahUKBYrHY6JDMzMzGpdZE4O+Bu0hWtFs1fewKzAE+CuwACGi65W6LxSLd3d2USiUiglKpRHd3t5MBMzObUmpafVDS/cBh6RKV5e07Az+JiK0l7QX8NCI2HaWfo4EvkCQNZ0XEDyT9DBheY3xt4NmI2L7KaweA54HXgKUR0TlW3J2dnVHrHIFCoUCptOIll46ODgYGBmrq28zMrJ4kzRnp+7GmOQIkhYSGqrQPpfsAHgXWGSW4bUmSgNnAK8C1kq6KiI+XHXMK8NwocewVEU+OK/IaDQ4OjqvdzMxsMqr10sDtwKmS3jrckG6fDAyPEmwJLBylj62B2yJiKCKWAjeTVCwc7k8klyAuqjHWumpvbx9Xu5mZ2WRUayLweWBjYFDSQDpMP5i2fT49ZnXgW6P0MQ/YXdJ6ktqA/YHNyvbvDvw5Ih4e4fUBXJ9OSuxe+bcyPr29vbS1tS3X1tbWRm9v70SFYGZmVrOaLg1ExMPp0P4HeeN6/gPADZFOPoiI/xmjj/slnURSovhFksqFr5UdcgijjwbsFhGLJG1AUtr4gYi4pfKgNEnohvr8au/q6gKgp6eHwcFB2tvb6e3tfb3dzMxsKqhpsmAeJH0bWBgR/ylpBrAI2CkiRru8MPzarwMvjFXSuB6TBc3MzKaK0SYL1rwMsaQvSbpX0pCkzdO24yT9/Tj62CD9204yP+DCdNcHgAdGSgIkrS5pzeFtkpGJeSv/bszMzFpLTYmApGOAfwP6SG79G7YIOHIcXV0m6T7gF8AREfFs2n4wFZcF0tUPr0mfbgj8TtIfSSYuXh0R1473fZiZmbWqWm8fPBz4QkRcLal8QuCdwDuzdhIRu4/QfliVtsdJJhQSEQuA7cYTsJmZmb2h1ksDHVQfin8VWK3Gvs3MzCxntSYCC4Adq7TvD9xXY99mZmaWs1ovDZwMnJ7e/y9gF0mfAv4F+GytwZmZmVm+aq0j8JP0Fr9vA23AT4HHgaMi4md1iM/MzMxyVOuIABFxFnCWpPWBaRGxuPawzMzMbCLUnAgMm+hFf8zMzKx2404EJD1KUt9/TBGx+bgjMjMzswmzMiMCp5dtrwEcS1LM59a0bReSJYVPqS00MzMzy9u4E4GIeP0LXtJ5wEkR8e3yYyQdzzgKCpmZmVlj1FpH4CDgkirt/w0cUGPfZmZmlrNaE4EXgT2rtO8JDNXYt5mZmeWs1rsGvg+cIakT+EPatjNwKPD1Gvs2MzOznNVaUOi7kgaAo4HhZYfvBw6NiGqXDMzMzGwSqUdBoUuoPk/AzMzMJrlxzxGQpHofL+loSfMk3SvpmLTtZ5Lmpo8BSXNHeO1+kh6UNF/SceOJzczMrNWtzGTBByR9UtKbRztI0taSzgJG/XKWtC3wBZLaA9sBH5G0RUR8PCK2j4jtgcuAy6u8djpwBvAhYBvgEEnbrMR7MjMza0krc2mgG/guyaqDNwL9JAsNvQSsQ/KFvBuwFXAayxcgqmZr4LaIGAKQdDPJbYnfTZ+LZP7B3lVeOxuYHxEL0mMvBg7ESyCbmZllsjIFhW4G3ivpfcAhwMeBDmA14EngLuBc4IKIeDZDl/OAXknrAX8F9idJLobtDvw5Ih6u8tpNgMfKni8E3lvtJJK6SZIY2tvbM4RlZmbW/FZ6smBE/B74fa0BRMT9kk4CriepSzAXeK3skEOAi+pwnj6gD6CzszPTWglmZmbNrtaCQnUREedExE4RsQfwDPAQgKQZJJcJfjbCSxcBm5U93zRtMzMzswwmRSIgaYP0bzvJF/+F6a4PAA9ExMIRXnoHsKWkWZJWAQ4Grsw7XjMzs2ZRcx2BOrksnSPwKnBE2dyCg6m4LCBpY+DsiNg/IpZKOhK4DpgOnBsR905g3GZmZlPapEgEImL3EdoPq9L2OMmEwuHn1wDX5BacmZlZE5sUlwbMzMysMWpOBCStKuljkr4qae207W2S1q05OjMzM8tVTZcGJG0B3ACsCawN/DfwLPDF9Pnna4rOzMzMclXriMAPSBKBDUmKAQ27Etirxr7NzMwsZ7VOFnwfsHNEvFaxttAgsHGNfZuZmVnO6jFZ8E1V2tqB5+rQt5mZmeWo1kTgeuDYsuchaS3gG8DVNfZtZmZmOas1ETgW2E3Sg8CqJKWAB4C3Msbyw2bNrFgsUigUmDZtGoVCgWKx2OiQzMyqqmmOQEQ8Lml7koWBdiRJLPqAYkT8dbTXmjWrYrFId3c3Q0NDAJRKJbq7uwHo6upqZGhmZitQROstxNfZ2Rn9/f1jH2i2EgqFAqVSaYX2jo4OBgYGJj4gM2t5kuZERGe1feMeEZB0UNZjI+Ly8fZvNtUNDg6Oq93MrJFW5tLApRmPC5KFgMxaSnt7e9URgfb29gZEY2Y2unFPFoyIaRkfTgKsJfX29tLW1rZcW1tbG729vQ2KyMxsZJNi0SFJR0uaJ+leSceUtX9Z0gNp+3dHeO2ApHskzZXkC//WcF1dXfT19dHR0YEkOjo66Ovr80RBM5uUal1r4NMj7ArgJWB+RNw1Rh/bAl8AZgOvANdKugrYDDgQ2C4iXpa0wSjd7BURT477DZjlpKury1/8ZjYl1Fpi+AxgFZLqgsvStmnAq+n2myTdBewXEUtG6GNr4LaIGAKQdDNwENAJnBgRLwNExOIaYzUzM7MKtV4a+HvgLmBXkoJCq6bbc4CPAjsAAk4dpY95wO6S1pPUBuxPMhqwVdp+m6SbJb1nhNcHcL2kOZK6RzqJpG5J/ZL6lywZKScxMzNrLbWOCJwKHBYRt5W13SrpWOAnEbG1pH8CfjpSBxFxv6STSMoVvwjMBV5LY1sX2Bl4D3CJpM1jxcIHu0XEovTSwQ2SHoiIW6qcp4+k2BGdnZ2tVzzBzMysilpHBArAUJX2oXQfwKPAOqN1EhHnRMROEbEH8AzwELAQuDwSt5Nceli/ymsXpX8XA1eQzDUwMzOzDGpNBG4HTpX01uGGdPtkYHiUYEuSL/URDU8ElNROMj/gQuB/gL3S9q1I5iI8WfG61SWtObwNfJDkUoOZmZllUGsi8HlgY2AwvY1vABhM2z6fHrM68K0x+rlM0n3AL4AjIuJZ4Fxgc0nzgIuBQyMiJG0s6Zr0dRsCv5P0R5Kk5OqIuLbG92RmZhW8kFbzqnmtAUki+SX+9rTpAeCGKtfyJw2vNWBmll3lQlqQFMlyfYypY7S1BrzokJmZjcoLaU19dV10qErn7wX2ATag4lJDRBxVa/9mZtZYXkirudVaWfArwHeB+cDjJPf0D2u9oQYzsybkhbSaW62TBY8GjoqIrSJiz4jYq+yxdz0CNDNrNlNt4t1UXUhrqn3OjVLrpYG1gGvGPMrMzIAVJ96VSiW6u5OiqJN14t1wXD09PQwODtLe3k5vb++kjRem5ufcKDVNFpR0JnB3RPxn/ULKnycLmlmjeOLdxPDnvLw8Jws+BnxD0q7A3byx2BAAETHaGgNmZi3HE+8mhj/n7GpNBD4PvAC8L32UC0ZfbMjMrOV44t3E8OecXU2TBSNi1iiPzesVpJlZs5iqE++mGn/O2dV614CZmY1DV1cXfX19dHR0IImOjg5X6MuBP+fs6lFieCvgY0A7ycJAr4uIz9bUeU48WdDMzCajYrGYy90ZuU0WlPRh4DLgLmAn4A7gbcCbgd/W0reZmVkradQtj7VeGjgB+EZE7AK8DHwKKAC/An5TY99mZmYto6enZ7mFnQCGhobo6enJ9by1JgJvB36Wbr8KtEXESyQJwjFZO5F0tKR5ku6VdExZ+5clPZC2f3eE1+4n6UFJ8yUdt9LvxMzMrIEadctjrbcPPg+smm4/AWwBzEv7XSdLB5K2Bb4AzAZeAa6VdBWwGXAgsF1EvCxpgyqvnQ6cAewLLATukHRlRNxX07syMzObYI265bHWEYHbgN3S7auBUyT9O/AT4NaMfWwN3BYRQxGxFLgZOAj4InBiRLwMEBGLq7x2NjA/IhZExCvAxSTJg5mZ2ZTSqFsea00EjgX+kG5/Hbge+FuS1Qg/n7GPecDuktaT1AbsTzIasFXafpukmyW9p8prNyGpbjhsYdq2Akndkvol9S9ZsiRjaGZmZhOjUbc81nRpICIWlG0PkfyKH28f90s6iSSJeBGYC7yWxrYusDPwHuASSZvHSt7vGBF9QB8ktw+uTB9mZmZ56urqmvBaB5OioFBEnBMRO0XEHsAzwEMkv+4vj8TtwDJg/YqXLiIZPRi2adpmZuPkJVvNWlOtkwXrQtIGEbFYUjvJ/ICdSb749wJ+nRYtWgV4suKldwBbSppFkgAcDHxi4iI3aw5estWsdU2KEQHgMkn3Ab8AjoiIZ4Fzgc0lzSOZBHhoRISkjSVdA5BOLjwSuA64H7gkIu5tyDswm8Iadf+ymTVezSWGpyKXGDZb3rRp06j2b4Ekli1b1oCIzKyeRisxPFlGBMysgUa6T3myL9nqeQ1mtat5joCk9wL7ABtQkVhExFG19m9m+evt7V1ujgBM/iVbPa/BrD5qujQg6SvAd0nqBjwOlHcWEbF3beHlw5cGzFaU16pneSkUClWrsHV0dDAwMDDxAZlNYqNdGqg1EXgMOCkiTl/pThrAiYDZ1Od5DWbZ5TlHYC3gmhr7MDMbt6k6r8Fssqk1EbgI2K8egZiZjUej6rKbNZtaJws+BnxD0q7A3SRLEb8uIk6tsX8zs6qG5y9MpXkNZpNRrXMEHh1ld0TE5ivdeY48R8DMzFrJaHMEal10aFYtrzczM7PGqltBIUlrSFq9Xv2ZmZlZ/mpOBCQdIWkQeA74i6SSpC/VHpqZmZnlraZLA5L+FTgeOBn4Xdq8O3CipLUi4sQa4zMzM7Mc1XrXwOFAd0RcVNZ2o6SHgW8DTgTMzMwmsVovDWwA3FGl/XZgw6ydSDpa0jxJ90o6Jm37uqRFkuamj/1HeO2ApHvSY3wrgJmZ2TjUOiLwEPAJ4ISK9k8AD2bpQNK2wBeA2cArwLWSrkp3fz8iTs7QzV4R8WS2kM3MzGxYrYnA14FLJO0B/G/ativwfuDvMvaxNXBbRAwBSLoZOKjGuMzMzCyDmi4NRMTlwHuBPwEfSR9/AmZHxP9k7GYesLuk9SS1AfsDm6X7jpR0t6RzJa0zUhjA9ZLmSOoe6SSSuiX1S+pfsmRJxtDMzMyaW02VBesWhPQ54EvAi8C9wMvAd4AnSb7ovwlsFBGfrfLaTSJikaQNgBuAL0fELaOdz5UFzcysldR19UFJ65Zvj/bI2mdEnBMRO0XEHsAzwEMR8eeIeC0ilgFnkcwhqPbaRenfxcAVIx1nZmZmK1qZOQJLJG2UfvEO/2KvpLR9epYOJW0QEYsltZPMD9g5PccT6SEfJbmEUPm61YFpEfF8uv1BVpy4aGZmZiNYmURgb+Dpsu16XFu4TNJ6JKsXHhERz0r6D0nbp/0PAP8AIGlj4OyI2J/kFsUrJEHyXi6MiGvrEI+ZmVlLmBRzBCaa5wiYmVkrqescgYqOX0sn6VW2ryfptVr6NjMzs/zVWllQI7S/maQ4kJmZmU1iK1VQSNKx6WYAh0t6oWz3dJKFhx6oMTYzMzPL2cqOCHw5fQj4fNnzL6fP30yyIJFZzYrFIoVCgWnTplEoFCgWi40OycysaazUiEBEzAKQ9GvgoIh4pq5RmaWKxSLd3d0MDQ0BUCqV6O5OCkh2dXU1MjQzs6bguwZsUisUCpRKpRXaOzo6GBgYmPiAzMymoNHuGhj3iICk04DjI+LFdHtEEXHUePs3Kzc4ODiudjMzG5+VuTTwLuBNZdsjab2hBqu79vb2qiMC7e3tDYjGzKz5jDsRiIi9qm2b5aG3t3e5OQIAbW1t9Pb2NjAqM7PmUWsdgRVI2kLSqvXu11pTV1cXfX19dHR0IImOjg76+vo8UdDMrE5qmiwo6dvAgxFxvpKC/9cD+wDPAR+KiD/UJ8z68mRBMzNrJbmVGAa6gAfT7Q8B2wM7A/8FfKfGvs3MzCxnK1VHoMyGwMJ0e3/gkoi4XdLTgH9ym5mZTXK1jgg8BXSk2x8Ebky3ZzDyOgQrkHS0pHmS7pV0TNr2dUmLJM1NH/uP8Nr9JD0oab6k41b+rZiZmbWeWkcELgMulPQQsC5wXdq+PTA/SweStgW+AMwmWajoWklXpbu/HxEnj/La6cAZwL4kIxN3SLoyIu5bifdiZmbWcmpNBI4FSkA78C8R8WLavhHwo4x9bA3cFhFDAJJuBg7K+NrZwPyIWJC+9mLgQMCJgJmZWQY1XRqIiKURcUpEHB0Rd5W1fz8izs7YzTxgd0nrSWojmWuwWbrvSEl3SzpX0jpVXrsJ8FjZ84Vp2wokdUvql9S/ZMmSjKE1Hy/gY2Zm5WquIyBpQ0knSLpU0n9L+oakDbK+PiLuB04iufXwWmAu8BrJiMLbSC4zPAGcUkucEdEXEZ0R0Tlz5sxaupqyhhfwKZVKRMTrC/g4GTAza101JQKSdiWZC/AJ4K/ASyS3FM6XtEvWfiLinIjYKSL2AJ4BHoqIP0fEaxGxDDiL5DJApUW8MXoAsGnaZlX09PQsV6EPYGhoiJ6engZFZGZmjVbrHIGTgYuAw9MvbCRNA84k+QX/viydSNogIhZLaieZH7CzpI0i4on0kI+SXEKodAewpaRZJAnAwSRJiVXhBXzMzKxSrYnA9sBhw0kAQEQsk3QqcNeIr1rRZZLWA14FjoiIZyX9h6TtSRYvGgD+AUDSxsDZEbF/RCyVdCTJ3QrTgXMj4t4a31PT8gI+ZmZWqdZE4DlgFm9UFxw2C3g2aycRsXuVtk+NcOzjJBMKh59fA1yT9VytzAv4mJlZpVonC14MnCOpS9Ks9PFJ4GySSwY2iXgBHzMzq1TrokOrAN8DDueN0YVXSWb8fzUiXqk5whx40SEzM2sloy06VNOlgfSL/mhJx5Pc6gfwyHBxIDMzM5vcVmpEIC388z3gb4A3Ab8CjoqIJ+saXU4kLSGpiNjK1gemxH+vKc6f88Tw5zxx/FlPjHp/zh0RUbWIzsomAt8DvgQUSWoHHAL8JiL+rpYobeJI6h9pmMjqx5/zxPDnPHH8WU+MifycV/bSwEHA5yLiYgBJFwD/K2l6RLxWt+jMzMwsVyt718BmwG+Hn0TE7cBSYON6BGVmZmYTY2UTgekkSwaXW0rtdQls4vQ1OoAW4c95Yvhznjj+rCfGhH3OKztHYBlwA/ByWfOHgJuB1+8YiIgDag3QzMzM8rOyv+DPr9J2QS2BmJmZ2cSrqaCQmZmZTW21lhi2KUbSZpJ+Lek+SfdKOrrRMTUzSdMl3SXpqkbH0qwkrS3pUkkPSLp/PEugW3aS/jH9N2OepIskrdromJqFpHMlLZY0r6xtXUk3SHo4/btOXud3ItB6lgL/FBHbADsDR0japsExNbOjgfsbHUST+yFwbUS8A9gOf951J2kT4CigMyK2JZkwfnBjo2oq5wH7VbQdB9wYEVsCN6bPc+FEoMVExBMRcWe6/TzJP5qbNDaq5iRpU+DDJItwWQ4kvQXYAzgHkrLnEfFsQ4NqXjOA1STNANqAxxscT9OIiFuApyuaD+SN+Xjnk1TyzYUTgRYmqQDsANzW4FCa1Q+AfwGWNTiOZjYLWAL8JL0Ec7ak1RsdVLOJiEXAycAg8ATwXERc39iomt6GEfFEuv0nYMO8TuREoEVJWgO4DDgmIv7S6HiajaSPAIsjYk6jY2lyM4AdgR9FxA7Ai+Q4hNqq0uvTB5IkXhsDq6dLztsEiGRWf24z+50ItCBJbyJJAooRcXmj42lSuwIHSBoALgb2TktxW30tBBZGxPCo1qUkiYHV1weARyNiSUS8ClwOvK/BMTW7P0vaCCD9uzivEzkRaDGSRHI99f6IOLXR8TSriDg+IjaNiALJpKqbIsK/oOosIv4EPCbp7WnTPsB9DQypWQ0CO0tqS/8N2QdPyszblcCh6fahwM/zOpETgdazK/Apkl+oc9PH/o0OyqwGXwaKku4Gtge+3dhwmk864nIpcCdwD8l3h0sN14mki4BbgbdLWijpc8CJwL6SHiYZkTkxt/O7oJCZmVnr8oiAmZlZC3MiYGZm1sKcCJiZmbUwJwJmZmYtzImAmZlZC3MiYGZm1sKcCJiZmbWwSZ8IjLXWuBKnSZov6W5JLi9qZmaW0YxGB5DB8FrjH5O0Csnyl+U+BGyZPt4L/Cj9a2ZmZmOY1IlA2Vrjh0Gy1jjwSsVhBwL/la7O9Id0BGGjsuUbV7D++utHoVDIJ2gzM7NJZs6cOU9GxMxq+yZ1IsDya41vB8wBjo6IF8uO2QR4rOz5wrRtxESgUCjQ39+fQ7hmZmaTj6TSSPsm+xyBuq01LqlbUr+k/iVLltQzRjMzsylrsicCWdYaXwRsVvZ807RtORHRFxGdEdE5c2bV0REzM2sixWKRQqHAtGnTKBQKFIvFRoc0KU3qRCDjWuNXAp9O7x7YGXhutPkBZmbW/IrFIt3d3ZRKJSKCUqlEd3e3k4EqJv0yxJK2B84GVgEWAJ8BPg4QEWdKEnA6sB8wBHwmIkadANDZ2RmeI2Bm1rwKhQKl0oqXxTs6OhgYGJj4gBpM0pyI6Ky6b7InAnlwImBm1tymTZtGte83SSxbtqwBETXWaInApL40YGZmtjLa29vH1d7KnAiYmVnT6e3tpa1t+fpzbW1t9Pb2NiiiycuJgJmZNZ2uri76+vro6OhAEh0dHfT19dHV1dXo0CYdzxEwMzNrcp4jYGZmZlU5ETAzM2thua41IOnNwMbAasCSiHBtXzMzs0mk7iMCktaU9EVJtwDPAfOBecCfJA1KOkvSe+p9XjMzMxu/uiYCko4FBoDPAjeQLBG8PbAVsAvwdZJRiBskXStpy3qe38zMzMan3pcGdgbeHxHzRth/O3CupMOBzwHvBx6ucwxmZmaWUV0TgYj4+4zHvQz8Zz3PbWZmZuPnuwbMzMxaWG53DUj6NVCtWlEAL5FMIjw/Iu7MKwYzMzMbXZ4jAvcDO5LcPrgwfWyUti0Gdgduk7TPaJ1IGpB0j6S5klYoByhpHUlXSLpb0u2Stq37OzEzM2tSedYReAk4LyKOKW+UdAoQEbGjpB8C3wJuHKOvvSLiyRH2/SswNyI+KukdwBnAqMmFmZmZJfIcETiU5Eu50o+Bz6TbZwHb1HiebYCbACLiAaAgacMa+zQzM2sJeSYCAt5ZpX2bdB/AK8CyMfoJ4HpJcyR1V9n/R+AgAEmzgQ5g0xWCkbol9UvqX7LEBQ7NzMwg30sD5wPnpEWD7kjb3gN8FTgvff5+kqqDo9ktIhZJ2oCkENEDEXFL2f4TgR9KmgvcA9wFvFbZSUT0AX2QrD64Uu/IzMysyeSZCHwF+DPwj8Bb07Y/Ad8DTk6fXwf8crROImJR+nexpCuA2cAtZfv/QnqpQZKAR4EFdXsXZmZmTSy3RCAiXiP5tX6ipLXStr9UHDM4Wh+SVgemRcTz6fYHgRMqjlkbGIqIV4DPA7dUnsfMzMyqy3X1wWE1fDFvCFyR/NBnBnBhRFybligmIs4EtgbOlxTAvSSli83MzCyDvJch/gxwCNAOrFK+LyI2H+v1EbEA2K5K+5ll27eSLGpkZmZm45TbXQOS/hk4BZgDFID/IZkYuC5wbl7nNTMzs+zyvH3wC0B3RBwPvAqcHhEHkCQHHTme18zMzDLKMxHYlGTZYYC/Amul2xcBf5vjec3MzCyjPBOBPwHrp9slYJd0ewuqL0ZkZmZmEyzPROAm4IB0+xzg1HRFwp8Bl+d4XjMzM8soz7sGukkTjYg4U9IzwK7AZSTrDZiZmVmD5VlQaBll6whExM9IRgPMzMxsksi7jsAqwLbABlRchoiIa/I8t5mZmY0tzzoC+wKDQD9wDXBV2eMXeZ3XzGyyKxaLFAoFpk2bRqFQoFgsNjoka2F5ThY8g+RLfxbQBqxW9mjL8bxmZpNWsViku7ubUqlERFAqleju7nYyYA2TZyKwEfDtiChFxEsR8XL5I8fzmplNWj09PQwNDS3XNjQ0RE9PT4MissmkEaNFec4RuAp4H14S2MzsdYOD1RddHandWsfwaNFwojg8WgTQ1dWV23kVkU9tH0lvAYrAwyRrDLxavj8i/iuXE2fQ2dkZ/f39jTq9mbWwQqFAqVRaob2jo4OBgYGJD8gmjTz/tyFpTkR0VtuX54jA/wH2AfYHhli+mmAAmRIBSQPA88BrwNLKN5ImHBeQrHA4Azg5In5Sa/BmZnno7e1d7lcfQFtbG729vQ2MyiaDRo0W5TlH4GTgdGDNiFgjItYse6w11osr7BUR24+QzRwB3BcR2wF7Aqekty2amU06XV1d9PX10dHRgSQ6Ojro6+vLdejXpob29vZxtddLnonA2sCZEfFijueAZHRhTUkC1gCeBpbmfE4zs5XW1dXFwMAAy5YtY2BgwEmAAcloUVvb8jfVTcRoUZ6JwGXAB+rQTwDXS5ojqbvK/tOBrYHHgXuAo9OqhsuR1C2pX1L/kiVL6hCWmZlZ/TRqtCjPyYL/DzgauA64mxUnC56asZ9NImKRpA2AG4AvR8QtZfs/RrKGwbHA29JjtouIv4zUpycLmplZK2nUZMHPkkzye1/6KBdApkQgIhalfxdLugKYDdxSdshngBMjyWjmS3oUeAdwe23hm5mZNb88Fx2aVWsfklYHpkXE8+n2B4ETKg4bJLk74beSNgTejmsXmJmZZZLrokN1sCFwRTIPkBnAhRFxraTDIVneGPgmcJ6kewABX42IJxsVsJmZ2VRS10RA0r8B389yp4CkXYF1I2LEBYgiYgGwXZX2M8u2HycZKTAzM7NxqvddA28DBiX1Sfq/kjYa3iFpVUk7SjpK0u3AT4Fn6nx+MzMzG4e6jghExGckvQs4kqRy4FqSguSOgVVIhu7vBPqA8734kJmZWWPVfY5ARNwD/IOkLwLvBjpIlh5+Epjr6/dmZmaTR553DSwD5qYPMzMzm4TyrCxoZmZmk5wTATMzG1OxWKRQKDBt2jQKhQLFYrHRIVmdTPY6AmZm1mDFYnG5pZNLpRLd3cnSL14waerziICZmY2qp6fn9SRg2NDQED09PQ2KyOopt0RA0tcktVVpX03S1/I6r5mZ1dfg4OC42m1qyXNE4N+BNaq0t6X7zMxsCmhvbx9Xu00teSYCIlllsNIOwNM5ntfMzOqot7eXtrblB3jb2tro7e1tUERWT3WfLCjpeZIEIIAFaWXBYdOBVYEzq73WzMwmn+EJgT09PQwODtLe3k5vb68nCjYJRVT70V5Dh9KhJKMB5wLHAM+V7X4FGIiIW+t60nHq7OyM/v7+RoZgZmY2YSTNiYjOavvyKDF8fnrSR4H/jYiltfQnaQB4HngNWFr5RiT9MzCcls4AtgZmRoQvP5iZmY0hzzkCS0hWIwRA0r6SLpB0vKTp4+xrr4jYvlo2ExHfS/dtDxwP3DxRSYALbJiZ2VSXZyJwLsnEQCRtBvwcWBc4AvhWTuc8BLgop76XM1xgo1QqERGvF9hwMmBmZlNJ3ecIvN6x9CwwOyIekvSPwAERsZekvYCfREQhYz+PAs+QTD78cUT0jXBcG7AQ2KLaiICkbqAboL29fadSqbQS7+oNhUKBan10dHQwMDBQU99mZmb1NNocgTxHBKaTTA4E2Ae4Jt1+BNhwHP3sFhE7Ah8CjpC0xwjH/V+SOQlVLwtERF9EdEZE58yZM8dx+upcYMPMzJpBnonAPOCLknYnSQSuTds3AZ7M2klELEr/LgauAGaPcOjBTNBlAXCBDTMzaw55JgJfBb4A/Aa4KCLuSdsPAG7P0oGk1SWtObwNfJAkwag87i3A+0nmIUwIF9gwM7NmkNvqgxFxi6SZwFoR8UzZrh8DQyO8rNKGwBWSIIn1woi4VtLh6TmGCxN9FLg+Il6sT/Rjc4ENMzNrBrlNFpzMXFDIzMxayYQWFKo48V4kt/S1A6uU74uIvfM8t5mZmY0tz2WIDwN+CawJ7ElSYGgdYEfgvrzOa2ZmZtnlOVnwK8CREXEI8CpwfETsAFwAvJDjec3MzCyjPBOBzYFfpdsvA2uk26cDh+V4XjMzM8soz0TgKZLLAgCLgG3T7fWA1XI8r5mZmWWU52TB35Lc938PcAlwmqR9SYoL3ZDjec3MzCyjPBOBI4FV0+3vAEuBXUmSgrwWHTIzM7NxyOXSgKQZJCV/AYiIZRFxUkQcEBFfiYhn8zivma28qbis9lSM2WyyyWVEICKWSvoecHUe/ZtZfQ0vqz00lBT9HF5WG5i01TKnYsxmk1GeyxDfCJwREZfncoIauLKg2fKm4rLaUzFms0ZpVGXBs4CTJbUDc4Dl1gGIiDtzPLeZjcNUXFZ7KsZsNhnlmQhcmP49tcq+AKbneG4zG4f29vaqv64n87LaUzFms8kozzoCs0Z5bJ61E0kDku6RNFdS1fF8SXum+++VdHMdYjdrKVNxWe2pGLPZZJRnItABLIqIUvmDpLhQxzj72isitq92fUPS2sB/AgdExDuBv6s1cLNW09XVRV9fHx0dHUiio6ODvr6+ST3pbirGbDYZ5TlZ8DVgo4hYXNG+HrA4IjJdGpA0AHRGxJMj7P8SsHFE/FvW2DxZ0MzMWslokwXzHBEQyVyASutRMXFwDAFcL2mOpO4q+7cC1pH0m/SYT69ErGZmZi2p7pMFJV2ZbgZwgaSXy3ZPJ1lz4Pfj6HK3iFgkaQPgBkkPRMQtZftnADuRlC5eDbhV0h8i4qGKuLqBbvBkIjMzs2F5jAg8lT4EPFP2/ClgIXAm8MmsnUXEovTvYuAKYHbFIQuB6yLixfTywS3AdlX66YuIzojonDlz5rjflJmZWTOq+4hARHwGXr+2f3JEjOcywHIkrQ5Mi4jn0+0PAidUHPZz4PS0rPEqwHuB76/sOc3MzFpJbnUEIuIbdehmQ+AKSZDEemFEXCvp8PQcZ0bE/ZKuBe4GlgFnR8S8OpzbzMys6eV518C6QC/JtfsNqLgMERFr5XLiDHzXgJmZtZJGlRg+B9gB6AMep/odBGZmZtZAeSYC+wD7RsRtOZ7DzMzMapBnHYHFwAs59m82aRWLRQqFAtOmTaNQKFAsFhsdkplZVXkmAj3ACZLWyPEcZpNOsViku7ubUqlERFAqleju7nYyYGaTUp6TBe8BCiRFhErAq+X7I+LduZw4A08WtDwVCoWqq+J1dHQwMDAw8QGZWctr1GTBS3Ps22zSGhwcHFe7mVkjTfY6AmZTTnt7e9URAZe2NrPJKM85AgBI2lvSkZKOkLRn3ucza7Te3l7a2tqWa2tra6O3t7dBEZmZjSy3EQFJm5CsDbATSR0BgI0l9QMfjYjHR3yx2RTW1dUFQE9PD4ODg7S3t9Pb2/t6u5nZZJLnZMHLgI2BT0TEo2nb5sAFwOMR8bFcTpyBJwuamVkradRkwX2BPYeTAICIWCDpKODGHM9rZmZmGeU9R6DacINLDZuZmU0SeSYCNwL/IWmz4QZJ7cAP8IiAmZnZpJBnInAUsDqwQFJJUgl4JG07KmsnkgYk3SNpbjrRsHL/npKeS/fPlfS1ur0DMzOzJpdnHYHHJO0IfAB4R9p8f0T8aiW62ysinhxl/28j4iMr0a+ZmVlLy3WOQCRuiIj/SB8rkwRYi/MCPmZm+al7IiDpQ+lw/lpV9r0l3bfvOLoM4HpJcyR1j3DMLpL+KOmXkt45Qlzdkvol9S9ZsmQcp7dG8gI+Zmb5qnsdAUlXA9dExBkj7P8i8JGI+HDG/jaJiEWSNgBuAL4cEbeU7V8LWBYRL0jaH/hhRGw5Wp+uIzB1eAEfM7PajVZHII9LA+8GRrsEcBOwXdbOImJR+ncxSaXC2RX7/xIRL6Tb1wBvkrT+eIO2yckL+JiZ5SuPRGAmsGyU/QGsl6UjSatLWnN4G/ggMK/imLdKUro9m+Q9PbUScdskNNJCPV7Ax8ysPvJIBBaSjAqM5N3Aoox9bQj8TtIfgduBqyPiWkmHSzo8PeZjwLz0mNOAgyOvusk24byAj5lZvvK4ffBq4JuSromIv5bvkNQGnJAeM6aIWECVywgRcWbZ9unA6TVFbJOWF/AxM8tXHpMFNwDuIrk8cDrwQLpra+BIQMCOEfHnup54HDxZ0MzMWsmELjoUEYslvQ/4EfBtki9+SOYGXAcc0cgkwMzMzN6QS2XBiCgB+0taB9iCJBl4OCKeyeN8ZmZmtnLyriz4TETcERG3OwmYHFylz8zMyuW21oBNPsNV+oaGhgBer9IHePKdmVmLynVEwCaXnp6e15OAYUNDQ/T09DQoIjMzazQnAi3EVfrMzKySE4EW4ip9ZmZWyYlAC3GVPjMzq+REoIV0dXXR19dHR0cHkujo6KCvr88TBc3MWljdKwtOBa4saGZmrWSilyE2MzOzKaIlRwQkLQFKjY6jwdYHnmx0EC3An/PE8Oc8cfxZT4x6f84dETGz2o6WTAQMJPWPNExk9ePPeWL4c544/qwnxkR+zr40YGZm1sKcCJiZmbUwJwKtq6/RAbQIf84Tw5/zxPFnPTEm7HP2HAEzM7MW5hEBMzOzFuZEwMzMrIU5EWgxkjaT9GtJ90m6V9LRjY6pmUmaLukuSVc1OpZmJWltSZdKekDS/ZJ2aXRMzUjSP6b/ZsyTdJGkVRsdU7OQdK6kxZLmlbWtK+kGSQ+nf9fJ6/xOBFrPUuCfImIbYGfgCEnbNDimZnY0cH+jg2hyPwSujYh3ANvhz7vuJG0CHAV0RsS2wHTg4MZG1VTOA/araDsOuDEitgRuTJ/nwolAi4mIJyLiznT7eZJ/NDdpbFTNSdKmwIeBsxsdS7OS9BZgD+AcgIh4JSKebWhQzWsGsJqkGUAb8HiD42kaEXEL8HRF84HA+en2+cDf5HV+JwItTFIB2AG4rcGhNKsfAP8CLGtwHM1sFrAE+El6CeZsSas3OqhmExGLgJOBQeAJ4LmIuL6xUTW9DSPiiXT7T8CGeZ3IiUCLkrQGcBlwTET8pdHxNBtJHwEWR8ScRsfS5GYAOwI/iogdgBfJcQi1VaXXpw8kSbw2BlaX9MnGRtU6IrnPP7d7/Z0ItCBJbyJJAooRcXmj42lSuwIHSBoALgb2lnRBY0NqSguBhRExPKp1KUliYPX1AeDRiFgSEa8ClwPva3BMze7PkjYCSP8uzutETgRajCSRXE+9PyJObXQ8zSoijo+ITSOiQDKp6qaI8C+oOouIPwGPSXp72rQPcF8DQ2pWg8DOktrSf0P2wZMy83YlcGi6fSjw87xO5ESg9ewKfIrkF+rc9LF/o4Myq8GXgaKku4HtgW83Npzmk464XArcCdxD8t3hUsN1Iuki4Fbg7ZIWSvoccCKwr6SHSUZkTszt/C4xbGZm1ro8ImBmZtbCnAiYmZm1MCcCZmZmLcyJgJmZWQtzImBmZtbCnAiYNYik8+q5KqGkgqSQ1FmvPtN+6xqnmU0uTgTMapR+UUb6eFXSAkknZ6h5fzRQzyJDjwEbAXPr2KfViaTfSDq90XGYVZrR6ADMmsSvSAo1vQnYnWTFwdWBL1YemK7e9lpEPFfPACLiNZLFSczMMvOIgFl9vBwRf4qIxyLiQqBIumyopK9LmifpMEmPAC+TLNqy3JB7+ovxPyV9W9KTkhanIwvTyo5ZJd1fkvRyOvpwVLpvuUsDkvZMn38krSD5kqQ5knYq6289SRel1cz+KuleSZ8Z75uX9A5JV0p6TtILkm6V9K503zRJ/0/SY2nM90g6sOy1w3EfLOnmNI67JL1b0raSfi/pRUm/kzSr7HXDn+vnJQ2mr/sfSeuXHZP13H8r6QZJQ5Luk7RvxfvbRtLVkp5P/7tcJOmtZfvPk3SVpKMlLZL0jKSfSGob3g+8HziibPSoIOlNkk6T9Hga32OScqsgZ1aNEwGzfPyVZHRg2CzgE8DfAdsBL43wui5gKcmCLkcCxwAfL9t/PvBp4Fhga+BzwLNjxHIy8FWgE1gAXDX8BQWsSlI29iPAO4EfAj+WtM8Yfb5O0sbA70hWR9uXZNGfM4Dp6SFHA/+cxvAu4ArgcknbV3T1DeAkkqWxnwUuAv4D6AFmp7GeVvGaAsnllQNJyrBuCZxbtj/ruXvTvrcD7gAuVrJC5/CCL7cA89I4PgCsAfy8PEkjGQnaNt3/ceCj6fmH47gV+AnJ5ZuNSC7lHJUed3Aa+8eBBzGbSBHhhx9+1PAAzgOuKns+G3gS+Fn6/OvAqyTri4/2ut8At1YccwNwdrq9JcmX7X4jxFFI93emz/dMn3eVHbMGyZfs50d5PxcPn7NanFWO7wVKwCoj7F8EfK2i7TfABRVx/0PZ/o+kbQeVtR0GvFD2/OvAa0B7Wdtu6eu2rOHcm6Rtu6XPTwBurOhjnfSY2WWf0WPA9LJjzgJ+VXHe0yv6OQ24kbTcux9+NOLhEQGz+tgvHRJ/ieSX3y0ki+EMWxgRf87Qz90Vzx8HNki3dwCWAb8eZ2y3Dm9ExAski8ZsAyBpuqQeSXdLekrSC8BBQPs4+t8B+F1EvFK5Q9JaJOvX/2/Frt8Nx1Cm/L0Pf1b3VLStXjaaAbAoIgbLnt9G8hltXcO5H0//Dn/uOwF7pP99X0g/o8fSfW8re919kczTKO9nA0Z3HslCSQ9JOkPShytGGcxy58mCZvVxC9BN8sv/8UjWbC/3YsZ+Kl8X5HsJ7yvAP5EMXd8DvECyet9YX2D1ULni2atV9lVrq8fnMeK5IyIklZ9nGnA1yWdVqTy5G/d/u4i4U1IB+D8kS/ueD/xR0r4RsWyM92BWF848zepjKCLmR0SpShJQL3NJ/j+71zhft/PwhpJbGrfljbXkdwN+ERE/jYi5wCPAVuPs/y5gN0mrVO6IiL+Q/DLetWLXbsB94zxPNZtI2qzs+WySz+j+Op77TpL5E6X0v3H54/lx9PMKb8ybeF1EPB8Rl0bEF4EPA3sDW4yjX7OaOBEwmyIi4iHgEuDsdJb7LEm7S/rUGC/9N0n7SnonyUS6V4AL030PAftI2k3SO4DTSSY2jsd/ksw9uETSeyRtIemQsgl53wO+krZtJekEkol1J4/zPNX8FThf0vaSdgHOBK6OiIfreO4zgLcAP5P0XkmbS/qApD5Ja46jnwFgdnq3wPrpHQ3HprFtLWkLkgmlfwEWjqNfs5r40oDZ1PJp4Jskk8zWJ/nC+P4YrzkOOAV4O3Av8JGIGL5U8S2SL/5fknypnkdy62PlNfQRRcQiSXuQfOn+mmRI/B6SSyWksa4JfBfYkGRW/N9GxB+znmMUAySTG39B8nlcD3y+bH/N546IxyXtCnwHuJbk7oXB9FwvjyPWk0mG/u8DViP53J8nuatheCLoXcCHImJoHP2a1UQRlZfKzKwZSNqT5It5ZkQ82dho6k/S14GPRcS2jY7FbCrzpQEzM7MW5kTAzMyshfnSgJmZWQvziICZmVkLcyJgZmbWwpwImJmZtTAnAmZmZi3MiYCZmVkL+/9JkYusMUb9hgAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "fig = plt.figure(figsize=(8, 8))\n", "ax1 = fig.add_subplot(3, 1, 1)\n", "ax2 = fig.add_subplot(3, 1, 2)\n", "ax3 = fig.add_subplot(3, 1, 3)\n", "\n", "for i in range(1, 11):\n", " data = pipeline.get_data(f'fluxpos{i:03d}')\n", " ax1.scatter(i, data[-1, 2], color='black')\n", " ax2.scatter(i, data[-1, 3], color='black')\n", " ax3.scatter(i, data[-1, 4], color='black')\n", "\n", "ax3.set_xlabel('Principal components', fontsize=14)\n", "ax1.set_ylabel('Separation (arcsec)', fontsize=14)\n", "ax2.set_ylabel('Position angle (deg)', fontsize=14)\n", "ax3.set_ylabel('Contrast (mag)', fontsize=14)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Detection limits" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "As a final analysis, we will estimate detection limits from the data. To do so, we will first use the [FakePlanetModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.fluxposition.FakePlanetModule) to remove the flux of the companion from the data since it would otherwise bias the result. We use the PSF template that was stored with the tag *psf* and we adopt the separation and position angle that was determined with the `SimplexMinimizationModule`. We need to apply a correction of -133 degrees which was used previously for `extra_rot`." ] }, { "cell_type": "code", "execution_count": 32, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "----------------\n", "FakePlanetModule\n", "----------------\n", "\n", "Module name: fake\n", "Input ports: centered (70, 57, 57), psf (70, 57, 57)\n", "Input parameters:\n", " - Magnitude = 6.10\n", " - PSF scaling = -1.0\n", " - Separation (arcsec) = 0.06\n", " - Position angle (deg) = 0.06\n", "Injecting artificial planets... [DONE] \n", "Output port: removed (70, 57, 57)\n" ] } ], "source": [ "module = FakePlanetModule(name_in='fake',\n", " image_in_tag='centered',\n", " psf_in_tag='psf',\n", " image_out_tag='removed',\n", " position=(0.061, 97.3-133.),\n", " magnitude=6.1,\n", " psf_scaling=-1.,\n", " interpolation='spline')\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('fake')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Now that the data only contains the flux of the central star, we use the [ContrastCurveModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.processing.html#pynpoint.processing.limits.ContrastCurveModule) to calculate the detection limits. We will calculate the brightness limits by setting the false positive fraction (FPF) to $2.87 \\times 10^{-7}$, which corresponds to $5\\sigma$ in the limit of Gaussian noise. At small angular separations, the detection limits are affected by small sample statistics (see [Mawet et al. 2014](https://ui.adsabs.harvard.edu/abs/2014ApJ...792...97M/abstract)) so this FPF would only correspond to a $5\\sigma$ detection at large separation from the star. In this example, we will subtract 10 principal components and use the median-collapsed residuals." ] }, { "cell_type": "code", "execution_count": 33, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-------------------\n", "ContrastCurveModule\n", "-------------------\n", "\n", "Module name: limits\n", "Input ports: removed (70, 57, 57), psf (70, 57, 57)\n", " \n", "Calculating detection limits... [DONE]\n", "Output port: limits (4, 4)\n" ] } ], "source": [ "module = ContrastCurveModule(name_in='limits',\n", " image_in_tag='removed',\n", " psf_in_tag='psf',\n", " contrast_out_tag='limits',\n", " separation=(0.05, 5., 0.01),\n", " angle=(0., 360., 60.),\n", " threshold=('fpf', 2.87e-7),\n", " psf_scaling=1.,\n", " aperture=0.02,\n", " pca_number=10,\n", " cent_size=0.02,\n", " edge_size=2.,\n", " extra_rot=-133.,\n", " residuals='median',\n", " snr_inject=100.)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('limits')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Exporting datasets to FITS and plain text formats" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Now that we have finished the data processing and analysis, we will export some of the results from the HDF5 database to other data formats. Since astronomical images are commonly viewed with tools such as [DS9](https://sites.google.com/cfa.harvard.edu/saoimageds9), we will use the [FitsWritingModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.readwrite.html?highlight=fitswr#pynpoint.readwrite.fitswriting.FitsWritingModule) to write the median-collapsed residuals of the PSF subtraction to a FITS file. The database tag is specified as argument of `data_tag` and we will store the FITS file in the default output place of the `Pypeline`. The FITS file contains a 3D dataset of which the first dimension corresponds to an increasing number of subtracted principal components." ] }, { "cell_type": "code", "execution_count": 34, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-----------------\n", "FitsWritingModule\n", "-----------------\n", "\n", "Module name: write1\n", "Input port: pca_median (30, 57, 57)\n", "Writing FITS file... [DONE]\n" ] } ], "source": [ "module = FitsWritingModule(name_in='write1',\n", " data_tag='pca_median',\n", " file_name='pca_median.fits',\n", " output_dir=None,\n", " data_range=None,\n", " overwrite=True,\n", " subset_size=None)\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('write1')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Similarly, we can export 1D and 2D datasets to a plain text file with the [TextWritingModule](https://pynpoint.readthedocs.io/en/latest/pynpoint.readwrite.html#pynpoint.readwrite.textwriting.TextWritingModule). Let's export the detection limits that were estimated with the `ContrastCurveModule`. We specify again the database tag and also add a header as first line in the text file." ] }, { "cell_type": "code", "execution_count": 35, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "-----------------\n", "TextWritingModule\n", "-----------------\n", "\n", "Module name: write2\n", "Input port: limits (4, 4)\n", "Writing text file... [DONE]\n" ] } ], "source": [ "module = TextWritingModule(name_in='write2',\n", " data_tag='limits',\n", " file_name='limits.dat',\n", " output_dir=None,\n", " header='Separation (arcsec) - Contrast (mag) - Variance (mag) - FPF')\n", "\n", "pipeline.add_module(module)\n", "pipeline.run_module('write2')" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.9" } }, "nbformat": 4, "nbformat_minor": 4 } PynPoint-0.11.0/pull_request_template.md000066400000000000000000000024121450275315200203150ustar00rootroot00000000000000Thank you for your contribution to the PynPoint repo! Before submitting this PR, please make sure: - [ ] To read the documentation page on the [Python guidelines](https://pynpoint.readthedocs.io/en/latest/python.html). - [ ] That your branch of the PR is synced with the main branch of the PynPoint/PynPoint repo. - [ ] To update the dependencies to the latest versions with `pip install --upgrade -r requirements.txt`. - [ ] To run both `pycodestyle` and `pylint` on the code that has been added and/or changed. - [ ] That the documentation is successfully build after running `make docs` in your local repo folder. This requires the installation of `sphinx` and `sphinx_book_theme`. - [ ] That all unit tests are finishing after running `make test` in your local repo folder. This requires the installation of `pytest` and `pytest-cov`. - [ ] To add unit tests in case there are new pipeline modules and/or functionalities added. - [ ] That only text files have been added and changed in the commits of the PR. Binary files will clutter up the repo because even after removing such files they will remain in the repo history. - [ ] To add and/or update the docstrings (including the parameters, returns, types, and descriptions). - [ ] To add and/or update the typehints and typechecks. PynPoint-0.11.0/pynpoint/000077500000000000000000000000001450275315200152355ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/__init__.py000066400000000000000000000117501450275315200173520ustar00rootroot00000000000000import warnings from pynpoint.core.pypeline import Pypeline from pynpoint.processing.background import SimpleBackgroundSubtractionModule, \ MeanBackgroundSubtractionModule, \ LineSubtractionModule, \ NoddingBackgroundModule from pynpoint.processing.badpixel import BadPixelSigmaFilterModule, \ BadPixelInterpolationModule, \ BadPixelMapModule, \ BadPixelTimeFilterModule, \ ReplaceBadPixelsModule from pynpoint.processing.basic import SubtractImagesModule, \ AddImagesModule, \ RotateImagesModule, \ RepeatImagesModule from pynpoint.processing.centering import StarAlignmentModule, \ FitCenterModule, \ ShiftImagesModule, \ WaffleCenteringModule from pynpoint.processing.darkflat import DarkCalibrationModule, \ FlatCalibrationModule from pynpoint.processing.extract import StarExtractionModule, \ ExtractBinaryModule from pynpoint.processing.filter import GaussianFilterModule from pynpoint.processing.fluxposition import FakePlanetModule, \ SimplexMinimizationModule, \ FalsePositiveModule, \ MCMCsamplingModule, \ AperturePhotometryModule, \ SystematicErrorModule from pynpoint.processing.frameselection import RemoveFramesModule, \ FrameSelectionModule, \ RemoveLastFrameModule, \ RemoveStartFramesModule, \ ImageStatisticsModule, \ FrameSimilarityModule, \ SelectByAttributeModule, \ ResidualSelectionModule from pynpoint.processing.limits import ContrastCurveModule, \ MassLimitsModule from pynpoint.processing.pcabackground import PCABackgroundPreparationModule, \ PCABackgroundSubtractionModule, \ DitheringBackgroundModule from pynpoint.processing.psfpreparation import PSFpreparationModule, \ AngleInterpolationModule, \ AngleCalculationModule, \ SortParangModule, \ SDIpreparationModule from pynpoint.processing.psfsubtraction import PcaPsfSubtractionModule, \ ClassicalADIModule from pynpoint.processing.resizing import CropImagesModule, \ ScaleImagesModule, \ AddLinesModule, \ RemoveLinesModule from pynpoint.processing.stacksubset import StackAndSubsetModule, \ StackCubesModule, \ DerotateAndStackModule, \ CombineTagsModule from pynpoint.processing.timedenoising import CwtWaveletConfiguration, \ DwtWaveletConfiguration, \ WaveletTimeDenoisingModule, \ TimeNormalizationModule from pynpoint.readwrite.attr_reading import AttributeReadingModule, \ ParangReadingModule, \ WavelengthReadingModule from pynpoint.readwrite.attr_writing import AttributeWritingModule, \ ParangWritingModule from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.readwrite.fitswriting import FitsWritingModule from pynpoint.readwrite.hdf5reading import Hdf5ReadingModule from pynpoint.readwrite.hdf5writing import Hdf5WritingModule from pynpoint.readwrite.textwriting import TextWritingModule from pynpoint.readwrite.nearreading import NearReadingModule warnings.simplefilter('always', DeprecationWarning) __author__ = 'Tomas Stolker & Markus Bonse' __license__ = 'GPLv3' __version__ = '0.11.0' __maintainer__ = 'Tomas Stolker' __email__ = 'stolker@strw.leidenuniv.nl' __status__ = 'Development' PynPoint-0.11.0/pynpoint/core/000077500000000000000000000000001450275315200161655ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/core/__init__.py000066400000000000000000000000001450275315200202640ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/core/attributes.py000066400000000000000000000106241450275315200207300ustar00rootroot00000000000000""" Module to obtain information about the implemented attributes. """ from typing import Dict, Union from typeguard import typechecked @typechecked def get_attributes() -> Dict[str, Dict[str, Union[str, float, int, None]]]: """ Function to get a dictionary with all attributes. Returns ------- dict Attribute information. """ attr = {'PIXSCALE': {'attribute': 'static', 'config': 'settings', 'value': 0.027, 'type': 'float'}, 'MEMORY': {'attribute': 'static', 'config': 'settings', 'value': 1000, 'type': 'int'}, 'CPU': {'attribute': 'static', 'config': 'settings', 'value': 1, 'type': 'int'}, 'INSTRUMENT': {'attribute': 'static', 'config': 'header', 'value': 'INSTRUME', 'type': 'str'}, 'NFRAMES': {'attribute': 'non-static', 'config': 'header', 'value': 'NAXIS3', 'type': 'int'}, 'EXP_NO': {'attribute': 'non-static', 'config': 'header', 'value': 'ESO DET EXP NO', 'type': 'int'}, 'DIT': {'attribute': 'static', 'config': 'header', 'value': 'ESO DET DIT', 'type': 'int'}, 'NDIT': {'attribute': 'non-static', 'config': 'header', 'value': 'ESO DET NDIT', 'type': 'int'}, 'PARANG_START': {'attribute': 'non-static', 'config': 'header', 'value': 'ESO ADA POSANG', 'type': 'float'}, 'PARANG_END': {'attribute': 'non-static', 'config': 'header', 'value': 'ESO ADA POSANG END', 'type': 'float'}, 'DITHER_X': {'attribute': 'non-static', 'config': 'header', 'value': 'ESO SEQ CUMOFFSETX', 'type': 'float'}, 'DITHER_Y': {'attribute': 'non-static', 'config': 'header', 'value': 'ESO SEQ CUMOFFSETY', 'type': 'float'}, 'PUPIL': {'attribute': 'non-static', 'config': 'header', 'value': 'ESO ADA PUPILPOS', 'type': 'float'}, 'DATE': {'attribute': 'non-static', 'config': 'header', 'value': 'DATE-OBS', 'type': 'str'}, 'LATITUDE': {'attribute': 'static', 'config': 'header', 'value': 'ESO TEL GEOLAT', 'type': 'float'}, 'LONGITUDE': {'attribute': 'static', 'config': 'header', 'value': 'ESO TEL GEOLON', 'type': 'float'}, 'RA': {'attribute': 'non-static', 'config': 'header', 'value': 'RA', 'type': 'float'}, 'DEC': {'attribute': 'non-static', 'config': 'header', 'value': 'DEC', 'type': 'float'}, 'PARANG': {'attribute': 'non-static', 'config': 'header', 'value': 'None', 'type': 'float'}, 'WAVELENGTH': {'attribute': 'non-static', 'config': 'header', 'value': 'None', 'type': 'float'}, 'STAR_POSITION': {'attribute': 'non-static', 'config': None, 'value': None, 'type': 'float'}, 'INDEX': {'attribute': 'non-static', 'config': None, 'value': None, 'type': 'int'}, 'FILES': {'attribute': 'non-static', 'config': None, 'value': None, 'type': 'str'}} return attr PynPoint-0.11.0/pynpoint/core/dataio.py000066400000000000000000001354751450275315200200170ustar00rootroot00000000000000""" Modules for accessing data and attributes in the central database. """ import os import warnings from abc import ABCMeta, abstractmethod from typing import Dict, List, Optional, Tuple, Union import h5py import numpy as np from typeguard import typechecked from pynpoint.util.type_aliases import NonStaticAttribute, StaticAttribute class DataStorage: """ Instances of DataStorage manage to open and close the Pypeline HDF5 databases. They have an internal h5py data bank (self.m_data_bank) which gives direct access to the data if the storage is open (self.m_open == True). """ @typechecked def __init__(self, location_in: str) -> None: """ Constructor of a DataStorage instance. It needs the location of the HDF5 file (Pypeline database) as input. If the file already exists it is opened and extended, if not a new File will be created. Parameters ---------- location_in : str Location (directory + filename) of the HDF5 database. Returns ------- NoneType None """ assert (os.path.isdir(os.path.split(location_in)[0])), 'Input directory for DataStorage ' \ 'does not exist - input requested:'\ ' %s.' % location_in self._m_location = location_in self.m_data_bank = None self.m_open = False @typechecked def open_connection(self) -> None: """ Opens the connection to the HDF5 file by opening an old file or creating a new one. Returns ------- NoneType None """ if not self.m_open: self.m_data_bank = h5py.File(self._m_location, mode='a') self.m_open = True @typechecked def close_connection(self) -> None: """ Closes the connection to the HDF5 file. All entries of the data bank will be stored on the hard drive and the memory is cleaned. Returns ------- NoneType None """ if self.m_open: self.m_data_bank.close() self.m_data_bank = None self.m_open = False class Port(metaclass=ABCMeta): """ Abstract interface and implementation of common functionality of the InputPort, OutputPort, and ConfigPort. Each Port has a internal tag which is its key to a dataset in the DataStorage. If for example data is stored under the entry ``im_arr`` in the central data storage only a port with the tag (``self._m_tag = im_arr``) can access and change that data. A port knows exactly one DataStorage instance, whether it is active or not (``self._m_data_base_active``). """ @abstractmethod @typechecked def __init__(self, tag: str, data_storage_in: Optional[DataStorage] = None) -> None: """ Abstract constructor of a Port. As input the tag / key is expected which is needed to build the connection to the database entry with the same tag / key. It is possible to give the Port a DataStorage. If this storage is not given the Pypeline module has to set it or the connection needs to be added manually using :func:`~pynpoint.core.dataio.Port.set_database_connection`. Parameters ---------- tag : str Input Tag. data_storage_in : pynpoint.core.dataio.DataStorage The data storage to which the port is connected. Returns ------- NoneType None """ assert isinstance(tag, str), 'Port tag needs to be a string.' self._m_tag = tag self._m_data_storage = data_storage_in self._m_data_base_active = False @property @typechecked def tag(self) -> str: """ Getter for the internal tag (no setter). Returns ------- str Database tag name. """ return self._m_tag @typechecked def open_port(self) -> None: """ Opens the connection to the :class:`~pynpoint.core.dataio.DataStorage` and activates its data bank. Returns ------- NoneType None """ if not self._m_data_base_active: self._m_data_storage.open_connection() self._m_data_base_active = True @typechecked def close_port(self) -> None: """ Closes the connection to the :class:`~pynpoint.core.dataio.DataStorage` and forces it to save the data to the hard drive. All data that was accessed using the port is cleaned from the memory. Returns ------- NoneType None """ if self._m_data_base_active: self._m_data_storage.close_connection() self._m_data_base_active = False @typechecked def set_database_connection(self, data_base_in: DataStorage) -> None: """ Sets the internal DataStorage instance. Parameters ---------- data_base_in: pynpoint.core.dataio.DataStorage The input DataStorage. Returns ------- NoneType None """ self._m_data_storage = data_base_in class ConfigPort(Port): """ ConfigPort can be used to read the 'config' tag from a (HDF5) database. This tag contains the central settings used by PynPoint, as well as the relevant FITS header keywords. You can use a ConfigPort instance to access a single attribute of the dataset using get_attribute(). """ @typechecked def __init__(self, tag: str, data_storage_in: Optional[DataStorage] = None) -> None: """ Constructor of the ConfigPort class which creates the config port instance which can read the settings stored in the central database under the tag `config`. An instance of the ConfigPort is created in the constructor of PypelineModule such that the attributes in the ConfigPort can be accessed from within all type of modules. For example: .. code-block:: python memory = self._m_config_port.get_attribute('MEMORY') Parameters ---------- tag : str The tag name of the port. The port can be used to get data from the dataset with the key `config`. data_storage_in : pynpoint.core.dataio.DataStorage The input DataStorage. It is possible to give the constructor of an ConfigPort a DataStorage instance which will link the port to that DataStorage. Usually the DataStorage is set later by calling :func:`~pynpoint.core.dataio.Port.set_database_connection`. Returns ------- NoneType None """ super().__init__(tag, data_storage_in) if tag != 'config': raise ValueError('The tag name of the central configuration should be \'config\'.') @typechecked def _check_status_and_activate(self) -> bool: """ Internal function which checks if the ConfigPort is ready to use and open it. Returns ------- bool Returns True if the ConfigPort can be used, False if not. """ if self._m_data_storage is None: warnings.warn('ConfigPort can not load data unless a database is connected.') status = False else: if not self._m_data_base_active: self.open_port() status = True return status @typechecked def _check_if_data_exists(self) -> bool: """ Internal function which checks if data exists for the 'config' tag. Returns ------- bool Returns True if data exists, False if not. """ return 'config' in self._m_data_storage.m_data_bank @typechecked def _check_error_cases(self) -> bool: """' Internal function which checks the error cases. """ if not self._check_status_and_activate(): status = False elif self._check_if_data_exists() is False: warnings.warn('No data under the tag which is linked by the ConfigPort.') status = False else: status = True return status @typechecked def get_attribute(self, name: str) -> Optional[StaticAttribute]: """ Returns a static attribute which is connected to the dataset of the ConfigPort. Parameters ---------- name : str The name of the attribute. Returns ------- str, float, or int The attribute value. Returns None if the attribute does not exist. """ if not self._check_error_cases(): attr_val = None elif name in self._m_data_storage.m_data_bank['config'].attrs: attr_val = self._m_data_storage.m_data_bank['config'].attrs[name] else: warnings.warn(f'The attribute \'{name}\' was not found.') attr_val = None # Convert numpy types to base types (e.g., np.float64 -> float) if isinstance(attr_val, np.generic): attr_val = attr_val.item() return attr_val class InputPort(Port): """ InputPorts can be used to read datasets with a specific tag from the HDF5 database. This type of port can be used to access: * A complete dataset using the get_all() method. * A single attribute of the dataset using get_attribute(). * All attributes of the dataset using get_all_static_attributes() and get_all_non_static_attributes(). * A part of a dataset using slicing. For example: .. code-block:: python in_port = InputPort('tag') data = in_port[0, :, :] # returns the first 2D image of a 3D image stack. (More information about how 1D, 2D, and 3D data is organized can be found in the documentation of OutputPort (:func:`~pynpoint.core.dataio.OutputPort.append` and :func:`~pynpoint.core.dataio.OutputPort.set_all`) InputPorts can load two types of attributes which give additional information about a dataset the port is linked to: * Static attributes: contain global information about a dataset which is not changing through a dataset in the database (e.g. the instrument name or pixel scale). * Non-static attributes: contain information which changes for different parts of the dataset (e.g. the parallactic angles or dithering positions). """ @typechecked def __init__(self, tag: str, data_storage_in: Optional[DataStorage] = None) -> None: """ Constructor of InputPort. An input port can read data from the central database under the key `tag`. Instances of InputPort should not be created manually inside a PypelineModule but should be created with the add_input_port() function. Parameters ---------- tag : str The tag of the port. The port can be used in order to get data from the dataset with the key `tag`. data_storage_in : pynpoint.core.dataio.DataStorage It is possible to give the constructor of an InputPort a DataStorage instance which will link the port to that DataStorage. Usually the DataStorage is set later by calling :func:`~pynpoint.core.dataio.Port.set_database_connection`. Returns ------- NoneType None """ super().__init__(tag, data_storage_in) if tag == 'config': raise ValueError('The tag name \'config\' is reserved for the central configuration ' 'of PynPoint.') if tag == 'fits_header': raise ValueError('The tag name \'fits_header\' is reserved for storage of the FITS ' 'headers.') @typechecked def _check_status_and_activate(self) -> bool: """ Internal function which checks if the InputPort is ready to use and open it. Returns ------- bool Returns True if the InputPort can be used, False if not. """ if self._m_data_storage is None: warnings.warn('InputPort can not load data unless a database is connected.') status = False else: status = True if not self._m_data_base_active: self.open_port() return status @typechecked def _check_if_data_exists(self) -> bool: """ Internal function which checks if data exists for the Port specific tag. Returns ------- bool Returns True if data exists, False if not. """ return self._m_tag in self._m_data_storage.m_data_bank @typechecked def _check_error_cases(self) -> bool: if not self._check_status_and_activate(): status = False elif self._check_if_data_exists() is False: warnings.warn('No data under the tag which is linked by the InputPort.') status = False else: status = True return status @typechecked def __getitem__(self, item: Union[slice, int, tuple]) -> Optional[Union[StaticAttribute, NonStaticAttribute]]: """ Internal function which handles the data access using slicing. See class documentation for a example (:class:`~pynpoint.core.dataio.InputPort`). None if the data does not exist. Parameters ---------- item : tuple Slicing parameter. Returns ------- StaticAttribute, NonStaticAttribute, None The selected data. Returns None if no data exists under the tag of thePort. """ if not self._check_error_cases(): data = None else: data = self._m_data_storage.m_data_bank[self._m_tag][item] return data @typechecked def get_shape(self) -> Optional[Tuple[int, ...]]: """ Returns the shape of the dataset the port is linked to. This can be useful if you need the shape without loading the whole data. Returns ------- tuple(int, ) Shape of the dataset. Returns None if the dataset does not exist. """ if not self._check_error_cases(): data_shape = None else: self.open_port() data_shape = self._m_data_storage.m_data_bank[self._m_tag].shape return data_shape @typechecked def get_ndim(self) -> Optional[int]: """ Returns the number of dimensions of the dataset the port is linked to. Returns ------- int Number of dimensions of the dataset. Returns None if the dataset does not exist. """ if not self._check_error_cases(): ndim = None else: self.open_port() ndim = self._m_data_storage.m_data_bank[self._m_tag].ndim return ndim @typechecked def get_all(self) -> Optional[np.ndarray]: """ Returns the whole dataset stored in the data bank under the tag of the Port. Be careful using this function for loading large datasets. The data type is inferred from the data with numpy.asarray. A 32 bit array will be returned in case the input data is a combination of float32 and float64 arrays. Returns ------- np.ndarray The full dataset. Returns None if the data does not exist. """ if not self._check_error_cases(): data = None else: data = np.asarray(self._m_data_storage.m_data_bank[self._m_tag][...]) return data @typechecked def get_attribute(self, name: str) -> Optional[Union[StaticAttribute, NonStaticAttribute]]: """ Returns an attribute which is connected to the dataset of the port. The function can return static and non-static attributes (static attributes have priority). More information about static and non-static attributes can be found in the class documentation of :class:`~pynpoint.core.dataio.InputPort`. Parameters ---------- name : str The name of the attribute. Returns ------- StaticAttribute, NonStaticAttribute, None The attribute value. Returns None if the attribute does not exist. """ if not self._check_error_cases(): attr_val = None else: if name in self._m_data_storage.m_data_bank[self._m_tag].attrs: # static attribute attr_val = self._m_data_storage.m_data_bank[self._m_tag].attrs[name] elif 'header_' + self._m_tag + '/' + name in self._m_data_storage.m_data_bank: # non-static attribute attribute = 'header_' + self._m_tag + '/' + name attr_val = np.asarray(self._m_data_storage.m_data_bank[attribute][...]) else: warnings.warn(f'The attribute \'{name}\' was not found.') attr_val = None # Convert numpy types to base types (e.g., np.float64 -> float) if isinstance(attr_val, np.generic): attr_val = attr_val.item() return attr_val @typechecked def get_all_static_attributes(self) -> Optional[Dict[str, StaticAttribute]]: """ Get all static attributes of the dataset which are linked to the Port tag. Returns ------- dict, None Dictionary of all attributes, as `{attr_name:attr_value}`. """ if not self._check_error_cases(): attr_dict = None else: attr_dict = dict(self._m_data_storage.m_data_bank[self._m_tag].attrs) return attr_dict @typechecked def get_all_non_static_attributes(self) -> Optional[List[str]]: """ Returns a list of all non-static attribute keys. More information about static and non-static attributes can be found in the class documentation of :class:`~pynpoint.core.dataio.InputPort`. Returns ------- list(str, ), None List of all existing non-static attribute keys. """ if not self._check_error_cases(): attr_key = None else: attr_key = [] if 'header_' + self._m_tag + '/' in self._m_data_storage.m_data_bank: for key in self._m_data_storage.m_data_bank['header_' + self._m_tag + '/']: attr_key.append(key) else: attr_key = None return attr_key class OutputPort(Port): """ Output ports can be used to save results under a given tag to the HDF5 DataStorage. An instance of OutputPort with self.tag=`tag` can store data under the key `tag` by using one of the following methods: * set_all(...) - replaces and sets the whole dataset * append(...) - appends data to the existing data set. For more information see function documentation (:func:`~pynpoint.core.dataio.OutputPort.append`). * slicing - sets a part of the actual dataset. Example: .. code-block:: python out_port = OutputPort('Some_tag') data = np.ones(200, 200) # 2D image filled with ones out_port[0,:,:] = data # Sets the first 2D image of a 3D image stack * add_attribute(...) - modifies or creates a attribute of the dataset * del_attribute(...) - deletes a attribute * del_all_attributes(...) - deletes all attributes * append_attribute_data(...) - appends information to non-static attributes. See add_attribute() (:func:`~pynpoint.core.dataio.OutputPort.add_attribute`) for more information about static and non-static attributes. * check_static_attribute(...) - checks if a static attribute exists and if it is equal to a given value * other functions listed below For more information about how data is organized inside the central database have a look at the function documentation of the function :func:`~pynpoint.core.dataio.OutputPort.set_all` and :func:`~pynpoint.core.dataio.OutputPort.append`. Furthermore it is possible to deactivate a OutputPort to stop him saving data. """ @typechecked def __init__(self, tag: str, data_storage_in: Optional[DataStorage] = None, activate_init: bool = True) -> None: """ Constructor of the OutputPort class which creates an output port instance which can write data to the the central database under the tag `tag`. If you write a PypelineModule you should not create instances manually! Use the add_output_port() function instead. Parameters ---------- tag : str The tag of the port. The port can be used in order to write data to the dataset with the key = `tag`. data_storage_in : pynpoint.core.dataio.DataStorage It is possible to give the constructor of an OutputPort a DataStorage instance which will link the port to that DataStorage. Usually the DataStorage is set later by calling :func:`~pynpoint.core.dataio.Port.set_database_connection`. Returns ------- NoneType None """ super().__init__(tag, data_storage_in) self.m_activate = activate_init if tag == 'config': raise ValueError('The tag name \'config\' is reserved for the central configuration ' 'of PynPoint.') if tag == 'fits_header': raise ValueError('The tag name \'fits_header\' is reserved for storage of the FITS ' 'headers.') @typechecked def _check_status_and_activate(self) -> bool: """ Internal function which checks if the OutputPort is ready to use and open it. Returns ------- :return: Returns True if the OutputPort can be used, False if not. :rtype: bool """ if not self.m_activate: status = False elif self._m_data_storage is None: warnings.warn('OutputPort can not store data unless a database is connected.') status = False else: if not self._m_data_base_active: self.open_port() status = True return status @typechecked def _init_dataset(self, first_data: Union[np.ndarray, list], tag: str, data_dim: Optional[int] = None) -> None: """ Internal function which is used to initialize a dataset in the HDF5 database. Parameters ---------- first_data : np.ndarray, list The initial data. tag : str Database tag. data_dim : int, None Number of dimensions. The dimensions of ``first_data`` is used if set to ``None``. Returns ------- NoneType None """ @typechecked def _ndim_check(data_dim: int, first_dim: int) -> None: if first_dim > 5 or first_dim < 1: raise ValueError('Output port can only save numpy arrays from 1D to 5D. Use Port ' 'attributes to save as int, float, or string.') if data_dim > 5 or data_dim < 1: raise ValueError('The data dimensions should be 1D, 2D, 3D, 4D, or 5D.') if data_dim < first_dim: raise ValueError('The dimensions of the data should be equal to or larger than the ' 'dimensions of the input data.') if data_dim == 3 and first_dim == 1: raise ValueError('Cannot initialize 1D data in 3D data container.') first_data = np.asarray(first_data) if data_dim is None: data_dim = first_data.ndim _ndim_check(data_dim, first_data.ndim) if data_dim == first_data.ndim: if first_data.ndim == 1: # 1D data_shape = (None, ) elif first_data.ndim == 2: # 2D data_shape = (None, first_data.shape[1]) elif first_data.ndim == 3: # 3D data_shape = (None, first_data.shape[1], first_data.shape[2]) elif first_data.ndim == 4: # 4D data_shape = (first_data.shape[0], None, first_data.shape[2], first_data.shape[3]) elif first_data.ndim == 5: # 5D data_shape = (first_data.shape[0], first_data.shape[1], first_data.shape[2], first_data.shape[3], first_data.shape[4]) else: if data_dim == 2: # 1D -> 2D data_shape = (None, first_data.shape[0]) first_data = first_data[np.newaxis, :] elif data_dim == 3: # 2D -> 3D data_shape = (None, first_data.shape[0], first_data.shape[1]) first_data = first_data[np.newaxis, :, :] elif data_dim == 4: # 3D -> 4D data_shape = (first_data.shape[0], None, first_data.shape[1], first_data.shape[2]) first_data = first_data[:, np.newaxis, :, :] if first_data.size == 0: warnings.warn(f'The new dataset that is stored under the tag name \'{tag}\' is empty.') else: if isinstance(first_data[0], str): first_data = np.array(first_data, dtype='|S') self._m_data_storage.m_data_bank.create_dataset(tag, data=first_data, maxshape=data_shape) @typechecked def _set_all_key(self, tag: str, data: np.ndarray, data_dim: Optional[int] = None, keep_attributes: bool = False) -> None: """ Internal function which sets the values of a dataset under the *tag* name in the database. If old data exists it will be overwritten. This function is used by :func:`~pynpoint.core.dataio.OutputPort.set_all` and for setting non-static attributes. Parameters ---------- tag : str Database tag of the data that will be modified. data : np.ndarray The data that will be stored and replace any old data. data_dim : int Number of dimension of the data. keep_attributes : bool Keep all static attributes of the dataset if set to True. Non-static attributes will be kept anyway so not needed for setting non-static attributes. Returns ------- NoneType None """ tmp_attributes = {} # check if database entry is new... if tag in self._m_data_storage.m_data_bank: # NO -> database entry exists if keep_attributes: # we have to copy all attributes since deepcopy is not supported for key, value in self._m_data_storage.m_data_bank[tag].attrs.items(): tmp_attributes[key] = value # remove database entry del self._m_data_storage.m_data_bank[tag] # make new database entry self._init_dataset(data, tag, data_dim=data_dim) if keep_attributes: for key, value in tmp_attributes.items(): self._m_data_storage.m_data_bank[tag].attrs[key] = value @typechecked def _append_key(self, tag: str, data: Union[np.ndarray, list], data_dim: Optional[int] = None, force: bool = False) -> None: """ Internal function for appending data to a dataset or appending non-static attributes. See :func:`~pynpoint.core.dataio.OutputPort.append` for more information. Parameters ---------- tag : str Database tag where the data will be stored. data : np.ndarray The data that will be appended. data_dim : int Number of dimension of the data. force : bool The existing data will be overwritten if shape or type does not match. Returns ------- NoneType None """ # check if database entry is new... if tag not in self._m_data_storage.m_data_bank: # YES -> database entry is new self._init_dataset(data, tag, data_dim=data_dim) return None # NO -> database entry exists # check if the existing data has the same dim and datatype tmp_shape = self._m_data_storage.m_data_bank[tag].shape tmp_dim = len(tmp_shape) if data_dim is None: data_dim = tmp_dim # convert input data to numpy array data = np.asarray(data) # if the dimension offset is 1 add that dimension (e.g. save 2D image in 3D image stack) if data.ndim + 1 == data_dim: if data_dim == 2: data = data[np.newaxis, :] elif data_dim == 3: data = data[np.newaxis, :, :] elif data_dim == 4: data = data[:, np.newaxis, :, :] @typechecked def _type_check() -> bool: check_result = False if tmp_dim == data.ndim: if tmp_dim == 1: check_result = True elif tmp_dim == 2: check_result = tmp_shape[1] == data.shape[1] elif tmp_dim == 3: # check if the spatial shape is the same check_result = (tmp_shape[1] == data.shape[1]) and \ (tmp_shape[2] == data.shape[2]) elif tmp_dim == 4: # check if the spectral and spatial shape is the same check_result = (tmp_shape[0] == data.shape[0]) and \ (tmp_shape[2] == data.shape[2]) and \ (tmp_shape[3] == data.shape[3]) return check_result if _type_check(): # YES -> dim and type match # we always append in axis one independent of the dimension # 1D case if data.size == 0: warnings.warn(f'The dataset that is appended under the tag name \'{tag}\' ' f'is empty.') else: if isinstance(data[0], str): data = np.array(data, dtype='|S') if data.ndim == 4: # IFS data: (n_wavelength, n_dit, y_pos, x_pos) self._m_data_storage.m_data_bank[tag].resize(tmp_shape[1] + data.shape[1], axis=1) self._m_data_storage.m_data_bank[tag][:, tmp_shape[1]:, :, :] = data else: # Other data: n_dit is the first dimension self._m_data_storage.m_data_bank[tag].resize(tmp_shape[0] + data.shape[0], axis=0) self._m_data_storage.m_data_bank[tag][tmp_shape[0]:, ] = data return None # NO -> shape or type is different # Check force if force: # YES -> Force is true self._set_all_key(tag, data=data) return None # NO -> Error message raise ValueError(f'The port tag \'{self._m_tag}\' is already used with a different data ' f'type. The \'force\' parameter can be used to replace the tag.') @typechecked def __setitem__(self, key: Union[slice, int, tuple], value: Union[np.ndarray, int]) -> None: """ Internal function needed to change data using slicing. See class documentation for an example (:class:`~pynpoint.core.dataio.OutputPort`). Parameters ---------- key : slice Index slice to be changed. value : np.ndarray New data. Returns ------- NoneType None """ if self._check_status_and_activate(): self._m_data_storage.m_data_bank[self._m_tag][key] = value @typechecked def del_all_data(self) -> None: """ Delete all data belonging to the database tag. """ if self._check_status_and_activate(): if self._m_tag in self._m_data_storage.m_data_bank: del self._m_data_storage.m_data_bank[self._m_tag] @typechecked def set_all(self, data: Union[np.ndarray, list], data_dim: Optional[int] = None, keep_attributes: bool = False) -> None: """ Set the data in the database by replacing all old values with the values of the input data. If no old values exists the data is just stored. Since it is not possible to change the number of dimensions of a data set later in the processing history one can choose a dimension different to the input data. The following cases are implemented: * (#dimension of the first input data#, #desired data_dim#) * (1, 1) 1D input or single value will be stored as list in HDF5 * (1, 2) 1D input, but 2D array stored inside (i.e. a list of lists with a fixed size). * (2, 2) 2D input (single image) and 2D array stored inside (i.e. a list of lists with a fixed size). * (2, 3) 2D input (single image) but 3D array stored inside (i.e. a stack of images with a fixed size). * (3, 3) 3D input and 3D array stored inside (i.e. a stack of images with a fixed size). For 2D and 3D data the first dimension always represents the list / stack (variable size) while the second (or third) dimension has a fixed size. After creation it is possible to extend a data set using :func:`~pynpoint.core.dataio.OutputPort.append` along the first dimension. **Example 1:** Input 2D array with size (200, 200). Desired dimension 3D. The result is a 3D dataset with the dimension (1, 200, 200). It is possible to append other images with the size (200, 200) or other stacks of images with the size (:, 200, 200). **Example 2:** Input 2D array with size (200, 200). Desired dimension 2D. The result is a 2D dataset with the dimension (200, 200). It is possible to append other list with the length 200 or other stacks of lines with the size (:, 200). However it is not possible to append other 2D images along a third dimension. Parameters ---------- data : np.ndarray The data to be saved. data_dim : int Number of data dimensions. The dimension of the *first_data* is used if set to None. keep_attributes : bool All attributes of the old dataset will remain the same if set to True. Returns ------- NoneType None """ data = np.asarray(data) if self._check_status_and_activate(): self._set_all_key(tag=self._m_tag, data=data, data_dim=data_dim, keep_attributes=keep_attributes) @typechecked def append(self, data: Union[np.ndarray, list], data_dim: Optional[int] = None, force: bool = False) -> None: """ Appends data to an existing dataset along the first dimension. If no data exists for the :class:`~pynpoint.core.dataio.OutputPort`, then a new data set is created. For more information about how the dimensions are organized, see the documentation of :func:`~pynpoint.core.dataio.OutputPort.set_all`. Note it is not possible to append data with a different shape or data type to an existing dataset. **Example:** An internal data set is 3D (storing a stack of 2D images) with shape of ``(233, 300, 300)``, that is, it contains 233 images with a resolution of 300 by 300 pixels. Thus it is only possible to extend along the first dimension by appending new images with a shape of ``(300, 300)`` or by appending a stack of images with a shape of ``(:, 300, 300)``. It is possible to force the function to overwrite existing data set if the shape or type of the input data do not match the existing data. Parameters ---------- data : np.ndarray The data that will be appended. data_dim : int Number of data dimensions used if a new data set is created. The dimension of the ``data`` is used if set to None. force : bool The existing data will be overwritten if the shape or type does not match. Returns ------- NoneType None """ if self._check_status_and_activate(): self._append_key(self._m_tag, data=data, data_dim=data_dim, force=force) @typechecked def activate(self) -> None: """ Activates the port. A non activated port will not save data. Returns ------- NoneType None """ self.m_activate = True @typechecked def deactivate(self) -> None: """ Deactivates the port. A non activated port will not save data. Returns ------- NoneType None """ self.m_activate = False @typechecked def add_attribute(self, name: str, value: Union[StaticAttribute, NonStaticAttribute], static: bool = True) -> None: """ Adds an attribute to the dataset of the Port with the attribute name = `name` and the value = `value`. If the attribute already exists it will be overwritten. Two different types of attributes are supported: 1. **static attributes**: Contain a single value or name (e.g. The name of the used Instrument). 2. **non-static attributes**: Contain a dataset which is connected to the actual data set (e.g. Instrument temperature). It is possible to append additional information to non-static attributes later (:func:`~pynpoint.core.dataio.OutputPort.append_attribute_data`). This is not supported by static attributes. Static and non-static attributes are stored in a different way using the HDF5 file format. Static attributes will be direct attributes while non-static attributes are stored in a group with the name *header_* + name of the dataset. Parameters ---------- name : str Name of the attribute. value : StaticAttribute, NonStaticAttribute Value of the attribute. static : bool Indicate if the attribute is static (True) or non-static (False). Returns ------- NoneType None """ if self._check_status_and_activate(): if self._m_tag not in self._m_data_storage.m_data_bank: warnings.warn(f'Can not store the attribute \'{name}\' because the dataset ' f'\'{self._m_tag}\' does not exist.') else: if static: self._m_data_storage.m_data_bank[self._m_tag].attrs[name] = value else: self._set_all_key(tag=('header_' + self._m_tag + '/' + name), data=np.asarray(value)) @typechecked def append_attribute_data(self, name: str, value: Union[StaticAttribute, NonStaticAttribute]) -> None: """ Function which appends data (either a single value or an array) to non-static attributes. Parameters ---------- name : str Name of the attribute. value : StaticAttribute, NonStaticAttribute Value which will be appended to the attribute dataset. Returns ------- NoneType None """ if self._check_status_and_activate(): self._append_key(tag=('header_' + self._m_tag + '/' + name), data=np.asarray([value, ])) @typechecked def copy_attributes(self, input_port: InputPort) -> None: """ Copies all static and non-static attributes from a given InputPort. Attributes which already exist will be overwritten. Non-static attributes will be linked not copied. If the InputPort tag = OutputPort tag (self.tag) nothing will be changed. Use this function in all modules to keep the header information. Parameters ---------- input_port : pynpoint.core.dataio.InputPort The InputPort with the header information. Returns ------- NoneType None """ if self._check_status_and_activate() and input_port.tag != self._m_tag: # link non-static attributes if 'header_' + input_port.tag + '/' in self._m_data_storage.m_data_bank: for attr_name, attr_data in self._m_data_storage\ .m_data_bank['header_' + input_port.tag + '/'].items(): database_name = 'header_'+self._m_tag+'/'+attr_name # overwrite existing header information in the database if database_name in self._m_data_storage.m_data_bank: del self._m_data_storage.m_data_bank[database_name] self._m_data_storage.m_data_bank[database_name] = attr_data # copy static attributes attributes = input_port.get_all_static_attributes() for attr_name, attr_val in attributes.items(): self.add_attribute(attr_name, attr_val) self._m_data_storage.m_data_bank.flush() @typechecked def del_attribute(self, name: str) -> None: """ Deletes the attribute of the dataset with the given name. Finds and removes static and non-static attributes. Parameters ---------- name : str Name of the attribute. Returns ------- NoneType None """ if self._check_status_and_activate(): # check if attribute is static if name in self._m_data_storage.m_data_bank[self._m_tag].attrs: del self._m_data_storage.m_data_bank[self._m_tag].attrs[name] elif 'header_'+self._m_tag+'/'+name in self._m_data_storage.m_data_bank: # remove non-static attribute del self._m_data_storage.m_data_bank[('header_' + self._m_tag + '/' + name)] else: warnings.warn(f'Attribute \'{name}\' does not exist and could not be deleted.') @typechecked def del_all_attributes(self) -> None: """ Deletes all static and non-static attributes of the dataset. Returns ------- NoneType None """ if self._check_status_and_activate(): # static attributes if self._m_tag in self._m_data_storage.m_data_bank: self._m_data_storage.m_data_bank[self._m_tag].attrs.clear() # non-static attributes if 'header_' + self._m_tag + '/' in self._m_data_storage.m_data_bank: del self._m_data_storage.m_data_bank[('header_' + self._m_tag + '/')] @typechecked def check_static_attribute(self, name: str, comparison_value: StaticAttribute) -> Optional[int]: """ Checks if a static attribute exists and if it is equal to a comparison value. Parameters ---------- name : str Name of the static attribute. comparison_value : StaticAttribute Comparison value. Returns ------- int, None Status: 1 if the static attribute does not exist, 0 if the static attribute exists and is equal, and -1 if the static attribute exists but is not equal. """ if not self._check_status_and_activate(): return None if name in self._m_data_storage.m_data_bank[self._m_tag].attrs: if self._m_data_storage.m_data_bank[self._m_tag].attrs[name] == comparison_value: return 0 return -1 return 1 @typechecked def check_non_static_attribute(self, name: str, comparison_value: NonStaticAttribute) -> Optional[int]: """ Checks if a non-static attribute exists and if it is equal to a comparison value. Parameters ---------- name : str Name of the non-static attribute. comparison_value : NonStaticAttribute Comparison values Returns ------- int, None Status: 1 if the non-static attribute does not exist, 0 if the non-static attribute exists and is equal, and -1 if the non-static attribute exists but is not equal. """ if not self._check_status_and_activate(): return None group = 'header_' + self._m_tag + '/' if group in self._m_data_storage.m_data_bank: if name in self._m_data_storage.m_data_bank[group]: if np.array_equal(self._m_data_storage.m_data_bank[group+name][:], comparison_value): return 0 return -1 return 1 return 1 @typechecked def add_history(self, module: str, history: str) -> None: """ Adds an attribute with history information about the pipeline module. Parameters ---------- module : str Name of the pipeline module which was executed. history : str History information. Returns ------- NoneType None """ self.add_attribute('History: ' + module, history) @typechecked def flush(self) -> None: """ Forces the :class:`~pynpoint.core.dataio.DataStorage` to save all data from the memory to the hard drive without closing the :class:`~pynpoint.core.dataio.OutputPort`. Returns ------- NoneType None """ self._m_data_storage.m_data_bank.flush() PynPoint-0.11.0/pynpoint/core/processing.py000066400000000000000000000541611450275315200207220ustar00rootroot00000000000000""" Interfaces for pipeline modules. """ import math import os import time import warnings from abc import ABCMeta, abstractmethod from typing import Callable, List, Optional import numpy as np from typeguard import typechecked from pynpoint.core.dataio import ConfigPort, DataStorage, InputPort, OutputPort from pynpoint.util.module import progress, update_arguments from pynpoint.util.multiline import LineProcessingCapsule from pynpoint.util.multiproc import apply_function from pynpoint.util.multistack import StackProcessingCapsule class PypelineModule(metaclass=ABCMeta): """ Abstract interface for the PypelineModule: * Reading module (:class:`pynpoint.core.processing.ReadingModule`) * Writing module (:class:`pynpoint.core.processing.WritingModule`) * Processing module (:class:`pynpoint.core.processing.ProcessingModule`) Each :class:`~pynpoint.core.processing.PypelineModule` has a name as a unique identifier in the :class:`~pynpoint.core.pypeline.Pypeline` and requires the ``connect_database`` and ``run`` methods. """ @typechecked def __init__(self, name_in: str) -> None: """ Abstract constructor of a :class:`~pynpoint.core.processing.PypelineModule`. Parameters ---------- name_in : str The name of the :class:`~pynpoint.core.processing.PypelineModule`. Returns ------- NoneType None """ self._m_name = name_in self._m_data_base = None self._m_config_port = ConfigPort('config') @property @typechecked def name(self) -> str: """ Returns the name of the :class:`~pynpoint.core.processing.PypelineModule`. This property makes sure that the internal module name can not be changed. Returns ------- str The name of the :class:`~pynpoint.core.processing.PypelineModule` """ return self._m_name @abstractmethod @typechecked def connect_database(self, data_base_in: DataStorage) -> None: """ Abstract interface for the function ``connect_database`` which is needed to connect a :class:`~pynpoint.core.dataio.Port` of a :class:`~pynpoint.core.processing.PypelineModule` with the :class:`~pynpoint.core.dataio.DataStorage`. Parameters ---------- data_base_in : pynpoint.core.dataio.DataStorage The central database. """ @abstractmethod @typechecked def run(self) -> None: """ Abstract interface for the run method of :class:`~pynpoint.core.processing.PypelineModule` which inheres the actual algorithm behind the module. """ class ReadingModule(PypelineModule, metaclass=ABCMeta): """ The abstract class ReadingModule is an interface for processing steps in the Pypeline which have only read access to the central data storage. One can specify a directory on the hard drive where the input data for the module is located. If no input directory is given then default Pypeline input directory is used. Reading modules have a dictionary of output ports (self._m_out_ports) but no input ports. """ @typechecked def __init__(self, name_in: str, input_dir: Optional[str] = None) -> None: """ Abstract constructor of ReadingModule which needs the unique name identifier as input (more information: :class:`pynpoint.core.processing.PypelineModule`). An input directory can be specified for the location of the data or else the Pypeline default directory is used. This function is called in all *__init__()* functions inheriting from this class. Parameters ---------- name_in : str The name of the ReadingModule. input_dir : str Directory where the input files are located. Returns ------- NoneType None """ super().__init__(name_in) assert (os.path.isdir(str(input_dir)) or input_dir is None), 'Input directory for ' \ 'reading module does not exist - input requested: %s.' % input_dir self.m_input_location = input_dir self._m_output_ports = {} @typechecked def add_output_port(self, tag: str, activation: bool = True) -> OutputPort: """ Function which creates an OutputPort for a ReadingModule and appends it to the internal OutputPort dictionary. This function should be used by classes inheriting from ReadingModule to make sure that only output ports with unique tags are added. The new port can be used as: :: port = self._m_output_ports[tag] or by using the returned Port. Parameters ---------- tag : str Tag of the new output port. activation : bool Activation status of the Port after creation. Deactivated ports will not save their results until they are activated. Returns ------- pynpoint.core.dataio.OutputPort The new OutputPort for the ReadingModule. """ port = OutputPort(tag, activate_init=activation) if tag in self._m_output_ports: warnings.warn(f'Tag \'{tag}\' of ReadingModule \'{self._m_name}\' is already used.') if self._m_data_base is not None: port.set_database_connection(self._m_data_base) self._m_output_ports[tag] = port return port @typechecked def connect_database(self, data_base_in: DataStorage) -> None: """ Function used by a ReadingModule to connect all ports in the internal input and output port dictionaries to the database. The function is called by Pypeline and connects the DataStorage object to all module ports. Parameters ---------- data_base_in : pynpoint.core.dataio.DataStorage The central database. Returns ------- NoneType None """ for port in self._m_output_ports.values(): port.set_database_connection(data_base_in) self._m_config_port.set_database_connection(data_base_in) self._m_data_base = data_base_in @typechecked def get_all_output_tags(self) -> List[str]: """ Returns a list of all output tags to the ReadingModule. Returns ------- list(str) List of output tags. """ return list(self._m_output_ports.keys()) @abstractmethod @typechecked def run(self) -> None: """ Abstract interface for the run method of a ReadingModule which inheres the actual algorithm behind the module. """ class WritingModule(PypelineModule, metaclass=ABCMeta): """ The abstract class WritingModule is an interface for processing steps in the pipeline which do not change the content of the internal DataStorage. They only have reading access to the central data base. WritingModules can be used to export data from the HDF5 database. WritingModules know the directory on the hard drive where the output of the module can be saved. If no output directory is given the default Pypeline output directory is used. WritingModules have a dictionary of input ports (self._m_input_ports) but no output ports. """ @typechecked def __init__(self, name_in: str, output_dir: Optional[str] = None) -> None: """ Abstract constructor of a WritingModule which needs the unique name identifier as input (more information: :class:`pynpoint.core.processing.PypelineModule`). In addition one can specify a output directory where the module will save its results. If no output directory is given the Pypeline default directory is used. This function is called in all *__init__()* functions inheriting from this class. Parameters ---------- name_in : str The name of the WritingModule. output_dir : str Directory where the results will be saved. Returns ------- NoneType None """ super().__init__(name_in) assert (os.path.isdir(str(output_dir)) or output_dir is None), 'Output directory for ' \ 'writing module does not exist - input requested: %s.' % output_dir self.m_output_location = output_dir self._m_input_ports = {} @typechecked def add_input_port(self, tag: str) -> InputPort: """ Function which creates an InputPort for a WritingModule and appends it to the internal InputPort dictionary. This function should be used by classes inheriting from WritingModule to make sure that only input ports with unique tags are added. The new port can be used as: :: port = self._m_input_ports[tag] or by using the returned Port. Parameters ---------- tag : str Tag of the new input port. Returns ------- pynpoint.core.dataio.InputPort The new InputPort for the WritingModule. """ port = InputPort(tag) if self._m_data_base is not None: port.set_database_connection(self._m_data_base) self._m_input_ports[tag] = port return port @typechecked def connect_database(self, data_base_in: DataStorage) -> None: """ Function used by a WritingModule to connect all ports in the internal input and output port dictionaries to the database. The function is called by Pypeline and connects the DataStorage object to all module ports. Parameters ---------- data_base_in : pynpoint.core.dataio.DataStorage The central database. Returns ------- NoneType None """ for port in self._m_input_ports.values(): port.set_database_connection(data_base_in) self._m_config_port.set_database_connection(data_base_in) self._m_data_base = data_base_in @typechecked def get_all_input_tags(self) -> List[str]: """ Returns a list of all input tags to the WritingModule. Returns ------- list(str) List of input tags. """ return list(self._m_input_ports.keys()) @abstractmethod @typechecked def run(self) -> None: """ Abstract interface for the run method of a WritingModule which inheres the actual algorithm behind the module. """ class ProcessingModule(PypelineModule, metaclass=ABCMeta): """ The abstract class ProcessingModule is an interface for all processing steps in the pipeline which read, process, and store data. Hence processing modules have read and write access to the central database through a dictionary of output ports (self._m_output_ports) and a dictionary of input ports (self._m_input_ports). """ @typechecked def __init__(self, name_in: str) -> None: """ Abstract constructor of a ProcessingModule which needs the unique name identifier as input (more information: :class:`pynpoint.core.processing.PypelineModule`). Call this function in all __init__() functions inheriting from this class. Parameters ---------- name_in : str The name of the ProcessingModule. """ super().__init__(name_in) self._m_input_ports = {} self._m_output_ports = {} @typechecked def add_input_port(self, tag: str) -> InputPort: """ Function which creates an InputPort for a ProcessingModule and appends it to the internal InputPort dictionary. This function should be used by classes inheriting from ProcessingModule to make sure that only input ports with unique tags are added. The new port can be used as: :: port = self._m_input_ports[tag] or by using the returned Port. Parameters ---------- tag : str Tag of the new input port. Returns ------- pynpoint.core.dataio.InputPort The new InputPort for the ProcessingModule. """ port = InputPort(tag) if self._m_data_base is not None: port.set_database_connection(self._m_data_base) self._m_input_ports[tag] = port return port @typechecked def add_output_port(self, tag: str, activation: bool = True) -> OutputPort: """ Function which creates an :class:`~pynpoint.core.dataio.OutputPort` for a :class:`~pynpoint.core.processing.ProcessingModule` and appends it to the internal :class:`~pynpoint.core.dataio.OutputPort` dictionary. This function should be used by classes inheriting from :class:`~pynpoint.core.processing.ProcessingModule` to make sure that only output ports with unique tags are added. The new port can be used as: .. code-block:: python port = self._m_output_ports[tag] or by using the returned :class:`~pynpoint.core.dataio.Port`. Parameters ---------- tag : str Tag of the new output port. activation : bool Activation status of the :class:`~pynpoint.core.dataio.Port` after creation. Deactivated ports will not save their results until they are activated. Returns ------- pynpoint.core.dataio.OutputPort The new :class:`~pynpoint.core.dataio.OutputPort` for the :class:`~pynpoint.core.processing.ProcessingModule`. """ port = OutputPort(tag, activate_init=activation) if tag in self._m_output_ports: warnings.warn(f'Tag \'{tag}\' of ProcessingModule \'{self._m_name}\' is already used.') if self._m_data_base is not None: port.set_database_connection(self._m_data_base) self._m_output_ports[tag] = port return port @typechecked def connect_database(self, data_base_in: DataStorage) -> None: """ Function used by a ProcessingModule to connect all ports in the internal input and output port dictionaries to the database. The function is called by Pypeline and connects the DataStorage object to all module ports. Parameters ---------- data_base_in : pynpoint.core.dataio.DataStorage The central database. Returns ------- NoneType None """ for port in self._m_input_ports.values(): port.set_database_connection(data_base_in) for port in self._m_output_ports.values(): port.set_database_connection(data_base_in) self._m_config_port.set_database_connection(data_base_in) self._m_data_base = data_base_in @typechecked def apply_function_in_time(self, func: Callable, image_in_port: InputPort, image_out_port: OutputPort, func_args: Optional[tuple] = None) -> None: """ Applies a function to all pixel lines in time. Parameters ---------- func : function The input function. image_in_port : pynpoint.core.dataio.InputPort Input port which is linked to the input data. image_out_port : pynpoint.core.dataio.OutputPort Output port which is linked to the results. func_args : tuple, None Additional arguments which are required by the input function. Not used if set to None. Returns ------- NoneType None """ cpu = self._m_config_port.get_attribute('CPU') init_line = image_in_port[:, 0, 0] im_shape = image_in_port.get_shape() size = apply_function(init_line, 0, func, func_args).shape[0] image_out_port.set_all(data=np.zeros((size, im_shape[1], im_shape[2])), data_dim=3, keep_attributes=False) image_in_port.close_port() image_out_port.close_port() capsule = LineProcessingCapsule(image_in_port=image_in_port, image_out_port=image_out_port, num_proc=cpu, function=func, function_args=func_args, data_length=size) capsule.run() @typechecked def apply_function_to_images(self, func: Callable[..., np.ndarray], image_in_port: InputPort, image_out_port: OutputPort, message: str, func_args: Optional[tuple] = None) -> None: """ Function which applies a function to all images of an input port. Stacks of images are processed in parallel if the CPU and MEMORY attribute are set in the central configuration. The number of images per process is equal to the value of MEMORY divided by the value of CPU. Note that the function *func* is not allowed to change the shape of the images if the input and output port have the same tag and ``MEMORY`` is not set to None. Parameters ---------- func : function The function which is applied to all images. Its definitions should be similar to:: def function(image_in, parameter1, parameter2, parameter3) The function must return a numpy array. image_in_port : pynpoint.core.dataio.InputPort Input port which is linked to the input data. image_out_port : pynpoint.core.dataio.OutputPort Output port which is linked to the results. message : str Progress message. func_args : tuple Additional arguments that are required by the input function. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') cpu = self._m_config_port.get_attribute('CPU') nimages = image_in_port.get_shape()[0] if memory == 0: memory = nimages if image_out_port.tag == image_in_port.tag: # load all images in the memory at once if the input and output tag are the # same or if the MEMORY attribute is set to None in the configuration file images = image_in_port.get_all() result = [] start_time = time.time() for i in range(nimages): progress(i, nimages, message+'...', start_time) args = update_arguments(i, nimages, func_args) if args is None: result.append(func(images[i, ], i)) else: result.append(func(images[i, ], i, *args)) image_out_port.set_all(np.asarray(result), keep_attributes=True) elif cpu == 1: # process images one-by-one with a single process if CPU is set to 1 start_time = time.time() for i in range(nimages): progress(i, nimages, message+'...', start_time) args = update_arguments(i, nimages, func_args) if args is None: result = func(image_in_port[i, ], i) else: result = func(image_in_port[i, ], i, *args) if result.ndim == 1: image_out_port.append(result, data_dim=2) elif result.ndim == 2: image_out_port.append(result, data_dim=3) else: # process images in parallel in stacks of MEMORY/CPU images print(message, end='') args = update_arguments(0, nimages, func_args) result = apply_function(image_in_port[0, :, :], 0, func, args) result_shape = result.shape out_shape = [nimages] for item in result_shape: out_shape.append(item) image_out_port.set_all(data=np.zeros(out_shape), data_dim=len(result_shape)+1, keep_attributes=False) image_in_port.close_port() image_out_port.close_port() capsule = StackProcessingCapsule(image_in_port=image_in_port, image_out_port=image_out_port, num_proc=cpu, function=func, function_args=func_args, stack_size=math.ceil(memory/cpu), result_shape=result_shape, nimages=nimages) capsule.run() print(' [DONE]') @typechecked def get_all_input_tags(self) -> List[str]: """ Returns a list of all input tags to the ProcessingModule. Returns ------- list(str) List of input tags. """ return list(self._m_input_ports.keys()) @typechecked def get_all_output_tags(self) -> List[str]: """ Returns a list of all output tags to the ProcessingModule. Returns ------- list(str) List of output tags. """ return list(self._m_output_ports.keys()) @abstractmethod @typechecked def run(self) -> None: """ Abstract interface for the run method of a :class:`~pynpoint.core.processing.ProcessingModule` which inheres the actual algorithm behind the module. """ PynPoint-0.11.0/pynpoint/core/pypeline.py000066400000000000000000000645221450275315200203750ustar00rootroot00000000000000""" Module which capsules the methods of the Pypeline. """ import collections import configparser import json import multiprocessing import os import urllib.request import warnings from typing import Any, Dict, List, Optional, Tuple, Union from urllib.error import URLError import h5py import numpy as np from typeguard import typechecked import pynpoint from pynpoint.core.attributes import get_attributes from pynpoint.core.dataio import DataStorage from pynpoint.core.processing import ProcessingModule, PypelineModule, ReadingModule, WritingModule from pynpoint.util.module import input_info, module_info, output_info from pynpoint.util.type_aliases import NonStaticAttribute, StaticAttribute class Pypeline: """ The :class:`~pynpoint.core.pypeline.Pypeline` class manages the pipeline modules. It inheres an internal dictionary of pipeline modules and has a :class:`~pynpoint.core.dataio.DataStorage` which is accessed by the various modules. The order in which the pipeline modules are executed depends on the order they have been added to the :class:`~pynpoint.core.pypeline.Pypeline`. It is possible to run all modules at once or run a single module by name. """ @typechecked def __init__(self, working_place_in: Optional[str] = None, input_place_in: Optional[str] = None, output_place_in: Optional[str] = None) -> None: """ Parameters ---------- working_place_in : str, None Working location where the central HDF5 database and the configuration file will be stored. Sufficient space is required in the working folder since each pipeline module stores a dataset in the HDF5 database. The current working folder of Python is used as working folder if the argument is set to None. input_place_in : str, None Default input folder where a :class:`~pynpoint.core.processing.ReadingModule` that is added to the :class:`~pynpoint.core.pypeline.Pypeline` will look for input data. The current working folder of Python is used as input folder if the argument is set to None. output_place_in : str, None Default output folder where a :class:`~pynpoint.core.processing.WritingModule` that is added to the :class:`~pynpoint.core.pypeline.Pypeline` will store output data. The current working folder of Python is used as output folder if the argument is set to None. Returns ------- NoneType None """ pynpoint_version = 'PynPoint v' + pynpoint.__version__ print(len(pynpoint_version) * '=') print(pynpoint_version) print(len(pynpoint_version) * '=' + '\n') try: contents = urllib.request.urlopen('https://pypi.org/pypi/pynpoint/json').read() data = json.loads(contents) latest_version = data['info']['version'] except URLError: latest_version = None if latest_version is not None and pynpoint.__version__ != latest_version: print(f'A new version ({latest_version}) is available!\n') print('Want to stay informed about updates, bug fixes, and new features?') print('Please consider using the \'Watch\' button on the Github page:') print('https://github.com/PynPoint/PynPoint\n') if working_place_in is None: self._m_working_place = os.getcwd() else: self._m_working_place = working_place_in if input_place_in is None: self._m_input_place = os.getcwd() else: self._m_input_place = input_place_in if output_place_in is None: self._m_output_place = os.getcwd() else: self._m_output_place = output_place_in print(f'Working place: {self._m_working_place}') print(f'Input place: {self._m_input_place}') print(f'Output place: {self._m_output_place}\n') self._m_modules = collections.OrderedDict() hdf5_path = os.path.join(self._m_working_place, 'PynPoint_database.hdf5') self.m_data_storage = DataStorage(hdf5_path) print(f'Database: {self.m_data_storage._m_location}') self._config_init() @typechecked def __setattr__(self, key: str, value: Any) -> None: """ Internal method which assigns a value to an object attribute. This method is called whenever and attribute of the :class:`~pynpoint.core.pypeline.Pypeline` is changed and checks if the chosen working, input, or output folder exists. Parameters ---------- key : str Attribute name. value : str Value for the attribute. Returns ------- NoneType None """ if key == '_m_working_place': error_msg = f'The folder that was chosen for the working place does not exist: {value}.' assert os.path.isdir(str(value)), error_msg elif key == '_m_input_place': error_msg = f'The folder that was chosen for the input place does not exist: {value}.' assert os.path.isdir(str(value)), error_msg elif key == '_m_output_place': error_msg = f'The folder that was chosen for the output place does not exist: {value}.' assert os.path.isdir(str(value)), error_msg super().__setattr__(key, value) @staticmethod @typechecked def _validate(module: Union[ReadingModule, WritingModule, ProcessingModule], tags: List[str]) -> Tuple[bool, Optional[str]]: """ Internal method to validate a :class:`~pynpoint.core.processing.PypelineModule`. Parameters ---------- module : ReadingModule, WritingModule, ProcessingModule Pipeline module that will be validated. tags : list(str) Tags that are present in the database. Returns ------- bool Validation of the pipeline module. str, None Pipeline module name in case it is not valid. Returns None if the module was validated. """ if isinstance(module, ReadingModule): tags.extend(module.get_all_output_tags()) elif isinstance(module, WritingModule): for tag in module.get_all_input_tags(): if tag not in tags: return False, module.name elif isinstance(module, ProcessingModule): tags.extend(module.get_all_output_tags()) for tag in module.get_all_input_tags(): if tag not in tags: return False, module.name return True, None @typechecked def _config_init(self) -> None: """ Internal method to initialize the configuration file. The configuration parameters are read from *PynPoint_config.ini* in the working folder. The file is created with default values (ESO/NACO) in case the file is not present. Returns ------- NoneType None """ @typechecked def _create_config(filename: str, attributes: dict) -> None: file_obj = open(filename, 'w') file_obj.write('[header]\n\n') for key, val in attributes.items(): if val['config'] == 'header': file_obj.write(key+': '+str(val['value'])+'\n') file_obj.write('\n[settings]\n\n') for key, val in attributes.items(): if val['config'] == 'settings': file_obj.write(key+': '+str(val['value'])+'\n') file_obj.close() @typechecked def _read_config(config_file: str, attributes: dict) -> dict: config = configparser.ConfigParser() with open(config_file) as cf_open: config.read_file(cf_open) for key, val in attributes.items(): if config.has_option(val['config'], key): if config.get(val['config'], key) == 'None': if val['config'] == 'header': attributes[key]['value'] = 'None' # elif val['type'] == 'str': # attributes[key]['value'] = 'None' elif val['type'] == 'float': attributes[key]['value'] = float(0.) elif val['type'] == 'int': attributes[key]['value'] = int(0) else: if val['config'] == 'header': attributes[key]['value'] = str(config.get(val['config'], key)) # elif val['type'] == 'str': # attributes[key]['value'] = str(config.get(val['config'], key)) elif val['type'] == 'float': attributes[key]['value'] = float(config.get(val['config'], key)) elif val['type'] == 'int': attributes[key]['value'] = int(config.get(val['config'], key)) return attributes @typechecked def _write_config(attributes: dict) -> None: hdf = h5py.File(self._m_working_place+'/PynPoint_database.hdf5', 'a') if 'config' in hdf: del hdf['config'] config = hdf.create_group('config') for key in attributes.keys(): if attributes[key]['value'] is not None: config.attrs[key] = attributes[key]['value'] config.attrs['WORKING_PLACE'] = self._m_working_place hdf.close() config_file = os.path.join(self._m_working_place, 'PynPoint_config.ini') print(f'Configuration: {config_file}\n') attributes = get_attributes() attributes['CPU']['value'] = multiprocessing.cpu_count() if not os.path.isfile(config_file): warnings.warn('Configuration file not found. Creating PynPoint_config.ini with ' 'default values in the working place.') _create_config(config_file, attributes) attributes = _read_config(config_file, attributes) _write_config(attributes) n_cpu = attributes['CPU']['value'] if 'OMP_NUM_THREADS' in os.environ: n_thread = os.environ['OMP_NUM_THREADS'] else: n_thread = 'not set' print(f'Number of CPUs: {n_cpu}') print(f'Number of threads: {n_thread}') @typechecked def add_module(self, module: PypelineModule) -> None: """ Method for adding a :class:`~pynpoint.core.processing.PypelineModule` to the internal dictionary of the :class:`~pynpoint.core.pypeline.Pypeline`. The module is appended at the end of this ordered dictionary. If the input module is a reading or writing module without a specified input or output location then the default location is used. The module is connected to the internal data storage of the :class:`~pynpoint.core.pypeline.Pypeline`. Parameters ---------- module : ReadingModule, WritingModule, ProcessingModule Pipeline module that will be added to the :class:`~pynpoint.core.pypeline.Pypeline`. Returns ------- NoneType None """ if isinstance(module, ReadingModule): if module.m_input_location is None: module.m_input_location = self._m_input_place if isinstance(module, WritingModule): if module.m_output_location is None: module.m_output_location = self._m_output_place module.connect_database(self.m_data_storage) if module.name in self._m_modules: warnings.warn(f'Names of pipeline modules that are added to the Pypeline need to ' f'be unique. The current pipeline module, \'{module.name}\', does ' f'already exist in the Pypeline dictionary so the previous module ' f'with the same name will be overwritten.') self._m_modules[module.name] = module @typechecked def remove_module(self, name: str) -> bool: """ Method to remove a :class:`~pynpoint.core.processing.PypelineModule` from the internal dictionary with pipeline modules that are added to the :class:`~pynpoint.core.pypeline.Pypeline`. Parameters ---------- name : str Name of the module that has to be removed. Returns ------- bool Confirmation of removing the :class:`~pynpoint.core.processing.PypelineModule`. """ if name in self._m_modules: del self._m_modules[name] removed = True else: warnings.warn(f'Pipeline module \'{name}\' is not found in the Pypeline dictionary ' f'so it could not be removed. The dictionary contains the following ' f'modules: {list(self._m_modules.keys())}.') removed = False return removed @typechecked def get_module_names(self) -> List[str]: """ Method to return a list with the names of all pipeline modules that are added to the :class:`~pynpoint.core.pypeline.Pypeline`. Returns ------- list(str) Ordered list of all Pypeline modules. """ return list(self._m_modules.keys()) @typechecked def validate_pipeline(self) -> Tuple[bool, Optional[str]]: """ Method to check if each :class:`~pynpoint.core.dataio.InputPort` is pointing to an :class:`~pynpoint.core.dataio.OutputPort` of a previously added :class:`~pynpoint.core.processing.PypelineModule`. Returns ------- bool Validation of the pipeline. str, None Name of the pipeline module that can not be validated. Returns None if all modules were validated. """ self.m_data_storage.open_connection() # Create list with all datasets that are stored in the database data_tags = list(self.m_data_storage.m_data_bank.keys()) # Initiate the validation in case self._m_modules.values() is empty validation = (True, None) # Loop over all pipline modules in the ordered dictionary for module in self._m_modules.values(): # Validate the pipeline module validation = self._validate(module, data_tags) if not validation[0]: # Break the for loop if a module could not be validated break return validation @typechecked def validate_pipeline_module(self, name: str) -> Tuple[bool, Optional[str]]: """ Method to check if each :class:`~pynpoint.core.dataio.InputPort` of a :class:`~pynpoint.core.processing.PypelineModule` with label ``name`` points to an existing dataset in the database. Parameters ---------- name : str Name of the pipeline module instance that will be validated. Returns ------- bool Validation of the pipeline module. str, None Pipeline module name in case it is not valid. Returns None if the module was validated. """ self.m_data_storage.open_connection() # Create list with all datasets that are stored in the database data_tags = list(self.m_data_storage.m_data_bank.keys()) # Check if the name is included in the internal dictionary with added modules if name in self._m_modules: # Validate the pipeline module validate = self._validate(self._m_modules[name], data_tags) else: validate = (False, name) return validate @typechecked def run(self) -> None: """ Method for running all pipeline modules that are added to the :class:`~pynpoint.core.pypeline.Pypeline`. Returns ------- NoneType None """ # Validate the pipeline validation = self.validate_pipeline() if not validation[0]: # Check if the input data is available raise AttributeError(f'Pipeline module \'{validation[1]}\' is looking for data ' f'under a tag which is not created by a previous module or ' f'the data does not exist in the database.') # Loop over all pipeline modules and run them for name in self._m_modules: self.run_module(name) @typechecked def run_module(self, name: str) -> None: """ Method for running a pipeline module. Parameters ---------- name : str Name of the pipeline module. Returns ------- NoneType None """ if name in self._m_modules: # Validate the pipeline module validation = self.validate_pipeline_module(name) if not validation[0]: raise AttributeError(f'Pipeline module \'{validation[1]}\' is looking for data ' f'under a tag which does not exist in the database.') # Print information about the pipeline module module_info(self._m_modules[name]) # Check if the module has any input ports if hasattr(self._m_modules[name], '_m_input_ports'): # Check if the list of input ports is not empty if len(self._m_modules[name]._m_input_ports) > 0: # Print information about the input ports input_info(self._m_modules[name]) # Check if the module has any output ports if hasattr(self._m_modules[name], '_m_output_ports'): for item in self._m_modules[name]._m_output_ports: # Check if the module is a ProcessingModule if isinstance(self._m_modules[name], ProcessingModule): # Check if the database tag is already used if item in self.m_data_storage.m_data_bank: # Check if the output port is not used as input port if hasattr(self._m_modules[name], '_m_input_ports') and \ item not in self._m_modules[name]._m_input_ports: print(f'Deleting data and attributes: {item}') # Delete existing data and attributes self._m_modules[name]._m_output_ports[item].del_all_data() self._m_modules[name]._m_output_ports[item].del_all_attributes() # Run the pipeline module self._m_modules[name].run() # Check if the module has any output ports if hasattr(self._m_modules[name], '_m_output_ports'): output_shape = {} for item in self._m_modules[name]._m_output_ports: # Get the shape of the output port output_shape[item] = self.get_shape(item) # Print information about the output ports output_info(self._m_modules[name], output_shape) else: warnings.warn(f'Pipeline module \'{name}\' not found.') @typechecked def get_data(self, tag: str, data_range: Optional[Tuple[int, int]] = None) -> np.ndarray: """ Method for reading data from the database. Parameters ---------- tag : str Database tag. data_range : tuple(int, int), None Slicing range for the first axis of a dataset. This argument can be used to select a subset of images from dataset. The full dataset is read if the argument is set to None. Returns ------- np.ndarray The selected dataset from the database. """ self.m_data_storage.open_connection() if data_range is None: data = np.array(self.m_data_storage.m_data_bank[tag]) else: data = np.array(self.m_data_storage.m_data_bank[tag][data_range[0]:data_range[1], ]) self.m_data_storage.close_connection() return data @typechecked def delete_data(self, tag: str) -> None: """ Method for deleting a dataset and related attributes from the central database. Disk space does not seem to free up when using this method. Parameters ---------- tag : str Database tag. Returns ------- NoneType None """ self.m_data_storage.open_connection() if tag in self.m_data_storage.m_data_bank: del self.m_data_storage.m_data_bank[tag] else: warnings.warn(f'Dataset \'{tag}\' not found in the database.') if 'header_' + tag + '/' in self.m_data_storage.m_data_bank: del self.m_data_storage.m_data_bank[f'header_{tag}'] else: warnings.warn(f'Attributes of \'{tag}\' not found in the database.') self.m_data_storage.close_connection() @typechecked def get_attribute(self, data_tag: str, attr_name: str, static: bool = True) -> Union[StaticAttribute, NonStaticAttribute]: """ Method for reading an attribute from the database. Parameters ---------- data_tag : str Database tag. attr_name : str Name of the attribute. static : bool Static (True) or non-static attribute (False). Returns ------- StaticAttribute, NonStaticAttribute Attribute value. For a static attribute, a single value is returned. For a non-static attribute, an array of values is returned. """ self.m_data_storage.open_connection() if static: attr = self.m_data_storage.m_data_bank[data_tag].attrs[attr_name] else: attr = self.m_data_storage.m_data_bank[f'header_{data_tag}/{attr_name}'] attr = np.asarray(attr) self.m_data_storage.close_connection() return attr @typechecked def set_attribute(self, data_tag: str, attr_name: str, attr_value: Union[StaticAttribute, NonStaticAttribute], static: bool = True) -> None: """ Method for writing an attribute to the database. Existing values will be overwritten. Parameters ---------- data_tag : str Database tag. attr_name : str Name of the attribute. attr_value : StaticAttribute, NonStaticAttribute Attribute value. static : bool Static (True) or non-static attribute (False). Returns ------- NoneType None """ self.m_data_storage.open_connection() if static: self.m_data_storage.m_data_bank[data_tag].attrs[attr_name] = attr_value else: if isinstance(attr_value[0], str): attr_value = np.array(attr_value, dtype='|S') if attr_name in list(self.m_data_storage.m_data_bank[f'header_{data_tag}'].keys()): del self.m_data_storage.m_data_bank[f'header_{data_tag}/{attr_name}'] attr_key = f'header_{data_tag}/{attr_name}' self.m_data_storage.m_data_bank[attr_key] = np.asarray(attr_value) self.m_data_storage.close_connection() @typechecked def get_tags(self) -> List[str]: """ Method for returning a list with all database tags, except header and configuration tags. Returns ------- list(str) Database tags. """ self.m_data_storage.open_connection() tags = list(self.m_data_storage.m_data_bank.keys()) selected_tags = [] for item in tags: if item in ['config', 'fits_header'] or item[0:7] == 'header_': continue selected_tags.append(item) self.m_data_storage.close_connection() return selected_tags @typechecked def get_shape(self, tag: str) -> Optional[Tuple[int, ...]]: """ Method for returning the shape of a database entry. Parameters ---------- tag : str Database tag. Returns ------- tuple(int, ...), None Shape of the dataset. None is returned if the database tag is not found. """ self.m_data_storage.open_connection() if tag in self.m_data_storage.m_data_bank: data_shape = self.m_data_storage.m_data_bank[tag].shape else: data_shape = None self.m_data_storage.close_connection() return data_shape @typechecked def list_attributes(self, data_tag: str) -> Dict[str, Union[str, np.float64, np.ndarray]]: """ Method for printing and returning an overview of all attributes of a dataset. Parameters ---------- data_tag : str Database tag of which the attributes will be extracted. Returns ------- dict(str, bool) Dictionary with all attributes, both static and non-static. """ print_text = f'Attribute overview of {data_tag}' print('\n' + len(print_text) * '-') print(print_text) print(len(print_text) * '-' + '\n') self.m_data_storage.open_connection() attributes = {} print('Static attributes:') for key, value in self.m_data_storage.m_data_bank[data_tag].attrs.items(): attributes[key] = value print(f'\n - {key} = {value}') print('\nNon-static attributes:') for key, value in self.m_data_storage.m_data_bank[f'header_{data_tag}'].items(): attributes[key] = list(value) print(f'\n - {key} = {list(value)}') self.m_data_storage.close_connection() return attributes PynPoint-0.11.0/pynpoint/processing/000077500000000000000000000000001450275315200174115ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/processing/__init__.py000066400000000000000000000000001450275315200215100ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/processing/background.py000066400000000000000000000466741450275315200221230ustar00rootroot00000000000000""" Pipeline modules for subtraction of the background emission. """ import time import warnings from typing import Any, Optional, Union import numpy as np from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.image import create_mask from pynpoint.util.module import progress from pynpoint.util.apply_func import subtract_line class SimpleBackgroundSubtractionModule(ProcessingModule): """ Pipeline module for simple background subtraction. Only applicable on data obtained with dithering. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, shift: int) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. shift : int Frame index offset for the background subtraction. Typically equal to the number of frames per dither location. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_shift = shift @typechecked def run(self) -> None: """ Run method of the module. Simple background subtraction with a constant index offset. Returns ------- NoneType None """ nframes = self.m_image_in_port.get_shape()[0] subtract = self.m_image_in_port[0] - self.m_image_in_port[(0 + self.m_shift) % nframes] if self.m_image_in_port.tag == self.m_image_out_port.tag: self.m_image_out_port[0] = subtract else: self.m_image_out_port.set_all(subtract, data_dim=3) start_time = time.time() for i in range(1, nframes): progress(i, nframes, 'Subtracting background...', start_time) subtract = self.m_image_in_port[i] - self.m_image_in_port[(i + self.m_shift) % nframes] if self.m_image_in_port.tag == self.m_image_out_port.tag: self.m_image_out_port[i] = subtract else: self.m_image_out_port.append(subtract) history = f'shift = {self.m_shift}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('SimpleBackgroundSubtractionModule', history) self.m_image_out_port.close_port() class MeanBackgroundSubtractionModule(ProcessingModule): """ Pipeline module for mean background subtraction. Only applicable on data obtained with dithering. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, shift: Optional[int] = None, cubes: int = 1) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. Should be different from *image_in_tag*. shift : int, None Image index offset for the background subtraction. Typically equal to the number of frames per dither location. If set to None, the ``NFRAMES`` attribute will be used to select the background frames automatically. The *cubes* parameters should be set when *shift* is set to None. cubes : int Number of consecutive cubes per dithering position. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_shift = shift self.m_cubes = cubes @typechecked def run(self) -> None: """ Run method of the module. Mean background subtraction which uses either a constant index offset or the ``NFRAMES`` attributes. The mean background is calculated from the cubes before and after the science cube. Returns ------- NoneType None """ # Use NFRAMES values if shift=None if self.m_shift is None: self.m_shift = self.m_image_in_port.get_attribute('NFRAMES') nframes = self.m_image_in_port.get_shape()[0] if not isinstance(self.m_shift, np.ndarray) and nframes < self.m_shift*2.0: raise ValueError('The input stack is too small for a mean background subtraction. The ' 'position of the star should shift at least once.') if self.m_image_in_port.tag == self.m_image_out_port.tag: raise ValueError('The tag of the input port should be different from the output port.') # Number of substacks if isinstance(self.m_shift, np.ndarray): nstacks = np.size(self.m_shift) else: nstacks = int(np.floor(nframes/self.m_shift)) # First mean subtraction to set up the output port array if isinstance(self.m_shift, np.ndarray): next_start = np.sum(self.m_shift[0:self.m_cubes]) next_end = np.sum(self.m_shift[0:2*self.m_cubes]) if 2*self.m_cubes > np.size(self.m_shift): raise ValueError('Not enough frames available for the background subtraction.') bg_data = self.m_image_in_port[next_start:next_end, ] bg_mean = np.mean(bg_data, axis=0) else: bg_data = self.m_image_in_port[self.m_shift:2*self.m_shift, ] bg_mean = np.mean(bg_data, axis=0) # Initiate the result port data with the first frame bg_sub = self.m_image_in_port[0, ] - bg_mean self.m_image_out_port.set_all(bg_sub, data_dim=3) # Mean subtraction of the first stack (minus the first frame) if isinstance(self.m_shift, np.ndarray): tmp_data = self.m_image_in_port[1:next_start, ] tmp_data = tmp_data - bg_mean self.m_image_out_port.append(tmp_data) else: tmp_data = self.m_image_in_port[1:self.m_shift, ] tmp_data = tmp_data - bg_mean self.m_image_out_port.append(tmp_data) # Processing of the rest of the data start_time = time.time() if isinstance(self.m_shift, np.ndarray): for i in range(self.m_cubes, nstacks, self.m_cubes): progress(i, nstacks, 'Subtracting background...', start_time) prev_start = np.sum(self.m_shift[0:i-self.m_cubes]) prev_end = np.sum(self.m_shift[0:i]) next_start = np.sum(self.m_shift[0:i+self.m_cubes]) next_end = np.sum(self.m_shift[0:i+2*self.m_cubes]) # calc the mean (previous) tmp_data = self.m_image_in_port[prev_start:prev_end, ] tmp_mean = np.mean(tmp_data, axis=0) if i < nstacks-self.m_cubes: # calc the mean (next) tmp_data = self.m_image_in_port[next_start:next_end, ] tmp_mean = (tmp_mean + np.mean(tmp_data, axis=0)) / 2.0 # subtract mean tmp_data = self.m_image_in_port[prev_end:next_start, ] tmp_data = tmp_data - tmp_mean self.m_image_out_port.append(tmp_data) else: # the last and the one before will be performed afterwards top = int(np.ceil(nframes/self.m_shift)) - 2 for i in range(1, top, 1): progress(i, top, 'Subtracting background...', start_time) # calc the mean (next) tmp_data = self.m_image_in_port[(i+1)*self.m_shift:(i+2)*self.m_shift, ] tmp_mean = np.mean(tmp_data, axis=0) # calc the mean (previous) tmp_data = self.m_image_in_port[(i-1)*self.m_shift:(i+0)*self.m_shift, ] tmp_mean = (tmp_mean + np.mean(tmp_data, axis=0)) / 2.0 # subtract mean tmp_data = self.m_image_in_port[(i+0)*self.m_shift:(i+1)*self.m_shift, ] tmp_data = tmp_data - tmp_mean self.m_image_out_port.append(tmp_data) # last and the one before # 1. ------------------------------- one before ------------------- # calc the mean (previous) tmp_data = self.m_image_in_port[(top-1)*self.m_shift:(top+0)*self.m_shift, ] tmp_mean = np.mean(tmp_data, axis=0) # calc the mean (next) # 'nframes' is important if the last step is to huge tmp_data = self.m_image_in_port[(top+1)*self.m_shift:nframes, ] tmp_mean = (tmp_mean + np.mean(tmp_data, axis=0)) / 2.0 # subtract mean tmp_data = self.m_image_in_port[top*self.m_shift:(top+1)*self.m_shift, ] tmp_data = tmp_data - tmp_mean self.m_image_out_port.append(tmp_data) # 2. ------------------------------- last ------------------- # calc the mean (previous) tmp_data = self.m_image_in_port[(top+0)*self.m_shift:(top+1)*self.m_shift, ] tmp_mean = np.mean(tmp_data, axis=0) # subtract mean tmp_data = self.m_image_in_port[(top+1)*self.m_shift:nframes, ] tmp_data = tmp_data - tmp_mean self.m_image_out_port.append(tmp_data) # ----------------------------------------------------------- if isinstance(self.m_shift, np.ndarray): history = 'shift = NFRAMES' else: history = f'shift = {self.m_shift}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('MeanBackgroundSubtractionModule', history) self.m_image_out_port.close_port() class LineSubtractionModule(ProcessingModule): """ Pipeline module for subtracting the background emission from each pixel by computing the mean or median of all values in the row and column of the pixel. The module can for example be used if no background data is available or to remove a detector bias. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, combine: str = 'median', mask: Optional[float] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. combine : str The method by which the column and row pixel values are combined ('median' or 'mean'). Using a mean-combination is computationally faster than a median-combination. mask : float, None The radius of the mask within which pixel values are ignored. No mask is used if set to None. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_combine = combine self.m_mask = mask @typechecked def run(self) -> None: """ Run method of the module. Selects the pixel values in the column and row at each pixel position, computes the mean or median value while excluding pixels within the radius of the mask, and subtracts the mean or median value from each pixel separately. Returns ------- NoneType None """ pixscale = self.m_image_in_port.get_attribute('PIXSCALE') im_shape = self.m_image_in_port.get_shape()[-2:] if self.m_mask: size = (self.m_mask/pixscale, None) else: size = (None, None) mask = create_mask(im_shape, size) self.apply_function_to_images(subtract_line, self.m_image_in_port, self.m_image_out_port, 'Background subtraction', func_args=(mask, self.m_combine, im_shape)) history = f'combine = {self.m_combine}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('LineSubtractionModule', history) self.m_image_out_port.close_port() class NoddingBackgroundModule(ProcessingModule): """ Pipeline module for background subtraction of data obtained with nodding (e.g., NACO AGPM data). Before using this module, the sky images should be stacked with the StackCubesModule such that each image in the stack of sky images corresponds to the mean combination of a single FITS data cube. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, science_in_tag: str, sky_in_tag: str, image_out_tag: str, mode: str = 'both') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. science_in_tag : str Tag of the database entry with science images that are read as input. sky_in_tag : str Tag of the database entry with sky images that are read as input. The :class:`~pynpoint.processing.stacksubset.StackCubesModule` should be used on the sky images beforehand. image_out_tag : str Tag of the database entry with sky subtracted images that are written as output. mode : str Sky images that are subtracted, relative to the science images. Either the next, previous, or average of the next and previous cubes of sky frames can be used by choosing 'next', 'previous', or 'both', respectively. Returns ------- NoneType None """ super().__init__(name_in) self.m_science_in_port = self.add_input_port(science_in_tag) self.m_sky_in_port = self.add_input_port(sky_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_time_stamps = [] if mode in ['next', 'previous', 'both']: self.m_mode = mode else: raise ValueError('Mode needs to be \'next\', \'previous\', or \'both\'.') @typechecked def _create_time_stamp_list(self) -> None: """ Internal method for assigning a time stamp, based on the exposure number ID, to each cube of sky and science images. """ class TimeStamp: """ Class for creating a time stamp. """ @typechecked def __init__(self, time_in: Any, im_type: str, index: Union[int, slice]) -> None: self.m_time = time_in self.m_im_type = im_type self.m_index = index @typechecked def __repr__(self) -> str: return repr((self.m_time, self.m_im_type, self.m_index)) exp_no_sky = self.m_sky_in_port.get_attribute('EXP_NO') exp_no_science = self.m_science_in_port.get_attribute('EXP_NO') nframes_sky = self.m_sky_in_port.get_attribute('NFRAMES') nframes_science = self.m_science_in_port.get_attribute('NFRAMES') if np.all(nframes_sky != 1): warnings.warn('The NFRAMES values of the sky images are not all equal to unity. ' 'The StackCubesModule should be applied on the sky images before the ' 'NoddingBackgroundModule is used.') for i, item in enumerate(exp_no_sky): self.m_time_stamps.append(TimeStamp(item, 'SKY', i)) current = 0 for i, item in enumerate(exp_no_science): frames = slice(current, current+nframes_science[i]) self.m_time_stamps.append(TimeStamp(item, 'SCIENCE', frames)) current += nframes_science[i] self.m_time_stamps = sorted(self.m_time_stamps, key=lambda time_stamp: time_stamp.m_time) @typechecked def calc_sky_frame(self, index_of_science_data: int) -> np.ndarray: """ Method for finding the required sky frame (next, previous, or the mean of next and previous) by comparing the time stamp of the science frame with preceding and following sky frames. """ if not any(x.m_im_type == 'SKY' for x in self.m_time_stamps): raise ValueError('List of time stamps does not contain any SKY images.') @typechecked def search_for_next_sky() -> np.ndarray: for i in range(index_of_science_data, len(self.m_time_stamps)): if self.m_time_stamps[i].m_im_type == 'SKY': return self.m_sky_in_port[self.m_time_stamps[i].m_index, ] # no next sky found, look for previous sky return search_for_previous_sky() @typechecked def search_for_previous_sky() -> np.ndarray: for i in reversed(list(range(0, index_of_science_data))): if self.m_time_stamps[i].m_im_type == 'SKY': return self.m_sky_in_port[self.m_time_stamps[i].m_index, ] # no previous sky found, look for next sky return search_for_next_sky() if self.m_mode == 'next': return search_for_next_sky() if self.m_mode == 'previous': return search_for_previous_sky() if self.m_mode == 'both': previous_sky = search_for_previous_sky() next_sky = search_for_next_sky() return (previous_sky+next_sky)/2. @typechecked def run(self) -> None: """ Run method of the module. Create list of time stamps, get sky and science images, and subtract the sky images from the science images. Returns ------- NoneType None """ self._create_time_stamp_list() start_time = time.time() for i, time_entry in enumerate(self.m_time_stamps): progress(i, len(self.m_time_stamps), 'Subtracting background...', start_time) if time_entry.m_im_type == 'SKY': continue sky = self.calc_sky_frame(i) science = self.m_science_in_port[time_entry.m_index, ] self.m_image_out_port.append(science - sky[None, ], data_dim=3) history = f'mode = {self.m_mode}' self.m_image_out_port.copy_attributes(self.m_science_in_port) self.m_image_out_port.add_history('NoddingBackgroundModule', history) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/badpixel.py000066400000000000000000000365701450275315200215660ustar00rootroot00000000000000""" Pipeline modules for the detection and interpolation of bad pixels. """ import warnings from typing import Optional, Tuple import numpy as np from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.apply_func import bad_pixel_sigma_filter, image_interpolation, \ replace_pixels, time_filter class BadPixelSigmaFilterModule(ProcessingModule): """ Pipeline module for finding bad pixels with a sigma filter and replacing them with the mean value of the surrounding pixels. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, map_out_tag: Optional[str] = None, box: int = 9, sigma: float = 5., iterate: int = 1) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. map_out_tag : str, None Tag of the database entry with the bad pixel map that is written as output. No data is written if set to None. This output port can not be used if CPU > 1. box : int Size of the sigma filter. The area of the filter is equal to the squared value of *box*. sigma : float Sigma threshold. iterate : int Number of iterations. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) if map_out_tag is None: self.m_map_out_port = None else: self.m_map_out_port = self.add_output_port(map_out_tag) self.m_box = box self.m_sigma = sigma self.m_iterate = iterate if self.m_iterate < 1: raise ValueError('The argument of \'iterate\' should be 1 or larger.') @typechecked def run(self) -> None: """ Run method of the module. Finds bad pixels with a sigma filter, replaces bad pixels with the mean value of the surrounding pixels, and writes the cleaned images to the database. Returns ------- NoneType None """ cpu = self._m_config_port.get_attribute('CPU') if cpu > 1 and self.m_map_out_port is not None: warnings.warn('The \'map_out_port\' can only be used if CPU = 1. No data will ' 'be stored to this output port.') del self._m_output_ports[self.m_map_out_port.tag] self.m_map_out_port = None self.apply_function_to_images(bad_pixel_sigma_filter, self.m_image_in_port, self.m_image_out_port, 'Bad pixel sigma filter', func_args=(self.m_box, self.m_sigma, self.m_iterate, self.m_map_out_port)) history = f'sigma = {self.m_sigma}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('BadPixelSigmaFilterModule', history) if self.m_map_out_port is not None: self.m_map_out_port.copy_attributes(self.m_image_in_port) self.m_map_out_port.add_history('BadPixelSigmaFilterModule', history) self.m_image_out_port.close_port() class BadPixelMapModule(ProcessingModule): """ Pipeline module to create a bad pixel map from the dark frames and flat fields. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, dark_in_tag: Optional[str], flat_in_tag: Optional[str], bp_map_out_tag: str, dark_threshold: float = 0.2, flat_threshold: float = 0.2) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. dark_in_tag : str, None Tag of the database entry with the dark frames that are read as input. Not read if set to None. flat_in_tag : str, None Tag of the database entry with the flat fields that are read as input. Not read if set to None. bp_map_out_tag : str Tag of the database entry with the bad pixel map that is written as output. dark_threshold : float Fractional threshold with respect to the maximum pixel value in the dark frame to flag bad pixels. Pixels `brighter` than the fractional threshold are flagged as bad. flat_threshold : float Fractional threshold with respect to the maximum pixel value in the flat field to flag bad pixels. Pixels `fainter` than the fractional threshold are flagged as bad. Returns ------- NoneType None """ super().__init__(name_in) if dark_in_tag is None: self.m_dark_port = None else: self.m_dark_port = self.add_input_port(dark_in_tag) if flat_in_tag is None: self.m_flat_port = None else: self.m_flat_port = self.add_input_port(flat_in_tag) self.m_bp_map_out_port = self.add_output_port(bp_map_out_tag) self.m_dark_threshold = dark_threshold self.m_flat_threshold = flat_threshold @typechecked def run(self) -> None: """ Run method of the module. Collapses a cube of dark frames and flat fields if needed, flags bad pixels by comparing the pixel values with the threshold times the maximum value, and writes a bad pixel map to the database. For the dark frame, pixel values larger than the threshold will be flagged while for the flat frame pixel values smaller than the threshold will be flagged. Returns ------- NoneType None """ if self.m_dark_port is not None: dark = self.m_dark_port.get_all() if dark.ndim == 3: dark = np.mean(dark, axis=0) max_dark = np.max(dark) print(f'Threshold dark frame = {max_dark*self.m_dark_threshold}') bpmap = np.ones(dark.shape) bpmap[np.where(dark > max_dark*self.m_dark_threshold)] = 0 if self.m_flat_port is not None: flat = self.m_flat_port.get_all() if flat.ndim == 3: flat = np.mean(flat, axis=0) max_flat = np.max(flat) print(f'Threshold flat field (ADU) = {max_flat*self.m_flat_threshold:.2e}') if self.m_dark_port is None: bpmap = np.ones(flat.shape) bpmap[np.where(flat < max_flat*self.m_flat_threshold)] = 0 if self.m_dark_port is not None and self.m_flat_port is not None: if not dark.shape == flat.shape: raise ValueError('Dark and flat images should have the same shape.') self.m_bp_map_out_port.set_all(bpmap, data_dim=3) if self.m_dark_port is not None: self.m_bp_map_out_port.copy_attributes(self.m_dark_port) elif self.m_flat_port is not None: self.m_bp_map_out_port.copy_attributes(self.m_flat_port) history = f'dark = {self.m_dark_threshold}, flat = {self.m_flat_threshold}' self.m_bp_map_out_port.add_history('BadPixelMapModule', history) self.m_bp_map_out_port.close_port() class BadPixelInterpolationModule(ProcessingModule): """ Pipeline module to interpolate bad pixels with spectral deconvolution. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, bad_pixel_map_tag: str, image_out_tag: str, iterations: int = 1000) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with the images that are read as input. bad_pixel_map_tag : str Tag of the database entry with the bad pixel map that is read as input. image_out_tag : str Tag of the database entry that is written as output. iterations : int Number of iterations of the spectral deconvolution. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_bp_map_in_port = self.add_input_port(bad_pixel_map_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_iterations = iterations @typechecked def run(self) -> None: """ Run method of the module. Interpolates bad pixels with an iterative spectral deconvolution. Returns ------- NoneType None """ bad_pixel_map = self.m_bp_map_in_port.get_all()[0, ] im_shape = self.m_image_in_port.get_shape() if self.m_iterations > im_shape[1]*im_shape[2]: raise ValueError('Maximum number of iterations needs to be smaller than the number of ' 'pixels in the image.') if bad_pixel_map.shape[0] != im_shape[-2] or bad_pixel_map.shape[1] != im_shape[-1]: raise ValueError('The shape of the bad pixel map does not match the shape of the ' 'images.') self.apply_function_to_images(image_interpolation, self.m_image_in_port, self.m_image_out_port, 'Bad pixel interpolation', func_args=(self.m_iterations, bad_pixel_map)) history = f'iterations = {self.m_iterations}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('BadPixelInterpolationModule', history) self.m_image_out_port.close_port() class BadPixelTimeFilterModule(ProcessingModule): """ Pipeline module for finding bad pixels with a sigma filter along a pixel line in time. This module is suitable for removing bad pixels that are only present at a position in a small number of images, for example because a dither pattern has been applied. Pixel lines can be processed in parallel by setting the CPU keyword in the configuration file. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, sigma: Tuple[float, float] = (5., 5.)) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. sigma : tuple(float, float) Lower and upper sigma threshold as (lower, upper). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_sigma = sigma @typechecked def run(self) -> None: """ Run method of the module. Finds bad pixels along a pixel line, replaces the bad pixels with the mean value of the pixels (excluding the bad pixels), and writes the cleaned images to the database. Returns ------- NoneType None """ print('Temporal filtering of bad pixels ...', end='') self.apply_function_in_time(time_filter, self.m_image_in_port, self.m_image_out_port, func_args=(self.m_sigma, )) print(' [DONE]') history = f'sigma = {self.m_sigma}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('BadPixelTimeFilterModule', history) self.m_image_out_port.close_port() class ReplaceBadPixelsModule(ProcessingModule): """ Pipeline module for replacing bad pixels with the mean are median value of the surrounding pixels. The bad pixels are selected from the input bad pixel map. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, map_in_tag: str, image_out_tag: str, size: int = 2, replace: str = 'median') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. size : int Number of pixel lines around the bad pixel that are used to calculate the median or mean replacement value. For example, a 5x5 window is used if ``size=2``. replace : str Replace the bad pixel with the 'median', 'mean' or 'nan'. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_map_in_port = self.add_input_port(map_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_size = size self.m_replace = replace @typechecked def run(self) -> None: """ Run method of the module. Masks the bad pixels with NaN and replaces the bad pixels with the mean or median value (excluding the bad pixels) within a window centered on the bad pixel. The original value is used if there are only NaNs within the window. Returns ------- NoneType None """ bpmap = self.m_map_in_port.get_all()[0, ] index = np.argwhere(bpmap == 0) self.apply_function_to_images(replace_pixels, self.m_image_in_port, self.m_image_out_port, 'Running ReplaceBadPixelsModule', func_args=(index, self.m_size, self.m_replace)) history = f'replace = {self.m_replace}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('ReplaceBadPixelsModule', history) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/basic.py000066400000000000000000000216211450275315200210460ustar00rootroot00000000000000""" Pipeline modules for basic image operations. """ import time from typing import Tuple from typeguard import typechecked from scipy.ndimage import rotate from pynpoint.core.processing import ProcessingModule from pynpoint.util.module import progress, memory_frames class SubtractImagesModule(ProcessingModule): """ Pipeline module for subtracting two sets of images. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tags: Tuple[str, str], image_out_tag: str, scaling: float = 1.) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tags : tuple(str, str) Tuple with two tags of the database entry that are read as input. image_out_tag : str Tag of the database entry with the subtracted images that are written as output. scaling : float Additional scaling factor. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in1_port = self.add_input_port(image_in_tags[0]) self.m_image_in2_port = self.add_input_port(image_in_tags[1]) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_scaling = scaling @typechecked def run(self) -> None: """ Run method of the module. Subtracts the images from the second database tag from the images of the first database tag, on a frame-by-frame basis. Returns ------- NoneType None """ if self.m_image_in1_port.get_shape() != self.m_image_in2_port.get_shape(): raise ValueError('The shape of the two input tags has to be the same.') memory = self._m_config_port.get_attribute('MEMORY') nimages = self.m_image_in1_port.get_shape()[0] frames = memory_frames(memory, nimages) start_time = time.time() for i, _ in enumerate(frames[:-1]): progress(i, len(frames[:-1]), 'Subtracting images...', start_time) images1 = self.m_image_in1_port[frames[i]:frames[i+1], ] images2 = self.m_image_in2_port[frames[i]:frames[i+1], ] self.m_image_out_port.append(self.m_scaling*(images1-images2), data_dim=3) history = f'scaling = {self.m_scaling}' self.m_image_out_port.add_history('SubtractImagesModule', history) self.m_image_out_port.copy_attributes(self.m_image_in1_port) self.m_image_out_port.close_port() class AddImagesModule(ProcessingModule): """ Pipeline module for adding two sets of images. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tags: Tuple[str, str], image_out_tag: str, scaling: float = 1.) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tags : tuple(str, str) Tuple with two tags of the database entry that are read as input. image_out_tag : str Tag of the database entry with the added images that are written as output. scaling: float Additional scaling factor. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in1_port = self.add_input_port(image_in_tags[0]) self.m_image_in2_port = self.add_input_port(image_in_tags[1]) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_scaling = scaling @typechecked def run(self) -> None: """ Run method of the module. Add the images from the two database tags on a frame-by-frame basis. Returns ------- NoneType None """ if self.m_image_in1_port.get_shape() != self.m_image_in2_port.get_shape(): raise ValueError('The shape of the two input tags has to be the same.') memory = self._m_config_port.get_attribute('MEMORY') nimages = self.m_image_in1_port.get_shape()[0] frames = memory_frames(memory, nimages) start_time = time.time() for i, _ in enumerate(frames[:-1]): progress(i, len(frames[:-1]), 'Adding images...', start_time) images1 = self.m_image_in1_port[frames[i]:frames[i+1], ] images2 = self.m_image_in2_port[frames[i]:frames[i+1], ] self.m_image_out_port.append(self.m_scaling*(images1+images2), data_dim=3) history = f'scaling = {self.m_scaling}' self.m_image_out_port.add_history('AddImagesModule', history) self.m_image_out_port.copy_attributes(self.m_image_in1_port) self.m_image_out_port.close_port() class RotateImagesModule(ProcessingModule): """ Pipeline module for rotating images. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, angle: float) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. angle : float Rotation angle (deg). Rotation is clockwise for positive values. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_angle = angle @typechecked def run(self) -> None: """ Run method of the module. Rotates all images by a constant angle. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') nimages = self.m_image_in_port.get_shape()[0] frames = memory_frames(memory, nimages) start_time = time.time() for i, _ in enumerate(frames[:-1]): progress(i, len(frames[:-1]), 'Rotating images...', start_time) images = self.m_image_in_port[frames[i]:frames[i+1], ] for j in range(frames[i+1]-frames[i]): im_tmp = images[j, ] # ndimage.rotate rotates in clockwise direction for positive angles im_tmp = rotate(im_tmp, self.m_angle, reshape=False) self.m_image_out_port.append(im_tmp, data_dim=3) history = f'angle [deg] = {self.m_angle}' self.m_image_out_port.add_history('RotateImagesModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() class RepeatImagesModule(ProcessingModule): """ Pipeline module for repeating the images from a dataset. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, repeat: int) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry with the added images that are written as output. repeat: int The number of times the input images get repeated. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_repeat = repeat @typechecked def run(self) -> None: """ Run method of the module. Repeats the stack of input images a specified number of times. Returns ------- NoneType None """ nimages = self.m_image_in_port.get_shape()[0] memory = self._m_config_port.get_attribute('MEMORY') frames = memory_frames(memory, nimages) start_time = time.time() for i in range(self.m_repeat): progress(i, self.m_repeat, 'Repeating images...', start_time) for j, _ in enumerate(frames[:-1]): images = self.m_image_in_port[frames[j]:frames[j+1], ] self.m_image_out_port.append(images, data_dim=3) history = f'repeat = {self.m_repeat}' self.m_image_out_port.add_history('RepeatImagesModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/centering.py000066400000000000000000001016031450275315200217420ustar00rootroot00000000000000""" Pipeline modules for aligning and centering of the star. """ import math import time import warnings from typing import Optional, Tuple, Union import numpy as np from astropy.modeling import fitting, models from scipy.ndimage import gaussian_filter from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.image import center_pixel, crop_image, pixel_distance, shift_image, \ subpixel_distance from pynpoint.util.module import memory_frames, progress from pynpoint.util.apply_func import align_image, apply_shift, fit_2d_function class StarAlignmentModule(ProcessingModule): """ Pipeline module to align the images with a cross-correlation in Fourier space. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, ref_image_in_tag: Optional[str] = None, interpolation: str = 'spline', accuracy: float = 10., resize: Optional[float] = None, num_references: int = 10, subframe: Optional[float] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with the stack of images that is read as input. ref_image_in_tag : str, None Tag of the database entry with the reference image(s) that are read as input. If it is set to None, a random subsample of *num_references* elements of *image_in_tag* is taken as reference images. image_out_tag : str Tag of the database entry with the images that are written as output. interpolation : str Type of interpolation that is used for shifting the images (spline, bilinear, or fft). accuracy : float Upsampling factor for the cross-correlation. Images will be registered to within 1/accuracy of a pixel. resize : float, None Scaling factor for the up/down-sampling before the images are shifted. Not used if set to None. num_references : int Number of reference images for the cross-correlation. subframe : float, None Size (arcsec) of the subframe around the image center that is used for the cross-correlation. The full image is used if set to None. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) if ref_image_in_tag is not None: self.m_ref_image_in_port = self.add_input_port(ref_image_in_tag) else: self.m_ref_image_in_port = None self.m_interpolation = interpolation self.m_accuracy = accuracy self.m_resize = resize self.m_num_references = num_references self.m_subframe = subframe @typechecked def run(self) -> None: """ Run method of the module. Applies a cross-correlation of the input images with respect to a stack of reference images, rescales the image dimensions, and shifts the images to a common center. Returns ------- NoneType None """ if self.m_ref_image_in_port is None: random = np.random.choice(self.m_image_in_port.get_shape()[0], self.m_num_references, replace=False) ref_images = self.m_image_in_port[np.sort(random), :, :] else: n_ref = self.m_ref_image_in_port.get_shape()[0] if n_ref < self.m_num_references: warnings.warn(f'Number of available images ({n_ref}) is smaller than ' f'num_references ({self.m_num_references}). Using all ' f'available images instead.') self.m_num_references = n_ref ref_index = np.sort(np.random.choice(n_ref, self.m_num_references, replace=False)) ref_images = self.m_ref_image_in_port[ref_index, :, :] if self.m_subframe is not None: pixscale = self.m_image_in_port.get_attribute('PIXSCALE') self.m_subframe = int(self.m_subframe/pixscale) self.apply_function_to_images(align_image, self.m_image_in_port, self.m_image_out_port, 'Aligning images', func_args=(self.m_interpolation, self.m_accuracy, self.m_resize, self.m_num_references, self.m_subframe, ref_images.reshape(-1), ref_images.shape)) self.m_image_out_port.copy_attributes(self.m_image_in_port) if self.m_resize is not None: pixscale = self.m_image_in_port.get_attribute('PIXSCALE') new_pixscale = pixscale/self.m_resize self.m_image_out_port.add_attribute('PIXSCALE', new_pixscale) print(f'New pixel scale (arcsec) = {new_pixscale:.2f}') history = f'resize = {self.m_resize}' self.m_image_out_port.add_history('StarAlignmentModule', history) self.m_image_out_port.close_port() class FitCenterModule(ProcessingModule): """ Pipeline module for fitting the PSF with a 2D Gaussian or Moffat function. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, fit_out_tag: str, mask_out_tag: Optional[str] = None, method: str = 'full', mask_radii: Tuple[Optional[float], float] = (None, 0.1), sign: str = 'positive', model: str = 'gaussian', filter_size: Optional[float] = None, **kwargs: Union[Tuple[float, float, float, float, float, float, float], Tuple[float, float, float, float, float, float, float, float], float]) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Database tag of the images that are read as input. fit_out_tag : str Database tag where the best-fit results and 1σ errors will be stored. The data are written in the following format: x offset (pix), x offset error (pix) y offset (pix), y offset error (pix), FWHM major axis (arcsec), FWHM major axis error (arcsec), FWHM minor axis (arcsec), FWHM minor axis error (arcsec), amplitude (ADU), amplitude error (ADU), angle (deg), angle error (deg) measured in counterclockwise direction with respect to the upward direction (i.e. east of north), offset (ADU), offset error (ADU), power index (only for Moffat function), and power index error (only for Moffat function). The ``fit_out_tag`` can be used as argument of ``shift_xy`` when running the :class:`~pynpoint.processing.centering.ShiftImagesModule`. mask_out_tag : str, None Database tag where the masked images will be stored. The unmasked part of the images is used for the fit. The effect of the smoothing that is applied by setting the ``fwhm`` argument is also visible in the data of the ``mask_out_tag``. The data are not stored if the argument is set to None. The :class:`~pynpoint.core.dataio.OutputPort` of ``mask_out_tag`` can only be used when ``CPU = 1``. method : str Fit and shift each image individually ('full') or only fit the mean of the cube and shift each image by this constant offset ('mean'). The 'mean' method can be used in case the images are already aligned with :class:`~pynpoint.processing.centering.StarAlignmentModule`. mask_radii : tuple(float, float), tuple(None, float) Inner and outer radius (arcsec) within and beyond which pixels are neglected during the fit. The radii are centered at the position that specified with the argument of ``guess``, which is the center of the image by default. The outer mask (second value of ``mask_radii``) is mandatory whereas radius of the inner mask is optional and can be set to None. sign : str Fit a 'positive' or 'negative' Gaussian/Moffat function. A 'negative' model can be used to center coronagraphic data in which a dark hole. model : str Type of 2D model that is used for the fit ('gaussian' or 'moffat'). Both models are elliptical in shape. filter_size : float, None Standard deviation (arcsec) of the Gaussian filter that is used to smooth the images before fitting the model. No filter is applied if set to None. Keyword arguments ----------------- guess : tuple(float, float, float, float, float, float, float, float), tuple(float, float, float, float, float, float, float, float, float) The initial parameter values for the least squares fit: x offset with respect to center (pix), y offset with respect to center (pix), FWHM x (pix), FWHM y (pix), amplitude (ADU), angle (deg), offset (ADU), and power index (only for Moffat function). Returns ------- NoneType None """ if 'guess' in kwargs: self.m_guess = kwargs['guess'] else: if model == 'gaussian': self.m_guess = (0., 0., 1., 1., 1., 0., 0.) elif model == 'moffat': self.m_guess = (0., 0., 1., 1., 1., 0., 0., 1.) if 'radius' in kwargs: mask_radii = (None, kwargs['radius']) warnings.warn(f'The \'radius\' parameter has been deprecated. Please use the ' f'\'mask_radii\' parameter instead. The argument of \'mask_radii\' ' f'is set to {mask_radii}.', DeprecationWarning) super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_fit_out_port = self.add_output_port(fit_out_tag) if mask_out_tag is None: self.m_mask_out_port = None else: self.m_mask_out_port = self.add_output_port(mask_out_tag) self.m_method = method self.m_mask_radii = mask_radii self.m_sign = sign self.m_model = model self.m_filter_size = filter_size @typechecked def run(self) -> None: """ Run method of the module. Uses a non-linear least squares (Levenberg-Marquardt) method to fit the the individual images or the mean of all images with a 2D Gaussian or Moffat function. The best-fit results and errors are stored and contain zeros in case the algorithm could not converge. The ``fit_out_tag`` can be used as argument of ``shift_xy`` when running the :class:`~pynpoint.processing.centering.ShiftImagesModule`. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') cpu = self._m_config_port.get_attribute('CPU') pixscale = self.m_image_in_port.get_attribute('PIXSCALE') if cpu > 1 and self.m_mask_out_port is not None: warnings.warn('The mask_out_port can only be used if CPU=1. No data will be ' 'stored to this output port.') del self._m_output_ports[self.m_mask_out_port.tag] self.m_mask_out_port = None if self.m_mask_radii[0] is None: # Convert from arcsec to pixels and change None to 0 self.m_mask_radii = (0., self.m_mask_radii[1]/pixscale) else: # Convert from arcsec to pixels self.m_mask_radii = (self.m_mask_radii[0]/pixscale, self.m_mask_radii[1]/pixscale) if self.m_filter_size: # Convert from arcsec to pixels self.m_filter_size /= pixscale _, xx_grid, yy_grid = pixel_distance(self.m_image_in_port.get_shape()[-2:], position=None) rr_ap = subpixel_distance(self.m_image_in_port.get_shape()[-2:], position=(self.m_guess[1], self.m_guess[0]), shift_center=False) # (y, x) nimages = self.m_image_in_port.get_shape()[0] frames = memory_frames(memory, nimages) if self.m_method == 'full': self.apply_function_to_images(fit_2d_function, self.m_image_in_port, self.m_fit_out_port, 'Fitting the stellar PSF', func_args=(self.m_mask_radii, self.m_sign, self.m_model, self.m_filter_size, self.m_guess, self.m_mask_out_port, xx_grid, yy_grid, rr_ap, pixscale)) elif self.m_method == 'mean': print('Fitting the stellar PSF...', end='') im_mean = np.zeros(self.m_image_in_port.get_shape()[1:3]) for i, _ in enumerate(frames[:-1]): im_mean += np.sum(self.m_image_in_port[frames[i]:frames[i+1], ], axis=0) best_fit = fit_2d_function(im_mean/float(nimages), 0, self.m_mask_radii, self.m_sign, self.m_model, self.m_filter_size, self.m_guess, self.m_mask_out_port, xx_grid, yy_grid, rr_ap, pixscale) best_fit = best_fit[np.newaxis, ...] best_fit = np.repeat(best_fit, nimages, axis=0) self.m_fit_out_port.set_all(best_fit, data_dim=2) print(' [DONE]') history = f'model = {self.m_model}' self.m_fit_out_port.copy_attributes(self.m_image_in_port) self.m_fit_out_port.add_history('FitCenterModule', history) if self.m_mask_out_port: self.m_mask_out_port.copy_attributes(self.m_image_in_port) self.m_mask_out_port.add_history('FitCenterModule', history) self.m_fit_out_port.close_port() class ShiftImagesModule(ProcessingModule): """ Pipeline module for shifting a stack of images. """ __author__ = 'Tomas Stolker, Benedikt Schmidhuber' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, shift_xy: Union[Tuple[float, float], str], interpolation: str = 'spline') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. shift_xy : tuple(float, float), str The shift (pix) in x and y direction as (delta_x, delta_y). Or, a database tag with the fit results from the :class:`~pynpoint.processing.centering.FitCenterModule`. interpolation : str Interpolation type for shifting of the images ('spline', 'bilinear', or 'fft'). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_interpolation = interpolation if isinstance(shift_xy, str): self.m_fit_in_port = self.add_input_port(shift_xy) self.m_shift = None else: self.m_fit_in_port = None self.m_shift = (shift_xy[1], shift_xy[0]) @typechecked def run(self) -> None: """ Run method of the module. Shifts an image with a fifth order spline, bilinear, or a Fourier shift interpolation. Returns ------- NoneType None """ constant = True # read the fit results from the self.m_fit_in_port if available if self.m_fit_in_port is not None: self.m_shift = -1.*self.m_fit_in_port[:, [0, 2]] # (x, y) self.m_shift = self.m_shift[:, [1, 0]] # (y, x) # check if data in self.m_fit_in_port is constant for all images using the # constant flag if not np.allclose(self.m_fit_in_port.get_all() - self.m_fit_in_port[0, ], 0.0): constant = False if constant: # if the offset is constant then use the first element for all images self.m_shift = self.m_shift[0, ] else: # if the offset is not constant, then apply the shifts to each frame individually for i, shift in enumerate(self.m_shift): shifted_image = shift_image(self.m_image_in_port[i, ], shift, self.m_interpolation) # append the shifted images to the selt.m_image_out_port database entry self.m_image_out_port.append(shifted_image, data_dim=3) mean_shift = np.mean(self.m_shift, axis=0) history = f'shift_xy = {mean_shift[0]:.2f}, {mean_shift[1]:.2f}' # apply a constant shift if constant: self.apply_function_to_images(apply_shift, self.m_image_in_port, self.m_image_out_port, 'Shifting the images', func_args=(self.m_shift, self.m_interpolation)) # if self.m_fit_in_port is None or constant: history = f'shift_xy = {self.m_shift[0]:.2f}, {self.m_shift[1]:.2f}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('ShiftImagesModule', history) self.m_image_out_port.close_port() class WaffleCenteringModule(ProcessingModule): """ Pipeline module for centering of coronagraphic data for which dedicated center frames with satellite spots are available. """ __author__ = 'Alexander Bohn' @typechecked def __init__(self, name_in: str, image_in_tag: str, center_in_tag: str, image_out_tag: str, size: Optional[float] = None, center: Optional[Tuple[float, float]] = None, radius: float = 45., pattern: str = None, angle: float = 45., sigma: float = 0.06, dither: bool = False) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with science images that are read as input. center_in_tag : str Tag of the database entry with the center frame that is read as input. image_out_tag : str Tag of the database entry with the centered images that are written as output. Should be different from *image_in_tag*. size : float, None Image size (arcsec) for both dimensions. Original image size is used if set to None. center : tuple(float, float), None Approximate position (x0, y0) of the coronagraph. The center of the image is used if set to None. radius : float Approximate separation (pix) of the satellite spots from the star. For IFS data, the separation of the spots in the image with the shortest wavelength is required. pattern : str, None Waffle pattern that is used ('x' or '+'). This parameter will be deprecated in a future release. Please use the ``angle`` parameter instead. The parameter will be ignored if set to None. angle : float Angle offset (deg) in clockwise direction of the satellite spots with respect to the '+' orientation (i.e. when the spots are located along the horizontal and vertical axis). The previous use of the '+' pattern corresponds to 0 degrees and 'x' pattern corresponds to 45 degrees. SPHERE/IFS data requires an angle of 55.48 degrees. sigma : float Standard deviation (arcsec) of the Gaussian kernel that is used for the unsharp masking. dither : bool Apply dithering correction based on the ``DITHER_X`` and ``DITHER_Y`` attributes. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_center_in_port = self.add_input_port(center_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_size = size self.m_center = center self.m_radius = radius self.m_pattern = pattern self.m_angle = angle self.m_sigma = sigma self.m_dither = dither @typechecked def run(self) -> None: """ Run method of the module. Locates the position of the calibration spots in the center frame. From the four spots, the position of the star behind the coronagraph is fitted, and the images are shifted and cropped. Returns ------- NoneType None """ @typechecked def _get_center(image_number: int, center: Optional[Tuple[int, int]]) -> Tuple[np.ndarray, Tuple[int, int]]: if center_shape[-3] > 1: warnings.warn('Multiple center images found. Using the first image of the stack.') if ndim == 3: center_frame = self.m_center_in_port[0, ] elif ndim == 4: center_frame = self.m_center_in_port[image_number, 0, ] if center is None: center = center_pixel(center_frame) else: center = (int(np.floor(center[0])), int(np.floor(center[1]))) return center_frame, center center_shape = self.m_center_in_port.get_shape() im_shape = self.m_image_in_port.get_shape() ndim = self.m_image_in_port.get_ndim() center_frame, self.m_center = _get_center(0, self.m_center) # Read in wavelength information or set it to default values if ndim == 4: wavelength = self.m_image_in_port.get_attribute('WAVELENGTH') if wavelength is None: raise ValueError('The wavelength information is required to centre IFS data. ' 'Please add it via the WavelengthReadingModule before using ' 'the WaffleCenteringModule.') if im_shape[0] != center_shape[0]: raise ValueError(f'Number of science wavelength channels: {im_shape[0]}. ' f'Number of center wavelength channels: {center_shape[0]}. ' 'Exactly one center image per wavelength is required.') wavelength_min = np.min(wavelength) elif ndim == 3: # for none ifs data, use default value wavelength = [1.] wavelength_min = 1. # check if science and center images have the same shape if im_shape[-2:] != center_shape[-2:]: raise ValueError('Science and center images should have the same shape.') # Setting angle via pattern (used for backwards compability) if self.m_pattern is not None: if self.m_pattern == 'x': self.m_angle = 45. elif self.m_pattern == '+': self.m_angle = 0. else: raise ValueError(f'The pattern {self.m_pattern} is not valid. Please select ' f'either \'x\' or \'+\'.') warnings.warn(f'The \'pattern\' parameter will be deprecated in a future release. ' f'Please Use the \'angle\' parameter instead and set it to ' f'{self.m_angle} degrees.', DeprecationWarning) pixscale = self.m_image_in_port.get_attribute('PIXSCALE') self.m_sigma /= pixscale if self.m_size is not None: self.m_size = int(math.ceil(self.m_size/pixscale)) if self.m_dither: dither_x = self.m_image_in_port.get_attribute('DITHER_X') dither_y = self.m_image_in_port.get_attribute('DITHER_Y') nframes = self.m_image_in_port.get_attribute('NFRAMES') nframes = np.cumsum(nframes) nframes = np.insert(nframes, 0, 0) # size of center image, only works with odd value ref_image_size = 21 # Arrays for the positions x_pos = np.zeros(4) y_pos = np.zeros(4) # Arrays for the center position for each wavelength x_center = np.zeros((len(wavelength))) y_center = np.zeros((len(wavelength))) # Loop for 4 waffle spots for w, wave_nr in enumerate(wavelength): # Prapre centering frame center_frame, _ = _get_center(w, self.m_center) center_frame_unsharp = center_frame - gaussian_filter(input=center_frame, sigma=self.m_sigma) for i in range(4): # Approximate positions of waffle spots radius = self.m_radius * wave_nr / wavelength_min x_0 = np.floor(self.m_center[0] + radius * np.cos(self.m_angle*np.pi/180 + np.pi / 4. * (2 * i))) y_0 = np.floor(self.m_center[1] + radius * np.sin(self.m_angle*np.pi/180 + np.pi / 4. * (2 * i))) tmp_center_frame = crop_image(image=center_frame_unsharp, center=(int(y_0), int(x_0)), size=ref_image_size) # find maximum in tmp image coords = np.unravel_index(indices=np.argmax(tmp_center_frame), shape=tmp_center_frame.shape) y_max, x_max = coords[0], coords[1] pixmax = tmp_center_frame[y_max, x_max] max_pos = np.array([x_max, y_max]).reshape(1, 2) # Check whether it is the correct maximum: second brightest pixel should be nearby tmp_center_frame[y_max, x_max] = 0. # introduce distance parameter dist = np.inf while dist > 2: coords = np.unravel_index(indices=np.argmax(tmp_center_frame), shape=tmp_center_frame.shape) y_max_new, x_max_new = coords[0], coords[1] pixmax_new = tmp_center_frame[y_max_new, x_max_new] # Caculate minimal distance to previous points tmp_center_frame[y_max_new, x_max_new] = 0. dist = np.amin(np.linalg.norm(np.vstack((max_pos[:, 0]-x_max_new, max_pos[:, 1]-y_max_new)), axis=0)) if dist <= 2 and pixmax_new < pixmax: break max_pos = np.vstack((max_pos, [x_max_new, y_max_new])) x_max = x_max_new y_max = y_max_new pixmax = pixmax_new x_0 = x_0 - (ref_image_size-1)/2 + x_max y_0 = y_0 - (ref_image_size-1)/2 + y_max # create reference image around determined maximum ref_center_frame = crop_image(image=center_frame_unsharp, center=(int(y_0), int(x_0)), size=ref_image_size) # Fit the data using astropy.modeling gauss_init = models.Gaussian2D(amplitude=np.amax(ref_center_frame), x_mean=x_0, y_mean=y_0, x_stddev=1., y_stddev=1., theta=0.) fit_gauss = fitting.LevMarLSQFitter() y_grid, x_grid = np.mgrid[y_0-(ref_image_size-1)/2:y_0+(ref_image_size-1)/2+1, x_0-(ref_image_size-1)/2:x_0+(ref_image_size-1)/2+1] gauss = fit_gauss(gauss_init, x_grid, y_grid, ref_center_frame) x_pos[i] = gauss.x_mean.value y_pos[i] = gauss.y_mean.value # Find star position as intersection of two lines x_center[w] = ((y_pos[0]-x_pos[0]*(y_pos[2]-y_pos[0])/(x_pos[2]-float(x_pos[0]))) - (y_pos[1]-x_pos[1]*(y_pos[1]-y_pos[3])/(x_pos[1]-float(x_pos[3])))) / \ ((y_pos[1]-y_pos[3])/(x_pos[1]-float(x_pos[3])) - (y_pos[2]-y_pos[0])/(x_pos[2]-float(x_pos[0]))) y_center[w] = x_center[w]*(y_pos[1]-y_pos[3])/(x_pos[1]-float(x_pos[3])) + \ (y_pos[1]-x_pos[1]*(y_pos[1]-y_pos[3])/(x_pos[1]-float(x_pos[3]))) # Adjust science images nimages = self.m_image_in_port.get_shape()[-3] npix = self.m_image_in_port.get_shape()[-2] nwavelengths = len(wavelength) start_time = time.time() for i in range(nimages): im_storage = [] for j in range(nwavelengths): im_index = i*nwavelengths + j progress(im_index, nimages*nwavelengths, 'Centering the images...', start_time) if ndim == 3: image = self.m_image_in_port[i, ] elif ndim == 4: image = self.m_image_in_port[j, i, ] shift_yx = np.array([(float(im_shape[-2])-1.)/2. - y_center[j], (float(im_shape[-1])-1.)/2. - x_center[j]]) if self.m_dither: index = np.digitize(i, nframes, right=False) - 1 shift_yx[0] -= dither_y[index] shift_yx[1] -= dither_x[index] if npix % 2 == 0 and self.m_size is not None: im_tmp = np.zeros((image.shape[0]+1, image.shape[1]+1)) im_tmp[:-1, :-1] = image image = im_tmp shift_yx[0] += 0.5 shift_yx[1] += 0.5 im_shift = shift_image(image, shift_yx, 'spline') if self.m_size is not None: im_crop = crop_image(im_shift, None, self.m_size) im_storage.append(im_crop) else: im_storage.append(im_shift) if ndim == 3: self.m_image_out_port.append(im_storage[0], data_dim=3) elif ndim == 4: self.m_image_out_port.append(np.asarray(im_storage), data_dim=4) print(f'Center [x, y] = [{x_center}, {y_center}]') history = f'[x, y] = [{round(x_center[j], 2)}, {round(y_center[j], 2)}]' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('WaffleCenteringModule', history) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/darkflat.py000066400000000000000000000141461450275315200215610ustar00rootroot00000000000000""" Pipeline modules for dark frame and flat field calibrations. """ import time import warnings from typing import Tuple import numpy as np from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.module import progress, memory_frames @typechecked def _master_frame(data: np.ndarray, im_shape: Tuple[int, int, int]) -> np.ndarray: """ Internal function which creates a master dark/flat by calculating the mean (3D data) and cropping the frames to the shape of the science images if needed. Parameters ---------- data : numpy.ndarray Input array (2D) with mean of the dark or flat frames. im_shape : tuple(int, int, int) Shape of the science images (3D). Returns ------- numpy.ndarray Master dark/flat frame. """ shape_in = (im_shape[1], im_shape[2]) if data.shape[0] < shape_in[0] or data.shape[1] < shape_in[1]: raise ValueError('Shape of the calibration images is smaller than the science images.') if data.shape != shape_in: cal_shape = data.shape x_off = (cal_shape[0] - shape_in[0]) // 2 y_off = (cal_shape[1] - shape_in[1]) // 2 data = data[x_off:x_off+shape_in[0], y_off:y_off+shape_in[1]] warnings.warn('The calibration images were cropped around their center to match the shape ' 'of the science images.') return data class DarkCalibrationModule(ProcessingModule): """ Pipeline module to subtract a master dark from the science data. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, dark_in_tag: str, image_out_tag: str) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with the science images that are read as input. dark_in_tag : str Tag of the database with the dark frames that are read as input. image_out_tag : str Tag of the database entry that is written as output. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_dark_in_port = self.add_input_port(dark_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) @typechecked def run(self) -> None: """ Run method of the module. Creates a master dark with the same shape as the science data and subtracts the dark frame from the science data. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') nimages = self.m_image_in_port.get_shape()[0] frames = memory_frames(memory, nimages) dark = self.m_dark_in_port.get_all() master = _master_frame(data=np.mean(dark, axis=0), im_shape=self.m_image_in_port.get_shape()) start_time = time.time() for i in range(len(frames[:-1])): progress(i, len(frames[:-1]), 'Subtracting the dark current...', start_time) images = self.m_image_in_port[frames[i]:frames[i+1], ] self.m_image_out_port.append(images - master, data_dim=3) history = f'dark_in_tag = {self.m_dark_in_port.tag}' self.m_image_out_port.add_history('DarkCalibrationModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() class FlatCalibrationModule(ProcessingModule): """ Pipeline module to apply a flat field correction to the science data. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, flat_in_tag: str, image_out_tag: str) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the science database that is read as input. flat_in_tag : str Tag of the flat field database that is read as input. image_out_tag : str Tag of the database entry that is written as output. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_flat_in_port = self.add_input_port(flat_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) @typechecked def run(self) -> None: """ Run method of the module. Creates a master flat with the same shape as the science image and divides the science images by the flat field. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') nimages = self.m_image_in_port.get_shape()[0] frames = memory_frames(memory, nimages) flat = self.m_flat_in_port.get_all() master = _master_frame(data=np.mean(flat, axis=0), im_shape=self.m_image_in_port.get_shape()) # shift all values to greater or equal to +1.0 flat_min = np.amin(master) master -= flat_min - 1. # normalization, median value is 1 afterwards master /= np.median(master) start_time = time.time() for i in range(len(frames[:-1])): progress(i, len(frames[:-1]), 'Flat fielding the images...', start_time) images = self.m_image_in_port[frames[i]:frames[i+1], ] self.m_image_out_port.append(images/master, data_dim=3) history = f'flat_in_tag = {self.m_flat_in_port.tag}' self.m_image_out_port.add_history('FlatCalibrationModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/extract.py000066400000000000000000000223631450275315200214430ustar00rootroot00000000000000""" Pipeline modules for locating and extracting the position of a star. """ import math import warnings from typing import Optional, Tuple, Union import numpy as np from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.apply_func import crop_around_star, crop_rotating_star from pynpoint.util.image import rotate_coordinates class StarExtractionModule(ProcessingModule): """ Pipeline module to locate the position of the star in each image and to crop all the images around this position. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, index_out_tag: Optional[str] = None, image_size: float = 2., fwhm_star: float = 0.2, position: Optional[Union[Tuple[int, int, float], Tuple[None, None, float]]] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the dataset with the input images. image_out_tag : str Tag of the dataset that is stored as output, containing the extracted images. index_out_tag : str, None List with image indices for which the image size is too large to be cropped around the brightest pixel. No data is written if set to None. This tag name can be provided to the ``frames``` parameter in :class:`~pynpoint.processing.frameselection.RemoveFramesModule`. This argument is ignored if ``CPU`` is set to a value larger than 1. image_size : float Cropped image size (arcsec). fwhm_star : float Full width at half maximum (arcsec) of the Gaussian kernel that is used to smooth the images to lower contributions of bad pixels. position : tuple(int, int, float), None Subframe that is selected to search for the star. The tuple should contain a position (pix) and size (arcsec) as (pos_x, pos_y, size). The full image is used if set to None. The center of the image will be used with ``position=(None, None, size)``. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) if index_out_tag is None: self.m_index_out_port = None else: self.m_index_out_port = self.add_output_port(index_out_tag) self.m_image_size = image_size self.m_fwhm_star = fwhm_star self.m_position = position @typechecked def run(self) -> None: """ Run method of the module. Locates the position of the star (only pixel precision) by selecting the highest pixel value. A Gaussian kernel with a FWHM similar to the PSF is used to lower the contribution of bad pixels which may have higher values than the peak of the PSF. Images are cropped and written to an output port. Returns ------- NoneType None """ cpu = self._m_config_port.get_attribute('CPU') if cpu > 1 and self.m_index_out_port is not None: warnings.warn('The \'index_out_port\' can only be used if CPU = 1. No data will ' 'be stored to this output port.') del self._m_output_ports[self.m_index_out_port.tag] self.m_index_out_port = None pixscale = self.m_image_in_port.get_attribute('PIXSCALE') self.m_image_size = int(math.ceil(self.m_image_size/pixscale)) self.m_fwhm_star = int(math.ceil(self.m_fwhm_star/pixscale)) self.apply_function_to_images(crop_around_star, self.m_image_in_port, self.m_image_out_port, 'Extracting stellar position', func_args=(self.m_position, self.m_image_size, self.m_fwhm_star, pixscale, self.m_index_out_port, self.m_image_out_port)) history = f'fwhm_star (pix) = {self.m_fwhm_star}' if self.m_index_out_port is not None: self.m_index_out_port.copy_attributes(self.m_image_in_port) self.m_index_out_port.add_history('StarExtractionModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('StarExtractionModule', history) self.m_image_out_port.close_port() class ExtractBinaryModule(ProcessingModule): """ Pipeline module to extract a binary star (or another point source) which is rotating across the image stack. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, pos_center: Tuple[float, float], pos_binary: Tuple[float, float], image_size: float = 2., search_size: float = 0.1, filter_size: Optional[float] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the dataset with the input images. image_out_tag : str Tag of the dataset that is stored as output, containing the extracted images. pos_center : tuple(float, float) Approximate position (x, y) of the center of rotation (pix). pos_binary : tuple(float, float) Approximate position (x, y) of the binary star in the first image (pix). image_size : float Cropped image size (arcsec). search_size : float Window size (arcsec) in which the brightest pixel is selected as position of the binary star. The search window is centered on the position that for each image is calculated from the ``pos_center``, ``pos_binary``, and parallactic angle (``PARANG``) of the image. filter_size : float, None Full width at half maximum (arcsec) of the Gaussian kernel that is used to smooth the images to lower contributions of bad pixels. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_pos_center = (pos_center[1], pos_center[0]) # (y, x) self.m_pos_binary = (pos_binary[1], pos_binary[0]) # (y, x) self.m_image_size = image_size self.m_search_size = search_size self.m_filter_size = filter_size @typechecked def run(self) -> None: """ Run method of the module. Locates the position of a binary star (or some other point source) which rotates across the stack of images due to parallactic rotation. The approximate position of the binary star is calculated by taking into account the parallactic angle of each image separately. The brightest pixel is then selected as center around which the image is cropped. Returns ------- NoneType None """ pixscale = self.m_image_in_port.get_attribute('PIXSCALE') parang = self.m_image_in_port.get_attribute('PARANG') positions = np.zeros((parang.shape[0], 2), dtype=int) for i, item in enumerate(parang): # rotates in counterclockwise direction, hence the minus sign in angle positions[i, :] = rotate_coordinates(center=self.m_pos_center, position=self.m_pos_binary, angle=item-parang[0]) self.m_image_size = int(math.ceil(self.m_image_size/pixscale)) self.m_search_size = int(math.ceil(self.m_search_size/pixscale)) if self.m_filter_size is not None: self.m_filter_size = int(math.ceil(self.m_filter_size/pixscale)) self.apply_function_to_images(crop_rotating_star, self.m_image_in_port, self.m_image_out_port, 'Extracting binary position', func_args=(positions, self.m_image_size, self.m_filter_size, self.m_search_size)) history = f'filter (pix) = {self.m_filter_size}' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('ExtractBinaryModule', history) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/filter.py000066400000000000000000000047031450275315200212540ustar00rootroot00000000000000""" Pipeline modules for spatial filtering of images. """ import math import time from typeguard import typechecked from scipy.ndimage import gaussian_filter from pynpoint.core.processing import ProcessingModule from pynpoint.util.module import memory_frames, progress class GaussianFilterModule(ProcessingModule): """ Pipeline module for applying a Gaussian filter. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, fwhm: float = 1.) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : tuple(str, str) Tuple with two tags of the database entry that are read as input. image_out_tag : str Tag of the database entry with the subtracted images that are written as output. fwhm : float Full width at half maximum (arcsec) of the Gaussian kernel. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_fwhm = fwhm @typechecked def run(self) -> None: """ Run method of the module. Applies a Gaussian filter to the spatial dimensions of the images. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') pixscale = self._m_config_port.get_attribute('PIXSCALE') nimages = self.m_image_in_port.get_shape()[0] frames = memory_frames(memory, nimages) sigma = (self.m_fwhm/pixscale) / (2.*math.sqrt(2.*math.log(2.))) # [pix] start_time = time.time() for i, _ in enumerate(frames[:-1]): progress(i, len(frames[:-1]), 'Applying Gaussian filter...', start_time) images = self.m_image_in_port[frames[i]:frames[i+1], ] im_filter = gaussian_filter(images, (0, sigma, sigma)) self.m_image_out_port.append(im_filter, data_dim=3) history = f'fwhm [arcsec] = {self.m_fwhm}' self.m_image_out_port.add_history('GaussianFilterModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/fluxposition.py000066400000000000000000001750341450275315200225400ustar00rootroot00000000000000""" Pipeline modules for photometric and astrometric measurements. """ import os import time import warnings from typing import Any, List, Optional, Tuple, Union from multiprocessing import Pool import numpy as np import emcee from typeguard import typechecked from scipy.optimize import minimize from sklearn.decomposition import PCA from photutils.aperture import CircularAperture from pynpoint.core.processing import ProcessingModule from pynpoint.util.apply_func import photometry from pynpoint.util.analysis import fake_planet, merit_function, false_alarm, pixel_variance from pynpoint.util.image import create_mask, polar_to_cartesian, cartesian_to_polar, \ center_subpixel, rotate_coordinates from pynpoint.util.mcmc import lnprob from pynpoint.util.module import progress, memory_frames from pynpoint.util.psf import pca_psf_subtraction from pynpoint.util.residuals import combine_residuals class FakePlanetModule(ProcessingModule): """ Pipeline module to inject a positive or negative artificial planet into a stack of images. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, psf_in_tag: str, image_out_tag: str, position: Tuple[float, float], magnitude: float, psf_scaling: float = 1., interpolation: str = 'spline') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with images that are read as input. psf_in_tag : str Tag of the database entry that contains the reference PSF that is used as fake planet. Can be either a single image (2D) or a cube (3D) with the dimensions equal to ``image_in_tag``. image_out_tag : str Tag of the database entry with images that are written as output. position : tuple(float, float) Angular separation (arcsec) and position angle (deg) of the fake planet. Angle is measured in counterclockwise direction with respect to the upward direction (i.e., East of North). magnitude : float Magnitude of the fake planet with respect to the star. psf_scaling : float Additional scaling factor of the planet flux (e.g., to correct for a neutral density filter). A negative value will inject a negative planet signal. interpolation : str Type of interpolation that is used for shifting the images (spline, bilinear, or fft). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) if psf_in_tag == image_in_tag: self.m_psf_in_port = self.m_image_in_port else: self.m_psf_in_port = self.add_input_port(psf_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_position = position self.m_magnitude = magnitude self.m_psf_scaling = psf_scaling self.m_interpolation = interpolation @typechecked def run(self) -> None: """ Run method of the module. Shifts the PSF template to the location of the fake planet with an additional correction for the parallactic angle and an optional flux scaling. The stack of images with the injected planet signal is stored. Returns ------- NoneType None """ print('Input parameters:') print(f' - Magnitude = {self.m_magnitude:.2f}') print(f' - PSF scaling = {self.m_psf_scaling}') print(f' - Separation (arcsec) = {self.m_position[0]:.2f}') print(f' - Position angle (deg) = {self.m_position[1]:.2f}') memory = self._m_config_port.get_attribute('MEMORY') parang = self.m_image_in_port.get_attribute('PARANG') pixscale = self.m_image_in_port.get_attribute('PIXSCALE') self.m_position = (self.m_position[0]/pixscale, self.m_position[1]) im_shape = self.m_image_in_port.get_shape() psf_shape = self.m_psf_in_port.get_shape() if psf_shape[0] != 1 and psf_shape[0] != im_shape[0]: raise ValueError('The number of frames in psf_in_tag does not match with the number ' 'of frames in image_in_tag. The DerotateAndStackModule can be ' 'used to average the PSF frames (without derotating) before applying ' 'the FakePlanetModule.') if psf_shape[-2:] != im_shape[-2:]: raise ValueError(f'The images in \'{self.m_image_in_port.tag}\' should have the same ' f'dimensions as the images images in \'{self.m_psf_in_port.tag}\'.') frames = memory_frames(memory, im_shape[0]) start_time = time.time() for j, _ in enumerate(frames[:-1]): progress(j, len(frames[:-1]), 'Injecting artificial planets...', start_time) images = self.m_image_in_port[frames[j]:frames[j+1]] angles = parang[frames[j]:frames[j+1]] if psf_shape[0] == 1: psf = self.m_psf_in_port.get_all() else: psf = self.m_psf_in_port[frames[j]:frames[j+1]] im_fake = fake_planet(images=images, psf=psf, parang=angles, position=self.m_position, magnitude=self.m_magnitude, psf_scaling=self.m_psf_scaling, interpolation='spline') self.m_image_out_port.append(im_fake, data_dim=3) history = f'(sep, angle, mag) = ({self.m_position[0]*pixscale:.2f}, ' \ f'{self.m_position[1]:.2f}, {self.m_magnitude:.2f})' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('FakePlanetModule', history) self.m_image_out_port.close_port() class SimplexMinimizationModule(ProcessingModule): """ Pipeline module to retrieve the contrast and position of a planet by injecting negative artificial planets and using a downhill simplex method. The module supports both ADI and RDI. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, psf_in_tag: str, res_out_tag: str, flux_position_tag: str, position: Tuple[float, float], magnitude: float, psf_scaling: float = -1., merit: str = 'gaussian', aperture: float = 0.1, sigma: float = 0.0, tolerance: float = 0.1, pca_number: Union[int, range, List[int]] = 10, cent_size: Optional[float] = None, edge_size: Optional[float] = None, extra_rot: float = 0., residuals: str = 'median', reference_in_tag: Optional[str] = None, offset: Optional[float] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with the science images that are read as input. psf_in_tag : str Tag of the database entry with the reference PSF that is used as fake planet. Can be either a single image or a stack of images equal in size to ``image_in_tag``. res_out_tag : str Tag of the database entry with the image residuals that are written as output. The residuals are stored for each step of the minimization. The last image contains the best-fit residuals. flux_position_tag : str Tag of the database entry with the flux and position results that are written as output. Each step of the minimization stores the x position (pixels), y position (pixels), separation (arcsec), angle (deg), contrast (mag), and the chi-square value. The last row contains the best-fit results. position : tuple(float, float) Approximate position of the planet (x, y), provided with subpixel precision (i.e. as floats). The figure of merit is calculated within an aperture of radius ``aperture`` centered at the rounded (i.e. integers) coordinates of ``position``. When setting, ``offset=0.``, the ``position`` is used as fixed position of the planet while only retrieving the contrast. magnitude : float Approximate magnitude of the planet relative to the star. psf_scaling : float Additional scaling factor of the planet flux (e.g., to correct for a neutral density filter). Should be a negative value in order to inject negative fake planets. merit : str Figure of merit for the minimization ('hessian', 'gaussian', or 'poisson'). Either the determinant of the Hessian matrix is minimized ('hessian') or the flux of each pixel ('gaussian' or 'poisson'). For the latter case, the estimated noise is assumed to follow a Poisson (see Wertz et al. 2017) or Gaussian distribution (see Stolker et al. 2020). aperture : float Aperture radius (arcsec) at the position specified at ``position``. sigma : float Standard deviation (arcsec) of the Gaussian kernel that is used to smooth the images before the figure of merit is calculated (in order to reduce small pixel-to-pixel variations). tolerance : float Absolute error on the input parameters, position (pixels) and contrast (mag), that is used as acceptance level for convergence. Note that only a single value can be specified which is used for both the position and flux so ``tolerance=0.1`` corresponds to a precision of 0.1 mag and 0.1 pix. The tolerance on the output (i.e., the chi-square value) is set to ``np.inf`` such that the condition is always met. pca_number : int, range, list(int, ) Number of principal components (PCs) used for the PSF subtraction. Can be either a single value, or a range or list of values. In the latter case, the ``res_out_tag`` and ```flux_position_tag``` contain a 3 digit number with the number of PCs. cent_size : float, None Radius of the central mask (arcsec). No mask is used when set to ``None``. The mask is applied after the artificial planet is injected. edge_size : float, None Outer radius (arcsec) beyond which pixels are masked. No outer mask is used when set to ``None``. The radius will be set to half the image size if the argument is larger than half the image size. The mask is applied after the artificial planet is injected. extra_rot : float Additional rotation angle of the images in clockwise direction (deg). residuals : str Method for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). reference_in_tag : str, None Tag of the database entry with the reference images that are read as input. The data of the ``image_in_tag`` itself is used as reference data for the PSF subtraction if set to ``None``. Note that the mean is not subtracted from the data of ``image_in_tag`` and ``reference_in_tag`` in case the ``reference_in_tag`` is used, to allow for flux and position measurements in the context of RDI. offset : float, None Offset (pixels) by which the injected negative PSF may deviate from ``position``. The constraint on the position is not applied if set to None. Only the contrast is optimized and the position if fixed to ``position`` if ``offset=0``. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) if psf_in_tag == image_in_tag: self.m_psf_in_port = self.m_image_in_port else: self.m_psf_in_port = self.add_input_port(psf_in_tag) if reference_in_tag is None: self.m_reference_in_port = None else: self.m_reference_in_port = self.add_input_port(reference_in_tag) self.m_res_out_port = [] self.m_flux_pos_port = [] if isinstance(pca_number, int): self.m_res_out_port.append(self.add_output_port(res_out_tag)) self.m_flux_pos_port.append(self.add_output_port(flux_position_tag)) else: for item in pca_number: self.m_res_out_port.append(self.add_output_port(res_out_tag+f'{item:03d}')) self.m_flux_pos_port.append(self.add_output_port(flux_position_tag+f'{item:03d}')) self.m_position = position self.m_magnitude = magnitude self.m_psf_scaling = psf_scaling self.m_merit = merit self.m_aperture = aperture self.m_sigma = sigma self.m_tolerance = tolerance self.m_cent_size = cent_size self.m_edge_size = edge_size self.m_extra_rot = extra_rot self.m_residuals = residuals self.m_offset = offset if isinstance(pca_number, int): self.m_pca_number = [pca_number] else: self.m_pca_number = pca_number @typechecked def run(self) -> None: """ Run method of the module. The position and contrast of a planet is measured by injecting negative copies of the PSF template and applying a downhill simplex method (Nelder-Mead) for minimization of a figure of merit at the planet location. Returns ------- NoneType None """ print('Input parameters:') print(f' - Number of principal components = {self.m_pca_number}') print(f' - Figure of merit = {self.m_merit}') print(f' - Residuals type = {self.m_residuals}') print(f' - Absolute tolerance (pixels/mag) = {self.m_tolerance}') print(f' - Maximum offset = {self.m_offset}') print(f' - Guessed position (x, y) = ({self.m_position[0]:.2f}, ' f'{self.m_position[1]:.2f})') parang = self.m_image_in_port.get_attribute('PARANG') pixscale = self.m_image_in_port.get_attribute('PIXSCALE') aperture = (round(self.m_position[1]), round(self.m_position[0]), self.m_aperture/pixscale) print(f' - Aperture position (x, y) = ({aperture[1]}, {aperture[0]})') print(f' - Aperture radius (pixels) = {int(aperture[2])}') self.m_sigma /= pixscale if self.m_cent_size is not None: self.m_cent_size /= pixscale print(f' - Inner mask radius (pixels) = {int(self.m_cent_size)}') if self.m_edge_size is not None: self.m_edge_size /= pixscale print(f' - Outer mask radius (pixels) = {int(self.m_edge_size)}') psf = self.m_psf_in_port.get_all() images = self.m_image_in_port.get_all() if psf.shape[0] != 1 and psf.shape[0] != images.shape[0]: raise ValueError('The number of frames in psf_in_tag does not match with the number ' 'of frames in image_in_tag. The DerotateAndStackModule can be ' 'used to average the PSF frames (without derotating) before applying ' 'the SimplexMinimizationModule.') center = center_subpixel(psf) print(f'Image center (y, x) = {center}') # Rotate the initial position, (y, x), by the extra rotation angle to (y_rot, x_rot) pos_init = rotate_coordinates(center, (self.m_position[1], self.m_position[0]), self.m_extra_rot) if self.m_reference_in_port is not None and self.m_merit != 'poisson': raise NotImplementedError('The reference_in_tag can only be used in combination with ' 'the \'poisson\' figure of merit.') @typechecked def _objective(arg: np.ndarray, count: int, n_components: int, sklearn_pca: Optional[PCA], var_noise: Optional[float]) -> float: # Extract the contrast, y position, and x position from the argument tuple mag = arg[0] if self.m_offset is None or self.m_offset > 0.: pos_y = arg[1] pos_x = arg[2] else: pos_y = pos_init[0] pos_x = pos_init[1] # Calculate the absolute offset (pixels) with respect to the initial guess pos_offset = np.sqrt((pos_x-pos_init[1])**2 + (pos_y-pos_init[0])**2) if self.m_offset is not None and pos_offset > self.m_offset: # Return chi-square = inf if the offset needs to be tested and is too large return np.inf # Convert the cartesian position to a separation and position angle sep_ang = cartesian_to_polar(center, pos_y, pos_x) # Inject the negative artifical planet at the position and contrast that is tested fake = fake_planet(images=images, psf=psf, parang=parang, position=(sep_ang[0], sep_ang[1]), magnitude=mag, psf_scaling=self.m_psf_scaling) # Create a mask mask = create_mask(fake.shape[-2:], (self.m_cent_size, self.m_edge_size)) if self.m_reference_in_port is None: # PSF subtraction with the science data as reference data (ADI) im_res_rot, im_res_derot = pca_psf_subtraction(images=fake*mask, angles=-1.*parang+self.m_extra_rot, pca_number=n_components, pca_sklearn=sklearn_pca, im_shape=None, indices=None) else: # PSF subtraction with separate reference data (RDI) im_reshape = np.reshape(fake*mask, (im_shape[0], im_shape[1]*im_shape[2])) im_res_rot, im_res_derot = pca_psf_subtraction(images=im_reshape, angles=-1.*parang+self.m_extra_rot, pca_number=n_components, pca_sklearn=sklearn_pca, im_shape=im_shape, indices=None) # Collapse the residuals of the PSF subtraction res_stack = combine_residuals(method=self.m_residuals, res_rot=im_res_derot, residuals=im_res_rot, angles=parang) # Appedn the collapsed residuals to the output port self.m_res_out_port[count].append(res_stack, data_dim=3) # Calculate the chi-square for the tested position and contrast chi_sq = merit_function(residuals=res_stack[0, ], merit=self.m_merit, aperture=aperture, sigma=self.m_sigma, var_noise=var_noise) # Apply the extra rotation to the y and x position # The returned position is given as (y, x) position = rotate_coordinates(center, (pos_y, pos_x), -self.m_extra_rot) # Create and array with the x position, y position, separation (arcsec), position # angle (deg), contrast (mag), and chi-square res = np.asarray([position[1], position[0], sep_ang[0]*pixscale, (sep_ang[1]-self.m_extra_rot) % 360., mag, chi_sq]) # Append the results to the output port self.m_flux_pos_port[count].append(res, data_dim=2) print(f'\rSimplex minimization... {n_components} PC - chi^2 = {chi_sq:.2e}', end='') return chi_sq for i, n_components in enumerate(self.m_pca_number): print(f'\rSimplex minimization... {n_components} PC ', end='') if self.m_reference_in_port is None: sklearn_pca = None else: ref_data = self.m_reference_in_port.get_all() im_shape = images.shape ref_shape = ref_data.shape if ref_shape[1:] != im_shape[1:]: raise ValueError('The image size of the science data and the reference data ' 'should be identical.') # reshape reference data and select the unmasked pixels ref_reshape = ref_data.reshape(ref_shape[0], ref_shape[1]*ref_shape[2]) mean_ref = np.mean(ref_reshape, axis=0) ref_reshape -= mean_ref # create the PCA basis sklearn_pca = PCA(n_components=n_components, svd_solver='arpack') sklearn_pca.fit(ref_reshape) # add mean of reference array as 1st PC and orthogonalize it to the PCA basis mean_ref_reshape = mean_ref.reshape((1, mean_ref.shape[0])) q_ortho, _ = np.linalg.qr(np.vstack((mean_ref_reshape, sklearn_pca.components_[:-1, ])).T) sklearn_pca.components_ = q_ortho.T if self.m_merit == 'poisson': var_noise = None elif self.m_merit in ['gaussian', 'hessian']: var_noise = pixel_variance(var_type=self.m_merit, images=images, parang=parang, cent_size=self.m_cent_size, edge_size=self.m_edge_size, pca_number=n_components, residuals=self.m_residuals, aperture=aperture, sigma=self.m_sigma) if self.m_offset == 0.: x0_minimize = np.array([self.m_magnitude]) else: x0_minimize = np.array([self.m_magnitude, pos_init[0], pos_init[1]]) min_result = minimize(fun=_objective, x0=x0_minimize, args=(i, n_components, sklearn_pca, var_noise), method='Nelder-Mead', tol=None, options={'xatol': self.m_tolerance, 'fatol': float('inf')}) print(' [DONE]') if self.m_offset == 0.: pos_x = pos_init[1] pos_y = pos_init[0] else: pos_x = min_result.x[2] pos_y = min_result.x[1] pos_rot_yx = rotate_coordinates(center, (pos_y, pos_x), -self.m_extra_rot) sep_ang = cartesian_to_polar(center, pos_rot_yx[0], pos_rot_yx[1]) print('Best-fit parameters:') print(f' - Position (x, y) = ({pos_rot_yx[1]:.2f}, {pos_rot_yx[0]:.2f})') print(f' - Separation (mas) = {sep_ang[0]*pixscale*1e3:.2f}') print(f' - Position angle (deg) = {sep_ang[1]:.2f}') print(f' - Contrast (mag) = {min_result.x[0]:.2f}') history = f'merit = {self.m_merit}' for item in self.m_flux_pos_port: item.copy_attributes(self.m_image_in_port) item.add_history('SimplexMinimizationModule', history) for item in self.m_res_out_port: item.copy_attributes(self.m_image_in_port) item.add_history('SimplexMinimizationModule', history) self.m_res_out_port[0].close_port() class FalsePositiveModule(ProcessingModule): """ Pipeline module to calculate the signal-to-noise ratio (SNR) and false positive fraction (FPF) at a specified location in an image by using the Student's t-test (Mawet et al. 2014). Optionally, the SNR can be optimized with the aperture position as free parameter. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, snr_out_tag: str, position: Tuple[float, float], aperture: float = 0.1, ignore: bool = False, optimize: bool = False, **kwargs: Any) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with the images that are read as input. The SNR/FPF is calculated for each image in the dataset. snr_out_tag : str Tag of the database entry that is written as output. The output format is: (x position (pix), y position (pix), separation (arcsec), position angle (deg), SNR, FPF). The position angle is measured in counterclockwise direction with respect to the upward direction (i.e., East of North). position : tuple(float, float) The x and y position (pix) where the SNR and FPF is calculated. Note that the bottom left of the image is defined as (-0.5, -0.5) so there is a -1.0 offset with respect to the DS9 coordinate system. Aperture photometry corrects for the partial inclusion of pixels at the boundary. aperture : float Aperture radius (arcsec). ignore : bool Ignore the two neighboring apertures that may contain self-subtraction from the planet. optimize : bool Optimize the SNR. The aperture position is stored in the `snr_out_tag`. The size of the aperture is kept fixed. Keyword arguments ----------------- tolerance : float The absolute tolerance on the position for the optimization to end. Default is set to 0.01 (pix). offset : float, None Offset (pix) by which the aperture may deviate from ``position`` when ``optimize=True`` (default: None). Returns ------- NoneType None """ if 'tolerance' in kwargs: self.m_tolerance = kwargs['tolerance'] else: self.m_tolerance = 1e-2 if 'offset' in kwargs: self.m_offset = kwargs['offset'] else: self.m_offset = None if 'bounds' in kwargs: warnings.warn('The \'bounds\' keyword argument has been deprecated. Please use ' '\'offset\' instead (e.g. offset=3.0).', DeprecationWarning) super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_snr_out_port = self.add_output_port(snr_out_tag) self.m_position = position self.m_aperture = aperture self.m_ignore = ignore self.m_optimize = optimize @typechecked def run(self) -> None: """ Run method of the module. Calculates the SNR and FPF for a specified position in a post- processed image with the Student's t-test (Mawet et al. 2014). This approach assumes Gaussian noise but accounts for small sample statistics. Returns ------- NoneType None """ @typechecked def _snr_optimize(arg: np.ndarray) -> float: pos_x, pos_y = arg pos_offset = np.sqrt((pos_x-self.m_position[0])**2 + (pos_y-self.m_position[1])**2) if self.m_offset is not None: if pos_offset > self.m_offset: snr = 0. else: snr = None if self.m_offset is None or snr is None: _, _, snr, _ = false_alarm(image=image, x_pos=pos_x, y_pos=pos_y, size=self.m_aperture, ignore=self.m_ignore) return -1.*snr pixscale = self.m_image_in_port.get_attribute('PIXSCALE') self.m_aperture /= pixscale print('Input parameters:') print(f' - Aperture position = {self.m_position}') print(f' - Aperture radius (pixels) = {self.m_aperture:.2f}') print(f' - Optimize aperture position = {self.m_optimize}') print(f' - Ignore neighboring apertures = {self.m_ignore}') print(f' - Minimization tolerance = {self.m_tolerance}') nimages = self.m_image_in_port.get_shape()[0] print('Calculating the S/N and FPF...') for j in range(nimages): image = self.m_image_in_port[j, ] center = center_subpixel(image) if self.m_optimize: result = minimize(fun=_snr_optimize, x0=np.array([self.m_position[0], self.m_position[1]]), method='Nelder-Mead', tol=None, options={'xatol': self.m_tolerance, 'fatol': float('inf')}) _, _, snr, fpf = false_alarm(image=image, x_pos=result.x[0], y_pos=result.x[1], size=self.m_aperture, ignore=self.m_ignore) x_pos, y_pos = result.x[0], result.x[1] else: _, _, snr, fpf = false_alarm(image=image, x_pos=self.m_position[0], y_pos=self.m_position[1], size=self.m_aperture, ignore=self.m_ignore) x_pos, y_pos = self.m_position[0], self.m_position[1] print(f'Image {j+1:03d}/{nimages} -> (x, y) = ({x_pos:.2f}, {y_pos:.2f}), ' f'S/N = {snr:.2f}, FPF = {fpf:.2e}') sep_ang = cartesian_to_polar(center, y_pos, x_pos) result = np.column_stack((x_pos, y_pos, sep_ang[0]*pixscale, sep_ang[1], snr, fpf)) self.m_snr_out_port.append(result, data_dim=2) history = f'aperture (arcsec) = {self.m_aperture*pixscale:.2f}' self.m_snr_out_port.copy_attributes(self.m_image_in_port) self.m_snr_out_port.add_history('FalsePositiveModule', history) self.m_snr_out_port.close_port() class MCMCsamplingModule(ProcessingModule): """ Pipeline module to measure the separation, position angle, and contrast of a planet with injection of negative artificial planets and sampling of the posterior distribution with ``emcee``, an affine invariant Markov chain Monte Carlo (MCMC) ensemble sampler. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, psf_in_tag: str, chain_out_tag: str, param: Tuple[float, float, float], bounds: Tuple[Tuple[float, float], Tuple[float, float], Tuple[float, float]], nwalkers: int = 100, nsteps: int = 200, psf_scaling: float = -1., pca_number: int = 20, aperture: Union[float, Tuple[int, int, float]] = 0.1, mask: Optional[Union[Tuple[float, float], Tuple[None, float], Tuple[float, None], Tuple[None, None]]] = None, extra_rot: float = 0., merit: str = 'gaussian', residuals: str = 'median', resume: bool = False, **kwargs: Union[float, Tuple[float, float, float]]) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Database tag with the science images. psf_in_tag : str Database tag with the reference PSF that is used as artificial planet. The dataset can be either a single image, or a stack of images with the dimensions equal to ``image_in_tag``. chain_out_tag : str Database tag were the posterior samples will be stored. The shape of the array is ``(nsteps, nwalkers, 3)``. The mean acceptance fraction and the integrated autocorrelation time are stored as attributes. param : tuple(float, float, float) The approximate separation (arcsec), angle (deg), and contrast (mag), for example obtained with the :class:`~pynpoint.processing.fluxposition.SimplexMinimizationModule`. The angle is measured in counterclockwise direction with respect to the upward direction (i.e., East of North). The separation and angle are also used as (fixed) position for the aperture if ``aperture`` contains a float (i.e. the radius). bounds : tuple(tuple(float, float), tuple(float, float), tuple(float, float)) The prior boundaries for the separation (arcsec), angle (deg), and contrast (mag). Each set of boundaries is specified as a tuple. nwalkers : int Number of walkers. nsteps : int Number of steps per walker. psf_scaling : float Additional scaling factor of the planet flux (e.g. to correct for a neutral density filter or difference in exposure time). The value should be negative in order to inject negative fake planets. pca_number : int Number of principal components used for the PSF subtraction. aperture : float, tuple(int, int, float) Either the aperture radius (arcsec) at the position of ``param`` or tuple with the position and aperture radius (arcsec) as ``(pos_x, pos_y, radius)``. mask : tuple(float, float), None Inner and outer mask radius (arcsec) for the PSF subtraction. Both elements of the tuple can be set to ``None``. Masked pixels are excluded from the PCA computation, resulting in a smaller runtime. Masking is done after the artificial planet is injected. extra_rot : float Additional rotation angle of the images (deg). merit : str Figure of merit for the minimization ('hessian', 'gaussian', or 'poisson'). Either the determinant of the Hessian matrix is minimized ('hessian') or the flux of each pixel ('gaussian' or 'poisson'). For the latter case, the estimate noise is assumed to follow a Poisson (see Wertz et al. 2017) or Gaussian distribution (see Wertz et al. 2017 and Stolker et al. 2020). residuals : str Method used for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). resume : bool Resume from the last state of the chain that was stored by the backend of ``emcee``. Set to ``True`` for continuing with the samples from a previous run, for example when it was interrupted or to create more steps for the walkers. The backend data of ``emcee`` is stored with the tag ``[chain_out_tag]_backend`` in the HDF5 database. Keyword arguments ----------------- sigma : tuple(float, float, float) The standard deviations that randomly initializes the start positions of the walkers in a small ball around the a priori preferred position. The tuple should contain a value for the separation (arcsec), position angle (deg), and contrast (mag). The default is set to ``(1e-5, 1e-3, 1e-3)``. Returns ------- NoneType None """ if 'sigma' in kwargs: self.m_sigma = kwargs['sigma'] else: self.m_sigma = (1e-5, 1e-3, 1e-3) super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) if psf_in_tag == image_in_tag: self.m_psf_in_port = self.m_image_in_port else: self.m_psf_in_port = self.add_input_port(psf_in_tag) self.m_chain_out_port = self.add_output_port(chain_out_tag) self.m_param = param self.m_bounds = bounds self.m_nwalkers = nwalkers self.m_nsteps = nsteps self.m_psf_scaling = psf_scaling self.m_pca_number = pca_number self.m_aperture = aperture self.m_extra_rot = extra_rot self.m_merit = merit self.m_residuals = residuals self.m_resume = resume if mask is None: self.m_mask = (None, None) else: self.m_mask = mask @typechecked def run(self) -> None: """ Run method of the module. The posterior distributions of the separation, position angle, and flux contrast are sampled with the affine invariant Markov chain Monte Carlo (MCMC) ensemble sampler ``emcee``. At each step, a negative copy of the PSF template is injected and the likelihood function is evaluated at the approximate position of the planet. Returns ------- NoneType None """ print('Input parameters:') print(f' - Number of principal components: {self.m_pca_number}') print(f' - Figure of merit: {self.m_merit}') ndim = 3 cpu = self._m_config_port.get_attribute('CPU') work_place = self._m_config_port.get_attribute('WORKING_PLACE') pixscale = self.m_image_in_port.get_attribute('PIXSCALE') parang = self.m_image_in_port.get_attribute('PARANG') images = self.m_image_in_port.get_all() psf = self.m_psf_in_port.get_all() im_shape = self.m_image_in_port.get_shape()[-2:] self.m_image_in_port.close_port() self.m_psf_in_port.close_port() if psf.shape[0] != 1 and psf.shape[0] != images.shape[0]: raise ValueError('The number of frames in psf_in_tag does not match with the number of ' 'frames in image_in_tag. The DerotateAndStackModule can be used to ' 'average the PSF frames (without derotating) before applying the ' 'MCMCsamplingModule.') if self.m_mask[0] is not None: self.m_mask = (self.m_mask[0]/pixscale, self.m_mask[1]) if self.m_mask[1] is not None: self.m_mask = (self.m_mask[0], self.m_mask[1]/pixscale) # create the mask and get the unmasked image indices mask = create_mask(im_shape[-2:], self.m_mask) indices = np.where(mask.reshape(-1) != 0.)[0] if isinstance(self.m_aperture, float): yx_pos = polar_to_cartesian(images, self.m_param[0]/pixscale, self.m_param[1]) aperture = (round(yx_pos[0]), round(yx_pos[1]), self.m_aperture/pixscale) elif isinstance(self.m_aperture, tuple): aperture = (self.m_aperture[1], self.m_aperture[0], self.m_aperture[2]/pixscale) print(f' - Aperture position (x, y): ({aperture[1]}, {aperture[0]})') print(f' - Aperture radius (pixels): {int(aperture[2])}') if self.m_merit == 'poisson': var_noise = None elif self.m_merit in ['gaussian', 'hessian']: var_noise = pixel_variance(var_type=self.m_merit, images=images, parang=parang, cent_size=self.m_mask[0], edge_size=self.m_mask[1], pca_number=self.m_pca_number, residuals=self.m_residuals, aperture=aperture, sigma=0.) if self.m_merit == 'gaussian': print(f'Gaussian standard deviation (counts): {np.sqrt(var_noise):.2e}') elif self.m_merit == 'hessian': print(f'Hessian standard deviation: {np.sqrt(var_noise):.2e}') initial = np.zeros((self.m_nwalkers, ndim)) initial[:, 0] = self.m_param[0] + np.random.normal(0, self.m_sigma[0], self.m_nwalkers) initial[:, 1] = self.m_param[1] + np.random.normal(0, self.m_sigma[1], self.m_nwalkers) initial[:, 2] = self.m_param[2] + np.random.normal(0, self.m_sigma[2], self.m_nwalkers) backend = emcee.backends.HDFBackend( os.path.join(work_place, 'PynPoint_database.hdf5'), name=self.m_chain_out_port.tag+'_backend', read_only=False) if not self.m_resume: print('Reset backend of emcee...', end='', flush=True) backend.reset(self.m_nwalkers, ndim) print(' [DONE]') print('Sampling the posterior distributions with MCMC...') with Pool(processes=cpu) as pool: sampler = emcee.EnsembleSampler(self.m_nwalkers, ndim, lnprob, pool=pool, args=([self.m_bounds, images, psf, mask, parang, self.m_psf_scaling, pixscale, self.m_pca_number, self.m_extra_rot, aperture, indices, self.m_merit, self.m_residuals, var_noise]), backend=backend) sampler.run_mcmc(initial, self.m_nsteps, progress=True) samples = sampler.get_chain() self.m_image_in_port._check_status_and_activate() self.m_chain_out_port._check_status_and_activate() self.m_chain_out_port.set_all(samples) print(f'Number of samples stored: {samples.shape[0]*samples.shape[1]}') burnin = int(0.2*samples.shape[0]) samples = samples[burnin:, :, :].reshape((-1, ndim)) sep_percen = np.percentile(samples[:, 0], [16., 50., 84.]) ang_percen = np.percentile(samples[:, 1], [16., 50., 84.]) mag_percen = np.percentile(samples[:, 2], [16., 50., 84.]) print('Median and uncertainties (20% removed as burnin):') print(f'Separation (mas) = {1e3*sep_percen[1]:.2f} ' f'(-{1e3*sep_percen[1]-1e3*sep_percen[0]:.2f} ' f'+{1e3*sep_percen[2]-1e3*sep_percen[1]:.2f})') print(f'Position angle (deg) = {ang_percen[1]:.2f} ' f'(-{ang_percen[1]-ang_percen[0]:.2f} ' f'+{ang_percen[2]-ang_percen[1]:.2f})') print(f'Contrast (mag) = {mag_percen[1]:.2f} ' f'(-{mag_percen[1]-mag_percen[0]:.2f} ' f'+{mag_percen[2]-mag_percen[1]:.2f})') history = f'walkers = {self.m_nwalkers}, steps = {self.m_nsteps}' self.m_chain_out_port.copy_attributes(self.m_image_in_port) self.m_chain_out_port.add_history('MCMCsamplingModule', history) mean_accept = np.mean(sampler.acceptance_fraction) print(f'Mean acceptance fraction: {mean_accept:.3f}') self.m_chain_out_port.add_attribute('ACCEPTANCE', mean_accept, static=True) try: autocorr = emcee.autocorr.integrated_time(sampler.get_chain()) print(f'Integrated autocorrelation time = {autocorr}') except emcee.autocorr.AutocorrError: autocorr = [np.nan, np.nan, np.nan] print('The chain is too short to reliably estimate the autocorrelation time. [WARNING]') self.m_chain_out_port.add_attribute('AUTOCORR_0', autocorr[0], static=True) self.m_chain_out_port.add_attribute('AUTOCORR_1', autocorr[1], static=True) self.m_chain_out_port.add_attribute('AUTOCORR_2', autocorr[2], static=True) self.m_chain_out_port.close_port() class AperturePhotometryModule(ProcessingModule): """ Pipeline module for calculating the counts within a circular area. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, phot_out_tag: str, radius: float = 0.1, position: Optional[Tuple[float, float]] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. phot_out_tag : str Tag of the database entry with the photometry values that are written as output. radius : float Radius (arcsec) of the circular aperture. position : tuple(float, float), None Center position (pix) of the aperture, (x, y), with subpixel precision. The center of the image will be used if set to None. Python indexing starts at zero so the bottom left corner of the image has coordinates (-0.5, -0.5). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_phot_out_port = self.add_output_port(phot_out_tag) self.m_phot_in_port = None self.m_radius = radius self.m_position = position @typechecked def run(self) -> None: """ Run method of the module. Computes the flux within a circular aperture for each frame and saves the values in the database. Returns ------- NoneType None """ pixscale = self.m_image_in_port.get_attribute('PIXSCALE') self.m_radius /= pixscale if self.m_position is None: # Returns the center position as (y, x) self.m_position = center_subpixel(self.m_image_in_port[0, ]) # Store the center position as (x, y) self.m_position = (self.m_position[1], self.m_position[0]) print(f'Aperture position (x, y) = ({self.m_position[0]:.1f}, {self.m_position[1]:.1f})') print(f'Aperture radius (pixels) = ({self.m_radius:.1f})') # Position in CircularAperture is defined as (x, y) aperture = CircularAperture((self.m_position[0], self.m_position[1]), self.m_radius) self.apply_function_to_images(photometry, self.m_image_in_port, self.m_phot_out_port, 'Aperture photometry', func_args=(aperture, )) self.m_phot_in_port = self.add_input_port(self.m_phot_out_port.tag) data = self.m_phot_in_port.get_all() print(f'Mean flux (counts) = {np.mean(data):.2f} +/- {np.std(data)/np.sqrt(data.size):.2f}') history = f'radius (pixels) = {self.m_radius:.3f}' self.m_phot_out_port.copy_attributes(self.m_image_in_port) self.m_phot_out_port.add_history('AperturePhotometryModule', history) self.m_phot_out_port.close_port() class SystematicErrorModule(ProcessingModule): """ Pipeline module for estimating the systematic error of the flux and position measurement. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, psf_in_tag: str, offset_out_tag: str, position: Tuple[float, float], magnitude: float, angles: Tuple[float, float, int] = (0., 359., 360), psf_scaling: float = 1., merit: str = 'gaussian', aperture: float = 0.1, tolerance: float = 0.01, pca_number: int = 10, mask: Optional[Union[Tuple[float, float], Tuple[None, float], Tuple[float, None], Tuple[None, None]]] = None, extra_rot: float = 0., residuals: str = 'median', offset: Optional[float] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with the science images for which the systematic error is estimated. psf_in_tag : str Tag of the database entry with the PSF template that is used as fake planet. Can be either a single image or a stack of images equal in size to ``image_in_tag``. offset_out_tag : str Tag of the database entry at which the differences are stored between the injected and and retrieved values of the separation (arcsec), position angle (deg), contrast (mag), x position (pix), and y position (pix). position : tuple(float, float) Separation (arcsec) and position angle (deg) that are used to remove the planet signal. The separation is also used to estimate the systematic error. magnitude : float Magnitude that is used to remove the planet signal and estimate the systematic error. angles : tuple(float, float, int) The start, end, and number of the position angles (linearly sampled) that are used to estimate the systematic errors (default: 0., 359., 360). The endpoint is also included. psf_scaling : float Additional scaling factor of the planet flux (e.g., to correct for a neutral density filter). Should be a positive value. merit : str Figure of merit for the minimization ('hessian', 'gaussian', or 'poisson'). Either the determinant of the Hessian matrix is minimized ('hessian') or the flux of each pixel ('gaussian' or 'poisson'). For the latter case, the estimate noise is assumed to follow a Poisson (see Wertz et al. 2017) or Gaussian distribution (see Wertz et al. 2017 and Stolker et al. 2020). aperture : float Aperture radius (arcsec) that is used for measuring the figure of merit. tolerance : float Absolute error on the input parameters, position (pix) and contrast (mag), that is used as acceptance level for convergence. Note that only a single value can be specified which is used for both the position and flux so tolerance=0.1 will give a precision of 0.1 mag and 0.1 pix. The tolerance on the output (i.e., the chi-square value) is set to np.inf so the condition is always met. pca_number : int Number of principal components (PCs) used for the PSF subtraction. mask : tuple(float, float), None Inner and outer mask radius (arcsec) which is applied before the PSF subtraction. Both elements of the tuple can be set to None. extra_rot : float Additional rotation angle of the images in clockwise direction (deg). residuals : str Method for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). offset : float, None Offset (pixels) by which the negative PSF may deviate from the positive injected PSF. No constraint on the position is applied if set to None. Only the contrast is optimized and the position is fixed to the injected value if ``offset=0``. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_tag = image_in_tag self.m_psf_in_tag = psf_in_tag self.m_image_in_port = self.add_input_port(image_in_tag) self.m_offset_out_port = self.add_output_port(offset_out_tag) self.m_position = position self.m_magnitude = magnitude self.m_angles = angles self.m_psf_scaling = psf_scaling self.m_merit = merit self.m_aperture = aperture self.m_tolerance = tolerance self.m_mask = mask self.m_extra_rot = extra_rot self.m_residuals = residuals self.m_pca_number = pca_number self.m_offset = offset @typechecked def run(self) -> None: """ Run method of the module. Removes the planet signal, then artificial planets are injected (one at a time) at equally separated position angles and their position and contrast is determined with the :class:`~pynpoint.processing.fluxposition.SimplexMinimizationModule`. The differences between the injected and retrieved separation, position angle, and contrast are then stored as output. Returns ------- NoneType None """ print('Input parameters:') print(f' - Number of principal components = {self.m_pca_number}') print(f' - Figure of merit = {self.m_merit}') print(f' - Residuals type = {self.m_residuals}') print(f' - Absolute tolerance (pixels/mag) = {self.m_tolerance}') print(f' - Maximum offset = {self.m_offset}') print(f' - Aperture radius (arcsec) = {self.m_aperture}') pixscale = self.m_image_in_port.get_attribute('PIXSCALE') image = self.m_image_in_port[0, ] module = FakePlanetModule(name_in=f'{self._m_name}_fake', image_in_tag=self.m_image_in_tag, psf_in_tag=self.m_psf_in_tag, image_out_tag=f'{self._m_name}_empty', position=(self.m_position[0], self.m_position[1]+self.m_extra_rot), magnitude=self.m_magnitude, psf_scaling=-self.m_psf_scaling) module.connect_database(self._m_data_base) module._m_output_ports[f'{self._m_name}_empty'].del_all_data() module._m_output_ports[f'{self._m_name}_empty'].del_all_attributes() module.run() sep = float(self.m_position[0]) angles = np.linspace(self.m_angles[0], self.m_angles[1], self.m_angles[2], endpoint=True) print('Testing the following parameters:') print(f' - Contrast (mag) = {self.m_magnitude:.2f}') print(f' - Separation (mas) = {sep*1e3:.1f}') print(f' - Position angle range (deg) = {angles[0]} - {angles[-1]}') if angles.size > 1: print(f' in steps of {np.mean(np.diff(angles)):.2f} deg') # Image center (y, x) with subpixel accuracy im_center = center_subpixel(image) for i, ang in enumerate(angles): print(f'\nProcessing position angle: {ang} deg...') # Convert the polar coordiantes of the separation and position angle that is tested # into cartesian coordinates (y, x) planet_pos_yx = polar_to_cartesian(image, sep/pixscale, ang) planet_pos_xy = (planet_pos_yx[1], planet_pos_yx[0]) # Convert the planet position to polar coordinates planet_sep_ang = cartesian_to_polar(im_center, planet_pos_yx[0], planet_pos_yx[1]) # Change the separation units to arcsec planet_sep_ang = (planet_sep_ang[0]*pixscale, planet_sep_ang[1]) # Inject the artifical planet module = FakePlanetModule(position=(planet_sep_ang[0], planet_sep_ang[1]+self.m_extra_rot), magnitude=self.m_magnitude, psf_scaling=self.m_psf_scaling, name_in=f'{self._m_name}_fake_{i}', image_in_tag=f'{self._m_name}_empty', psf_in_tag=self.m_psf_in_tag, image_out_tag=f'{self._m_name}_fake') module.connect_database(self._m_data_base) module._m_output_ports[f'{self._m_name}_fake'].del_all_data() module._m_output_ports[f'{self._m_name}_fake'].del_all_attributes() module.run() # Retrieve the position and contrast of the artificial planet module = SimplexMinimizationModule(position=planet_pos_xy, magnitude=self.m_magnitude, psf_scaling=-self.m_psf_scaling, name_in=f'{self._m_name}_fake_{i}', image_in_tag=f'{self._m_name}_fake', psf_in_tag=self.m_psf_in_tag, res_out_tag=f'{self._m_name}_simplex', flux_position_tag=f'{self._m_name}_fluxpos', merit=self.m_merit, aperture=self.m_aperture, sigma=0., tolerance=self.m_tolerance, pca_number=self.m_pca_number, cent_size=self.m_mask[0], edge_size=self.m_mask[1], extra_rot=self.m_extra_rot, residuals=self.m_residuals, offset=self.m_offset) module.connect_database(self._m_data_base) module._m_output_ports[f'{self._m_name}_simplex'].del_all_data() module._m_output_ports[f'{self._m_name}_simplex'].del_all_attributes() module._m_output_ports[f'{self._m_name}_fluxpos'].del_all_data() module._m_output_ports[f'{self._m_name}_fluxpos'].del_all_attributes() module.run() # Add the input port to collect the results of SimplexMinimizationModule fluxpos_out_port = self.add_input_port(f'{self._m_name}_fluxpos') # Create a list with the offset between the injected and retrieved values of the # separation (arcsec), position angle (deg), contrast (mag), x position (pixels), # and y position (pixels). data = [planet_sep_ang[0] - fluxpos_out_port[-1, 2], # Separation (arcsec) planet_sep_ang[1] - fluxpos_out_port[-1, 3], # Position angle (deg) self.m_magnitude - fluxpos_out_port[-1, 4], # Contrast (mag) planet_pos_xy[0] - fluxpos_out_port[-1, 0], # Position x (pixels) planet_pos_xy[1] - fluxpos_out_port[-1, 1]] # Position y (pixels) if data[1] > 180.: data[1] -= 360. elif data[1] < -180.: data[1] += 360. print(f'Offset: {data[0]*1e3:.2f} mas, {data[1]:.2f} deg, {data[2]:.2f} mag') self.m_offset_out_port.append(data, data_dim=2) offset_in_port = self.add_input_port(self.m_offset_out_port.tag) offsets = offset_in_port.get_all() sep_percen = np.percentile(offsets[:, 0], [16., 50., 84.]) ang_percen = np.percentile(offsets[:, 1], [16., 50., 84.]) mag_percen = np.percentile(offsets[:, 2], [16., 50., 84.]) x_pos_percen = np.percentile(offsets[:, 3], [16., 50., 84.]) y_pos_percen = np.percentile(offsets[:, 4], [16., 50., 84.]) print('\nMedian offset and uncertainties:') print(f' - Position x (pixels) = {x_pos_percen[1]:.2f} ' f'(-{x_pos_percen[1]-x_pos_percen[0]:.2f} ' f'+{x_pos_percen[2]-x_pos_percen[1]:.2f})') print(f' - Position y (pixels) = {y_pos_percen[1]:.2f} ' f'(-{y_pos_percen[1]-y_pos_percen[0]:.2f} ' f'+{y_pos_percen[2]-y_pos_percen[1]:.2f})') print(f' - Separation (mas) = {1e3*sep_percen[1]:.2f} ' f'(-{1e3*sep_percen[1]-1e3*sep_percen[0]:.2f} ' f'+{1e3*sep_percen[2]-1e3*sep_percen[1]:.2f})') print(f' - Position angle (deg) = {ang_percen[1]:.2f} ' f'(-{ang_percen[1]-ang_percen[0]:.2f} ' f'+{ang_percen[2]-ang_percen[1]:.2f})') print(f' - Contrast (mag) = {mag_percen[1]:.2f} ' f'(-{mag_percen[1]-mag_percen[0]:.2f} ' f'+{mag_percen[2]-mag_percen[1]:.2f})') history = f'sep = {self.m_position[0]:.3f}, ' \ f'pa = {self.m_position[1]:.1f}, ' \ f'mag = {self.m_magnitude:.1f}' self.m_offset_out_port.copy_attributes(self.m_image_in_port) self.m_offset_out_port.add_history('SystematicErrorModule', history) self.m_offset_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/frameselection.py000066400000000000000000001216561450275315200227760ustar00rootroot00000000000000""" Pipeline modules for frame selection. """ import time import math import warnings import multiprocessing as mp from typing import Optional, Tuple, Union import numpy as np from typeguard import typechecked from skimage.metrics import structural_similarity, mean_squared_error from pynpoint.core.processing import ProcessingModule from pynpoint.util.apply_func import image_stat from pynpoint.util.image import crop_image, pixel_distance, center_pixel from pynpoint.util.module import progress from pynpoint.util.remove import write_selected_data, write_selected_attributes from pynpoint.util.star import star_positions class RemoveFramesModule(ProcessingModule): """ Pipeline module for removing images by their index number. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, selected_out_tag: str, removed_out_tag: str, frames: Union[str, range, list, np.ndarray]) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. selected_out_tag : str Tag of the database entry with the remaining images after removing the specified images. Should be different from *image_in_tag*. removed_out_tag : str Tag of the database entry with the images that are removed. Should be different from *image_in_tag*. frames : str, list, range, numpy.ndarray The frame indices that have to be removed or a database tag pointing to a list of frame indices. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_selected_out_port = self.add_output_port(selected_out_tag) self.m_removed_out_port = self.add_output_port(removed_out_tag) if isinstance(frames, str): self.m_index_in_port = self.add_input_port(frames) else: self.m_index_in_port = None if isinstance(frames, (range, list)): self.m_frames = np.asarray(frames, dtype=int) elif isinstance(frames, np.ndarray): self.m_frames = frames @typechecked def run(self) -> None: """ Run method of the module. Removes the frames and corresponding attributes, updates the NFRAMES attribute, and saves the data and attributes. Returns ------- NoneType None """ if self.m_index_in_port is not None: self.m_frames = self.m_index_in_port.get_all() if np.size(np.where(self.m_frames >= self.m_image_in_port.get_shape()[0])) > 0: raise ValueError(f'Some values in \'frames\' are larger than the total number of ' f'available frames, {self.m_image_in_port.get_shape()[0]}.') write_selected_data(memory=self._m_config_port.get_attribute('MEMORY'), indices=self.m_frames, image_in_port=self.m_image_in_port, selected_out_port=self.m_selected_out_port, removed_out_port=self.m_removed_out_port) write_selected_attributes(indices=self.m_frames, image_in_port=self.m_image_in_port, selected_out_port=self.m_selected_out_port, removed_out_port=self.m_removed_out_port, module_type='RemoveFramesModule', history=f'frames removed = {np.size(self.m_frames)}') self.m_image_in_port.close_port() class FrameSelectionModule(ProcessingModule): """ Pipeline module for applying a frame selection. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, selected_out_tag: str, removed_out_tag: str, index_out_tag: Optional[str] = None, method: str = 'median', threshold: Union[float, Tuple[float, float]] = 4., fwhm: Optional[float] = 0.1, aperture: Union[Tuple[str, float], Tuple[str, float, float]] = ('circular', 0.2), position: Optional[Union[Tuple[int, int, float], Tuple[None, None, float], Tuple[int, int, None]]] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. selected_out_tag : str Tag of the database entry with the selected images that are written as output. Should be different from ``image_in_tag``. removed_out_tag : str Tag of the database entry with the removed images that are written as output. Should be different from ``image_in_tag``. index_out_tag : str, None Tag of the database entry with the list of frames indices that are removed with the frames selection. No data is written when set to ``None``. method : str Method used for the frame selection. Either sigma clipping is applied with respect to the median (``method='median'``) or maximum (``method='max'``) aperture flux or fluxes outside a specified range (``method='range'``) are removed. In the latter case, the ``threshold`` argument should be a tuple with the minimum and maximum flux value. threshold : float, tuple(float, float) Threshold in units of sigma for the frame selection in case ``method='median'`` or ``method='max'``. All images that are a ``threshold`` number of sigmas away from the median/maximum aperture flux will be removed. In case ``method='range'``, the argument should be a tuple with the minimum and maximum flux. Aperture fluxes within this range will be selected and stored at ``selected_out_tag``. fwhm : float, None The FWHM (arcsec) of the Gaussian kernel that is used to smooth the images before the brightest pixel is located. Recommended to be similar in size to the FWHM of the stellar PSF. No smoothing is applied if set to ``None``. aperture : tuple(str, float), tuple(str, float, float) Tuple with the aperture properties for measuring the photometry around the location of the brightest pixel. The first element contains the aperture type ('circular', 'annulus', or 'ratio'). For a circular aperture, the second element contains the aperture radius (arcsec). For the other two types, the second and third element are the inner and outer radii (arcsec) of the aperture. position : tuple(int, int, float), None Subframe that is selected to search for the star. The tuple contains the center (pix) and size (arcsec) (pos_x, pos_y, size). Setting ``position`` to None will use the full image to search for the star. If `position=(None, None, size)` then the center of the image will be used. If ``position=(pos_x, pos_y, None)`` then a fixed position is used for the aperture. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_selected_out_port = self.add_output_port(selected_out_tag) self.m_removed_out_port = self.add_output_port(removed_out_tag) if index_out_tag is None: self.m_index_out_port = None else: self.m_index_out_port = self.add_output_port(index_out_tag) self.m_method = method self.m_fwhm = fwhm self.m_aperture = aperture self.m_threshold = threshold self.m_position = position @staticmethod @typechecked def aperture_phot(image: np.ndarray, position: np.ndarray, aperture: Union[Tuple[str, float, float], Tuple[str, None, float]]) -> np.float64: """ Parameters ---------- image : np.ndarray Input image (2D). position : np.ndarray Center position (y, x) of the aperture. aperture : tuple(str, float, float) Tuple with the aperture properties for measuring the photometry around the location of the brightest pixel. The first element contains the aperture type ('circular', 'annulus', or 'ratio'). For a circular aperture, the second element is empty and the third element contains the aperture radius (pix). For the other two types, the second and third element are the inner and outer radii (pix) of the aperture. Returns ------- np.float64 Photometry value. """ check_pos_in = any(np.floor(position[:]-aperture[2]) < 0.) check_pos_out = any(np.ceil(position[:]+aperture[2]) > image.shape[0]) if check_pos_in or check_pos_out: phot = np.nan else: im_crop = crop_image(image, tuple(position), 2*int(math.ceil(aperture[2]))) npix = im_crop.shape[0] x_grid = y_grid = np.linspace(-(npix-1)/2, (npix-1)/2, npix) xx_grid, yy_grid = np.meshgrid(x_grid, y_grid) rr_grid = np.sqrt(xx_grid**2 + yy_grid**2) if aperture[0] == 'circular': phot = np.sum(im_crop[rr_grid < aperture[2]]) elif aperture[0] == 'annulus': phot = np.sum(im_crop[(rr_grid > aperture[1]) & (rr_grid < aperture[2])]) elif aperture[0] == 'ratio': phot = np.sum(im_crop[rr_grid < aperture[1]]) / \ np.sum(im_crop[(rr_grid > aperture[1]) & (rr_grid < aperture[2])]) return phot @typechecked def run(self) -> None: """ Run method of the module. Smooths the images with a Gaussian kernel, locates the brightest pixel in each image, measures the integrated flux around the brightest pixel, calculates the median and standard deviation of the photometry, and applies sigma clipping to remove low quality images. Returns ------- NoneType None """ pixscale = self.m_image_in_port.get_attribute('PIXSCALE') nimages = self.m_image_in_port.get_shape()[0] if self.m_fwhm is not None: self.m_fwhm = int(math.ceil(self.m_fwhm/pixscale)) if self.m_position is not None and self.m_position[2] is not None: self.m_position = (self.m_position[0], self.m_position[0], int(math.ceil(self.m_position[2]/pixscale))) if len(self.m_aperture) == 2: self.m_aperture = (self.m_aperture[0], None, self.m_aperture[1]/pixscale) elif len(self.m_aperture) == 3: self.m_aperture = (self.m_aperture[0], self.m_aperture[1]/pixscale, self.m_aperture[2]/pixscale) starpos = star_positions(self.m_image_in_port, self.m_fwhm, self.m_position) phot = np.zeros(nimages) start_time = time.time() for i in range(nimages): progress(i, nimages, 'Aperture photometry...', start_time) phot[i] = self.aperture_phot(self.m_image_in_port[i, ], starpos[i, :], self.m_aperture) if self.m_method == 'median': phot_ref = np.nanmedian(phot) print(f'Median = {phot_ref:.2f}') elif self.m_method == 'max': phot_ref = np.nanmax(phot) print(f'Maximum = {phot_ref:.2f}') elif self.m_method == 'range': phot_ref = np.nanmedian(phot) print(f'Median = {phot_ref:.2f}') phot_std = np.nanstd(phot) print(f'Standard deviation = {phot_std:.2f}') if self.m_method in ['median', 'max']: index_del = np.logical_or((phot > phot_ref + self.m_threshold*phot_std), (phot < phot_ref - self.m_threshold*phot_std)) elif self.m_method == 'range': index_del = np.logical_or((phot < self.m_threshold[0]), (phot > self.m_threshold[1])) index_del[np.isnan(phot)] = True index_del = np.where(index_del)[0] index_sel = np.ones(nimages, dtype=bool) index_sel[index_del] = False write_selected_data(memory=self._m_config_port.get_attribute('MEMORY'), indices=index_del, image_in_port=self.m_image_in_port, selected_out_port=self.m_selected_out_port, removed_out_port=self.m_removed_out_port) history = f'frames removed = {np.size(index_del)}' if self.m_index_out_port is not None: self.m_index_out_port.set_all(index_del, data_dim=1) self.m_index_out_port.copy_attributes(self.m_image_in_port) self.m_index_out_port.add_attribute('STAR_POSITION', starpos, static=False) self.m_index_out_port.add_history('FrameSelectionModule', history) write_selected_attributes(indices=index_del, image_in_port=self.m_image_in_port, selected_out_port=self.m_selected_out_port, removed_out_port=self.m_removed_out_port, module_type='FrameSelectionModule', history=history) self.m_selected_out_port.add_attribute('STAR_POSITION', starpos[index_sel], static=False) self.m_selected_out_port.add_history('FrameSelectionModule', history) self.m_removed_out_port.add_attribute('STAR_POSITION', starpos[index_del], static=False) self.m_removed_out_port.add_history('FrameSelectionModule', history) self.m_image_in_port.close_port() class RemoveLastFrameModule(ProcessingModule): """ Pipeline module for removing every NDIT+1 image from NACO dataset obtained in cube mode. This frame contains the average pixel values of the cube. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) @typechecked def run(self) -> None: """ Run method of the module. Removes every NDIT+1 frame and saves the data and attributes. Returns ------- NoneType None """ ndit = self.m_image_in_port.get_attribute('NDIT') nframes = self.m_image_in_port.get_attribute('NFRAMES') index = self.m_image_in_port.get_attribute('INDEX') nframes_new = [] index_new = [] start_time = time.time() for i, item in enumerate(ndit): progress(i, len(ndit), 'Removing the last image of each FITS cube...', start_time) if nframes[i] != item+1: warnings.warn(f'Number of frames ({nframes[i]}) is not equal to NDIT+1.') frame_start = np.sum(nframes[0:i]) frame_end = np.sum(nframes[0:i+1]) - 1 nframes_new.append(nframes[i]-1) index_new.extend(index[frame_start:frame_end]) self.m_image_out_port.append(self.m_image_in_port[frame_start:frame_end, ]) nframes_new = np.asarray(nframes_new, dtype=int) index_new = np.asarray(index_new, dtype=int) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_attribute('NFRAMES', nframes_new, static=False) self.m_image_out_port.add_attribute('INDEX', index_new, static=False) history = 'frames removed = NDIT+1' self.m_image_out_port.add_history('RemoveLastFrameModule', history) self.m_image_out_port.close_port() class RemoveStartFramesModule(ProcessingModule): """ Pipeline module for removing a fixed number of images at the beginning of each cube. This can be useful for NACO data in which the background is higher at the beginning of the cube. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, frames: int = 1) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. frames : int Number of frames that are removed at the beginning of each cube. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_frames = int(frames) @typechecked def run(self) -> None: """ Run method of the module. Removes a constant number of images at the beginning of each cube and saves the data and attributes. Returns ------- NoneType None """ if self.m_image_out_port.tag == self.m_image_in_port.tag: raise ValueError('Input and output port should have a different tag.') nframes = self.m_image_in_port.get_attribute('NFRAMES') index = self.m_image_in_port.get_attribute('INDEX') index_new = [] if 'PARANG' in self.m_image_in_port.get_all_non_static_attributes(): parang = self.m_image_in_port.get_attribute('PARANG') parang_new = [] else: parang = None if 'STAR_POSITION' in self.m_image_in_port.get_all_non_static_attributes(): star = self.m_image_in_port.get_attribute('STAR_POSITION') star_new = [] else: star = None start_time = time.time() for i, _ in enumerate(nframes): progress(i, len(nframes), 'Removing images at the begin of each cube...', start_time) frame_start = np.sum(nframes[0:i]) + self.m_frames frame_end = np.sum(nframes[0:i+1]) if frame_start >= frame_end: raise ValueError('The number of frames in the original data cube is equal or ' 'smaller than the number of frames that have to be removed.') index_new.extend(index[frame_start:frame_end]) if parang is not None: parang_new.extend(parang[frame_start:frame_end]) if star is not None: star_new.extend(star[frame_start:frame_end]) self.m_image_out_port.append(self.m_image_in_port[frame_start:frame_end, ]) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_attribute('NFRAMES', nframes-self.m_frames, static=False) self.m_image_out_port.add_attribute('INDEX', index_new, static=False) if parang is not None: self.m_image_out_port.add_attribute('PARANG', parang_new, static=False) if star is not None: self.m_image_out_port.add_attribute('STAR_POSITION', np.asarray(star_new), static=False) history = 'frames removed = '+str(self.m_frames) self.m_image_out_port.add_history('RemoveStartFramesModule', history) self.m_image_out_port.close_port() class ImageStatisticsModule(ProcessingModule): """ Pipeline module for calculating image statistics for the full images or a subsection of the images. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, stat_out_tag: str, position: Optional[Union[Tuple[int, int, float], Tuple[None, None, float]]] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with the images that are read as input. stat_out_tag : str Tag of the database entry with the statistical results that are written as output. The result is stored in the following order: minimum, maximum, sum, mean, median, and standard deviation. position : tuple(int, int, float) Position (x, y) (pix) and radius (arcsec) of the circular area in which the statistics are calculated. The full image is used if set to None. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_stat_out_port = self.add_output_port(stat_out_tag) self.m_position = position @typechecked def run(self) -> None: """ Run method of the module. Calculates the minimum, maximum, sum, mean, median, and standard deviation of the pixel values of each image separately. NaNs are ignored for each calculation. The values are calculated for either the full images or a circular subsection of the images. Returns ------- NoneType None """ pixscale = self.m_image_in_port.get_attribute('PIXSCALE') nimages = self.m_image_in_port.get_shape()[0] im_shape = self.m_image_in_port.get_shape()[1:] if self.m_position is None: indices = None else: if self.m_position[0] is None and self.m_position[1] is None: center = center_pixel(self.m_image_in_port[0, ]) self.m_position = (center[0], # y position center[1], # x position self.m_position[2]/pixscale) # radius (pix) else: self.m_position = (int(self.m_position[1]), # y position int(self.m_position[0]), # x position self.m_position[2]/pixscale) # radius (pix) rr_grid, _, _ = pixel_distance(im_shape, position=self.m_position[0:2]) rr_reshape = np.reshape(rr_grid, (rr_grid.shape[0]*rr_grid.shape[1])) indices = np.where(rr_reshape <= self.m_position[2])[0] self.apply_function_to_images(image_stat, self.m_image_in_port, self.m_stat_out_port, 'Calculating image statistics', func_args=(indices, )) history = f'number of images = {nimages}' self.m_stat_out_port.copy_attributes(self.m_image_in_port) self.m_stat_out_port.add_history('ImageStatisticsModule', history) self.m_stat_out_port.close_port() class FrameSimilarityModule(ProcessingModule): """ Pipeline module for measuring the similarity between frames. """ __author__ = 'Benedikt Schmidhuber, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_tag: str, method: str = 'MSE', mask_radius: Tuple[float, float] = (0., 5.), window_size: float = 0.1, temporal_median: str = 'full') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_tag : str Tag of the database entry that is read as input. method : str Method for the similarity measure. There are three measures available: - `MSE` - Mean Squared Error - `PCC` - Pearson Correlation Coefficient - `SSIM` - Structural Similarity These measures compare each image to the temporal median of the image set. mask_radius : tuple(float, float) Inner and outer radius (arcsec) of the mask that is applied to the images. The inner radius is actually not used so can be set to zero. window_size : float Size (arcsec) of the sliding window that is used when the SSIM similarity is calculated. temporal_median : str Option to calculate the temporal median for each position ('full') or as a constant value ('constant') for the entire set. The latter is computationally less expensive. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_tag) self.m_image_out_port = self.add_output_port(image_tag) if method not in ('MSE', 'PCC', 'SSIM'): raise ValueError(f'The chosen method \'{method}\' is not available. Please ensure ' f'that you have selected one of \'MSE\', \'PCC\', or \'SSIM\'.') if temporal_median not in ('full', 'constant'): raise ValueError(f'The chosen temporal_median \'{temporal_median}\' is not ' f'available. Please ensure that you have selected one of \'full\', ' f'\'constant\'.') self.m_method = method self.m_temporal_median = temporal_median self.m_mask_radii = mask_radius self.m_window_size = window_size @staticmethod @typechecked def _similarity(images: np.ndarray, reference_index: int, mode: str, window_size: int, temporal_median: Union[bool, np.ndarray] = False) -> Tuple[int, float]: """ Internal function to compute the MSE as defined by Ruane et al. 2019. """ @typechecked def _temporal_median(reference_index: int, images: np.ndarray) -> np.ndarray: """ Internal function to calculate the temporal median for all frames, except the one with the ``reference_index``. """ image_m = np.concatenate((images[:reference_index], images[reference_index+1:])) return np.median(image_m, axis=0) image_x_i = images[reference_index] if isinstance(temporal_median, bool): image_m = _temporal_median(reference_index, images=images) else: image_m = temporal_median if mode == 'MSE': return reference_index, mean_squared_error(image_x_i, image_m) if mode == 'PCC': # calculate the covariance matrix of the flattened images cov_mat = np.cov(image_x_i.flatten(), image_m.flatten(), ddof=1) # the variances are stored in the diagonal, therefore take the sqrt to obtain std std = np.sqrt(np.diag(cov_mat)) # does not matter whether [0, 1] or [1, 0] as cov_mat is symmetric return reference_index, cov_mat[0, 1] / (std[0] * std[1]) if mode == 'SSIM': # winsize needs to be odd if int(window_size) % 2 == 0: winsize = int(window_size) + 1 else: winsize = int(window_size) # TODO Unclear what value to pass to data_range # Previously the argument was not requires data_range = np.amax(image_x_i)-np.amin(image_x_i) struc_sim = structural_similarity(im1=image_x_i, im2=image_m, win_size=winsize, data_range=data_range) return reference_index, struc_sim @typechecked def run(self) -> None: """ Run method of the module. Computes the similarity between frames based on the Mean Squared Error (MSE), the Pearson Correlation Coefficient (PCC), or the Structural Similarity (SSIM). The correlation values are stored as non-static attribute (``MSE``, ``PCC``, or ``SSIM``) to the input data. After running this module, the :class:`~pynpoint.processing.frameselection.SelectByAttributeModule` can be used to select the images with the highest correlation. Returns ------- NoneType None """ cpu = self._m_config_port.get_attribute('CPU') pixscale = self.m_image_in_port.get_attribute('PIXSCALE') # get number of images nimages = self.m_image_in_port.get_shape()[0] # convert arcsecs to pixels self.m_mask_radii = (math.floor(self.m_mask_radii[0] / pixscale), math.floor(self.m_mask_radii[1] / pixscale)) self.m_window_size = int(self.m_window_size / pixscale) # overlay the same mask over all images images = self.m_image_in_port.get_all() # close the port during the calculations self.m_image_out_port.close_port() # Change the radius to the image size images = crop_image(images, None, int(2.*self.m_mask_radii[1])) if self.m_temporal_median == 'constant': temporal_median = np.median(images, axis=0) else: temporal_median = False # compare images and store similarity similarities = np.zeros(nimages) pool = mp.Pool(cpu) async_results = [] for i in range(nimages): async_results.append(pool.apply_async(FrameSimilarityModule._similarity, args=(images, i, self.m_method, self.m_window_size, temporal_median))) pool.close() start_time = time.time() # wait for all processes to finish while mp.active_children(): # number of finished processes nfinished = sum([i.ready() for i in async_results]) progress(nfinished, nimages, 'Calculating image similarity...', start_time) # check if new processes have finished every 5 seconds time.sleep(5) if nfinished != nimages: print('\r ') print('\rCalculating image similarity... [DONE]') # get the results for every async_result object for async_result in async_results: reference, similarity = async_result.get() similarities[reference] = similarity pool.terminate() # reopen the port after the calculation self.m_image_out_port.open_port() self.m_image_out_port.add_attribute(f'{self.m_method}', similarities, static=False) self.m_image_out_port.close_port() class SelectByAttributeModule(ProcessingModule): """ Pipeline module for selecting frames based on attribute values. """ __author__ = 'Benedikt Schmidhuber, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, selected_out_tag: str, removed_out_tag: str, attribute_tag: str, number_frames: int = 100, order: str = 'descending') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. selected_out_tag : str Tag of the database entry to which the selected frames are written. removed_out_tag : str Tag of the database entry to which the removed frames are written. attribute_tag : str Name of the attribute which is used to sort and select the frames. number_frames : int Number of frames that are selected. order : str Order in which the frames are selected. Can be either 'descending' (will select the lowest attribute values) or 'ascending' (will select the highest attribute values). Returns ------- NoneType None Examples -------- The example below selects the first 100 frames with an ascending order of the ``INDEX`` values that are stored to the 'im_arr' dataset:: SelectByAttributeModule(name_in='frame_selection', image_in_tag='im_arr', attribute_tag='INDEX', selected_frames=100, order='ascending', selected_out_tag='im_arr_selected', removed_out_tag='im_arr_removed')) The example below selects the 200 frames with the largest ``SSIM`` values that are stored to the 'im_arr' dataset:: SelectByAttributeModule(name_in='frame_selection', image_in_tag='im_arr', attribute_tag='SSIM', selected_frames=200, order='descending', selected_out_tag='im_arr_selected', removed_out_tag='im_arr_removed')) """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_selected_out_port = self.add_output_port(selected_out_tag) self.m_removed_out_port = self.add_output_port(removed_out_tag) if order not in ('ascending', 'descending'): raise ValueError('The selected order is not available. The available options are ' '\'ascending\' or \'descending\'.') self.m_attribute_tag = attribute_tag self.m_number_frames = number_frames self.m_order = order @typechecked def run(self) -> None: """ Run method of the module. Selects images according to a specified attribute tag and ordering, e.g. the highest 150 ``INDEX`` frames, or the lowest 50 ``PCC`` frames. The order of the selected images is determined by the `descending` or `ascending` attribute values. To sort the images again by their original order, the :class:`~pynpoint.processing.psfpreparation.SortParangModule` can be used. Returns ------- NoneType None """ nimages = self.m_image_in_port.get_shape()[0] attribute = self.m_image_in_port.get_attribute(f'{self.m_attribute_tag}') if nimages != attribute.size: raise ValueError(f'The attribute {{self.m_attribute_tag}} does not have the same ' f'length ({len(attribute)}) as the tag has images ({nimages}). ' f'Please check the attribute you have chosen for selection.') if self.m_order == 'descending': # sort attribute in descending order sorting_order = np.argsort(attribute)[::-1] else: # sort attribute in ascending order sorting_order = np.argsort(attribute) index_del = sorting_order[self.m_number_frames:] write_selected_data(memory=self._m_config_port.get_attribute('MEMORY'), indices=index_del, image_in_port=self.m_image_in_port, selected_out_port=self.m_selected_out_port, removed_out_port=self.m_removed_out_port) write_selected_attributes(indices=index_del, image_in_port=self.m_image_in_port, selected_out_port=self.m_selected_out_port, removed_out_port=self.m_removed_out_port, module_type='SelectByAttributeModule', history=f'selected tag = {self.m_attribute_tag}') self.m_image_in_port.close_port() class ResidualSelectionModule(ProcessingModule): """ Pipeline module for applying a frame selection on the residuals of the PSF subtraction. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, selected_out_tag: str, removed_out_tag: str, percentage: float = 10., annulus_radii: Tuple[float, float] = (0.1, 0.2)) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. selected_out_tag : str Tag of the database entry with the selected images that are written as output. removed_out_tag : str Tag of the database entry with the removed images that are written as output. percentage : float The percentage of best frames that is selected. annulus_radii : tuple(float, float) Inner and outer radius of the annulus (arcsec). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_selected_out_port = self.add_output_port(selected_out_tag) self.m_removed_out_port = self.add_output_port(removed_out_tag) self.m_percentage = percentage self.m_annulus_radii = annulus_radii @typechecked def run(self) -> None: """ Run method of the module. Applies a frame selection on the derotated residuals from the PSF subtraction. The pixels within an annulus (e.g. at the separation of an expected planet) are selected and the standard deviation is calculated. The chosen percentage of images with the lowest standard deviation are stored as output. Returns ------- NoneType None """ pixscale = self.m_image_in_port.get_attribute('PIXSCALE') nimages = self.m_image_in_port.get_shape()[0] npix = self.m_image_in_port.get_shape()[-1] rr_grid, _, _ = pixel_distance((npix, npix), position=None) pixel_select = np.where((rr_grid > self.m_annulus_radii[0]/pixscale) & (rr_grid < self.m_annulus_radii[1]/pixscale)) start_time = time.time() phot_annulus = np.zeros(nimages) for i in range(nimages): progress(i, nimages, 'Aperture photometry...', start_time) phot_annulus[i] = np.sum(np.abs(self.m_image_in_port[i][pixel_select])) print(f'Minimum, maximum = {np.amin(phot_annulus):.2f}, {np.amax(phot_annulus):.2f}') print(f'Mean, median = {np.nanmean(phot_annulus):.2f}, {np.nanmedian(phot_annulus):.2f}') print(f'Standard deviation = {np.nanstd(phot_annulus):.2f}') n_select = int(nimages*self.m_percentage/100.) index_del = np.argsort(phot_annulus)[n_select:] write_selected_data(memory=self._m_config_port.get_attribute('MEMORY'), indices=index_del, image_in_port=self.m_image_in_port, selected_out_port=self.m_selected_out_port, removed_out_port=self.m_removed_out_port) write_selected_attributes(indices=index_del, image_in_port=self.m_image_in_port, selected_out_port=self.m_selected_out_port, removed_out_port=self.m_removed_out_port, module_type='ResidualSelectionModule', history=f'frames removed = {index_del.size}') self.m_image_in_port.close_port() PynPoint-0.11.0/pynpoint/processing/limits.py000066400000000000000000000533321450275315200212720ustar00rootroot00000000000000""" Pipeline modules for estimating detection limits. """ import os import math import time import warnings import multiprocessing as mp from typing import List, Optional, Tuple import numpy as np from scipy.interpolate import griddata from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.image import create_mask from pynpoint.util.limits import contrast_limit from pynpoint.util.module import progress from pynpoint.util.psf import pca_psf_subtraction from pynpoint.util.residuals import combine_residuals class ContrastCurveModule(ProcessingModule): """ Pipeline module to calculate contrast limits for a given sigma level or false positive fraction, with a correction for small sample statistics. Positions are processed in parallel if ``CPU`` is set to a value larger than 1 in the configuration file. """ __author__ = 'Tomas Stolker, Jasper Jonker, Benedikt Schmidhuber' @typechecked def __init__(self, name_in: str, image_in_tag: str, psf_in_tag: str, contrast_out_tag: str, separation: Tuple[float, float, float] = (0.1, 1., 0.01), angle: Tuple[float, float, float] = (0., 360., 60.), threshold: Tuple[str, float] = ('sigma', 5.), psf_scaling: float = 1., aperture: float = 0.05, pca_number: int = 20, cent_size: Optional[float] = None, edge_size: Optional[float] = None, extra_rot: float = 0., residuals: str = 'mean', snr_inject: float = 100., **kwargs: float) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that contains the stack with images. psf_in_tag : str Tag of the database entry that contains the reference PSF that is used as fake planet. Can be either a single image or a stack of images equal in size to *image_in_tag*. contrast_out_tag : str Tag of the database entry that contains the separation, azimuthally averaged contrast limits, the azimuthal variance of the contrast limits, and the threshold of the false positive fraction associated with sigma. separation : tuple(float, float, float) Range of separations (arcsec) where the contrast is calculated. Should be specified as (lower limit, upper limit, step size). Apertures that fall within the mask radius or beyond the image size are removed. angle : tuple(float, float, float) Range of position angles (deg) where the contrast is calculated. Should be specified as (lower limit, upper limit, step size), measured counterclockwise with respect to the vertical image axis, i.e. East of North. threshold : tuple(str, float) Detection threshold for the contrast curve, either in terms of 'sigma' or the false positive fraction (FPF). The value is a tuple, for example provided as ('sigma', 5.) or ('fpf', 1e-6). Note that when sigma is fixed, the false positive fraction will change with separation. Also, sigma only corresponds to the standard deviation of a normal distribution at large separations (i.e., large number of samples). psf_scaling : float Additional scaling factor of the planet flux (e.g., to correct for a neutral density filter). Should have a positive value. aperture : float Aperture radius (arcsec). pca_number : int Number of principal components used for the PSF subtraction. cent_size : float, None Central mask radius (arcsec). No mask is used when set to None. edge_size : float, None Outer edge radius (arcsec) beyond which pixels are masked. No outer mask is used when set to None. If the value is larger than half the image size then it will be set to half the image size. extra_rot : float Additional rotation angle of the images in clockwise direction (deg). residuals : str Method used for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). snr_inject : float Signal-to-noise ratio of the injected planet signal that is used to measure the amount of self-subtraction. Returns ------- NoneType None """ super().__init__(name_in) if 'sigma' in kwargs: warnings.warn('The \'sigma\' parameter has been deprecated. Please use the ' '\'threshold\' parameter instead.', DeprecationWarning) if 'norm' in kwargs: warnings.warn('The \'norm\' parameter has been deprecated. It is not recommended to ' 'normalize the images before PSF subtraction.', DeprecationWarning) if 'accuracy' in kwargs: warnings.warn('The \'accuracy\' parameter has been deprecated. The parameter is no ' 'longer required.', DeprecationWarning) if 'magnitude' in kwargs: warnings.warn('The \'magnitude\' parameter has been deprecated. The parameter is no ' 'longer required.', DeprecationWarning) if 'ignore' in kwargs: warnings.warn('The \'ignore\' parameter has been deprecated. The parameter is no ' 'longer required.', DeprecationWarning) self.m_image_in_port = self.add_input_port(image_in_tag) if psf_in_tag == image_in_tag: self.m_psf_in_port = self.m_image_in_port else: self.m_psf_in_port = self.add_input_port(psf_in_tag) self.m_contrast_out_port = self.add_output_port(contrast_out_tag) self.m_separation = separation self.m_angle = angle self.m_psf_scaling = psf_scaling self.m_threshold = threshold self.m_aperture = aperture self.m_pca_number = pca_number self.m_cent_size = cent_size self.m_edge_size = edge_size self.m_extra_rot = extra_rot self.m_residuals = residuals self.m_snr_inject = snr_inject if self.m_angle[0] < 0. or self.m_angle[0] > 360. or self.m_angle[1] < 0. or \ self.m_angle[1] > 360. or self.m_angle[2] < 0. or self.m_angle[2] > 360.: raise ValueError('The angular positions of the fake planets should lie between ' '0 deg and 360 deg.') @typechecked def run(self) -> None: """ Run method of the module. An artificial planet is injected (based on the noise level) at a given separation and position angle. The amount of self-subtraction is then determined and the contrast limit is calculated for a given sigma level or false positive fraction. A correction for small sample statistics is applied for both cases. Note that if the sigma level is fixed, the false positive fraction changes with separation, following the Student's t-distribution (see Mawet et al. 2014 for details). Returns ------- NoneType None """ images = self.m_image_in_port.get_all() psf = self.m_psf_in_port.get_all() if psf.shape[0] != 1 and psf.shape[0] != images.shape[0]: raise ValueError(f'The number of frames in psf_in_tag {psf.shape} does not match with ' f'the number of frames in image_in_tag {images.shape}. The ' f'DerotateAndStackModule can be used to average the PSF frames ' f'(without derotating) before applying the ContrastCurveModule.') cpu = self._m_config_port.get_attribute('CPU') working_place = self._m_config_port.get_attribute('WORKING_PLACE') parang = self.m_image_in_port.get_attribute('PARANG') pixscale = self.m_image_in_port.get_attribute('PIXSCALE') self.m_image_in_port.close_port() self.m_psf_in_port.close_port() if self.m_cent_size is not None: self.m_cent_size /= pixscale if self.m_edge_size is not None: self.m_edge_size /= pixscale self.m_aperture /= pixscale pos_r = np.arange(self.m_separation[0]/pixscale, self.m_separation[1]/pixscale, self.m_separation[2]/pixscale) pos_t = np.arange(self.m_angle[0]+self.m_extra_rot, self.m_angle[1]+self.m_extra_rot, self.m_angle[2]) if self.m_cent_size is None: index_del = np.argwhere(pos_r-self.m_aperture <= 0.) else: index_del = np.argwhere(pos_r-self.m_aperture <= self.m_cent_size) pos_r = np.delete(pos_r, index_del) if self.m_edge_size is None or self.m_edge_size > images.shape[1]/2.: index_del = np.argwhere(pos_r+self.m_aperture >= images.shape[1]/2.) else: index_del = np.argwhere(pos_r+self.m_aperture >= self.m_edge_size) pos_r = np.delete(pos_r, index_del) positions = [] for sep in pos_r: for ang in pos_t: positions.append((sep, ang)) result = [] async_results = [] # Create temporary files tmp_im_str = os.path.join(working_place, 'tmp_images.npy') tmp_psf_str = os.path.join(working_place, 'tmp_psf.npy') np.save(tmp_im_str, images) np.save(tmp_psf_str, psf) mask = create_mask(images.shape[-2:], (self.m_cent_size, self.m_edge_size)) _, im_res = pca_psf_subtraction(images=images*mask, angles=-1.*parang+self.m_extra_rot, pca_number=self.m_pca_number) noise = combine_residuals(method=self.m_residuals, res_rot=im_res) pool = mp.Pool(cpu) for pos in positions: async_results.append(pool.apply_async(contrast_limit, args=(tmp_im_str, tmp_psf_str, noise, mask, parang, self.m_psf_scaling, self.m_extra_rot, self.m_pca_number, self.m_threshold, self.m_aperture, self.m_residuals, self.m_snr_inject, pos))) pool.close() start_time = time.time() # wait for all processes to finish while mp.active_children(): # number of finished processes nfinished = sum([i.ready() for i in async_results]) progress(nfinished, len(positions), 'Calculating detection limits...', start_time) # check if new processes have finished every 5 seconds time.sleep(5) if nfinished != len(positions): print('\r ') print('\rCalculating detection limits... [DONE]') # get the results for every async_result object for item in async_results: result.append(item.get()) pool.terminate() os.remove(tmp_im_str) os.remove(tmp_psf_str) result = np.asarray(result) # Sort the results first by separation and then by angle indices = np.lexsort((result[:, 1], result[:, 0])) result = result[indices] result = result.reshape((pos_r.size, pos_t.size, 4)) result[np.isinf(result)] = np.nan mag_mean = np.nanmean(result, axis=1)[:, 2] mag_var = np.nanvar(result, axis=1)[:, 2] res_fpf = result[:, 0, 3] limits = np.column_stack((pos_r*pixscale, mag_mean, mag_var, res_fpf)) self.m_image_in_port._check_status_and_activate() self.m_contrast_out_port._check_status_and_activate() self.m_contrast_out_port.set_all(limits, data_dim=2) history = f'{self.m_threshold[0]} = {self.m_threshold[1]}' self.m_contrast_out_port.add_history('ContrastCurveModule', history) self.m_contrast_out_port.copy_attributes(self.m_image_in_port) self.m_contrast_out_port.close_port() class MassLimitsModule(ProcessingModule): """ Pipeline module to calculate mass limits from the contrast limits and any isochrones model grid downloaded from https://phoenix.ens-lyon.fr/Grids/. """ __author__ = 'Benedikt Schmidhuber, Tomas Stolker' @typechecked def __init__(self, name_in: str, contrast_in_tag: str, mass_out_tag: str, model_file: str, star_prop: dict, instr_filter: str = 'L\'') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. contrast_in_tag : str Tag of the database entry that contains the contrast curve data, as computed with the :class:`~pynpoint.processing.limits.ContrastCurveModule`. mass_out_tag : str Tag of the database entry with the output data containing the separation, the mass limits, and the upper and lower one sigma deviation as calculated for the azimuthal variance on the contrast limits. model_file: str Path to the file containing the model data. Must be in the same format as the grids found on https://phoenix.ens-lyon.fr/Grids/. Any of the isochrones files from this website can be used. star_prop : dict Dictionary containing host star properties. Must have the following keys: - ``magnitude`` - Apparent magnitude, in the same band as the `instr_filter`. - ``distance`` - Distance in parsec. - ``age`` - Age of the system in the Myr. instr_filter: str Instrument filter in the same format as listed in the `model_file`. Returns ------- NoneType None """ super().__init__(name_in) self.m_star_age = star_prop['age']/1000. # [Myr] self.m_star_abs = star_prop['magnitude'] - 5.*math.log10(star_prop['distance']/10.) self.m_instr_filter = instr_filter self.m_model_file = model_file if not os.path.exists(self.m_model_file): raise ValueError('The path does not appear to be an existing file. Please check the' 'path. If you are unsure about the path, pass the absolute path to the' 'model file.') self.m_contrast_in_port = self.add_input_port(contrast_in_tag) self.m_mass_out_port = self.add_output_port(mass_out_tag) @staticmethod @typechecked def read_model(model_file_path: str) -> Tuple[List[float], List[np.ndarray], List[str]]: """ Reads the data from the model file and structures it. Returns an array of available model ages and a list of model data for each age. Parameters ------- model_file: str Path to the file containing the model data. Returns ------- list(float) List with all the ages from the model grid. list(np.ndarray) List with all the isochrone data, so the length is the same as the number of ages. list(str) List with all the column names from the model grid. """ # read in all the data, selecting out empty lines or '---' lines data = [] with open(model_file_path) as file: for line in file: if ('---' in line) or line == '\n': continue else: data += [list(filter(None, line.rstrip().split(' ')))] # initialize list of ages ages = [] # initialize the header header = [] # initialize a new data list, where the data is separated by age isochrones = [] k = -1 for _line in data: if '(Gyr)' in _line: # get time line ages += [float(_line[-1])] isochrones += [[]] k += 1 elif 'lg(g)' in _line: # get header line header = ['M/Ms', 'Teff(K)'] + _line[1:] else: # save the data isochrones[k] += [_line] for index, _ in enumerate(isochrones): isochrones[index] = np.array(isochrones[index], dtype=float) return ages, isochrones, header @staticmethod @typechecked def interpolate_model(age_eval: np.ndarray, mag_eval: np.ndarray, filter_index: int, model_age: List[float], model_data: List[np.ndarray]) -> np.ndarray: """ Interpolates the grid based model data. Parameters ---------- age_eval : np.ndarray Age at which the system is evaluated. Must be of the same shape as `mag_eval`. mag_eval : np.ndarray Absolute magnitude for which the system is evaluated. Must be of the same shape as `age_eval`. filter_index: int Column index where the filter is located. model_age: list(float) List of ages which are given by the model. model_data: list(np.ndarray) List of arrays containing the model data. Returns ------- griddata : np.ndarray Interpolated values for the given evaluation points (age_eval, mag_eval). Has the same shape as age_eval and mag_eval. """ grid_points = np.array([]) grid_values = np.array([]) # create array of available points for age_index, age_item in enumerate(model_age): iso_mag = model_data[age_index][:, filter_index] iso_age = np.ones_like(iso_mag) * age_item iso_mass = model_data[age_index][:, 0] grid_points = np.append(grid_points, np.column_stack((iso_age, iso_mag))) grid_values = np.append(grid_values, iso_mass) grid_points = grid_points.reshape(-1, 2) interp = np.column_stack((age_eval, mag_eval)) return griddata(grid_points, grid_values, interp, method='cubic', rescale=True) @typechecked def run(self) -> None: """ Run method of the module. Calculates the mass limits from the contrast limits (as calculated with the :class:`~pynpoint.processing.limits.ContrastCurveModule`) and the isochrones of an evolutionary model. The age and the absolute magnitude of the isochrones are linearly interpolated such that the mass limits can be calculated for a given contrast limits (which is converted in an absolute magnitude with the apparent magnitude and distance of the central star). Returns ------- NoneType None """ model_age, model_data, model_header = self.read_model(self.m_model_file) assert self.m_instr_filter in model_header, 'The selected filter was not found in the ' \ 'list of available filters from the model.' # find the column index of the filter # simple argwhere gives empty list?! filter_index = np.argwhere([self.m_instr_filter == j for j in model_header])[0] filter_index = int(filter_index) contrast_data = self.m_contrast_in_port.get_all() separation = contrast_data[:, 0] contrast = contrast_data[:, 1] contrast_std = np.sqrt(contrast_data[:, 2]) age_eval = self.m_star_age*np.ones_like(contrast) mag_eval = self.m_star_abs+contrast print('Interpolating isochrones...', end='') mass = self.interpolate_model(age_eval=age_eval, mag_eval=mag_eval, filter_index=filter_index, model_age=model_age, model_data=model_data) mass_upper = self.interpolate_model(age_eval=age_eval, mag_eval=mag_eval-contrast_std, filter_index=filter_index, model_age=model_age, model_data=model_data) - mass mass_lower = self.interpolate_model(age_eval=age_eval, mag_eval=mag_eval+contrast_std, filter_index=filter_index, model_age=model_age, model_data=model_data) - mass mass_limits = np.column_stack((separation, mass, mass_upper, mass_lower)) self.m_mass_out_port.set_all(mass_limits, data_dim=2) print(' [DONE]') history = f'filter = {self.m_instr_filter}' self.m_mass_out_port.add_history('MassLimitsModule', history) self.m_mass_out_port.copy_attributes(self.m_contrast_in_port) self.m_mass_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/pcabackground.py000066400000000000000000001004541450275315200225720ustar00rootroot00000000000000""" Pipeline modules for PCA-based background subtraction. """ import math import time import warnings from typing import List, Optional, Tuple, Union import numpy as np from scipy.optimize import curve_fit from scipy.sparse.linalg import svds from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.processing.psfpreparation import SortParangModule from pynpoint.processing.resizing import CropImagesModule from pynpoint.processing.stacksubset import CombineTagsModule from pynpoint.util.module import memory_frames, progress from pynpoint.util.star import locate_star class PCABackgroundPreparationModule(ProcessingModule): """ Pipeline module for preparing the images for a PCA-based background subtraction. """ __author__ = 'Tomas Stolker, Silvan Hunziker' @typechecked def __init__(self, name_in: str, image_in_tag: str, star_out_tag: str, subtracted_out_tag: str, background_out_tag: str, dither: Union[Tuple[int, int, int], Tuple[int, None, Tuple[float, float]]], combine: str = 'mean') -> None: """ Parameters ---------- name_in : str Unique name of the pipeline module instance. image_in_tag : str Database tag with the images that are read as input. star_out_tag : str Database tag to store the images that contain the star. subtracted_out_tag : str Database tag to store the mean/median background subtracted images with the star. background_out_tag : str Database tag to store the images that contain only background emission. dither : tuple(int, int, int), tuple(int, None, tuple(float, float)) Tuple with the parameters for separating the star and background frames. The tuple should contain three values, ``(positions, cubes, first)``, with ``positions`` the number of unique dithering position, ``cubes`` the number of consecutive cubes per dithering position, and ``first`` the index value of the first cube which contains the star (Python indexing starts at zero). Sorting is based on the ``DITHER_X`` and ``DITHER_Y`` attributes when ``cubes`` is set to None. In that case, the ``first`` value should be a tuple with the ``DITHER_X`` and ``DITHER_Y`` values in which the star appears first. combine : str Method for combining the background images ('mean' or 'median'). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_star_out_port = self.add_output_port(star_out_tag) self.m_subtracted_out_port = self.add_output_port(subtracted_out_tag) self.m_background_out_port = self.add_output_port(background_out_tag) if len(dither) != 3: raise ValueError('The \'dither\' argument should contain three values.') self.m_dither = dither self.m_combine = combine @typechecked def _prepare(self) -> Tuple[np.ndarray, np.ndarray]: nframes = self.m_image_in_port.get_attribute('NFRAMES') cube_mean = np.zeros((nframes.shape[0], self.m_image_in_port.get_shape()[2], self.m_image_in_port.get_shape()[1])) count = 0 for i, item in enumerate(nframes): if self.m_combine == 'mean': cube_mean[i, ] = np.mean(self.m_image_in_port[count:count+item, ], axis=0) elif self.m_combine == 'median': cube_mean[i, ] = np.median(self.m_image_in_port[count:count+item, ], axis=0) count += item if self.m_dither[1] is None: dither_x = self.m_image_in_port.get_attribute('DITHER_X') dither_y = self.m_image_in_port.get_attribute('DITHER_Y') star = np.logical_and(dither_x == self.m_dither[2][0], dither_y == self.m_dither[2][1]) bg_frames = np.invert(star) else: bg_frames = np.ones(nframes.shape[0], dtype=bool) for i in range(self.m_dither[2]*self.m_dither[1], np.size(nframes), self.m_dither[1]*self.m_dither[0]): bg_frames[i:i+self.m_dither[1]] = False return bg_frames, cube_mean @typechecked def _separate(self, bg_frames: np.ndarray, bg_indices: np.ndarray, parang: Optional[np.ndarray], cube_mean: np.ndarray) -> Tuple[np.array, Optional[np.ndarray], np.ndarray, np.ndarray, Optional[np.ndarray], np.ndarray]: @typechecked def _initialize() -> Tuple[np.array, Optional[np.ndarray], np.ndarray, np.ndarray, Optional[np.ndarray], np.ndarray]: background_nframes = np.empty(0, dtype=np.int64) star_nframes = np.empty(0, dtype=np.int64) background_index = np.empty(0, dtype=np.int64) star_index = np.empty(0, dtype=np.int64) if parang is None: background_parang = None star_parang = None else: background_parang = np.empty(0, dtype=np.float64) star_parang = np.empty(0, dtype=np.float64) return star_index, star_parang, star_nframes, background_index, \ background_parang, background_nframes @typechecked def _select_background(i: int) -> np.ndarray: # Previous background cube if np.size(bg_indices[bg_indices < i]) > 0: index_prev = np.amax(bg_indices[bg_indices < i]) bg_prev = cube_mean[index_prev, ] else: bg_prev = None # Next background cube if np.size(bg_indices[bg_indices > i]) > 0: index_next = np.amin(bg_indices[bg_indices > i]) bg_next = cube_mean[index_next, ] else: bg_next = None # Select background: previous, next, or mean of previous and next if bg_prev is None and bg_next is not None: background = bg_next elif bg_prev is not None and bg_next is None: background = bg_prev elif bg_prev is not None and bg_next is not None: background = (bg_prev+bg_next)/2. else: raise ValueError('Neither previous nor next background frames are found.') return background nframes = self.m_image_in_port.get_attribute('NFRAMES') index = self.m_image_in_port.get_attribute('INDEX') star_index, star_parang, star_nframes, background_index, background_parang, \ background_nframes = _initialize() # Separate star and background cubes. Subtract mean background. count = 0 start_time = time.time() for i, item in enumerate(nframes): progress(i, len(nframes), 'Preparing PCA background subtraction...', start_time) im_tmp = self.m_image_in_port[count:count+item, ] # Background frames if bg_frames[i]: self.m_background_out_port.append(im_tmp) background_nframes = np.append(background_nframes, nframes[i]) background_index = np.append(background_index, index[count:count+item]) if parang is not None: background_parang = np.append(background_parang, parang[count:count+item]) # Star frames else: background = _select_background(i) self.m_star_out_port.append(im_tmp) self.m_subtracted_out_port.append(im_tmp-background) star_nframes = np.append(star_nframes, nframes[i]) star_index = np.append(star_index, index[count:count+item]) if parang is not None: star_parang = np.append(star_parang, parang[count:count+item]) count += item return star_index, star_parang, star_nframes, background_index, \ background_parang, background_nframes @typechecked def run(self) -> None: """ Run method of the module. Separates the star and background frames, subtracts the mean or median background from both the star and background frames, and writes the star and background frames separately to their respective output ports. Returns ------- NoneType None """ if 'PARANG' in self.m_image_in_port.get_all_non_static_attributes(): parang = self.m_image_in_port.get_attribute('PARANG') else: parang = None bg_frames, cube_mean = self._prepare() bg_indices = np.nonzero(bg_frames)[0] star_index, star_parang, star_nframes, background_index, background_parang, \ background_nframes = self._separate(bg_frames, bg_indices, parang, cube_mean) history = f'frames = {sum(star_nframes)}, {len(background_nframes)}' self.m_star_out_port.copy_attributes(self.m_image_in_port) self.m_star_out_port.add_history('PCABackgroundPreparationModule', history) self.m_star_out_port.add_attribute('NFRAMES', star_nframes, static=False) self.m_star_out_port.add_attribute('INDEX', star_index, static=False) if parang is not None: self.m_star_out_port.add_attribute('PARANG', star_parang, static=False) self.m_subtracted_out_port.copy_attributes(self.m_image_in_port) self.m_subtracted_out_port.add_history('PCABackgroundPreparationModule', history) self.m_subtracted_out_port.add_attribute('NFRAMES', star_nframes, static=False) self.m_subtracted_out_port.add_attribute('INDEX', star_index, static=False) if parang is not None: self.m_subtracted_out_port.add_attribute('PARANG', star_parang, static=False) self.m_background_out_port.copy_attributes(self.m_image_in_port) self.m_background_out_port.add_history('PCABackgroundPreparationModule', history) self.m_background_out_port.add_attribute('NFRAMES', background_nframes, static=False) self.m_background_out_port.add_attribute('INDEX', background_index, static=False) if parang is not None: self.m_background_out_port.add_attribute('PARANG', background_parang, static=False) self.m_star_out_port.close_port() class PCABackgroundSubtractionModule(ProcessingModule): """ Pipeline module applying a PCA-based background subtraction (see Hunziker et al. 2018). """ __author__ = 'Tomas Stolker, Silvan Hunziker' @typechecked def __init__(self, name_in: str, star_in_tag: str, background_in_tag: str, residuals_out_tag: str, fit_out_tag: Optional[str] = None, mask_out_tag: Optional[str] = None, pca_number: int = 60, mask_star: float = 0.7, subframe: Optional[float] = None, gaussian: float = 0.15, **kwargs) -> None: """ Parameters ---------- name_in : str Unique name of the pipeline module instance. star_in_tag : str Database tag with the input images that contain the star. background_in_tag : str Database tag with the input images that contain only background emission. residuals_out_tag : str Database tag to store the background-subtracted images of the star. fit_out_tag : str, None Database tag to store the modeled background images. The data is not stored if the arguments is set to None. mask_out_tag : str, None Database tag to store the mask. The data is not stored if the argument is set to None. pca_number : int Number of principal components that is used to model the background emission. mask_star : float Radius of the central mask (arcsec). gaussian : float Full width at half maximum (arcsec) of the Gaussian kernel that is used to smooth the image before the star is located. subframe : float, None Size (arcsec) of the subframe that is used to find the star. Cropping of the subframe is done around the center of the image. The full images is used if set to None. Returns ------- NoneType None """ super().__init__(name_in) if 'subtract_mean' in kwargs: warnings.warn('The \'subtract_mean\' parameter has been deprecated. Subtracting of ' 'the mean is no longer optional so subtract_mean=True.', DeprecationWarning) self.m_star_in_port = self.add_input_port(star_in_tag) self.m_background_in_port = self.add_input_port(background_in_tag) self.m_residuals_out_port = self.add_output_port(residuals_out_tag) if fit_out_tag is None: self.m_fit_out_port = None else: self.m_fit_out_port = self.add_output_port(fit_out_tag) if mask_out_tag is None: self.m_mask_out_port = None else: self.m_mask_out_port = self.add_output_port(mask_out_tag) self.m_pca_number = pca_number self.m_mask_star = mask_star self.m_gaussian = gaussian self.m_subframe = subframe @typechecked def run(self) -> None: """ Run method of the module. Creates a PCA basis set of the background frames after subtracting the mean background frame from both the star and background frames, masks the PSF of the star, projects the star frames onto the principal components, and stores the residuals of the background subtracted images. Returns ------- NoneType None """ @typechecked def _create_mask(radius: float, position: np.ndarray, nimages: int) -> np.ndarray: """ Method for creating a circular mask at the star or planet position. """ npix = self.m_star_in_port.get_shape()[1] x_grid = np.arange(0, npix, 1) y_grid = np.arange(0, npix, 1) xx_grid, yy_grid = np.meshgrid(x_grid, y_grid) mask = np.ones((nimages, npix, npix)) cent_x = position[:, 1] cent_y = position[:, 0] for i in range(nimages): rr_grid = np.sqrt((xx_grid - cent_x[i])**2 + (yy_grid - cent_y[i])**2) mask[i, ][rr_grid < radius] = 0. return mask @typechecked def _create_basis(images: np.ndarray, pca_number: int) -> np.ndarray: """ Method for calculating the principal components for a stack of background images. Parameters ---------- images : np.ndarray Background images with the mean subtracted from all images. pca_number : int Number of principal components that is used to model the background emission. Returns ------- np.ndarray Principal components with the second and third dimension reshaped to ``images``. """ _, _, v_svd = svds(images.reshape(images.shape[0], images.shape[1]*images.shape[2]), k=pca_number) v_svd = v_svd[::-1, ] return v_svd.reshape(v_svd.shape[0], images.shape[1], images.shape[2]) @typechecked def _model_background(basis: np.ndarray, im_arr: np.ndarray, mask: np.ndarray) -> np.ndarray: """ Method for creating a model of the background. """ @typechecked def _dot_product(x_dot: np.ndarray, *p: np.float64) -> np.ndarray: return np.dot(p, x_dot) fit_im_chi = np.zeros(im_arr.shape) # fit_coeff_chi = np.zeros((im_arr.shape[0], basis.shape[0])) basis_reshaped = basis.reshape(basis.shape[0], -1) for i in range(im_arr.shape[0]): basis_reshaped_masked = (basis*mask[i]).reshape(basis.shape[0], -1) data_to_fit = im_arr[i, ] init = np.ones(basis_reshaped_masked.shape[0]) fitted = curve_fit(_dot_product, basis_reshaped_masked, data_to_fit.reshape(-1), init) fit_im = np.dot(fitted[0], basis_reshaped) fit_im = fit_im.reshape(data_to_fit.shape[0], data_to_fit.shape[1]) fit_im_chi[i, ] = fit_im # fit_coeff_chi[i, ] = fitted[0] return fit_im_chi memory = self._m_config_port.get_attribute('MEMORY') pixscale = self.m_star_in_port.get_attribute('PIXSCALE') nimages = self.m_star_in_port.get_shape()[0] frames = memory_frames(memory, nimages) self.m_mask_star /= pixscale self.m_gaussian = int(math.ceil(self.m_gaussian/pixscale)) if self.m_subframe is not None: self.m_subframe /= pixscale self.m_subframe = int(math.ceil(self.m_subframe)) bg_mean = np.mean(self.m_background_in_port.get_all(), axis=0) star = np.zeros((nimages, 2)) for i, _ in enumerate(star): star[i, :] = locate_star(image=self.m_star_in_port[i, ] - bg_mean, center=None, width=self.m_subframe, fwhm=self.m_gaussian) print('Creating PCA basis set...', end='') basis_pca = _create_basis(self.m_background_in_port.get_all() - bg_mean, self.m_pca_number) print(' [DONE]') start_time = time.time() for i, _ in enumerate(frames[:-1]): progress(i, len(frames[:-1]), 'Calculating background model...', start_time) # Subtract the mean background from the star frames im_star = self.m_star_in_port[frames[i]:frames[i+1], ] - bg_mean mask = _create_mask(self.m_mask_star, star[frames[i]:frames[i+1], ], int(frames[i+1]-frames[i])) fit_im = _model_background(basis_pca, im_star*mask, mask) self.m_residuals_out_port.append(im_star-fit_im) if self.m_fit_out_port is not None: self.m_fit_out_port.append(fit_im) if self.m_mask_out_port is not None: self.m_mask_out_port.append(mask) history = f'PC number = {self.m_pca_number}' self.m_residuals_out_port.copy_attributes(self.m_star_in_port) self.m_residuals_out_port.add_history('PCABackgroundSubtractionModule', history) self.m_residuals_out_port.add_attribute('STAR_POSITION', star, static=False) if self.m_fit_out_port is not None: self.m_fit_out_port.copy_attributes(self.m_star_in_port) self.m_fit_out_port.add_history('PCABackgroundSubtractionModule', history) if self.m_mask_out_port is not None: self.m_mask_out_port.copy_attributes(self.m_star_in_port) self.m_mask_out_port.add_history('PCABackgroundSubtractionModule', history) self.m_residuals_out_port.close_port() class DitheringBackgroundModule(ProcessingModule): """ Pipeline module for PCA-based background subtraction of dithering data. This is a wrapper that applies the processing modules for either a mean or the PCA-based background subtraction. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, center: Optional[List[Tuple[int, int]]] = None, cubes: Optional[int] = None, size: float = 2., gaussian: float = 0.15, subframe: Optional[float] = None, pca_number: Optional[int] = 5, mask_star: float = 0.7, **kwargs) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Database tag with input images. image_out_tag : str Database tag to store the background subtracted images. center : list(tuple(int, int)), None Tuple with the centers of the dithering positions, e.g. ((x0, y0), (x1, y1)). The order of the coordinates should correspond to the order in which the star is present. If ``center`` and ``cubes`` are both set to None then sorting and subtracting of the background frames is based on ``DITHER_X`` and ``DITHER_Y``. If ``center`` is specified and ``cubes`` is set to None then the ``DITHER_X`` and ``DITHER_Y`` attributes will be used for sorting and subtracting of the background but not for selecting the dither positions. cubes : int, None Number of consecutive cubes per dither position. If ``cubes`` is set to None then sorting and subtracting of the background frames is based on ``DITHER_X`` and ``DITHER_Y``. size : float Cropped image size (arcsec). gaussian : float Full width at half maximum (arcsec) of the Gaussian kernel that is used to smooth the image before the star is located. subframe : float, None Size (arcsec) of the subframe that is used to search for the star. Cropping of the subframe is done around the center of the dithering position. The full image size (i.e. ``size``) will be used if set to None then. pca_number : int, None Number of principal components that is used to model the background emission. The PCA background subtraction is skipped if the argument is set to None. In that case, the mean background subtracted images are written toe ``image_out_tag``. mask_star : float Radius of the central mask (arcsec) that is used to exclude the star when fitting the principal components. The region behind the mask is included when subtracting the PCA background model. Returns ------- NoneType None """ if 'mask_planet' in kwargs: warnings.warn('The \'mask_planet\' parameter has been deprecated.', DeprecationWarning) if 'crop' in kwargs: warnings.warn('The \'crop\' parameter has been deprecated. The step to crop the ' 'images is no longer optional so crop=True.', DeprecationWarning) if 'prepare' in kwargs: warnings.warn('The \'prepare\' parameter has been deprecated. The preparation step ' 'is no longer optional so prepare=True.', DeprecationWarning) if 'pca_background' in kwargs: warnings.warn('The \'pca_background\' parameter has been deprecated. The PCA ' 'background is no longer optional when combine=\'pca\' so ' 'pca_background=True.', DeprecationWarning) if 'subtract_mean' in kwargs: warnings.warn('The \'subtract_mean\' parameter has been deprecated. Subtracting of ' 'the mean is no longer optional so subtract_mean=True.', DeprecationWarning) if 'combine' in kwargs: warnings.warn('The \'combine\' parameter has been deprecated. To write the mean ' 'background subtracted images to image_out_tag is done by setting ' 'pca_number=None.', DeprecationWarning) super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_center = center self.m_cubes = cubes self.m_size = size self.m_gaussian = gaussian self.m_pca_number = pca_number self.m_mask_star = mask_star self.m_subframe = subframe self.m_image_in_tag = image_in_tag self.m_image_out_tag = image_out_tag @typechecked def _initialize(self) -> Tuple[int, np.ndarray]: if self.m_cubes is None: dither_x = self.m_image_in_port.get_attribute('DITHER_X') dither_y = self.m_image_in_port.get_attribute('DITHER_Y') dither_xy = np.zeros((dither_x.shape[0], 2)) dither_xy[:, 0] = dither_x dither_xy[:, 1] = dither_y _, index = np.unique(dither_xy, axis=0, return_index=True) dither = dither_xy[np.sort(index)] npix = self.m_image_in_port.get_shape()[1] # Compute center from dither and make sure all positions are actually Python integers if self.m_center is None: self.m_center = np.copy(dither) + float(npix) / 2. self.m_center = tuple(zip(map(int, self.m_center[:, 0]), map(int, self.m_center[:, 1]))) else: if np.size(dither, axis=0) != np.size(self.m_center, axis=0): raise ValueError('Number of specified center positions should be equal to the ' 'number of unique dithering positions.') n_dither = np.size(self.m_center, 0) if self.m_cubes is None: star_pos = np.copy(dither) else: star_pos = np.arange(0, n_dither, 1) return n_dither, star_pos @typechecked def run(self) -> None: """ Run method of the module. Cuts out the detector sections at the different dither positions, prepares the PCA background subtraction, locates the star in each image, runs the PCA background subtraction, combines the output from the different dither positions is written to a single database tag. Returns ------- NoneType None """ @typechecked def _admin_start(count: int, n_dither: int, position: Tuple[int, int], star_pos: Union[np.ndarray, np.int64]) -> None: print(f'Processing dither position {count+1} out of {n_dither}...') print(f'Center position = {position}') if self.m_cubes is None and self.m_center is not None: print(f'DITHER_X, DITHER_Y = {tuple(star_pos)}') @typechecked def _admin_end(count: int) -> None: if self.m_pca_number is None: tags.append(f'{self.m_image_in_tag}_dither_mean{count+1}') else: tags.append(f'{self.m_image_in_tag}_dither_pca_res{count+1}') n_dither, star_pos = self._initialize() tags = [] for i, position in enumerate(self.m_center): _admin_start(i, n_dither, position, star_pos[i]) im_out_tag = f'{self.m_image_in_tag}_dither_crop{i+1}' module = CropImagesModule(name_in=f'crop{i}', image_in_tag=self.m_image_in_tag, image_out_tag=im_out_tag, size=self.m_size, center=(int(math.ceil(position[0])), int(math.ceil(position[1])))) module.connect_database(self._m_data_base) module._m_output_ports[im_out_tag].del_all_data() module._m_output_ports[im_out_tag].del_all_attributes() module.run() if self.m_cubes is None: dither_val = (n_dither, self.m_cubes, tuple(star_pos[i])) else: dither_val = (n_dither, self.m_cubes, int(star_pos[i])) im_in_tag = f'{self.m_image_in_tag}_dither_crop{i+1}' star_out_tag = f'{self.m_image_in_tag}_dither_star{i+1}' sub_out_tag = f'{self.m_image_in_tag}_dither_mean{i+1}' back_out_tag = f'{self.m_image_in_tag}_dither_background{i+1}' module = PCABackgroundPreparationModule(name_in=f'prepare{i}', image_in_tag=im_in_tag, star_out_tag=star_out_tag, subtracted_out_tag=sub_out_tag, background_out_tag=back_out_tag, dither=dither_val, combine='mean') module.connect_database(self._m_data_base) module._m_output_ports[star_out_tag].del_all_data() module._m_output_ports[star_out_tag].del_all_attributes() module._m_output_ports[sub_out_tag].del_all_data() module._m_output_ports[sub_out_tag].del_all_attributes() module._m_output_ports[back_out_tag].del_all_data() module._m_output_ports[back_out_tag].del_all_attributes() module.run() if self.m_pca_number is not None: star_in_tag = f'{self.m_image_in_tag}_dither_star{i+1}' back_in_tag = f'{self.m_image_in_tag}_dither_background{i+1}' res_out_tag = f'{self.m_image_in_tag}_dither_pca_res{i+1}' fit_out_tag = f'{self.m_image_in_tag}_dither_pca_fit{i+1}' mask_out_tag = f'{self.m_image_in_tag}_dither_pca_mask{i+1}' module = PCABackgroundSubtractionModule(name_in=f'pca_background{i}', star_in_tag=star_in_tag, background_in_tag=back_in_tag, residuals_out_tag=res_out_tag, fit_out_tag=fit_out_tag, mask_out_tag=mask_out_tag, pca_number=self.m_pca_number, mask_star=self.m_mask_star, subframe=self.m_subframe, gaussian=self.m_gaussian) module.connect_database(self._m_data_base) module._m_output_ports[res_out_tag].del_all_data() module._m_output_ports[res_out_tag].del_all_attributes() module._m_output_ports[fit_out_tag].del_all_data() module._m_output_ports[fit_out_tag].del_all_attributes() module._m_output_ports[mask_out_tag].del_all_data() module._m_output_ports[mask_out_tag].del_all_attributes() module.run() _admin_end(i) module = CombineTagsModule(name_in='combine', check_attr=True, index_init=False, image_in_tags=tags, image_out_tag=self.m_image_in_tag+'_dither_combine') module.connect_database(self._m_data_base) module._m_output_ports[self.m_image_in_tag+'_dither_combine'].del_all_data() module._m_output_ports[self.m_image_in_tag+'_dither_combine'].del_all_attributes() module.run() module = SortParangModule(name_in='sort', image_in_tag=self.m_image_in_tag+'_dither_combine', image_out_tag=self.m_image_out_tag) module.connect_database(self._m_data_base) module._m_output_ports[self.m_image_out_tag].del_all_data() module._m_output_ports[self.m_image_out_tag].del_all_attributes() module.run() PynPoint-0.11.0/pynpoint/processing/psfpreparation.py000066400000000000000000000647201450275315200230310ustar00rootroot00000000000000""" Pipeline modules to prepare the data for the PSF subtraction. """ import time import warnings from typing import Optional, Tuple import numpy as np from astropy.coordinates import EarthLocation from astropy.time import Time from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.module import progress, memory_frames from pynpoint.util.image import create_mask, scale_image, shift_image class PSFpreparationModule(ProcessingModule): """ Module to prepare the data for PSF subtraction with PCA. The preparation steps include masking and an optional normalization. """ __author__ = 'Markus Bonse, Tomas Stolker, Timothy Gebhard, Sven Kiefer' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, mask_out_tag: Optional[str] = None, norm: bool = False, resize: Optional[float] = None, cent_size: Optional[float] = None, edge_size: Optional[float] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry with images that is written as output. mask_out_tag : str, None, optional Tag of the database entry with the mask that is written as output. If set to None, no mask array is saved. norm : bool Normalize each image by its Frobenius norm. Only supported for 3D datasets (i.e. regular imaging). resize : float, None DEPRECATED. This parameter is currently ignored by the module and will be removed in a future version of PynPoint. cent_size : float, None, optional Radius of the central mask (in arcsec). No mask is used when set to None. edge_size : float, None, optional Outer radius (in arcsec) beyond which pixels are masked. No outer mask is used when set to None. If the value is larger than half the image size then it will be set to half the image size. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) if mask_out_tag is None: self.m_mask_out_port = None else: self.m_mask_out_port = self.add_output_port(mask_out_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_cent_size = cent_size self.m_edge_size = edge_size self.m_norm = norm # Raise a DeprecationWarning if the resize argument is used if resize is not None: warnings.warn('The \'resize\' parameter has been deprecated. Its value is currently ' 'being ignored, and the argument will be removed in a future version ' 'of PynPoint.', DeprecationWarning) @typechecked def run(self) -> None: """ Run method of the module. Masks and normalizes the images. Returns ------- NoneType None """ # Get the PIXSCALE and MEMORY attributes pixscale = self.m_image_in_port.get_attribute('PIXSCALE') memory = self._m_config_port.get_attribute('MEMORY') # Get the numnber of dimensions and shape ndim = self.m_image_in_port.get_ndim() im_shape = self.m_image_in_port.get_shape() if ndim == 3: # Number of images nimages = im_shape[-3] # Split into batches to comply with memory constraints frames = memory_frames(memory, nimages) elif ndim == 4: # Process all wavelengths per exposure at once frames = np.linspace(0, im_shape[-3], im_shape[-3]+1) if self.m_norm and ndim == 4: warnings.warn('The \'norm\' parameter does not support 4D datasets and will therefore ' 'be ignored.') # Convert m_cent_size and m_edge_size from arcseconds to pixels if self.m_cent_size is not None: self.m_cent_size /= pixscale if self.m_edge_size is not None: self.m_edge_size /= pixscale # Create 2D disk mask which will be applied to every frame mask = create_mask((int(im_shape[-2]), int(im_shape[-1])), (self.m_cent_size, self.m_edge_size)).astype(bool) # Keep track of the normalization vectors in case we are normalizing the images (if # we are not normalizing, this list will remain empty) norms = list() start_time = time.time() # Run the PSFpreparationModule for each subset of frames for i in range(frames[:-1].size): # Print progress to command line progress(i, len(frames[:-1]), 'Preparing images for PSF subtraction...', start_time) if ndim == 3: # Get the images and ensure they have the correct 3D shape with the following # three dimensions: (batch_size, height, width) images = self.m_image_in_port[frames[i]:frames[i+1], ] if images.ndim == 2: warnings.warn('The input data has 2 dimensions whereas 3 dimensions are ' 'required. An extra dimension has been added.') images = images[np.newaxis, ...] elif ndim == 4: # Process all wavelengths per exposure at once images = self.m_image_in_port[:, i, ] # Apply the mask, i.e., set all pixels to 0 where the mask is False images[:, ~mask] = 0. # If desired, normalize the images using the Frobenius norm if self.m_norm and ndim == 3: im_norm = np.linalg.norm(images, ord='fro', axis=(1, 2)) images /= im_norm[:, np.newaxis, np.newaxis] norms.append(im_norm) # Write processed images to output port if ndim == 3: self.m_image_out_port.append(images, data_dim=3) elif ndim == 4: self.m_image_out_port.append(images, data_dim=4) # Store information about mask if self.m_mask_out_port is not None: self.m_mask_out_port.set_all(mask) self.m_mask_out_port.copy_attributes(self.m_image_in_port) # Copy attributes from input port self.m_image_out_port.copy_attributes(self.m_image_in_port) # If the norms list is not empty (i.e., if we have computed the norm for every image), # we can also save the corresponding norm vector as an additional attribute if norms: self.m_image_out_port.add_attribute(name='norm', value=np.hstack(norms), static=False) # Save cent_size and edge_size as attributes to the output port if self.m_cent_size is not None: self.m_image_out_port.add_attribute(name='cent_size', value=self.m_cent_size * pixscale, static=True) if self.m_edge_size is not None: self.m_image_out_port.add_attribute(name='edge_size', value=self.m_edge_size * pixscale, static=True) class AngleInterpolationModule(ProcessingModule): """ Module for calculating the parallactic angle values by interpolating between the begin and end value of a data cube. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry for which the parallactic angles are written as attributes. Returns ------- NoneType None """ super().__init__(name_in) self.m_data_in_port = self.add_input_port(data_tag) self.m_data_out_port = self.add_output_port(data_tag) @typechecked def run(self) -> None: """ Run method of the module. Calculates the parallactic angles of each frame by linearly interpolating between the start and end values of the data cubes. The values are written as attributes to *data_tag*. A correction of 360 deg is applied when the start and end values of the angles change sign at +/-180 deg. Returns ------- NoneType None """ parang_start = self.m_data_in_port.get_attribute('PARANG_START') parang_end = self.m_data_in_port.get_attribute('PARANG_END') steps = self.m_data_in_port.get_attribute('NFRAMES') if 'NDIT' in self.m_data_in_port.get_all_non_static_attributes(): ndit = self.m_data_in_port.get_attribute('NDIT') if not np.all(ndit == steps): warnings.warn('There is a mismatch between the NDIT and NFRAMES values. The ' 'parallactic angles are calculated with a linear interpolation by ' 'using NFRAMES steps. A frame selection should be applied after ' 'the parallactic angles are calculated.') new_angles = [] start_time = time.time() for i, _ in enumerate(parang_start): progress(i, len(parang_start), 'Interpolating parallactic angles...', start_time) if parang_start[i] < -170. and parang_end[i] > 170.: parang_start[i] += 360. elif parang_end[i] < -170. and parang_start[i] > 170.: parang_end[i] += 360. if steps[i] == 1: new_angles = np.append(new_angles, [(parang_start[i] + parang_end[i])/2.]) elif steps[i] != 1: new_angles = np.append(new_angles, np.linspace(parang_start[i], parang_end[i], num=steps[i])) self.m_data_out_port.add_attribute('PARANG', new_angles, static=False) class SortParangModule(ProcessingModule): """ Module to sort the images and attributes with increasing ``INDEX``. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Database tag with the input data. image_out_tag : str Database tag where the output data will be stored. Should be different from ``image_in_tag``. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) @typechecked def run(self) -> None: """ Run method of the module. Sorts the images and attributes with increasing ``INDEX``. Therefore, the images are sorted by there original (usually chronological) order. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') index = self.m_image_in_port.get_attribute('INDEX') ndim = self.m_image_in_port.get_ndim() nimages = self.m_image_in_port.get_shape()[-3] index_new = np.zeros(index.shape, dtype=int) if 'PARANG' in self.m_image_in_port.get_all_non_static_attributes(): parang = self.m_image_in_port.get_attribute('PARANG') parang_new = np.zeros(parang.shape) else: parang_new = None if 'STAR_POSITION' in self.m_image_in_port.get_all_non_static_attributes(): star = self.m_image_in_port.get_attribute('STAR_POSITION') star_new = np.zeros(star.shape) else: star_new = None index_sort = np.argsort(index) frames = memory_frames(memory, nimages) start_time = time.time() for i, _ in enumerate(frames[:-1]): progress(i, len(frames[:-1]), 'Sorting images in time...', start_time) index_new[frames[i]:frames[i+1]] = index[index_sort[frames[i]:frames[i+1]]] if parang_new is not None: parang_new[frames[i]:frames[i+1]] = parang[index_sort[frames[i]:frames[i+1]]] if star_new is not None: star_new[frames[i]:frames[i+1]] = star[index_sort[frames[i]:frames[i+1]]] # HDF5 indexing elements must be in increasing order for item in index_sort[frames[i]:frames[i+1]]: if ndim == 3: self.m_image_out_port.append(self.m_image_in_port[item, ], data_dim=3) elif ndim == 4: self.m_image_out_port.append(self.m_image_in_port[:, item, ], data_dim=4) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('SortParangModule', 'sorted by INDEX') self.m_image_out_port.add_attribute('INDEX', index_new, static=False) if parang_new is not None: self.m_image_out_port.add_attribute('PARANG', parang_new, static=False) if star_new is not None: self.m_image_out_port.add_attribute('STAR_POSITION', star_new, static=False) self.m_image_out_port.close_port() class AngleCalculationModule(ProcessingModule): """ Module for calculating the parallactic angles. The start time of the observation is taken and multiples of the exposure time are added to derive the parallactic angle of each frame inside the cube. Instrument specific overheads are included. """ __author__ = 'Alexander Bohn, Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str, instrument: str = 'NACO') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry for which the parallactic angles are written as attributes. instrument : str Instrument name ('NACO', 'SPHERE/IRDIS', or 'SPHERE/IFS'). Returns ------- NoneType None """ super().__init__(name_in) # Parameters self.m_instrument = instrument # Set parameters according to choice of instrument if self.m_instrument == 'NACO': # pupil offset in degrees self.m_pupil_offset = 0. # No offset here # no overheads in cube mode, since cube is read out after all individual exposures # see NACO manual page 62 (v102) self.m_O_START = 0. self.m_DIT_DELAY = 0. self.m_ROT = 0. # rotator offset in degrees self.m_rot_offset = 89.44 # According to NACO manual page 65 (v102) elif self.m_instrument == 'SPHERE/IRDIS': # pupil offset in degrees self.m_pupil_offset = -135.99 # According to SPHERE manual page 64 (v102) # overheads in cube mode (several NDITS) in hours self.m_O_START = 0.3 / 3600. # According to SPHERE manual page 90/91 (v102) self.m_DIT_DELAY = 0.1 / 3600. # According to SPHERE manual page 90/91 (v102) self.m_ROT = 0.838 / 3600. # According to SPHERE manual page 90/91 (v102) # rotator offset in degrees self.m_rot_offset = 0. # no offset here elif self.m_instrument == 'SPHERE/IFS': # pupil offset in degrees self.m_pupil_offset = -135.99 - 100.48 # According to SPHERE manual page 64 (v102) # overheads in cube mode (several NDITS) in hours self.m_O_START = 0.3 / 3600. # According to SPHERE manual page 90/91 (v102) self.m_DIT_DELAY = 0.2 / 3600. # According to SPHERE manual page 90/91 (v102) self.m_ROT = 1.65 / 3600. # According to SPHERE manual page 90/91 (v102) # rotator offset in degrees self.m_rot_offset = 0. # no offset here else: raise ValueError('The instrument argument should be set to either \'NACO\', ' '\'SPHERE/IRDIS\', or \'SPHERE/IFS\'.') self.m_data_in_port = self.add_input_port(data_tag) self.m_data_out_port = self.add_output_port(data_tag) @typechecked def _attribute_check(self, ndit: np.ndarray, steps: np.ndarray) -> None: if not np.all(ndit == steps): warnings.warn('There is a mismatch between the NDIT and NFRAMES values. A frame ' 'selection should be applied after the parallactic angles are ' 'calculated.') if self.m_instrument == 'SPHERE/IFS': warnings.warn('AngleCalculationModule has not been tested for SPHERE/IFS data.') if self.m_instrument in ('SPHERE/IRDIS', 'SPHERE/IFS'): if self._m_config_port.get_attribute('RA') != 'ESO INS4 DROT2 RA': warnings.warn('For SPHERE data it is recommended to use the header keyword ' '\'ESO INS4 DROT2 RA\' to specify the object\'s right ascension. ' 'The input will be parsed accordingly. Using the regular ' '\'RA\' keyword will lead to wrong parallactic angles.') if self._m_config_port.get_attribute('DEC') != 'ESO INS4 DROT2 DEC': warnings.warn('For SPHERE data it is recommended to use the header keyword ' '\'ESO INS4 DROT2 DEC\' to specify the object\'s declination. ' 'The input will be parsed accordingly. Using the regular ' '\'DEC\' keyword will lead to wrong parallactic angles.') @typechecked def run(self) -> None: """ Run method of the module. Calculates the parallactic angles from the position of the object on the sky and the telescope location on earth. The start of the observation is used to extrapolate for the observation time of each individual image of a data cube. The values are written as PARANG attributes to *data_tag*. Returns ------- NoneType None """ # Load cube sizes steps = self.m_data_in_port.get_attribute('NFRAMES') ndit = self.m_data_in_port.get_attribute('NDIT') self._attribute_check(ndit, steps) # Load exposure time [hours] exptime = self.m_data_in_port.get_attribute('DIT')/3600. # Load telescope location tel_lat = self.m_data_in_port.get_attribute('LATITUDE') tel_lon = self.m_data_in_port.get_attribute('LONGITUDE') # Load temporary target position tmp_ra = self.m_data_in_port.get_attribute('RA') tmp_dec = self.m_data_in_port.get_attribute('DEC') # Parse to degree depending on instrument if 'SPHERE' in self.m_instrument: # get sign of declination tmp_dec_sign = np.sign(tmp_dec) tmp_dec = np.abs(tmp_dec) # parse RA tmp_ra_s = tmp_ra % 100 tmp_ra_m = ((tmp_ra - tmp_ra_s) / 1e2) % 100 tmp_ra_h = ((tmp_ra - tmp_ra_s - tmp_ra_m * 1e2) / 1e4) # parse DEC tmp_dec_s = tmp_dec % 100 tmp_dec_m = ((tmp_dec - tmp_dec_s) / 1e2) % 100 tmp_dec_d = ((tmp_dec - tmp_dec_s - tmp_dec_m * 1e2) / 1e4) # get RA and DEC in degree ra = (tmp_ra_h + tmp_ra_m / 60. + tmp_ra_s / 3600.) * 15. dec = tmp_dec_sign * (tmp_dec_d + tmp_dec_m / 60. + tmp_dec_s / 3600.) else: ra = tmp_ra dec = tmp_dec # Load start times of exposures obs_dates = self.m_data_in_port.get_attribute('DATE') # Load pupil positions during observations if self.m_instrument == 'NACO': pupil_pos = self.m_data_in_port.get_attribute('PUPIL') elif self.m_instrument == 'SPHERE/IRDIS': pupil_pos = np.zeros(steps.shape) elif self.m_instrument == 'SPHERE/IFS': pupil_pos = np.zeros(steps.shape) new_angles = np.array([]) pupil_pos_arr = np.array([]) start_time = time.time() # Calculate parallactic angles for each cube for i, tmp_steps in enumerate(steps): progress(i, len(steps), 'Calculating parallactic angles...', start_time) t = Time(obs_dates[i].decode('utf-8'), location=EarthLocation(lat=tel_lat, lon=tel_lon)) sid_time = t.sidereal_time('apparent').value # Extrapolate sideral times from start time of the cube for each frame of it sid_time_arr = np.linspace(sid_time+self.m_O_START, (sid_time+self.m_O_START) + (exptime+self.m_DIT_DELAY + self.m_ROT)*(tmp_steps-1), tmp_steps) # Convert to degrees sid_time_arr_deg = sid_time_arr * 15. # Calculate hour angle in degrees hour_angle = sid_time_arr_deg - ra[i] # Conversion to radians: hour_angle_rad = np.deg2rad(hour_angle) dec_rad = np.deg2rad(dec[i]) lat_rad = np.deg2rad(tel_lat) p_angle = np.arctan2(np.sin(hour_angle_rad), (np.cos(dec_rad)*np.tan(lat_rad) - np.sin(dec_rad)*np.cos(hour_angle_rad))) new_angles = np.append(new_angles, np.rad2deg(p_angle)) pupil_pos_arr = np.append(pupil_pos_arr, np.ones(tmp_steps)*pupil_pos[i]) # Correct for rotator (SPHERE) or pupil offset (NACO) if self.m_instrument == 'NACO': # See NACO manual page 65 (v102) new_angles_corr = new_angles - (90. + (self.m_rot_offset-pupil_pos_arr)) elif self.m_instrument == 'SPHERE/IRDIS': # See SPHERE manual page 64 (v102) new_angles_corr = new_angles - self.m_pupil_offset elif self.m_instrument == 'SPHERE/IFS': # See SPHERE manual page 64 (v102) new_angles_corr = new_angles - self.m_pupil_offset indices = np.where(new_angles_corr < -180.)[0] if indices.size > 0: new_angles_corr[indices] += 360. indices = np.where(new_angles_corr > 180.)[0] if indices.size > 0: new_angles_corr[indices] -= 360. self.m_data_out_port.add_attribute('PARANG', new_angles_corr, static=False) class SDIpreparationModule(ProcessingModule): """ Module for preparing continuum frames for dual-band simultaneous differential imaging. """ __author__ = 'Gabriele Cugno, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, wavelength: Tuple[float, float], width: Tuple[float, float]) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. Should be different from *image_in_tag*. wavelength : tuple(float, float) The central wavelengths of the line and continuum filter, (line, continuum), in arbitrary but identical units. width : tuple(float, float) The equivalent widths of the line and continuum filter, (line, continuum), in arbitrary but identical units. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_line_wvl = wavelength[0] self.m_cnt_wvl = wavelength[1] self.m_line_width = width[0] self.m_cnt_width = width[1] @typechecked def run(self) -> None: """ Run method of the module. Normalizes the images for the different filter widths, upscales the images, and crops the images to the initial image shape in order to align the PSF patterns. Returns ------- NoneType None """ wvl_factor = self.m_line_wvl/self.m_cnt_wvl width_factor = self.m_line_width/self.m_cnt_width nimages = self.m_image_in_port.get_shape()[0] start_time = time.time() for i in range(nimages): progress(i, nimages, 'Preparing images for dual-band SDI...', start_time) image = self.m_image_in_port[i, ] im_scale = width_factor * scale_image(image, wvl_factor, wvl_factor) if i == 0: npix_del = im_scale.shape[-1] - image.shape[-1] if npix_del % 2 == 0: npix_del_a = int(npix_del/2) npix_del_b = int(npix_del/2) else: npix_del_a = int((npix_del-1)/2) npix_del_b = int((npix_del+1)/2) im_crop = im_scale[npix_del_a:-npix_del_b, npix_del_a:-npix_del_b] if npix_del % 2 == 1: im_crop = shift_image(im_crop, (-0.5, -0.5), interpolation='spline') self.m_image_out_port.append(im_crop, data_dim=3) history = f'(line, continuum) = ({self.m_line_wvl}, {self.m_cnt_wvl})' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('SDIpreparationModule', history) self.m_image_in_port.close_port() PynPoint-0.11.0/pynpoint/processing/psfsubtraction.py000066400000000000000000001020061450275315200230300ustar00rootroot00000000000000""" Pipeline modules for PSF subtraction. """ import time import math import warnings from copy import deepcopy from typing import List, Optional, Tuple, Union import numpy as np from scipy.ndimage import rotate from sklearn.decomposition import PCA from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.apply_func import subtract_psf from pynpoint.util.module import progress from pynpoint.util.multipca import PcaMultiprocessingCapsule from pynpoint.util.residuals import combine_residuals from pynpoint.util.postproc import postprocessor from pynpoint.util.sdi import scaling_factors class PcaPsfSubtractionModule(ProcessingModule): """ Pipeline module for PSF subtraction with principal component analysis (PCA). The module can be used for ADI, RDI (see ``subtract_mean`` parameter), SDI, and ASDI. The residuals are calculated in parallel for the selected numbers of principal components. This may require a large amount of memory in case the stack of input images is very large. The number of processes can therefore be set with the ``CPU`` keyword in the configuration file. """ __author__ = 'Markus Bonse, Tomas Stolker, Sven Kiefer' @typechecked def __init__(self, name_in: str, images_in_tag: str, reference_in_tag: str, res_mean_tag: Optional[str] = None, res_median_tag: Optional[str] = None, res_weighted_tag: Optional[str] = None, res_rot_mean_clip_tag: Optional[str] = None, res_arr_out_tag: Optional[str] = None, basis_out_tag: Optional[str] = None, pca_numbers: Union[range, List[int], np.ndarray, Tuple[range, range], Tuple[List[int], List[int]], Tuple[np.ndarray, np.ndarray]] = range(1, 21), extra_rot: float = 0., subtract_mean: bool = True, processing_type: str = 'ADI') -> None: """ Parameters ---------- name_in : str Name tag of the pipeline module. images_in_tag : str Database entry with the images from which the PSF model will be subtracted. reference_in_tag : str Database entry with the reference images from which the PSF model is created. Usually ``reference_in_tag`` is the same as ``images_in_tag``, but a different dataset can be used as reference images in case of RDI. res_mean_tag : str, None Database entry where the the mean-collapsed residuals will be stored. The residuals are not calculated and stored if set to None. res_median_tag : str, None Database entry where the the median-collapsed residuals will be stored. The residuals are not calculated and stored if set to None. res_weighted_tag : str, None Database entry where the the noise-weighted residuals will be stored (see Bottom et al. 2017). The residuals are not calculated and stored if set to None. res_rot_mean_clip_tag : str, None Tag of the database entry of the clipped mean residuals. Not calculated if set to None. res_arr_out_tag : str, None Database entry where the derotated, but not collapsed, residuals are stored. The number of principal components is was used is appended to the ``res_arr_out_tag``. The residuals are not stored if set to None. This parameter is not supported with multiprocessing (i.e. ``CPU`` > 1). For IFS data and if the processing type is either ADI+SDI or SDI+ADI the residuals can only be calculated if exactly 1 principal component for each ADI and SDI is given with the pca_numbers parameter. basis_out_tag : str, None Database entry where the principal components are stored. The data is not stored if set to None. Only supported for imaging data with ``processing_type='ADI'``. pca_numbers : range, list(int), np.ndarray, tuple(range, range), tuple[list(int), list(int)), tuple(np.ndarray, np.ndarray)) Number of principal components that are used for the PSF model. With ADI or SDI, a single list/range/array needs to be provided while for SDI+ADI or ADI+SDI a tuple is required with twice a list/range/array. extra_rot : float Additional rotation angle of the images (deg). subtract_mean : bool The mean of the science and reference images is subtracted from the corresponding stack, before the PCA basis is constructed and fitted. Set the argument to ``False`` for RDI, that is, in case ``reference_in_tag`` is different from ``images_in_tag`` and there is no or limited field rotation. The parameter is only supported with ``processing_type='ADI'``. processing_type : str Post-processing type: - ADI: Angular differential imaging. Can be used both on imaging and IFS datasets. This argument is also used for RDI, in which case the ``PARANG`` attribute should contain zeros a derotation angles (e.g. with :func:`~pynpoint.core.pypeline.Pypeline.set_attribute` or :class:`~pynpoint.readwrite.attr_writing.ParangWritingModule`). The collapsed residuals are stored as 3D dataset with one image per principal component. - SDI: Spectral differential imaging. Can only be applied on IFS datasets. The collapsed residuals are stored as $D dataset with one image per wavelength and principal component. - SDI+ADI: Spectral and angular differential imaging. Can only be applied on IFS datasets. The collapsed residuals are stored as 5D datasets with one image per wavelength and each of the principal components. - ADI+SDI: Angular and spectral differential imaging. Can only be applied on IFS datasets. The collapsed residuals are stored as 5D datasets with one image per wavelength and each of the principal components. - CODI: Perform ADI and SDI simultaniously. Usually requires a lot more PC than the other techniques. The collapsed residuals are stored as 4D datasets with one image per wavelength and each of the principal components. Returns ------- NoneType None """ super().__init__(name_in) self.m_pca_numbers = pca_numbers if isinstance(pca_numbers, tuple): self.m_components = (np.sort(np.atleast_1d(pca_numbers[0])), np.sort(np.atleast_1d(pca_numbers[1]))) else: self.m_components = np.sort(np.atleast_1d(pca_numbers)) self.m_pca = PCA(n_components=np.amax(self.m_components), svd_solver='arpack') self.m_extra_rot = extra_rot self.m_subtract_mean = subtract_mean self.m_processing_type = processing_type self.m_reference_in_port = self.add_input_port(reference_in_tag) self.m_star_in_port = self.add_input_port(images_in_tag) if res_mean_tag is None: self.m_res_mean_out_port = None else: self.m_res_mean_out_port = self.add_output_port(res_mean_tag) if res_median_tag is None: self.m_res_median_out_port = None else: self.m_res_median_out_port = self.add_output_port(res_median_tag) if res_weighted_tag is None: self.m_res_weighted_out_port = None else: self.m_res_weighted_out_port = self.add_output_port(res_weighted_tag) if res_rot_mean_clip_tag is None: self.m_res_rot_mean_clip_out_port = None else: self.m_res_rot_mean_clip_out_port = self.add_output_port(res_rot_mean_clip_tag) if res_arr_out_tag is None: self.m_res_arr_out_ports = None else: if isinstance(self.m_components, tuple): self.m_res_arr_out_ports = self.add_output_port(res_arr_out_tag) else: self.m_res_arr_out_ports = {} for pca_number in self.m_components: self.m_res_arr_out_ports[pca_number] = self.add_output_port( res_arr_out_tag + str(pca_number)) if basis_out_tag is None: self.m_basis_out_port = None else: self.m_basis_out_port = self.add_output_port(basis_out_tag) if self.m_processing_type in ['ADI', 'SDI', 'CODI']: if not isinstance(self.m_components, (range, list, np.ndarray)): raise ValueError(f'The post-processing type \'{self.m_processing_type}\' requires ' f'a single range/list/array as argument for \'pca_numbers\'.') elif self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: if not isinstance(self.m_components, tuple): raise ValueError(f'The post-processing type \'{self.m_processing_type}\' requires ' f'a tuple for with twice a range/list/array as argument for ' f'\'pca_numbers\'.') if res_arr_out_tag is not None and len(self.m_components[0]) + \ len(self.m_components[1]) != 2: raise ValueError(f'If the post-processing type \'{self.m_processing_type}\' ' 'is selected, residuals can only be calculated if no more than ' '1 principal component for ADI and SDI is given.') else: raise ValueError('Please select a valid post-processing type.') @typechecked def _run_multi_processing(self, star_reshape: np.ndarray, im_shape: tuple, indices: Optional[np.ndarray]) -> None: """ Internal function to create the residuals, derotate the images, and write the output using multiprocessing. """ cpu = self._m_config_port.get_attribute('CPU') parang = -1.*self.m_star_in_port.get_attribute('PARANG') + self.m_extra_rot if self.m_ifs_data: if 'WAVELENGTH' in self.m_star_in_port.get_all_non_static_attributes(): wavelength = self.m_star_in_port.get_attribute('WAVELENGTH') else: raise ValueError('The wavelengths are not found. These should be stored ' 'as the \'WAVELENGTH\' attribute.') scales = scaling_factors(wavelength) else: scales = None if self.m_processing_type in ['ADI', 'SDI', 'CODI']: pca_first = self.m_components pca_secon = [-1] # Not used elif self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: pca_first = self.m_components[0] pca_secon = self.m_components[1] if self.m_ifs_data: if self.m_processing_type in ['ADI', 'SDI', 'CODI']: res_shape = (len(pca_first), len(wavelength), im_shape[-2], im_shape[-1]) elif self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: res_shape = (len(pca_first), len(pca_secon), len(wavelength), im_shape[-2], im_shape[-1]) else: res_shape = (len(self.m_components), im_shape[1], im_shape[2]) tmp_output = np.zeros(res_shape) if self.m_res_mean_out_port is not None: self.m_res_mean_out_port.set_all(tmp_output, keep_attributes=False) if self.m_res_median_out_port is not None: self.m_res_median_out_port.set_all(tmp_output, keep_attributes=False) if self.m_res_weighted_out_port is not None: self.m_res_weighted_out_port.set_all(tmp_output, keep_attributes=False) if self.m_res_rot_mean_clip_out_port is not None: self.m_res_rot_mean_clip_out_port.set_all(tmp_output, keep_attributes=False) self.m_star_in_port.close_port() self.m_reference_in_port.close_port() if self.m_res_mean_out_port is not None: self.m_res_mean_out_port.close_port() if self.m_res_median_out_port is not None: self.m_res_median_out_port.close_port() if self.m_res_weighted_out_port is not None: self.m_res_weighted_out_port.close_port() if self.m_res_rot_mean_clip_out_port is not None: self.m_res_rot_mean_clip_out_port.close_port() if self.m_basis_out_port is not None: self.m_basis_out_port.close_port() capsule = PcaMultiprocessingCapsule(self.m_res_mean_out_port, self.m_res_median_out_port, self.m_res_weighted_out_port, self.m_res_rot_mean_clip_out_port, cpu, deepcopy(self.m_components), deepcopy(self.m_pca), deepcopy(star_reshape), deepcopy(parang), deepcopy(scales), im_shape, indices, self.m_processing_type) capsule.run() @typechecked def _run_single_processing(self, star_reshape: np.ndarray, im_shape: tuple, indices: Optional[np.ndarray]) -> None: """ Internal function to create the residuals, derotate the images, and write the output using a single process. """ start_time = time.time() # Get the parallactic angles parang = -1.*self.m_star_in_port.get_attribute('PARANG') + self.m_extra_rot if self.m_ifs_data: # Get the wavelengths if 'WAVELENGTH' in self.m_star_in_port.get_all_non_static_attributes(): wavelength = self.m_star_in_port.get_attribute('WAVELENGTH') else: raise ValueError('The wavelengths are not found. These should be stored ' 'as the \'WAVELENGTH\' attribute.') # Calculate the wavelength ratios scales = scaling_factors(wavelength) else: scales = None if self.m_processing_type in ['ADI', 'SDI', 'CODI']: pca_first = self.m_components pca_secon = [-1] # Not used n_pca_total = len(pca_first) elif self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: pca_first = self.m_components[0] pca_secon = self.m_components[1] n_pca_total = len(pca_first) + len(pca_secon) # Setup output arrays out_array_res = np.zeros(im_shape) if self.m_ifs_data: if self.m_processing_type in ['ADI', 'SDI', 'CODI']: res_shape = (len(pca_first), len(wavelength), im_shape[-2], im_shape[-1]) elif self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: res_shape = (len(pca_first), len(pca_secon), len(wavelength), im_shape[-2], im_shape[-1]) else: res_shape = (len(pca_first), im_shape[-2], im_shape[-1]) out_array_mean = np.zeros(res_shape) out_array_medi = np.zeros(res_shape) out_array_weig = np.zeros(res_shape) out_array_clip = np.zeros(res_shape) # loop over all different combination of pca_numbers and applying the reductions for i, pca_1 in enumerate(pca_first): for j, pca_2 in enumerate(pca_secon): progress(i+j, n_pca_total, 'Creating residuals...', start_time) # process images residuals, res_rot = postprocessor(images=star_reshape, angles=parang, scales=scales, pca_number=(pca_1, pca_2), pca_sklearn=self.m_pca, im_shape=im_shape, indices=indices, processing_type=self.m_processing_type) # 1.) derotated residuals if self.m_res_arr_out_ports is not None: if not self.m_ifs_data: self.m_res_arr_out_ports[pca_1].set_all(res_rot) self.m_res_arr_out_ports[pca_1].copy_attributes(self.m_star_in_port) self.m_res_arr_out_ports[pca_1].add_history( 'PcaPsfSubtractionModule', f'max PC number = {pca_first}') else: out_array_res = residuals # 2.) mean residuals if self.m_res_mean_out_port is not None: if self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: out_array_mean[i, j] = combine_residuals(method='mean', res_rot=res_rot, angles=parang) else: out_array_mean[i] = combine_residuals(method='mean', res_rot=res_rot, angles=parang) # 3.) median residuals if self.m_res_median_out_port is not None: if self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: out_array_medi[i, j] = combine_residuals(method='median', res_rot=res_rot, angles=parang) else: out_array_medi[i] = combine_residuals(method='median', res_rot=res_rot, angles=parang) # 4.) noise-weighted residuals if self.m_res_weighted_out_port is not None: if self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: out_array_weig[i, j] = combine_residuals(method='weighted', res_rot=res_rot, residuals=residuals, angles=parang) else: out_array_weig[i] = combine_residuals(method='weighted', res_rot=res_rot, residuals=residuals, angles=parang) # 5.) clipped mean residuals if self.m_res_rot_mean_clip_out_port is not None: if self.m_processing_type in ['SDI+ADI', 'ADI+SDI']: out_array_clip[i, j] = combine_residuals(method='clipped', res_rot=res_rot, angles=parang) else: out_array_clip[i] = combine_residuals(method='clipped', res_rot=res_rot, angles=parang) # Configurate data output according to the processing type # 1.) derotated residuals if self.m_res_arr_out_ports is not None and self.m_ifs_data: if pca_secon[0] == -1: history = f'max PC number = {pca_first}' else: history = f'max PC number = {pca_first} / {pca_secon}' # squeeze out_array_res to reduce dimensionallity as the residuals of # SDI+ADI and ADI+SDI are always of the form (1, 1, ...) squeezed = np.squeeze(out_array_res) if isinstance(self.m_components, tuple): self.m_res_arr_out_ports.set_all(squeezed, data_dim=squeezed.ndim) self.m_res_arr_out_ports.copy_attributes(self.m_star_in_port) self.m_res_arr_out_ports.add_history('PcaPsfSubtractionModule', history) else: for i, pca in enumerate(self.m_components): self.m_res_arr_out_ports[pca].append(squeezed[i]) self.m_res_arr_out_ports[pca].add_history('PcaPsfSubtractionModule', history) # 2.) mean residuals if self.m_res_mean_out_port is not None: self.m_res_mean_out_port.set_all(out_array_mean, data_dim=out_array_mean.ndim) # 3.) median residuals if self.m_res_median_out_port is not None: self.m_res_median_out_port.set_all(out_array_medi, data_dim=out_array_medi.ndim) # 4.) noise-weighted residuals if self.m_res_weighted_out_port is not None: self.m_res_weighted_out_port.set_all(out_array_weig, data_dim=out_array_weig.ndim) # 5.) clipped mean residuals if self.m_res_rot_mean_clip_out_port is not None: self.m_res_rot_mean_clip_out_port.set_all(out_array_clip, data_dim=out_array_clip.ndim) @typechecked def run(self) -> None: """ Run method of the module. Subtracts the mean of the image stack from all images, reshapes the stack of images into a 2D array, uses singular value decomposition to construct the orthogonal basis set, calculates the PCA coefficients for each image, subtracts the PSF model, and writes the residuals as output. Returns ------- NoneType None """ print('Input parameters:') print(f' - Post-processing type: {self.m_processing_type}') print(f' - Number of principal components: {self.m_pca_numbers}') print(f' - Subtract mean: {self.m_subtract_mean}') print(f' - Extra rotation (deg): {self.m_extra_rot}') cpu = self._m_config_port.get_attribute('CPU') if cpu > 1 and self.m_res_arr_out_ports is not None: warnings.warn('Multiprocessing not possible if \'res_arr_out_tag\' is not set ' 'to None.') # Read the data star_data = self.m_star_in_port.get_all() im_shape = star_data.shape # Parse input processing types to internal processing types if star_data.ndim == 3: self.m_ifs_data = False elif star_data.ndim == 4: self.m_ifs_data = True else: raise ValueError(f'The input data has {star_data.ndim} dimensions while only 3 or 4 ' f' are supported by the pipeline module.') if self.m_processing_type == 'ADI' and not self.m_ifs_data: # select the first image and get the unmasked image indices im_star = star_data[0, ].reshape(-1) indices = np.where(im_star != 0.)[0] # reshape the star data and select the unmasked pixels star_reshape = star_data.reshape(im_shape[0], im_shape[1]*im_shape[2]) star_reshape = star_reshape[:, indices] if self.m_reference_in_port.tag == self.m_star_in_port.tag: ref_reshape = deepcopy(star_reshape) else: ref_data = self.m_reference_in_port.get_all() ref_shape = ref_data.shape if ref_shape[-2:] != im_shape[-2:]: raise ValueError('The image size of the science data and the reference data ' 'should be identical.') # reshape reference data and select the unmasked pixels ref_reshape = ref_data.reshape(ref_shape[0], ref_shape[1]*ref_shape[2]) ref_reshape = ref_reshape[:, indices] # subtract mean from science data, if required if self.m_subtract_mean: mean_star = np.mean(star_reshape, axis=0) star_reshape -= mean_star # subtract mean from reference data mean_ref = np.mean(ref_reshape, axis=0) ref_reshape -= mean_ref # create the PCA basis print('Constructing PSF model...', end='') self.m_pca.fit(ref_reshape) # add mean of reference array as 1st PC and orthogonalize it with respect to # the other principal components if not self.m_subtract_mean: mean_ref_reshape = mean_ref.reshape((1, mean_ref.shape[0])) q_ortho, _ = np.linalg.qr(np.vstack((mean_ref_reshape, self.m_pca.components_[:-1, ])).T) self.m_pca.components_ = q_ortho.T print(' [DONE]') if self.m_basis_out_port is not None: pc_size = self.m_pca.components_.shape[0] basis = np.zeros((pc_size, im_shape[1]*im_shape[2])) basis[:, indices] = self.m_pca.components_ basis = basis.reshape((pc_size, im_shape[1], im_shape[2])) self.m_basis_out_port.set_all(basis) else: # This setup is used for SDI processes. No preparations are possible because SDI/ADI # combinations are case specific and need to be conducted in pca_psf_subtraction. self.m_pca = None indices = None star_reshape = star_data # Running a single processing PCA analysis if cpu == 1 or self.m_res_arr_out_ports is not None: self._run_single_processing(star_reshape, im_shape, indices) # Running multiprocessed PCA analysis else: print('Creating residuals', end='') self._run_multi_processing(star_reshape, im_shape, indices) print(' [DONE]') # write history if isinstance(self.m_components, tuple): history = f'max PC number = {np.amax(self.m_components[0])} / ' \ f'{np.amax(self.m_components[1])}' else: history = f'max PC number = {np.amax(self.m_components)}' # save history for all other ports if self.m_res_mean_out_port is not None: self.m_res_mean_out_port.copy_attributes(self.m_star_in_port) self.m_res_mean_out_port.add_history('PcaPsfSubtractionModule', history) if self.m_res_median_out_port is not None: self.m_res_median_out_port.copy_attributes(self.m_star_in_port) self.m_res_median_out_port.add_history('PcaPsfSubtractionModule', history) if self.m_res_weighted_out_port is not None: self.m_res_weighted_out_port.copy_attributes(self.m_star_in_port) self.m_res_weighted_out_port.add_history('PcaPsfSubtractionModule', history) if self.m_res_rot_mean_clip_out_port is not None: self.m_res_rot_mean_clip_out_port.copy_attributes(self.m_star_in_port) self.m_res_rot_mean_clip_out_port.add_history('PcaPsfSubtractionModule', history) self.m_star_in_port.close_port() class ClassicalADIModule(ProcessingModule): """ Pipeline module for PSF subtraction with classical ADI by subtracting a median-combined reference image. A rotation threshold can be set for a fixed separation to prevent self-subtraction. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, res_out_tag: str, stack_out_tag: str, threshold: Optional[Tuple[float, float, float]], nreference: Optional[int] = None, residuals: str = 'median', extra_rot: float = 0.) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry with the science images that are read as input. res_out_tag : str Tag of the database entry with the residuals of the PSF subtraction that are written as output. stack_out_tag : str Tag of the database entry with the stacked residuals that are written as output. threshold : tuple(float, float, float), None Tuple with the separation for which the angle threshold is optimized (arcsec), FWHM of the PSF (arcsec), and the threshold (FWHM) for the selection of the reference images. No threshold is used if set to None. nreference : int, None Number of reference images, closest in line to the science image. All images are used if *threshold* is None or *nreference* is None. residuals : str Method used for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). extra_rot : float Additional rotation angle of the images (deg). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_res_out_port = self.add_output_port(res_out_tag) self.m_stack_out_port = self.add_output_port(stack_out_tag) self.m_threshold = threshold self.m_nreference = nreference self.m_extra_rot = extra_rot self.m_residuals = residuals @typechecked def run(self) -> None: """ Run method of the module. Selects for each image the reference images closest in line while taking into account a rotation threshold for a fixed separation, median-combines the references images, and subtracts the reference image from each image separately. Alternatively, a single, median-combined reference image can be created and subtracted from all images. All images are used if the rotation condition can not be met. Both the individual residuals (before derotation) and the stacked residuals are stored. Returns ------- NoneType None """ parang = -1.*self.m_image_in_port.get_attribute('PARANG') + self.m_extra_rot nimages = self.m_image_in_port.get_shape()[0] if self.m_threshold: parang_thres = 2.*math.atan2(self.m_threshold[2]*self.m_threshold[1], 2.*self.m_threshold[0]) parang_thres = math.degrees(parang_thres) reference = None else: parang_thres = None reference = self.m_image_in_port.get_all() reference = np.median(reference, axis=0) ang_diff = np.zeros((nimages, parang.shape[0])) for i in range(nimages): ang_diff[i, :] = np.abs(parang[i] - parang) self.apply_function_to_images(subtract_psf, self.m_image_in_port, self.m_res_out_port, 'Classical ADI', func_args=(parang_thres, self.m_nreference, reference, ang_diff, self.m_image_in_port)) self.m_res_in_port = self.add_input_port(self.m_res_out_port._m_tag) im_res = self.m_res_in_port.get_all() res_rot = np.zeros(im_res.shape) for i, item in enumerate(parang): res_rot[i, ] = rotate(im_res[i, ], item, reshape=False) stack = combine_residuals(self.m_residuals, res_rot, residuals=im_res, angles=parang) self.m_stack_out_port.set_all(stack) if self.m_threshold: history = f'threshold [deg] = {parang_thres:.2f}' else: history = 'threshold [deg] = None' self.m_res_out_port.copy_attributes(self.m_image_in_port) self.m_res_out_port.add_history('ClassicalADIModule', history) self.m_stack_out_port.copy_attributes(self.m_image_in_port) self.m_stack_out_port.add_history('ClassicalADIModule', history) self.m_res_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/resizing.py000066400000000000000000000310441450275315200216170ustar00rootroot00000000000000""" Pipeline modules for resizing of images. """ import math import time from typing import Tuple, Union import numpy as np from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.apply_func import image_scaling from pynpoint.util.image import crop_image from pynpoint.util.module import memory_frames, progress class CropImagesModule(ProcessingModule): """ Pipeline module for cropping of images around a given position. """ @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, size: float, center: Union[Tuple[int, int], None]) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. Should be different from *image_in_tag*. size : float New image size (arcsec). The same size will be used for both image dimensions. center : tuple(int, int), None Tuple (x0, y0) with the new image center. Python indexing starts at 0. The center of the input images will be used when *center* is set to *None*. Note that if the image is even-sized, it is not possible to a uniquely define a pixel position in the center of the image. The image center is determined (with pixel precision) with the :func:`~pynpoint.util.image.center_pixel` function. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_size = size self.m_center = center if self.m_center is not None: self.m_center = (self.m_center[1], self.m_center[0]) # (y, x) @typechecked def run(self) -> None: """ Run method of the module. Decreases the image size by cropping around an given position. The module always returns odd-sized images. Returns ------- NoneType None """ # Get memory and number of images to split the frames into chunks memory = self._m_config_port.get_attribute('MEMORY') nimages = self.m_image_in_port.get_shape()[0] # Get the numnber of dimensions and shape ndim = self.m_image_in_port.get_ndim() im_shape = self.m_image_in_port.get_shape() if ndim == 3: # Number of images nimages = im_shape[-3] # Split into batches to comply with memory constraints frames = memory_frames(memory, nimages) elif ndim == 4: # Process all wavelengths per exposure at once frames = np.linspace(0, im_shape[-3], im_shape[-3]+1) # Convert size parameter from arcseconds to pixels pixscale = self.m_image_in_port.get_attribute('PIXSCALE') print(f'New image size (arcsec) = {self.m_size}') self.m_size = int(math.ceil(self.m_size / pixscale)) print(f'New image size (pixels) = {self.m_size}') if self.m_center is not None: print(f'New image center (x, y) = ({self.m_center[1]}, {self.m_center[0]})') # Crop images chunk by chunk start_time = time.time() for i in range(len(frames[:-1])): # Update progress bar progress(i, len(frames[:-1]), 'Cropping images...', start_time) # Select images in the current chunk if ndim == 3: images = self.m_image_in_port[frames[i]:frames[i+1], ] elif ndim == 4: # Process all wavelengths per exposure at once images = self.m_image_in_port[:, i, ] # crop images according to input parameters images = crop_image(images, self.m_center, self.m_size, copy=False) # Write processed images to output port if ndim == 3: self.m_image_out_port.append(images, data_dim=3) elif ndim == 4: self.m_image_out_port.append(images, data_dim=4) # Save history and copy attributes history = f'image size (pix) = {self.m_size}' self.m_image_out_port.add_history('CropImagesModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() class ScaleImagesModule(ProcessingModule): """ Pipeline module for rescaling of an image. """ @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, scaling: Union[Tuple[float, float, float], Tuple[None, None, float], Tuple[float, float, None]], pixscale: bool = True) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. Should be different from ``image_in_tag``. scaling : tuple(float, float, float) Tuple with the scaling factors for the image size and flux, (scaling_x, scaling_y, scaling_flux). Upsampling and downsampling of the image corresponds to ``scaling_x/y`` > 1 and 0 < ``scaling_x/y`` < 1, respectively. pixscale : bool Adjust the pixel scale by the average scaling in x and y direction. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) if scaling[0] is None: self.m_scaling_x = 1. else: self.m_scaling_x = scaling[0] if scaling[1] is None: self.m_scaling_y = 1. else: self.m_scaling_y = scaling[1] if scaling[2] is None: self.m_scaling_flux = 1. else: self.m_scaling_flux = scaling[2] self.m_pixscale = pixscale @typechecked def run(self) -> None: """ Run method of the module. Rescales an image with a fifth order spline interpolation and a reflecting boundary condition. Returns ------- NoneType None """ pixscale = self.m_image_in_port.get_attribute('PIXSCALE') self.apply_function_to_images(image_scaling, self.m_image_in_port, self.m_image_out_port, 'Scaling images', func_args=(self.m_scaling_y, self.m_scaling_x, self.m_scaling_flux)) history = f'scaling = ({self.m_scaling_x:.2f}, {self.m_scaling_y:.2f}, ' \ f'{self.m_scaling_flux:.2f})' self.m_image_out_port.add_history('ScaleImagesModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) if self.m_pixscale: mean_scaling = (self.m_scaling_x+self.m_scaling_y)/2. self.m_image_out_port.add_attribute('PIXSCALE', pixscale/mean_scaling) self.m_image_out_port.close_port() class AddLinesModule(ProcessingModule): """ Module to add lines of pixels to increase the size of an image. """ @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, lines: Tuple[int, int, int, int]) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output, including the images with increased size. Should be different from *image_in_tag*. lines : tuple(int, int, int, int) The number of lines that are added in left, right, bottom, and top direction. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_lines = np.asarray(lines) @typechecked def run(self) -> None: """ Run method of the module. Adds lines of zero-value pixels to increase the size of an image. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') nimages = self.m_image_in_port.get_shape()[0] frames = memory_frames(memory, nimages) shape_in = self.m_image_in_port.get_shape() shape_out = (shape_in[-2]+int(self.m_lines[2])+int(self.m_lines[3]), shape_in[-1]+int(self.m_lines[0])+int(self.m_lines[1])) self.m_lines[1] = shape_out[1] - self.m_lines[1] # right side of image self.m_lines[3] = shape_out[0] - self.m_lines[3] # top side of image start_time = time.time() for i in range(len(frames[:-1])): progress(i, len(frames[:-1]), 'Adding lines...', start_time) image_in = self.m_image_in_port[frames[i]:frames[i+1], ] image_out = np.zeros((frames[i+1]-frames[i], shape_out[0], shape_out[1])) image_out[:, int(self.m_lines[2]):int(self.m_lines[3]), int(self.m_lines[0]):int(self.m_lines[1])] = image_in self.m_image_out_port.append(image_out, data_dim=3) history = f'number of lines = {self.m_lines}' self.m_image_out_port.add_history('AddLinesModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() class RemoveLinesModule(ProcessingModule): """ Module to decrease the dimensions of an image by removing lines of pixels. """ @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, lines: Tuple[int, int, int, int]) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output, including the images with decreased size. Should be different from *image_in_tag*. lines : tuple(int, int, int, int) The number of lines that are removed in left, right, bottom, and top direction. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_lines = lines @typechecked def run(self) -> None: """ Run method of the module. Removes the lines given by *lines* from each frame. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') nimages = self.m_image_in_port.get_shape()[0] frames = memory_frames(memory, nimages) start_time = time.time() for i in range(len(frames[:-1])): progress(i, len(frames[:-1]), 'Removing lines...', start_time) image_in = self.m_image_in_port[frames[i]:frames[i+1], ] image_out = image_in[:, int(self.m_lines[2]):image_in.shape[1]-int(self.m_lines[3]), int(self.m_lines[0]):image_in.shape[2]-int(self.m_lines[1])] self.m_image_out_port.append(image_out, data_dim=3) history = f'number of lines = {self.m_lines}' self.m_image_out_port.add_history('RemoveLinesModule', history) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/stacksubset.py000066400000000000000000000571421450275315200223270ustar00rootroot00000000000000""" Pipeline modules for stacking and subsampling of images. """ import time import warnings from typing import List, Optional, Tuple import numpy as np from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.module import progress, memory_frames, stack_angles, angle_average from pynpoint.util.image import rotate_images class StackAndSubsetModule(ProcessingModule): """ Pipeline module for stacking subsets of images and/or selecting a random sample of images. """ @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, random: Optional[int] = None, stacking: Optional[int] = None, combine: str = 'mean', max_rotation: Optional[float] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. Should be different from *image_in_tag*. random : int, None Number of random images. All images are used if set to None. stacking : int, None Number of stacked images per subset. No stacking is applied if set to None. combine : str Method for combining images ('mean' or 'median'). The angles are always mean-combined. max_rotation : float, None Maximum allowed field rotation (deg) throughout each subset of stacked images when `stacking` is not None. No restriction on the field rotation is applied if set to None. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_random = random self.m_stacking = stacking self.m_combine = combine self.m_max_rotation = max_rotation if self.m_stacking is None and self.m_random is None: warnings.warn('Both \'stacking\' and \'random\' are set to None.') @typechecked def run(self) -> None: """ Run method of the module. Stacks subsets of images and/or selects a random subset. Also the parallactic angles are mean-combined if images are stacked. Returns ------- NoneType None """ @typechecked def _stack_subsets(nimages: int, im_shape: Tuple[int, ...], parang: np.ndarray) -> Tuple[Tuple[int, ...], np.ndarray, np.ndarray]: im_new = None parang_new = None if self.m_stacking is not None: if self.m_max_rotation is not None: frames = stack_angles(self.m_stacking, parang, self.m_max_rotation) else: frames = memory_frames(self.m_stacking, nimages) nimages_new = np.size(frames)-1 if parang is None: parang_new = None else: parang_new = np.zeros(nimages_new) im_new = np.zeros((nimages_new, im_shape[1], im_shape[2])) start_time = time.time() for i in range(nimages_new): progress(i, nimages_new, 'Stacking subsets of images...', start_time) if parang is not None: # parang_new[i] = np.mean(parang[frames[i]:frames[i+1]]) parang_new[i] = angle_average(parang[frames[i]:frames[i+1]]) im_subset = self.m_image_in_port[frames[i]:frames[i+1], ] if self.m_combine == 'mean': im_new[i, ] = np.mean(im_subset, axis=0) elif self.m_combine == 'median': im_new[i, ] = np.median(im_subset, axis=0) im_shape = im_new.shape else: if parang is not None: parang_new = np.copy(parang) return im_shape, im_new, parang_new @typechecked def _random_subset(im_shape: Tuple[int, ...], im_new: np.ndarray, parang_new: np.ndarray) -> Tuple[int, np.ndarray, np.ndarray]: if self.m_random is not None: choice = np.random.choice(im_shape[0], self.m_random, replace=False) choice = list(np.sort(choice)) if parang_new is None: parang_new = None else: parang_new = parang_new[choice] if self.m_stacking is None: im_new = self.m_image_in_port[list(choice), ] else: im_new = im_new[choice, ] if self.m_random is None and self.m_stacking is None: nimages = 0 elif im_new.ndim == 2: nimages = 1 elif im_new.ndim == 3: nimages = im_new.shape[0] return nimages, im_new, parang_new non_static = self.m_image_in_port.get_all_non_static_attributes() im_shape = self.m_image_in_port.get_shape() nimages = im_shape[0] if self.m_random is not None: if self.m_stacking is None and im_shape[0] < self.m_random: raise ValueError('The number of images of the destination subset is larger than ' 'the number of images in the source.') if self.m_stacking is not None and \ int(float(im_shape[0])/float(self.m_stacking)) < self.m_random: raise ValueError('The number of images of the destination subset is larger than ' 'the number of images in the stacked source.') if 'PARANG' in non_static: parang = self.m_image_in_port.get_attribute('PARANG') else: parang = None im_shape, im_new, parang_new = _stack_subsets(nimages, im_shape, parang) nimages, im_new, parang_new = _random_subset(im_shape, im_new, parang_new) if self.m_random or self.m_stacking: self.m_image_out_port.set_all(im_new, keep_attributes=True) self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_attribute('INDEX', np.arange(0, nimages, 1), static=False) if parang_new is not None: self.m_image_out_port.add_attribute('PARANG', parang_new, static=False) if 'NFRAMES' in non_static: self.m_image_out_port.del_attribute('NFRAMES') history = f'stacking = {self.m_stacking}, random = {self.m_random}' self.m_image_out_port.add_history('StackAndSubsetModule', history) self.m_image_out_port.close_port() class StackCubesModule(ProcessingModule): """ Pipeline module for calculating the mean or median of each original data cube associated with a database tag. """ @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, combine: str = 'mean') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry with the mean or median collapsed images that are written as output. Should be different from *image_in_tag*. combine : str Method to combine the images ('mean' or 'median'). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_combine = combine @typechecked def run(self) -> None: """ Run method of the module. Uses the ``NFRAMES`` attribute to select the images of each cube, calculates the mean or median of each cube, and saves the data and attributes. Returns ------- NoneType None """ if self.m_image_in_port.tag == self.m_image_out_port.tag: raise ValueError('Input and output port should have a different tag.') non_static = self.m_image_in_port.get_all_non_static_attributes() nframes = self.m_image_in_port.get_attribute('NFRAMES') if 'PARANG' in non_static: parang = self.m_image_in_port.get_attribute('PARANG') else: parang = None current = 0 parang_new = [] start_time = time.time() for i, frames in enumerate(nframes): progress(i, len(nframes), 'Stacking images per FITS cube...', start_time) if self.m_combine == 'mean': im_stack = np.mean(self.m_image_in_port[current:current+frames, ], axis=0) elif self.m_combine == 'median': im_stack = np.median(self.m_image_in_port[current:current+frames, ], axis=0) self.m_image_out_port.append(im_stack, data_dim=3) if parang is not None: parang_new.append(np.mean(parang[current:current+frames])) current += frames nimages = np.size(nframes) self.m_image_out_port.copy_attributes(self.m_image_in_port) if 'INDEX' in non_static: index = np.arange(0, nimages, 1, dtype=int) self.m_image_out_port.add_attribute('INDEX', index, static=False) if 'NFRAMES' in non_static: nframes = np.ones(nimages, dtype=int) self.m_image_out_port.add_attribute('NFRAMES', nframes, static=False) if 'PARANG' in non_static: self.m_image_out_port.add_attribute('PARANG', parang_new, static=False) self.m_image_out_port.close_port() class DerotateAndStackModule(ProcessingModule): """ Pipeline module for derotating and/or stacking (i.e., taking the median or average) of the images, either along the time or the wavelengths dimension. """ @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, derotate: bool = True, stack: Optional[str] = None, extra_rot: float = 0., dimension: str = 'time') -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tag : str Tag of the database entry that is read as input. image_out_tag : str Tag of the database entry that is written as output. The shape of the output data is equal to the data from ``image_in_tag``. If the argument of ``stack`` is not None, then the size of the collapsed dimension is equal to 1. derotate : bool Derotate the images with the ``PARANG`` attribute. stack : str Type of stacking applied after optional derotation ('mean', 'median', or None for no stacking). extra_rot : float Additional rotation angle of the images in clockwise direction (deg). dimension : str Dimension along which the images are stacked. Can either be 'time' or 'wavelength'. If the ``image_in_tag`` has three dimensions then ``dimension`` is always fixed to 'time'. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_derotate = derotate self.m_stack = stack self.m_extra_rot = extra_rot self.m_dimension = dimension @typechecked def run(self) -> None: """ Run method of the module. Uses the ``PARANG`` attributes to derotate the images (if ``derotate`` is set to ``True``) and applies an optional mean or median stacking along the time or wavelengths dimension afterwards. Returns ------- NoneType None """ @typechecked def _initialize(ndim: int, npix: int) -> Tuple[int, np.ndarray, Optional[np.ndarray], Optional[np.ndarray]]: if ndim == 2: nimages = 1 elif ndim == 3: nimages = self.m_image_in_port.get_shape()[-3] if self.m_stack == 'median': frames = np.array([0, nimages]) else: frames = memory_frames(memory, nimages) elif ndim == 4: nimages = self.m_image_in_port.get_shape()[-3] nwave = self.m_image_in_port.get_shape()[-4] if self.m_dimension == 'time': frames = np.linspace(0, nwave, nwave+1) elif self.m_dimension == 'wavelength': frames = np.linspace(0, nimages, nimages+1) else: raise ValueError('The dimension should be set to \'time\' or \'wavelength\'.') if self.m_stack == 'mean': if ndim == 4: if self.m_dimension == 'time': im_tot = np.zeros((nwave, npix, npix)) elif self.m_dimension == 'wavelength': im_tot = np.zeros((nimages, npix, npix)) else: im_tot = np.zeros((npix, npix)) else: im_tot = None if self.m_stack is None and ndim == 4: im_none = np.zeros((nwave, nimages, npix, npix)) else: im_none = None return nimages, frames, im_tot, im_none memory = self._m_config_port.get_attribute('MEMORY') if self.m_derotate: parang = self.m_image_in_port.get_attribute('PARANG') ndim = self.m_image_in_port.get_ndim() npix = self.m_image_in_port.get_shape()[-2] nimages, frames, im_tot, im_none = _initialize(ndim, npix) start_time = time.time() for i, _ in enumerate(frames[:-1]): progress(i, len(frames[:-1]), 'Derotating and/or stacking images...', start_time) if ndim == 3: # Get the images and ensure they have the correct 3D shape with the following # three dimensions: (batch_size, height, width) images = self.m_image_in_port[frames[i]:frames[i+1], ] elif ndim == 4: # Process all time frames per exposure at once if self.m_dimension == 'time': images = self.m_image_in_port[i, :, ] elif self.m_dimension == 'wavelength': images = self.m_image_in_port[:, i, ] if self.m_derotate: if ndim == 4: if self.m_dimension == 'time': angles = -1.*parang + self.m_extra_rot elif self.m_dimension == 'wavelength': n_wavel = self.m_image_in_port.get_shape()[-4] angles = np.full(n_wavel, -1.*parang[i]) + self.m_extra_rot else: angles = -parang[frames[i]:frames[i+1]]+self.m_extra_rot images = rotate_images(images, angles) if self.m_stack is None: if ndim == 2: self.m_image_out_port.set_all(images[np.newaxis, ...]) elif ndim == 3: self.m_image_out_port.append(images, data_dim=3) elif ndim == 4: if self.m_dimension == 'time': im_none[i] = images elif self.m_dimension == 'wavelength': im_none[:, i] = images elif self.m_stack == 'mean': if ndim == 4: im_tot[i] = np.sum(images, axis=0) else: im_tot += np.sum(images, axis=0) if self.m_stack == 'mean': if ndim == 4: im_stack = im_tot/float(im_tot.shape[0]) if self.m_dimension == 'time': self.m_image_out_port.set_all(im_stack[:, np.newaxis, ...]) elif self.m_dimension == 'wavelength': self.m_image_out_port.set_all(im_stack[np.newaxis, ...]) else: im_stack = im_tot/float(nimages) self.m_image_out_port.set_all(im_stack[np.newaxis, ...]) elif self.m_stack == 'median': if ndim == 4: images = self.m_image_in_port[:] if self.m_dimension == 'time': im_stack = np.median(images, axis=1) self.m_image_out_port.set_all(im_stack[:, np.newaxis, ...]) elif self.m_dimension == 'wavelength': im_stack = np.median(images, axis=0) self.m_image_out_port.set_all(im_stack[np.newaxis, ...]) else: im_stack = np.median(images, axis=0) self.m_image_out_port.set_all(im_stack[np.newaxis, ...]) elif self.m_stack is None and ndim == 4: if self.m_dimension == 'time': self.m_image_out_port.set_all(im_none) elif self.m_dimension == 'wavelength': self.m_image_out_port.set_all(im_none) if self.m_derotate or self.m_stack is not None: self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.close_port() class CombineTagsModule(ProcessingModule): """ Pipeline module for combining tags from multiple database entries into a single tag. """ @typechecked def __init__(self, name_in: str, image_in_tags: List[str], image_out_tag: str, check_attr: bool = True, index_init: bool = False) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. image_in_tags : list(str, ) Tags of the database entries that are read as input and combined. image_out_tag : str Tag of the database entry that is written as output. Should not be present in *image_in_tags*. check_attr : bool Compare non-static attributes between the tags or combine all non-static attributes into the new database tag. index_init : bool Reinitialize the ``INDEX`` attribute. The frames are indexed in the order of tags names that are provided in *image_in_tags*. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_out_port = self.add_output_port(image_out_tag) if image_out_tag in image_in_tags: raise ValueError('The \'image_out_tag\' should not be present in \'image_in_tags\'.') if len(image_in_tags) < 2: raise ValueError('The \'image_in_tags\' should contain at least two tags.') self.m_image_in_tags = image_in_tags self.m_check_attr = check_attr self.m_index_init = index_init @typechecked def run(self) -> None: """ Run method of the module. Combines the frames of multiple tags into a single dataset and adds the static and non-static attributes. The values of the attributes are compared between the input tags to make sure that the input tags descent from the same data set. Returns ------- NoneType None """ memory = self._m_config_port.get_attribute('MEMORY') image_in_port = [] im_shape = [] for i, item in enumerate(self.m_image_in_tags): image_in_port.append(self.add_input_port(item)) im_shape.append(image_in_port[i].get_shape()[-2:]) if len(set(im_shape)) > 1: raise ValueError('The size of the images should be the same for all datasets.') count = 0 start_time = time.time() for i, item in enumerate(self.m_image_in_tags): progress(i, len(self.m_image_in_tags), 'Combining datasets...', start_time) nimages = image_in_port[i].get_shape()[0] frames = memory_frames(memory, nimages) for j, _ in enumerate(frames[:-1]): im_tmp = image_in_port[i][frames[j]:frames[j+1], ] self.m_image_out_port.append(im_tmp) if self.m_index_init: index = np.arange(frames[j], frames[j+1], 1) + count if i == 0 and j == 0: self.m_image_out_port.add_attribute('INDEX', index, static=False) else: for ind in index: self.m_image_out_port.append_attribute_data('INDEX', ind) static_attr = image_in_port[i].get_all_static_attributes() non_static_attr = image_in_port[i].get_all_non_static_attributes() for key in static_attr: status = self.m_image_out_port.check_static_attribute(key, static_attr[key]) if status == 1: self.m_image_out_port.add_attribute(key, static_attr[key], static=True) elif status == -1 and key[0:7] != 'History': warnings.warn(f'The static keyword {key} is already used but with a different ' f'value. It is advisable to only combine tags that descend from ' f'the same data set.') for key in non_static_attr: values = image_in_port[i].get_attribute(key) status = self.m_image_out_port.check_non_static_attribute(key, values) if key != 'INDEX' or (key == 'INDEX' and not self.m_index_init): if self.m_check_attr: if key in ('PARANG', 'STAR_POSITION', 'INDEX', 'NFRAMES'): if status == 1: self.m_image_out_port.add_attribute(key, values, static=False) else: for j in values: self.m_image_out_port.append_attribute_data(key, j) else: if status == 1: self.m_image_out_port.add_attribute(key, values, static=False) if status == -1: warnings.warn(f'The non-static keyword {key} is already used but ' f'with different values. It is advisable to only ' f'combine tags that descend from the same data set.') else: if status == 1: self.m_image_out_port.add_attribute(key, values, static=False) else: for j in values: self.m_image_out_port.append_attribute_data(key, j) count += nimages history = f'number of input tags = {np.size(self.m_image_in_tags)}' self.m_image_out_port.add_history('CombineTagsModule', history) self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/processing/timedenoising.py000066400000000000000000000200711450275315200226210ustar00rootroot00000000000000""" Continuous wavelet transform (CWT) and discrete wavelet transform (DWT) denoising for speckle suppression in the time domain. The module can be used as additional preprocessing step. See Bonse et al. (arXiv:1804.05063) more information. """ from typing import Union import pywt from typeguard import typechecked from pynpoint.core.processing import ProcessingModule from pynpoint.util.apply_func import cwt_denoise_line_in_time, dwt_denoise_line_in_time, \ normalization class CwtWaveletConfiguration: """ Configuration capsule for a CWT based time denoising. Standard configuration as in the original paper. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, wavelet: str = 'dog', wavelet_order: int = 2, keep_mean: bool = False, resolution: float = 0.5) -> None: """ Parameters ---------- wavelet : str Wavelet. wavelet_order : int Wavelet order. keep_mean : bool Keep mean. resolution : float Resolution. Returns ------- NoneType None """ if wavelet not in ['dog', 'morlet']: raise ValueError('CWT supports only \'dog\' and \'morlet\' wavelets.') self.m_wavelet = wavelet self.m_wavelet_order = wavelet_order self.m_keep_mean = keep_mean self.m_resolution = resolution class DwtWaveletConfiguration: """ Configuration capsule for a DWT based time denoising. A cheap alternative of the CWT based wavelet denoising. However, the supported wavelets should perform worse compared to the CWT DOG wavelet. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, wavelet: str = 'db8') -> None: """ Parameters ---------- wavelet : str Wavelet. Returns ------- NoneType None """ # create list of supported wavelets supported = [] for item in pywt.families(): supported += pywt.wavelist(item) # check if wavelet is supported if wavelet not in supported: raise ValueError(f'DWT supports only {supported} as input wavelet.') self.m_wavelet = wavelet class WaveletTimeDenoisingModule(ProcessingModule): """ Pipeline module for speckle subtraction in the time domain by using CWT or DWT wavelet shrinkage. See Bonse et al. (arXiv:1804.05063) for details. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str, wavelet_configuration: Union[CwtWaveletConfiguration, DwtWaveletConfiguration], padding: str = 'zero', median_filter: bool = False, threshold_function: str = 'soft') -> None: """ Parameters ---------- name_in : str Unique name for the pipeline module. image_in_tag : str Database tag with the input data. image_out_tag : str Database tag for the output data. wavelet_configuration : pynpoint.processing.timedenoising.CwtWaveletConfiguration or \ pynpoint.processing.timedenoising.DwtWaveletConfiguration Instance of :class:`~pynpoint.processing.timedenoising.DwtWaveletConfiguration` or :class:`~pynpoint.processing.timedenoising.CwtWaveletConfiguration` which contains the parameters for the wavelet transformation. padding : str Padding method (``'zero'``, ``'mirror'``, or ``'none'``). median_filter : bool Apply a median filter in time to remove outliers, for example due to cosmic rays. threshold_function : str Threshold function that is used for wavelet shrinkage in the wavelet space (``'soft'`` or ``'hard'``). Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) self.m_wavelet_configuration = wavelet_configuration self.m_median_filter = median_filter assert padding in ['zero', 'mirror', 'none'] self.m_padding = padding assert threshold_function in ['soft', 'hard'] self.m_threshold_function = threshold_function == 'soft' assert isinstance(wavelet_configuration, (DwtWaveletConfiguration, CwtWaveletConfiguration)) @typechecked def run(self) -> None: """ Run method of the module. Applies the time denoising for the lines in time in parallel. Returns ------- NoneType None """ if isinstance(self.m_wavelet_configuration, DwtWaveletConfiguration): if self.m_padding == 'const_mean': self.m_padding = 'constant' if self.m_padding == 'none': self.m_padding = 'periodic' self.apply_function_in_time(dwt_denoise_line_in_time, self.m_image_in_port, self.m_image_out_port, func_args=(self.m_threshold_function, self.m_padding, self.m_wavelet_configuration)) elif isinstance(self.m_wavelet_configuration, CwtWaveletConfiguration): self.apply_function_in_time(cwt_denoise_line_in_time, self.m_image_in_port, self.m_image_out_port, func_args=(self.m_threshold_function, self.m_padding, self.m_median_filter, self.m_wavelet_configuration)) if self.m_threshold_function: history = 'threshold_function = soft' else: history = 'threshold_function = hard' self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('WaveletTimeDenoisingModule', history) self.m_image_out_port.close_port() class TimeNormalizationModule(ProcessingModule): """ Pipeline module for normalization of global brightness variations of the detector. See Bonse et al. (arXiv:1804.05063) for details. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, image_in_tag: str, image_out_tag: str) -> None: """ Parameters ---------- name_in : str Unique name for the pipeline module. image_in_tag : str Database tag with the input data. image_out_tag : str Database tag for the output data. Returns ------- NoneType None """ super().__init__(name_in) self.m_image_in_port = self.add_input_port(image_in_tag) self.m_image_out_port = self.add_output_port(image_out_tag) @typechecked def run(self) -> None: """ Run method of the module. Returns ------- NoneType None """ self.apply_function_to_images(normalization, self.m_image_in_port, self.m_image_out_port, 'Time normalization') self.m_image_out_port.copy_attributes(self.m_image_in_port) self.m_image_out_port.add_history('TimeNormalizationModule', 'normalization = median') self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/readwrite/000077500000000000000000000000001450275315200172235ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/readwrite/__init__.py000066400000000000000000000000001450275315200213220ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/readwrite/attr_reading.py000066400000000000000000000244341450275315200222470ustar00rootroot00000000000000""" Modules for reading attributes from a FITS or ASCII file. """ import os import warnings from typing import Optional import numpy as np from astropy.io import fits from typeguard import typechecked from pynpoint.core.attributes import get_attributes from pynpoint.core.processing import ReadingModule class AttributeReadingModule(ReadingModule): """ Module for reading a list of values from a FITS or ASCII file and appending them as a non-static attributes to a dataset. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str, file_name: str, attribute: str, input_dir: Optional[str] = None, overwrite: bool = False) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry to which the attribute is written. file_name : str Name of the input file with the attribute value. Should be equal in size to the number of images in ``data_tag``. In case the ``file_name`` is ending with ``.fits``, then a FITS file is read. Otherwise, a single column of values is expected in an ASCII file. file_name : str Name of the input file with a list of values. attribute : str Name of the attribute as to be written in the database. input_dir : str, None Input directory where the input file is located. If not specified the Pypeline default directory is used. overwrite : bool Overwrite if the attribute is already exists. Returns ------- NoneType None """ super().__init__(name_in, input_dir=input_dir) self.m_data_port = self.add_output_port(data_tag) self.m_file_name = file_name self.m_attribute = attribute self.m_overwrite = overwrite @typechecked def run(self) -> None: """ Run method of the module. Reads a list of values from a FITS or ASCII file and writes them as non-static attribute to a dataset. Returns ------- NoneType None """ print('Reading attribute data...', end='') attributes = get_attributes() if self.m_attribute not in attributes: raise ValueError(f'\'{self.m_attribute}\' is not a valid attribute.') if self.m_file_name.endswith('fits'): values = fits.getdata(os.path.join(self.m_input_location, self.m_file_name)) else: values = np.loadtxt(os.path.join(self.m_input_location, self.m_file_name), dtype=attributes[self.m_attribute]['type']) if values.ndim != 1: raise ValueError(f'The input file {self.m_file_name} should contain a 1D list with ' f'attributes.') status = self.m_data_port.check_non_static_attribute(self.m_attribute, values) if status == 1: self.m_data_port.add_attribute(self.m_attribute, values, static=False) elif status == -1 and self.m_overwrite: self.m_data_port.add_attribute(self.m_attribute, values, static=False) elif status == -1 and not self.m_overwrite: warnings.warn(f'The attribute \'{self.m_attribute}\' is already present. Set the ' f'\'overwrite\' parameter to True in order to overwrite the values with ' f'{self.m_file_name}.') elif status == 0: warnings.warn(f'The \'{self.m_attribute}\' attribute is already present and ' f'contains the same values as are present in {self.m_file_name}.') print(' [DONE]') self.m_data_port.close_port() class ParangReadingModule(ReadingModule): """ Module for reading a list of parallactic angles from a FITS or ASCII file. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str, file_name: str, input_dir: Optional[str] = None, overwrite: bool = False) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry to which the ``PARANG`` attribute is written. file_name : str Name of the input file with the parallactic angles (deg). Should be equal in size to the number of images in ``data_tag``. In case the ``file_name`` is ending with ``.fits``, then a FITS file is read. Otherwise, a single column of values is expected in an ASCII file. input_dir : str, None Input directory where the input file is located. If not specified the Pypeline default directory is used. overwrite : bool Overwrite if the ``PARANG`` attribute already exists. Returns ------- NoneType None """ super().__init__(name_in, input_dir=input_dir) self.m_data_port = self.add_output_port(data_tag) self.m_file_name = file_name self.m_overwrite = overwrite @typechecked def run(self) -> None: """ Run method of the module. Reads the parallactic angles from a FITS or ASCII file and writes the values as non-static attribute (``PARANG``) to the database tag. Returns ------- NoneType None """ print('Reading parallactic angles...', end='') if self.m_file_name.endswith('fits'): parang = fits.getdata(os.path.join(self.m_input_location, self.m_file_name)) else: parang = np.loadtxt(os.path.join(self.m_input_location, self.m_file_name)) print(' [DONE]') if parang.ndim != 1: raise ValueError(f'The input file {self.m_file_name} should contain a 1D data set with ' f'the parallactic angles.') print(f'Number of angles: {parang.size}') print(f'Rotation range: {parang[0]:.2f} -> {parang[-1]:.2f} deg') status = self.m_data_port.check_non_static_attribute('PARANG', parang) if status == 1: self.m_data_port.add_attribute('PARANG', parang, static=False) elif status == -1 and self.m_overwrite: self.m_data_port.add_attribute('PARANG', parang, static=False) elif status == -1 and not self.m_overwrite: warnings.warn(f'The PARANG attribute is already present. Set the \'overwrite\' ' f'parameter to True in order to overwrite the values with ' f'{self.m_file_name}.') elif status == 0: warnings.warn(f'The PARANG attribute is already present and contains the same values ' f'as are present in {self.m_file_name}.') self.m_data_port.close_port() class WavelengthReadingModule(ReadingModule): """ Module for reading a list of wavelengths from a FITS or ASCII file. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str, file_name: str, input_dir: Optional[str] = None, overwrite: bool = False) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry to which the ``WAVELENGTH`` attribute is written. file_name : str Name of the input file with the wavelengths (a.u.). Should be equal in size to the number of images in ``data_tag``. In case the ``file_name`` is ending with ``.fits``, then a FITS file is read. Otherwise, a single column of values is expected in an ASCII file. input_dir : str, None Input directory where the input file is located. If not specified the Pypeline default directory is used. overwrite : bool Overwrite if the ``WAVELENGTH`` attribute already exists. Returns ------- NoneType None """ super().__init__(name_in, input_dir=input_dir) self.m_data_port = self.add_output_port(data_tag) self.m_file_name = file_name self.m_overwrite = overwrite @typechecked def run(self) -> None: """ Run method of the module. Reads the parallactic angles from a FITS or ASCII file and writes the values as non-static attribute (``WAVELENGTH``) to the database tag. Returns ------- NoneType None """ print('Reading wavelengths...', end='') if self.m_file_name.endswith('fits'): wavelength = fits.getdata(os.path.join(self.m_input_location, self.m_file_name)) else: wavelength = np.loadtxt(os.path.join(self.m_input_location, self.m_file_name)) print(' [DONE]') if wavelength.ndim != 1: raise ValueError(f'The input file {self.m_file_name} should contain a 1D data set with ' f'the wavelengths.') print(f'Number of wavelengths: {wavelength.size}') print(f'Wavelength range: {wavelength[0]:.2f} - {wavelength[-1]:.2f}') status = self.m_data_port.check_non_static_attribute('WAVELENGTH', wavelength) if status == 1: self.m_data_port.add_attribute('WAVELENGTH', wavelength, static=False) elif status == -1 and self.m_overwrite: self.m_data_port.add_attribute('WAVELENGTH', wavelength, static=False) elif status == -1 and not self.m_overwrite: warnings.warn(f'The WAVELENGTH attribute is already present. Set the \'overwrite\' ' f'parameter to True in order to overwrite the values with ' f'{self.m_file_name}.') elif status == 0: warnings.warn(f'The WAVELENGTH attribute is already present and contains the same ' f'values as are present in {self.m_file_name}.') self.m_data_port.close_port() PynPoint-0.11.0/pynpoint/readwrite/attr_writing.py000066400000000000000000000110551450275315200223140ustar00rootroot00000000000000""" Modules for writing data as text file. """ import os from typing import Optional import numpy as np from typeguard import typechecked from pynpoint.core.processing import WritingModule class AttributeWritingModule(WritingModule): """ Module for writing a 1D or 2D array of non-static attributes to a text file. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str, attribute: str, file_name: str = 'attributes.dat', output_dir: Optional[str] = None, header: Optional[str] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry from which the ``PARANG`` attribute is read. attribute : str Name of the non-static attribute as given in the central database (e.g., 'INDEX' or 'STAR_POSITION'). file_name : str Name of the output file. output_dir : str, None Output directory where the text file will be stored. If no path is specified then the Pypeline default output location is used. header : str, None Header that is written at the top of the text file. Returns ------- NoneType None """ super().__init__(name_in, output_dir=output_dir) self.m_data_port = self.add_input_port(data_tag) self.m_file_name = file_name self.m_attribute = attribute self.m_header = header @typechecked def run(self) -> None: """ Run method of the module. Writes the non-static attributes (1D or 2D) to a a text file. Returns ------- NoneType None """ if self.m_header is None: self.m_header = '' print('Writing attribute data...', end='') out_name = os.path.join(self.m_output_location, self.m_file_name) if self.m_attribute not in self.m_data_port.get_all_non_static_attributes(): raise ValueError(f'The \'{self.m_attribute}\' attribute is not present in ' f'\'{self.m_data_port.tag}\'.') values = self.m_data_port.get_attribute(self.m_attribute) np.savetxt(out_name, values, header=self.m_header, comments='# ') print(' [DONE]') self.m_data_port.close_port() class ParangWritingModule(WritingModule): """ Module for writing a list of parallactic angles to a text file. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str, file_name: str = 'parang.dat', output_dir: Optional[str] = None, header: Optional[str] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry from which the ``PARANG`` attribute is read. file_name : str Name of the output file. output_dir : str, None Output directory where the text file will be stored. If no path is specified then the Pypeline default output location is used. header : str, None Header that is written at the top of the text file. Returns ------- NoneType None """ super().__init__(name_in, output_dir=output_dir) self.m_data_port = self.add_input_port(data_tag) self.m_file_name = file_name self.m_header = header @typechecked def run(self) -> None: """ Run method of the module. Writes the parallactic angles from the ``PARANG`` attribute of the specified database tag to a a text file. Returns ------- NoneType None """ print('Writing parallactic angles...', end='') if self.m_header is None: self.m_header = '' out_name = os.path.join(self.m_output_location, self.m_file_name) if 'PARANG' not in self.m_data_port.get_all_non_static_attributes(): raise ValueError(f'The PARANG attribute is not present in \'{self.m_data_port.tag}\'.') parang = self.m_data_port.get_attribute('PARANG') np.savetxt(out_name, parang, header=self.m_header, comments='# ') print(' [DONE]') self.m_data_port.close_port() PynPoint-0.11.0/pynpoint/readwrite/fitsreading.py000066400000000000000000000241531450275315200221010ustar00rootroot00000000000000""" Module for reading FITS files. """ import os import time import warnings from typing import List, Optional, Tuple, Union import numpy as np from astropy.io import fits from typeguard import typechecked from pynpoint.core.processing import ReadingModule from pynpoint.util.attributes import set_static_attr, set_nonstatic_attr, set_extra_attr from pynpoint.util.module import progress class FitsReadingModule(ReadingModule): """ Reads FITS files from the given *input_dir* or the default directory of the Pypeline. The FITS files need to contain either single images (2D) or cubes of images (3D). Individual images should have the same shape and type. The header of the FITS is scanned for the required static attributes (should be identical for each FITS file) and non-static attributes. Static entries will be saved as HDF5 attributes while non-static attributes will be saved as separate data sets in a subfolder of the database named *header_* + image_tag. If the FITS files in the input directory have changing static attributes or the shape of the input images is changing a warning appears. FitsReadingModule overwrites by default all existing data with the same tags in the central database. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, input_dir: Optional[str] = None, image_tag: str = 'im_arr', overwrite: bool = True, check: bool = True, filenames: Optional[Union[str, List[str]]] = None, ifs_data: bool = False) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. input_dir : str, None Input directory where the FITS files are located. If not specified the Pypeline default directory is used. image_tag : str Tag of the read data in the HDF5 database. Non static header information is stored with the tag: *header_* + image_tag / header_entry_name. overwrite : bool Overwrite existing data and header in the central database. check : bool Print a warning if certain attributes from the configuration file are not present in the FITS header. If set to `False`, attributes are still written to the dataset but there will be no warning if a keyword is not found in the FITS header. filenames : str, list(str, ), None If a string, then a path of a text file should be provided. This text file should contain a list of FITS files. If a list, then the paths of the FITS files should be provided directly. If set to None, the FITS files in the `input_dir` are read. All paths should be provided either relative to the Python working folder (i.e., the folder where Python is executed) or as absolute paths. ifs_data : bool Import IFS data which is stored as a 4D array with the wavelength and temporal dimensions as first and second dimension, respectively. If set to ``False`` (default), the data is imported as a 3D array with the temporal dimension as first dimension. Returns ------- NoneType None """ super().__init__(name_in, input_dir=input_dir) self.m_image_out_port = self.add_output_port(image_tag) self.m_overwrite = overwrite self.m_check = check self.m_filenames = filenames self.m_ifs_data = ifs_data @typechecked def read_single_file(self, fits_file: str, overwrite_tags: list) -> Tuple[fits.header.Header, tuple]: """ Function which reads a single FITS file and appends it to the database. The function gets a list of *overwriting_tags*. If a new key (header entry or image data) is found that is not on this list the old entry is overwritten if *self.m_overwrite* is active. After replacing the old entry the key is added to the *overwriting_tags*. This procedure guaranties that all previous database information, that does not belong to the new data set that is read by FitsReadingModule is replaced and the rest is kept. Parameters ---------- fits_file : str Absolute path and filename of the FITS file. overwrite_tags : list(str, ) The list of database tags that will not be overwritten. Returns ------- astropy.io.fits.header.Header FITS header. tuple(int, ) Image shape. """ hdu_list = fits.open(fits_file) if hdu_list[0].data is not None: images = hdu_list[0].data.byteswap().newbyteorder() elif len(hdu_list) > 1: for i, item in enumerate(hdu_list[1:]): if isinstance(item, fits.hdu.image.ImageHDU): warnings.simplefilter('always', UserWarning) warnings.warn(f"No data was found in the PrimaryHDU " f"so reading data from the ImageHDU " f"at number {i+1} instead.") images = hdu_list[i+1].data.byteswap().newbyteorder() break else: raise RuntimeError(f"No data was found in {fits_file}.") images = np.nan_to_num(images) if images.ndim == 4 and not self.m_ifs_data: raise ValueError('The input data is 4D but ifs_data is set to False. Reading in 4D ' 'data is only possible by setting the argument to True.') if images.ndim < 3 and self.m_ifs_data: raise ValueError('It is only possible to read 3D or 4D data when ifs_data is set to ' 'True.') if self.m_overwrite and self.m_image_out_port.tag not in overwrite_tags: overwrite_tags.append(self.m_image_out_port.tag) if self.m_ifs_data: self.m_image_out_port.set_all(images, data_dim=4) else: self.m_image_out_port.set_all(images, data_dim=3) self.m_image_out_port.del_all_attributes() else: if self.m_ifs_data: self.m_image_out_port.append(images, data_dim=4) else: self.m_image_out_port.append(images, data_dim=3) header = hdu_list[0].header fits_header = [] for key in header: fits_header.append(f'{key} = {header[key]}') hdu_list.close() header_out_port = self.add_output_port('fits_header/'+os.path.basename(fits_file)) header_out_port.set_all(fits_header) return header, images.shape @typechecked def _txt_file_list(self) -> list: """ Internal function to import a list of FITS files from a text file. """ with open(self.m_filenames) as file_obj: files = file_obj.readlines() # remove newlines files = [x.strip() for x in files] # remove of empty lines files = filter(None, files) return list(files) @typechecked def run(self) -> None: """ Run method of the module. Looks for all FITS files in the input directory and imports the images into the database. Note that previous database information is overwritten if ``overwrite=True``. The filenames are stored as attributes. Returns ------- NoneType None """ files = [] if isinstance(self.m_filenames, str): files = self._txt_file_list() for item in files: if not os.path.isfile(item): raise ValueError(f'The file {item} does not exist. Please check that the ' f'path is correct.') elif isinstance(self.m_filenames, list): files = self.m_filenames for item in files: if not os.path.isfile(item): raise ValueError(f'The file {item} does not exist. Please check that the ' f'path is correct.') elif isinstance(self.m_filenames, type(None)): for filename in os.listdir(self.m_input_location): if filename.endswith('.fits') and not filename.startswith('._'): files.append(os.path.join(self.m_input_location, filename)) assert files, 'No FITS files found in %s.' % self.m_input_location files.sort() overwrite_tags = [] first_index = 0 start_time = time.time() for i, fits_file in enumerate(files): progress(i, len(files), 'Reading FITS files...', start_time) header, shape = self.read_single_file(fits_file, overwrite_tags) if len(shape) == 2: nimages = 1 elif len(shape) == 3: if self.m_ifs_data: nimages = 1 else: nimages = shape[0] elif len(shape) == 4: nimages = shape[1] else: raise ValueError('Data read from FITS file has an invalid shape.') set_static_attr(fits_file=fits_file, header=header, config_port=self._m_config_port, image_out_port=self.m_image_out_port, check=self.m_check) set_nonstatic_attr(header=header, config_port=self._m_config_port, image_out_port=self.m_image_out_port, check=self.m_check) set_extra_attr(fits_file=fits_file, nimages=nimages, config_port=self._m_config_port, image_out_port=self.m_image_out_port, first_index=first_index) first_index += nimages self.m_image_out_port.flush() self.m_image_out_port.close_port() PynPoint-0.11.0/pynpoint/readwrite/fitswriting.py000066400000000000000000000127651450275315200221610ustar00rootroot00000000000000""" Module for exporting a dataset from the HDF5 database to a FITS file. """ import os import warnings from typing import Optional, Tuple from astropy.io import fits from typeguard import typechecked import numpy as np from pynpoint.core.processing import WritingModule from pynpoint.util.module import memory_frames class FitsWritingModule(WritingModule): """ Module for writing a dataset from the central HDF5 database to a FITS file. The static attributes will be stored as header information. The dataset is selected from the database by its tag name. :class:`~pynpoint.readwrite.fitswriting.FitsWritingModule` is a :class:`~pynpoint.core.processing.WritingModule` and uses either the default output directory of a :class:`~pynpoint.core.pypeline.Pypeline` or a specified location to store the FITS data. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str, file_name: str, output_dir: Optional[str] = None, data_range: Optional[Tuple[int, int]] = None, overwrite: bool = True, subset_size: Optional[int] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry that has to be exported to a FITS file. file_name : str Name of the FITS output file. Requires the FITS extension. output_dir : str, None Output directory where the FITS file will be stored. If no folder is specified the Pypeline default is chosen. data_range : tuple, None A two element tuple which specifies a begin and end frame of the export. This can be used to save a subsets of a large dataset. The whole dataset will be exported if set to None. overwrite : bool Overwrite an existing FITS file with an identical filename. subset_size : int, None Size of the subsets that are created when storing the data. This can be useful if the dataset contains a large number of images. An increasing index value is appended to the FITS file names. All images are written to a single FITS file if set to None. Returns ------- NoneType None """ super().__init__(name_in, output_dir=output_dir) if not file_name.endswith('.fits'): raise ValueError('Output \'file_name\' requires the FITS extension.') self.m_file_name = file_name self.m_data_port = self.add_input_port(data_tag) self.m_range = data_range self.m_overwrite = overwrite self.m_subset_size = subset_size @typechecked def run(self) -> None: """ Run method of the module. Creates a FITS file and stores the data and the corresponding static attributes. Returns ------- NoneType None """ out_name = os.path.join(self.m_output_location, self.m_file_name) print('Writing FITS file...', end='') if os.path.isfile(out_name) and not self.m_overwrite: warnings.warn('Filename already present. Use overwrite=True to overwrite an existing ' 'FITS file.') else: header = fits.Header() attributes = self.m_data_port.get_all_static_attributes() for attr in attributes: if len(attr) > 8: # Check if the header keyword together with its value is # too long for the FITS format. If that is the case, raise # a warning and truncate the value to avoid a ValueError. key = 'hierarch ' + attr value = str(attributes[attr]) max_val_len = 75 - len(key) if len(key + value) > 75: warnings.warn(f'Key \'{key}\' with value \'{value}\' is too long for ' f'the FITS format. To avoid an error, the value was ' f'truncated to \'{value[:max_val_len]}\'.') header[key] = value[:max_val_len] else: header[attr] = attributes[attr] if self.m_subset_size is None: if self.m_range is None: frames = [0, self.m_data_port.get_shape()[0]] else: frames = [self.m_range[0], self.m_range[1]] else: if self.m_range is None: nimages = self.m_data_port.get_shape()[0] frames = memory_frames(self.m_subset_size, nimages) else: nimages = self.m_range[1] - self.m_range[0] frames = memory_frames(self.m_subset_size, nimages) frames = np.asarray(frames) + self.m_range[0] for i, _ in enumerate(frames[:-1]): data_select = self.m_data_port[frames[i]:frames[i+1], ] if len(frames) == 2: fits.writeto(out_name, data_select, header, overwrite=self.m_overwrite) else: filename = f'{out_name[:-5]}{i:03d}.fits' fits.writeto(filename, data_select, header, overwrite=self.m_overwrite) print(' [DONE]') self.m_data_port.close_port() PynPoint-0.11.0/pynpoint/readwrite/hdf5reading.py000066400000000000000000000120101450275315200217470ustar00rootroot00000000000000""" Module for reading HDF5 files that were created with the :class:`~pynpoint.readwrite.hdf5writing.Hdf5WritingModule`. """ import os import time import warnings from typing import Optional import h5py import numpy as np from typeguard import typechecked from pynpoint.core.processing import ReadingModule from pynpoint.util.module import progress class Hdf5ReadingModule(ReadingModule): """ Reads an HDF5 file from the given *input_dir* or the default directory of the Pypeline. A tag dictionary has to be set in order to choose the datasets which will be imported into the database. Also the static and non-static attributes are read from the HDF5 file and stored in the database with the corresponding data set. This module should only be used for reading HDF5 files that are created with the Hdf5WritingModule. Reading different type of HDF5 files may lead to inconsistencies in the central database. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, input_filename: Optional[str] = None, input_dir: Optional[str] = None, tag_dictionary: Optional[dict] = None): """ Parameters ---------- name_in : str Unique name of the module instance. input_filename : str, None The file name of the HDF5 input file. All files inside the input location will be imported if no filename is provided. input_dir : str, None The directory of the input HDF5 file. If no location is given, the default input location of the Pypeline is used. tag_dictionary : dict, None Dictionary of all data sets that will be imported. The dictionary format is {*tag_name_in_input_file*:*tag_name_in_database*, }. All data sets in the input HDF5 file that match one of the *tag_name_in_input_file* will be imported. The tag name inside the internal Pypeline database will be changed to *tag_name_in_database*. Returns ------- NoneType None """ super().__init__(name_in, input_dir=input_dir) if tag_dictionary is None: tag_dictionary = {} for out_tag in tag_dictionary.values(): self.add_output_port(out_tag) self.m_filename = input_filename self._m_tag_dictionary = tag_dictionary @typechecked def read_single_hdf5(self, file_in: str) -> None: """ Function which reads a single HDF5 file. Parameters ---------- file_in : str Path and name of the HDF5 file. Returns ------- NoneType None """ hdf5_file = h5py.File(file_in, mode='r') for tag_in in self._m_tag_dictionary: tag_in = str(tag_in) # unicode keys cause errors tag_out = self._m_tag_dictionary[tag_in] if tag_in not in hdf5_file: warnings.warn(f'The dataset with tag name \'{tag_in}\' is not found in the HDF5 ' f'file.') continue # add data port_out = self._m_output_ports[tag_out] port_out.set_all(np.asarray(hdf5_file[tag_in][...])) # add static attributes for attr_name, attr_value in hdf5_file[tag_in].attrs.items(): port_out.add_attribute(name=attr_name, value=attr_value) # add non-static attributes if 'header_' + tag_in in hdf5_file: for attr_name in hdf5_file['header_' + tag_in]: attr_val = hdf5_file['header_' + tag_in + '/' + attr_name][...] port_out.add_attribute(name=attr_name, value=attr_val, static=False) @typechecked def run(self) -> None: """ Run method of the module. Looks for all HDF5 files in the input directory and reads the datasets that are provided in the tag dictionary. Returns ------- NoneType None """ # create list of files to be read files = [] tmp_dir = os.path.join(self.m_input_location, '') # check if a single input file is given if self.m_filename is not None: # create file path + filename assert(os.path.isfile((tmp_dir + str(self.m_filename)))), \ f'Error: Input file does not exist. Input requested: {self.m_filename}' files.append((tmp_dir + str(self.m_filename))) else: # look for all HDF5 files in the directory for tmp_file in os.listdir(self.m_input_location): if tmp_file.endswith('.hdf5') or tmp_file.endswith('.h5'): files.append(tmp_dir + str(tmp_file)) start_time = time.time() for i, tmp_file in enumerate(files): progress(i, len(files), 'Reading HDF5 file...', start_time) self.read_single_hdf5(tmp_file) PynPoint-0.11.0/pynpoint/readwrite/hdf5writing.py000066400000000000000000000072221450275315200220320ustar00rootroot00000000000000""" Module for writing a list of tags from the database to a separate HDF5 file. """ import os from typing import Optional import h5py from typeguard import typechecked from pynpoint.core.processing import WritingModule class Hdf5WritingModule(WritingModule): """ Module which exports a part of the PynPoint internal database to a separate HDF5 file. The datasets of the database can be chosen using the *tag_dictionary*. The module will also export the static and non-static attributes. """ __author__ = 'Markus Bonse, Tomas Stolker' @typechecked def __init__(self, name_in: str, file_name: str, output_dir: Optional[str] = None, tag_dictionary: Optional[dict] = None, keep_attributes: bool = True, overwrite: bool = False) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. file_name : str Name of the file which will be created by the module. output_dir : str, None Location where the HDF5 file will be stored. The Pypeline default output location is used when no location is given. tag_dictionary : dict, None Directory containing all tags / keys of the datasets which will be exported from the PynPoint internal database. The datasets will be exported as {*input_tag*:*output_tag*, }. keep_attributes : bool If True all static and non-static attributes will be exported. overwrite : bool Overwrite an existing HDF5 file. Returns ------- NoneType None """ super().__init__(name_in, output_dir=output_dir) if tag_dictionary is None: tag_dictionary = {} self.m_file_name = file_name self.m_tag_dictionary = tag_dictionary self.m_keep_attributes = keep_attributes self.m_overwrite = overwrite @typechecked def run(self) -> None: """ Run method of the module. Exports all datasets defined in the *tag_dictionary* to an external HDF5 file. Returns ------- NoneType None """ print('Writing HDF5 file...', end='') if self.m_overwrite: out_file = h5py.File(os.path.join(self.m_output_location, self.m_file_name), mode='w') else: out_file = h5py.File(os.path.join(self.m_output_location, self.m_file_name), mode='a') for in_tag, out_tag in self.m_tag_dictionary.items(): tmp_port = self.add_input_port(in_tag) tmp_data = tmp_port.get_all() if tmp_data is None: continue data_set = out_file.create_dataset(out_tag, data=tmp_data) if self.m_keep_attributes: # static attributes tmp_attr = tmp_port.get_all_static_attributes() # it is not possible to copy attributes all together for key, value in tmp_attr.items(): data_set.attrs[key] = value # non-static attributes non_static_attr_keys = tmp_port.get_all_non_static_attributes() if non_static_attr_keys is not None: for key in non_static_attr_keys: tmp_data_attr = tmp_port.get_attribute(key) attr_tag = 'header_' + out_tag + '/' + key out_file.create_dataset(attr_tag, data=tmp_data_attr) tmp_port.close_port() out_file.close() print(' [DONE]') PynPoint-0.11.0/pynpoint/readwrite/nearreading.py000066400000000000000000000405351450275315200220630ustar00rootroot00000000000000""" Module for reading FITS files obtained with VLT/VISIR for the NEAR experiment. """ import os import math import time import shlex import subprocess import threading import warnings from typing import Optional, Union, Tuple import numpy as np from astropy.io import fits from typeguard import typechecked from pynpoint.core.processing import ReadingModule from pynpoint.util.attributes import set_static_attr, set_nonstatic_attr, set_extra_attr from pynpoint.util.module import progress, memory_frames from pynpoint.util.image import crop_image class NearReadingModule(ReadingModule): """ Pipeline module for reading VLT/VISIR data of the NEAR experiment. The FITS files and required header information are read from the input directory and stored in two datasets, corresponding to chop A and chop B. The primary HDU of the FITS files should contain the main header information, while the subsequent HDUs contain each a single image (alternated for chop A and chop B) and some additional header information for that image. The last HDU is ignored as it contains the average of all images. """ __author__ = 'Jasper Jonker, Tomas Stolker, Anna Boehle' @typechecked def __init__(self, name_in: str, input_dir: Optional[str] = None, chopa_out_tag: str = 'chopa', chopb_out_tag: str = 'chopb', subtract: bool = False, crop: Optional[Union[Tuple[int, int, float], Tuple[None, None, float]]] = None, combine: Optional[str] = None): """ Parameters ---------- name_in : str Unique name of the instance. input_dir : str, None Input directory where the FITS files are located. The default input folder of the Pypeline is used if set to None. chopa_out_tag : str Database entry where the chop A images will be stored. Should be different from ``chop_b_out_tag``. chopb_out_tag : str Database entry where the chop B images will be stored. Should be different from ``chop_a_out_tag``. subtract : bool If True, the other chop position is subtracted before saving out the chop A and chop B images. crop: tuple(int, int, float), None The pixel position (x, y) around which the chop A and chop B images are cropped and the new image size (arcsec), together provided as (pos_x, pos_y, size). The same size will be used for both image dimensions. It is recommended to crop the images around the approximate coronagraph position. No cropping is applied if set to None. combine: str, None Method ('mean' or 'median') for combining (separately) the chop A and chop B frames from each cube into a single frame. All frames are stored if set to None. Returns ------- NoneType None """ super().__init__(name_in, input_dir=input_dir) self.m_chopa_out_port = self.add_output_port(chopa_out_tag) self.m_chopb_out_port = self.add_output_port(chopb_out_tag) self.m_subtract = subtract self.m_crop = crop self.m_combine = combine @staticmethod @typechecked def _uncompress_file(filename: str) -> None: """ Internal function to uncompress a .Z file. Parameters ---------- filename : str Compressed .Z file. Returns ------- NoneType None """ try: # try running a subprocess with the 'uncompress' command command = 'uncompress ' + filename subprocess.check_call(shlex.split(command)) except(FileNotFoundError, OSError): # or else run a subprocess with the 'gunzip' command command = 'gunzip -d ' + filename subprocess.check_call(shlex.split(command)) @typechecked def uncompress(self) -> None: """ Method to check if the input directory contains compressed files ending with .fits.Z. If this is the case, the files will be uncompressed using multithreading. The number of threads can be set with the ``CPU`` parameter in the configuration file. Returns ------- NoneType None """ cpu = self._m_config_port.get_attribute('CPU') # list all files ending with .fits.Z in the input location files = [] for item in os.listdir(self.m_input_location): if item.endswith('.fits.Z'): files.append(os.path.join(self.m_input_location, item)) if files: # subdivide the file indices by number of CPU indices = memory_frames(cpu, len(files)) start_time = time.time() for i, _ in enumerate(indices[:-1]): progress(i, len(indices[:-1]), 'Uncompressing NEAR data...', start_time) # select subset of compressed files subset = files[indices[i]:indices[i+1]] # create a list of threads to uncompress CPU number of files # each file is processed by a different thread threads = [] for filename in subset: thread = threading.Thread(target=self._uncompress_file, args=(filename, )) threads.append(thread) # start the threads for item in threads: item.start() # join the threads for item in threads: item.join() @staticmethod @typechecked def check_header(header: fits.header.Header) -> None: """ Method to check the header information and prompt a warning if a value is not as expected. Parameters ---------- header : astropy.io.fits.header.Header Header information from the FITS file that is read. Returns ------- NoneType None """ if str(header['ESO DET CHOP ST']) == 'F': warnings.warn('Dataset was obtained without chopping.') skipped = int(header['ESO DET CHOP CYCSKIP']) if skipped != 0: warnings.warn(f'Chop cycles ({skipped}) have been skipped.') if str(header['ESO DET CHOP CYCSUM']) == 'T': warnings.warn('FITS file contains averaged images.') @typechecked def read_header(self, filename: str) -> Tuple[fits.header.Header, Tuple[int, int, int]]: """ Function that opens a FITS file and separates the chop A and chop B images. The primary HDU contains only a general header. The subsequent HDUs contain a single image with a small extra header. The last HDU is the average of all images, which will be ignored. Parameters ---------- filename : str Absolute path and filename of the FITS file. Returns ------- astropy.io.fits.header.Header Primary header, which is valid for all images. tuple(int, int, int) Shape of a stack of images for chop A or B. """ # open the FITS file hdulist = fits.open(filename) # number of images = total number of HDUs - primary HDU - last HDU (average image) nimages = len(hdulist) - 2 # check if the file contains an even number of images, as expected with two chop positions if nimages % 2 != 0: warnings.warn(f'FITS file contains odd number of images: {filename}') # decreasing nimages to an even number such that nimages // 2 gives the correct size nimages -= 1 # primary header header = hdulist[0].header # number of chop cycles ncycles = header['ESO DET CHOP NCYCLES'] # number of chop cycles should be equal to half the number of available images if ncycles != nimages // 2: warnings.warn(f'The number of chop cycles ({ncycles}) is not equal to half the ' f'number of available HDU images ({nimages // 2}).') # header of the first image header_image = hdulist[1].header # create a list of key = value from the primary header fits_header = [] for key in header: if key: fits_header.append(str(key)+' = '+str(header[key])) # write the primary header information to the fits_header group header_out_port = self.add_output_port('fits_header/' + filename) header_out_port.set_all(np.array(fits_header)) # shape of the image stacks for chop A/B (hence nimages/2) im_shape = (nimages // 2, header_image['NAXIS2'], header_image['NAXIS1']) # set the NAXIS image shape in the primary header # required by util.attributes.set_nonstatic_attr header.set('NAXIS', 3) header.set('NAXIS1', im_shape[2]) header.set('NAXIS2', im_shape[1]) header.set('NAXIS3', im_shape[0]) # check primary header self.check_header(header) hdulist.close() return header, im_shape @staticmethod @typechecked def read_images(filename: str, im_shape: Tuple[int, int, int]) -> Tuple[np.ndarray, np.ndarray]: """ Function that opens a FITS file and separates the chop A and chop B images. The primary HDU contains only a general header. The subsequent HDUs contain a single image with a small extra header. The last HDU is the average of all images, which will be ignored. Parameters ---------- filename : str Absolute path and filename of the FITS file. im_shape : tuple(int, int, int) Shape of a stack of images for chop A or B. Returns ------- numpy.array Array containing the images of chop A. numpy.array Array containing the images of chop B. """ # open the FITS file hdulist = fits.open(filename) # initialize the image arrays for chop A and B chopa = np.zeros(im_shape, dtype=np.float32) chopb = np.zeros(im_shape, dtype=np.float32) count_chopa, count_chopb = 0, 0 prev_cycle = None for i in range(2*im_shape[0]): # get the chop position (HCYCLE1 = chop A, HCYCLE2 = chop B) # primary HDU is skipped with +1 if 'ESO DET FRAM TYPE' in hdulist[i+1].header: cycle = hdulist[i+1].header['ESO DET FRAM TYPE'] else: hdulist.close() raise ValueError(f'Frame type not found in the FITS header. Image number: {i}.') # write the HDU image to the chop A or B array # count the number of chop A and B images if cycle == 'HCYCLE1' and cycle != prev_cycle: chopa[count_chopa, ] = hdulist[i+1].data.byteswap().newbyteorder() count_chopa += 1 prev_cycle = cycle elif cycle == 'HCYCLE2' and cycle != prev_cycle: chopb[count_chopb, ] = hdulist[i+1].data.byteswap().newbyteorder() count_chopb += 1 prev_cycle = cycle elif cycle == prev_cycle: warnings.warn(f'Previous and current chop position ({cycle}) are the same. ' 'Skipping the current image.') else: hdulist.close() raise ValueError(f'Frame type ({cycle}) not a valid value. Expecting HCYCLE1 or ' 'HCYCLE2 as value for ESO DET FRAM TYPE.') # check if the number of chop A and B images is equal, this error should never occur if count_chopa != count_chopb: warnings.warn('The number of images is not equal for chop A and chop B.') hdulist.close() return chopa, chopb @typechecked def run(self) -> None: """ Run the module. The FITS files are collected from the input directory and uncompressed if needed. The images are then sorted by the two chop positions (chop A and chop B). The required FITS header keywords (which should be set in the configuration file) are also imported and stored as attributes to the two output datasets in the HDF5 database. Returns ------- NoneType None """ # clear the output ports self.m_chopa_out_port.del_all_data() self.m_chopa_out_port.del_all_attributes() self.m_chopb_out_port.del_all_data() self.m_chopb_out_port.del_all_attributes() # uncompress the FITS files if needed self.uncompress() # find and sort the FITS files files = [] for filename in os.listdir(self.m_input_location): if filename.endswith('.fits'): files.append(os.path.join(self.m_input_location, filename)) files.sort() # check if there are FITS files present in the input location assert files, f'No FITS files found in {self.m_input_location}.' # if cropping chop A, get pixscale and convert crop_size to pixels and swap x/y if self.m_crop is not None: pixscale = self._m_config_port.get_attribute('PIXSCALE') self.m_crop = (self.m_crop[1], self.m_crop[0], int(math.ceil(self.m_crop[2]/pixscale))) start_time = time.time() for i, filename in enumerate(files): progress(i, len(files), 'Preprocessing NEAR data...', start_time) # get the primary header data and the image shape header, im_shape = self.read_header(filename) # get the images of chop A and chop B chopa, chopb = self.read_images(filename, im_shape) if self.m_subtract: chopa = chopa - chopb chopb = -1. * np.copy(chopa) if self.m_crop is not None: chopa = crop_image(chopa, center=self.m_crop[0:2], size=self.m_crop[2], copy=False) chopb = crop_image(chopb, center=self.m_crop[0:2], size=self.m_crop[2], copy=False) if self.m_combine is not None: if self.m_combine == 'mean': chopa = np.mean(chopa, axis=0) chopb = np.mean(chopb, axis=0) elif self.m_combine == 'median': chopa = np.median(chopa, axis=0) chopb = np.median(chopb, axis=0) header[self._m_config_port.get_attribute('NFRAMES')] = 1 # append the images of chop A and B self.m_chopa_out_port.append(chopa, data_dim=3) self.m_chopb_out_port.append(chopb, data_dim=3) # starting value for the INDEX attribute first_index = 0 for port in (self.m_chopa_out_port, self.m_chopb_out_port): # set the static attributes set_static_attr(fits_file=filename, header=header, config_port=self._m_config_port, image_out_port=port, check=True) # set the non-static attributes set_nonstatic_attr(header=header, config_port=self._m_config_port, image_out_port=port, check=True) # set the remaining attributes set_extra_attr(fits_file=filename, nimages=im_shape[0]//2, config_port=self._m_config_port, image_out_port=port, first_index=first_index) # increase the first value of the INDEX attribute first_index += im_shape[0]//2 # flush the output port port.flush() # add history information self.m_chopa_out_port.add_history('NearReadingModule', 'Chop A') self.m_chopb_out_port.add_history('NearReadingModule', 'Chop B') # close all connections to the database self.m_chopa_out_port.close_port() PynPoint-0.11.0/pynpoint/readwrite/textwriting.py000066400000000000000000000050741450275315200221730ustar00rootroot00000000000000""" Modules for writing data as text file. """ import os from typing import Optional import numpy as np from typeguard import typechecked from pynpoint.core.processing import WritingModule class TextWritingModule(WritingModule): """ Module for writing a 1D or 2D data set from the central HDF5 database as text file. TextWritingModule is a :class:`pynpoint.core.processing.WritingModule` and supports the use of the Pypeline default output directory as well as a specified location. """ __author__ = 'Tomas Stolker' @typechecked def __init__(self, name_in: str, data_tag: str, file_name: str, output_dir: Optional[str] = None, header: Optional[str] = None) -> None: """ Parameters ---------- name_in : str Unique name of the module instance. data_tag : str Tag of the database entry from which data is exported. file_name : str Name of the output file. output_dir : str, None Output directory where the text file will be stored. If no path is specified then the Pypeline default output location is used. header : str, None Header that is written at the top of the text file. Returns ------- NoneType None """ super().__init__(name_in, output_dir=output_dir) self.m_data_port = self.add_input_port(data_tag) self.m_file_name = file_name self.m_header = header @typechecked def run(self) -> None: """ Run method of the module. Saves the specified data from the database to a text file. Returns ------- NoneType None """ if self.m_header is None: self.m_header = '' print('Writing text file...', end='') out_name = os.path.join(self.m_output_location, self.m_file_name) data = self.m_data_port.get_all() if data.ndim == 3 and data.shape[0] == 1: data = np.squeeze(data, axis=0) if data.ndim > 2: raise ValueError('Only 1D or 2D arrays can be written to a text file.') if data.dtype == 'int32' or data.dtype == 'int64': np.savetxt(out_name, data, header=self.m_header, comments='# ', fmt='%i') elif data.dtype == 'float32' or data.dtype == 'float64': np.savetxt(out_name, data, header=self.m_header, comments='# ') print(' [DONE]') self.m_data_port.close_port() PynPoint-0.11.0/pynpoint/util/000077500000000000000000000000001450275315200162125ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/util/__init__.py000066400000000000000000000000001450275315200203110ustar00rootroot00000000000000PynPoint-0.11.0/pynpoint/util/analysis.py000066400000000000000000000435751450275315200204250ustar00rootroot00000000000000""" Functions for point source analysis. """ import math from typing import Optional, Tuple import numpy as np from typeguard import typechecked from scipy.stats import t from scipy.ndimage import gaussian_filter from skimage.feature import hessian_matrix from photutils.aperture import aperture_photometry, CircularAperture from pynpoint.util.image import shift_image, center_subpixel, pixel_distance, select_annulus, \ cartesian_to_polar, create_mask from pynpoint.util.psf import pca_psf_subtraction from pynpoint.util.residuals import combine_residuals def compute_aperture_flux_elements(image: np.ndarray, x_pos: float, y_pos: float, size: float, ignore: bool): """ Computes the average fluxes inside apertures with the same separation from the center. This function can be used to to estimate the residual flux of a planet at position (x_pos, y_pos) and the respective noise elements with same separation (see function false_alarm) It can also be used to compute the noise apertures is if no planet is present (needed for contrast curves). Parameters ---------- image : numpy.ndarray The input image as a 2D numpy array. For example, this could be a residual frame returned by a :class:`.PcaPsfSubtractionModule`. x_pos : float The planet position (in pixels) along the horizontal axis. The pixel coordinates of the bottom-left corner of the image are (-0.5, -0.5). If no planet is present x_pos and y_pos determine the separation from the center. y_pos : float The planet position (pix) along the vertical axis. The pixel coordinates of the bottom-left corner of the image are (-0.5, -0.5). If no planet is present x_pos and y_pos determine the separation from the center. size : float The radius of the reference apertures (in pixels). Usually, this value is chosen close to one half of the typical FWHM of the PSF (0.514 lambda over D for a perfect Airy pattern; in practice, however, the FWHM is often larger than this). ignore : bool Whether or not to ignore the immediate neighboring apertures for the noise estimate. This is desirable in case there are "self-subtraction wings" left and right of the planet which would bias the estimation of the noise level at the separation of the planet if not ignored. Returns ------- ap_phot : A list of aperture photometry values. If a planet was present ap_phot[0] contains the flux of the planet and ap_phot[1:] contains the noise. If not planet was present ap_phot[...] gives the aperture photometry of the noise elements. """ # Compute the center of the current frame (with subpixel precision) and use it to compute the # radius of the given position in polar coordinates (with the origin at the center of the frame) center = center_subpixel(image) radius = math.sqrt((center[0] - y_pos)**2 + (center[1] - x_pos)**2) # Compute the number of apertures which we can place at the separation of the given position num_ap = int(math.pi * radius / size) # Compute the angles at which to place the reference apertures ap_theta = np.linspace(0, 2 * math.pi, num_ap, endpoint=False) # If ignore is True, delete the apertures immediately right and left of the aperture placed on # the planet signal. These apertures often contain "self-subtraction wings", which means they # cannot be considered to originate from the same distribution. In accordance with section 3.2 # of Mawet et al. (2014), such apertures are ignored to prevent bias. if ignore: num_ap -= 2 ap_theta = np.delete(ap_theta, [1, np.size(ap_theta) - 1]) # If the number of apertures is 2 or less, we cannot compute the false positive fraction if num_ap < 3: raise ValueError( f'Number of apertures (num_ap={num_ap}) is too small to calculate the ' 'false positive fraction.') # Initialize a numpy array in which we will store the integrated flux of all reference apertures ap_phot = np.zeros(num_ap) # Loop over all reference apertures and measure the integrated flux for i, theta in enumerate(ap_theta): # Compute the position of the current aperture in polar coordinates and convert to Cartesian x_tmp = center[1] + (x_pos - center[1]) * math.cos(theta) - \ (y_pos - center[0]) * math.sin(theta) y_tmp = center[0] + (x_pos - center[1]) * math.sin(theta) + \ (y_pos - center[0]) * math.cos(theta) # Place a circular aperture at a position and sum up the flux inside the aperture aperture = CircularAperture((x_tmp, y_tmp), size) phot_table = aperture_photometry(image, aperture, method='exact') ap_phot[i] = phot_table['aperture_sum'] return ap_phot @typechecked def false_alarm(image: np.ndarray, x_pos: float, y_pos: float, size: float, ignore: bool) -> Tuple[float, float, float, float]: """ Compute the signal-to-noise ratio (SNR), which is formally defined as the test statistic of a two-sample t-test, and related quantities (such as the FPF) at a given position in an image. For more detailed information about the definition of the signal-to-noise ratio and the motivation behind it, please see the following paper: Mawet, D. et al. (2014): "Fundamental limitations of high contrast imaging set by small sample statistics". *The Astrophysical Journal*, 792(2), 97. DOI: `10.1088/0004-637X/792/2/97 `_. Parameters ---------- image : numpy.ndarray The input image as a 2D numpy array. For example, this could be a residual frame returned by a :class:`.PcaPsfSubtractionModule`. x_pos : float The planet position (in pixels) along the horizontal axis. The pixel coordinates of the bottom-left corner of the image are (-0.5, -0.5). y_pos : float The planet position (pix) along the vertical axis. The pixel coordinates of the bottom-left corner of the image are (-0.5, -0.5). size : float The radius of the reference apertures (in pixels). Usually, this value is chosen close to one half of the typical FWHM of the PSF (0.514 lambda over D for a perfect Airy pattern; in practice, however, the FWHM is often larger than this). ignore : bool Whether or not to ignore the immediate neighboring apertures for the noise estimate. This is desirable in case there are "self-subtraction wings" left and right of the planet which would bias the estimation of the noise level at the separation of the planet if not ignored. Returns ------- signal_sum : The integrated (summed up) flux inside the signal aperture. Please note that this is **not** identical to the numerator of the fraction defining the SNR (which is given by the `signal_sum` minus the mean of the noise apertures). noise : The denominator of the SNR, i.e., the standard deviation of the integrated flux of the noise apertures, times a correction factor that accounts for small sample statistics. snr : The signal-to-noise ratio (SNR) as defined by Mawet et al. (2014) in eq. (8). fpf : The false positive fraction (FPF) as defined by Mawet et al. (2014) in eq. (10). """ ap_phot = compute_aperture_flux_elements(image=image, x_pos=x_pos, y_pos=y_pos, size=size, ignore=ignore) # Define shortcuts to the signal and the noise aperture sums signal_aperture = ap_phot[0] noise_apertures = ap_phot[1:] # Compute the "signal", that is, the numerator of the signal-to-noise ratio: According to # eq. (8) in Mawet et al. (2014), this is given by the difference between the integrated flux # in the signal aperture and the mean of the integrated flux in the noise apertures signal = signal_aperture - np.mean(noise_apertures) # Compute the "noise", that is, the denominator of the signal-to-noise-ratio: According to # eq. (8) in Mawet et al. (2014), this is given by the standard deviation of the integrated flux # in the noise apertures times a correction factor to account for the small sample statistics. # NOTE: `ddof=1` is a necessary argument for np.std() in order to compute the *unbiased* # estimate (i.e., including Bessel's corrections) of the standard deviation. noise = np.std(noise_apertures, ddof=1) *\ math.sqrt(1 + 1 / (noise_apertures.shape[0])) # Compute the signal-to-noise ratio by dividing the "signal" through the "noise" snr = signal / noise # Compute the false positive fraction (FPF). According to eq. (10) in Mawet et al. (2014), the # FPF is given by 1 - F_nu(SNR), where F_nu is the cumulative distribution function (CDF) of a # t-distribution with `nu = n-1` degrees of freedom (see Section 3 of Mawet et al. (2014) for # more details on the Student's t distribution). # For numerical reasons, we use the survival function (SF), which is defined precisely as 1-CDF, # but may give more accurate results according to the scipy documentation. fpf = t.sf(snr, df=(noise_apertures.shape[0] - 1)) return signal_aperture, noise, snr, fpf @typechecked def student_t(t_input: Tuple[str, float], radius: float, size: float, ignore: bool) -> float: """ Function to calculate the false positive fraction for a given sigma level (Mawet et al. 2014). Parameters ---------- t_input : tuple(str, float) Tuple with the input type ('sigma' or 'fpf') and the input value. radius : float Aperture radius (in pixels). size : float Separation of the aperture center from the center of the frame (in pixels). ignore : bool Whether or not to ignore the immediate neighboring apertures of the point source to exclude potential self-subtraction lobes. Returns ------- float False positive fraction (FPF). """ num_ap = int(math.pi * radius / size) if ignore: num_ap -= 2 # Note that the number of degrees of freedom is given by nu = n-1 with n the number of samples. # The number of samples is equal to the number of apertures minus 1 (i.e. the planet aperture). # See Section 3 of Mawet et al. (2014) for more details on the Student's t distribution. if t_input[0] == 'sigma': t_result = t.sf(t_input[1], num_ap-2, loc=0., scale=1.) elif t_input[0] == 'fpf': t_result = t.ppf(1. - t_input[1], num_ap-2, loc=0., scale=1.) else: raise ValueError('First element of t_input needs to be "sigma" or "fpf"!') return t_result @typechecked def fake_planet(images: np.ndarray, psf: np.ndarray, parang: np.ndarray, position: Tuple[float, float], magnitude: float, psf_scaling: float, interpolation: str = 'spline') -> np.ndarray: """ Function to inject artificial planets in a dataset. Parameters ---------- images : numpy.ndarray Input images (3D). psf : numpy.ndarray PSF template (3D). parang : numpy.ndarray Parallactic angles (deg). position : tuple(float, float) Separation (pix) and position angle (deg) measured in counterclockwise with respect to the upward direction. magnitude : float Magnitude difference used to scale input PSF. psf_scaling : float Extra factor used to scale input PSF. interpolation : str Interpolation type ('spline', 'bilinear', or 'fft'). Returns ------- numpy.ndarray Images with artificial planet injected. """ sep = position[0] ang = np.radians(position[1] + 90. - parang) flux_ratio = 10. ** (-magnitude / 2.5) psf = psf*psf_scaling*flux_ratio x_shift = sep*np.cos(ang) y_shift = sep*np.sin(ang) im_shift = np.zeros(images.shape) for i in range(images.shape[0]): if psf.shape[0] == 1: im_shift[i, ] = shift_image(psf[0, ], (float(y_shift[i]), float(x_shift[i])), interpolation, mode='reflect') else: im_shift[i, ] = shift_image(psf[i, ], (float(y_shift[i]), float(x_shift[i])), interpolation, mode='reflect') return images + im_shift @typechecked def merit_function(residuals: np.ndarray, merit: str, aperture: Tuple[int, int, float], sigma: float, var_noise: Optional[float]) -> float: """ Function to calculate the figure of merit at a given position in the image residuals. Parameters ---------- residuals : numpy.ndarray Residuals of the PSF subtraction (2D). merit : str Figure of merit for the chi-square function ('hessian', 'poisson', or 'gaussian'). aperture : tuple(int, int, float) Position (y, x) of the aperture center (pix) and aperture radius (pix). sigma : float Standard deviation (pix) of the Gaussian kernel which is used to smooth the residuals before the chi-square is calculated. var_noise : float, None Variance of the noise which is required when `merit` is set to 'gaussian' or 'hessian'. Returns ------- float Chi-square value. """ rr_grid, _, _ = pixel_distance(residuals.shape, position=(aperture[0], aperture[1])) indices = np.where(rr_grid <= aperture[2]) if merit == 'hessian': hessian_rr, hessian_rc, hessian_cc = hessian_matrix(image=residuals, sigma=sigma, mode='constant', cval=0., order='rc', use_gaussian_derivatives=False) hes_det = (hessian_rr*hessian_cc) - (hessian_rc*hessian_rc) chi_square = np.sum(hes_det[indices]**2)/var_noise elif merit == 'poisson': if sigma > 0.: residuals = gaussian_filter(input=residuals, sigma=sigma) chi_square = np.sum(np.abs(residuals[indices])) elif merit == 'gaussian': chi_square = np.sum(residuals[indices]**2)/var_noise else: raise ValueError('Figure of merit not recognized. Please use \'hessian\', \'poisson\' ' 'or \'gaussian\'. Previous use of \'sum\' should now be set as ' '\'poisson\'.') return chi_square @typechecked def pixel_variance(var_type: str, images: np.ndarray, parang: np.ndarray, cent_size: Optional[float], edge_size: Optional[float], pca_number: int, residuals: str, aperture: Tuple[int, int, float], sigma: float) -> float: """ Function to calculate the variance of the noise. After the PSF subtraction, images are rotated in opposite direction of the regular derotation, therefore dispersing any companion or disk signal. The noise is measured within an annulus. Parameters ---------- var_type : str Variance type ('gaussian' or 'hessian'). images : numpy.ndarray Input images (3D). parang : numpy.ndarray Parallactic angles. cent_size : float, None Radius of the central mask (pix). No mask is used when set to None. edge_size : float, None Outer radius (pix) beyond which pixels are masked. No outer mask is used when set to None. pca_number : int Number of principal components (PCs) used for the PSF subtraction. residuals : str Method for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). aperture : tuple(int, int, float) Aperture position (y, x) and radius (pix). sigma : float, None Standard deviation (pix) of the Gaussian kernel which is used to smooth the images. Returns ------- float Variance of the pixel values. Either the variance of the pixel values ('gaussian') or the variance of the determinant of the Hessian ('hessian'). """ mask = create_mask(images.shape[-2:], (cent_size, edge_size)) _, im_res_derot = pca_psf_subtraction(images*mask, parang, pca_number) res_noise = combine_residuals(residuals, im_res_derot) sep_ang = cartesian_to_polar(center_subpixel(res_noise), aperture[0], aperture[1]) if var_type == 'gaussian': selected = select_annulus(res_noise[0, ], sep_ang[0]-aperture[2], sep_ang[0]+aperture[2]) elif var_type == 'hessian': hessian_rr, hessian_rc, hessian_cc = hessian_matrix(image=res_noise[0, ], sigma=sigma, mode='constant', cval=0., order='rc', use_gaussian_derivatives=False) hes_det = (hessian_rr*hessian_cc) - (hessian_rc*hessian_rc) selected = select_annulus(hes_det, sep_ang[0]-aperture[2], sep_ang[0]+aperture[2]) return float(np.var(selected)) PynPoint-0.11.0/pynpoint/util/apply_func.py000066400000000000000000000655351450275315200207420ustar00rootroot00000000000000""" Functions that are executed with :func:`~pynpoint.core.processing.ProcessingModule.apply_function_to_images` and :func:`~pynpoint.core.processing.ProcessingModule.apply_function_in_time`. The functions are placed here such that they are pickable by the multiprocessing functionalities. The first two parameters are always the sliced data and the index in the dataset. TODO Docstrings are missing for most of the functions. """ import copy import math import warnings from typing import List, Optional, Union, Tuple import cv2 import numpy as np import pywt from numba import jit from photutils.aperture import Aperture, aperture_photometry from scipy.ndimage import gaussian_filter from scipy.optimize import curve_fit from skimage.registration import phase_cross_correlation from skimage.transform import rescale from statsmodels.robust import mad from typeguard import typechecked from pynpoint.core.dataio import InputPort, OutputPort from pynpoint.util.image import center_pixel, crop_image, scale_image, shift_image from pynpoint.util.star import locate_star from pynpoint.util.wavelets import WaveletAnalysisCapsule @typechecked def image_scaling(image_in: np.ndarray, im_index: int, scaling_y: float, scaling_x: float, scaling_flux: float) -> np.ndarray: return scaling_flux * scale_image(image_in, scaling_y, scaling_x) @typechecked def subtract_line(image_in: np.ndarray, im_index: int, mask: np.ndarray, combine: str, im_shape: Tuple[int, int]) -> np.ndarray: image_tmp = np.copy(image_in) image_tmp[mask == 0.] = np.nan if combine == 'mean': row_mean = np.nanmean(image_tmp, axis=1) col_mean = np.nanmean(image_tmp, axis=0) x_grid, y_grid = np.meshgrid(col_mean, row_mean) subtract = (x_grid+y_grid)/2. elif combine == 'median': col_median = np.nanmedian(image_tmp, axis=0) col_2d = np.tile(col_median, (im_shape[1], 1)) image_tmp -= col_2d image_tmp[mask == 0.] = np.nan row_median = np.nanmedian(image_tmp, axis=1) row_2d = np.tile(row_median, (im_shape[0], 1)) row_2d = np.rot90(row_2d) # 90 deg rotation in clockwise direction subtract = col_2d + row_2d return image_in - subtract @typechecked def align_image(image_in: np.ndarray, im_index: int, interpolation: str, accuracy: float, resize: Optional[float], num_references: int, subframe: Optional[float], ref_images_reshape: np.ndarray, ref_images_shape: Tuple[int, int, int]) -> np.ndarray: offset = np.array([0., 0.]) # Reshape the reference images back to their original 3D shape # The original shape can not be used directly because of util.module.update_arguments ref_images = ref_images_reshape.reshape(ref_images_shape) for i in range(num_references): if subframe is None: tmp_offset, _, _ = phase_cross_correlation(ref_images[i, :, :], image_in, normalization=None, upsample_factor=accuracy) else: sub_in = crop_image(image_in, None, subframe) sub_ref = crop_image(ref_images[i, :, :], None, subframe) tmp_offset, _, _ = phase_cross_correlation(sub_ref, sub_in, normalization=None, upsample_factor=accuracy) offset += tmp_offset offset /= float(num_references) if resize is not None: offset *= resize sum_before = np.sum(image_in) tmp_image = rescale(image_in, (resize, resize), order=5, mode='reflect', channel_axis=None, anti_aliasing=True) sum_after = np.sum(tmp_image) # Conserve flux because the rescale function normalizes all values to [0:1]. tmp_image = tmp_image*(sum_before/sum_after) else: tmp_image = image_in return shift_image(tmp_image, offset, interpolation) @typechecked def fit_2d_function(image: np.ndarray, im_index: int, mask_radii: Tuple[float, float], sign: str, model: str, filter_size: Optional[float], guess: Union[Tuple[float, float, float, float, float, float, float], Tuple[float, float, float, float, float, float, float, float]], mask_out_port: Optional[OutputPort], xx_grid: np.ndarray, yy_grid: np.ndarray, rr_ap: np.ndarray, pixscale: float) -> np.ndarray: @typechecked def gaussian_2d(grid: Union[Tuple[np.ndarray, np.ndarray], np.ndarray], x_center: float, y_center: float, fwhm_x: float, fwhm_y: float, amp: float, theta: float, offset: float) -> np.ndarray: """ Function to create a 2D elliptical Gaussian model. Parameters ---------- grid : tuple(np.ndarray, np.ndarray), np.ndarray A tuple of two 2D arrays with the mesh grid points in x and y direction, or an equivalent 3D numpy array with 2 elements along the first axis. x_center : float Offset of the model center along the x axis (pix). y_center : float Offset of the model center along the y axis (pix). fwhm_x : float Full width at half maximum along the x axis (pix). fwhm_y : float Full width at half maximum along the y axis (pix). amp : float Peak flux. theta : float Rotation angle in counterclockwise direction (rad). offset : float Flux offset. Returns ------- np.ndimage Raveled 2D elliptical Gaussian model. """ (xx_grid, yy_grid) = grid x_diff = xx_grid - x_center y_diff = yy_grid - y_center sigma_x = fwhm_x/math.sqrt(8.*math.log(2.)) sigma_y = fwhm_y/math.sqrt(8.*math.log(2.)) a_gauss = 0.5 * ((np.cos(theta)/sigma_x)**2 + (np.sin(theta)/sigma_y)**2) b_gauss = 0.5 * ((np.sin(2.*theta)/sigma_x**2) - (np.sin(2.*theta)/sigma_y**2)) c_gauss = 0.5 * ((np.sin(theta)/sigma_x)**2 + (np.cos(theta)/sigma_y)**2) gaussian = offset + amp*np.exp(-(a_gauss*x_diff**2 + b_gauss*x_diff*y_diff + c_gauss*y_diff**2)) return gaussian[(rr_ap > mask_radii[0]) & (rr_ap < mask_radii[1])] @typechecked def moffat_2d(grid: Union[Tuple[np.ndarray, np.ndarray], np.ndarray], x_center: float, y_center: float, fwhm_x: float, fwhm_y: float, amp: float, theta: float, offset: float, beta: float) -> np.ndarray: """ Function to create a 2D elliptical Moffat model. The parametrization used here is equivalent to the one in AsPyLib: http://www.aspylib.com/doc/aspylib_fitting.html#elliptical-moffat-psf Parameters ---------- grid : tuple(np.ndarray, np.ndarray), np.ndarray A tuple of two 2D arrays with the mesh grid points in x and y direction, or an equivalent 3D numpy array with 2 elements along the first axis. x_center : float Offset of the model center along the x axis (pix). y_center : float Offset of the model center along the y axis (pix). fwhm_x : float Full width at half maximum along the x axis (pix). fwhm_y : float Full width at half maximum along the y axis (pix). amp : float Peak flux. theta : float Rotation angle in counterclockwise direction (rad). offset : float Flux offset. beta : float Power index. Returns ------- np.ndimage Raveled 2D elliptical Moffat model. """ (xx_grid, yy_grid) = grid x_diff = xx_grid - x_center y_diff = yy_grid - y_center if 2.**(1./beta)-1. < 0.: alpha_x = np.nan alpha_y = np.nan else: alpha_x = 0.5*fwhm_x/np.sqrt(2.**(1./beta)-1.) alpha_y = 0.5*fwhm_y/np.sqrt(2.**(1./beta)-1.) if alpha_x == 0. or alpha_y == 0.: a_moffat = np.nan b_moffat = np.nan c_moffat = np.nan else: a_moffat = (np.cos(theta)/alpha_x)**2. + (np.sin(theta)/alpha_y)**2. b_moffat = (np.sin(theta)/alpha_x)**2. + (np.cos(theta)/alpha_y)**2. c_moffat = 2.*np.sin(theta)*np.cos(theta)*(1./alpha_x**2. - 1./alpha_y**2.) a_term = a_moffat*x_diff**2 b_term = b_moffat*y_diff**2 c_term = c_moffat*x_diff*y_diff moffat = offset + amp / (1.+a_term+b_term+c_term)**beta return moffat[(rr_ap > mask_radii[0]) & (rr_ap < mask_radii[1])] if filter_size: image = gaussian_filter(image, filter_size) if mask_out_port is not None: mask = np.copy(image) mask[(rr_ap < mask_radii[0]) | (rr_ap > mask_radii[1])] = 0. mask_out_port.append(mask, data_dim=3) if sign == 'negative': image = -1.*image + np.abs(np.min(-1.*image)) image = image[(rr_ap > mask_radii[0]) & (rr_ap < mask_radii[1])] if model == 'gaussian': model_func = gaussian_2d elif model == 'moffat': model_func = moffat_2d try: popt, pcov = curve_fit(model_func, (xx_grid, yy_grid), image, p0=guess, sigma=None, method='lm') perr = np.sqrt(np.diag(pcov)) except RuntimeError: if model == 'gaussian': popt = np.zeros(7) perr = np.zeros(7) elif model == 'moffat': popt = np.zeros(8) perr = np.zeros(8) warnings.warn(f'Fit could not converge on image number {im_index}.') if model == 'gaussian': best_fit = np.asarray((popt[0], perr[0], popt[1], perr[1], popt[2]*pixscale, perr[2]*pixscale, popt[3]*pixscale, perr[3]*pixscale, popt[4], perr[4], math.degrees(popt[5]) % 360., math.degrees(perr[5]), popt[6], perr[6])) elif model == 'moffat': best_fit = np.asarray((popt[0], perr[0], popt[1], perr[1], popt[2]*pixscale, perr[2]*pixscale, popt[3]*pixscale, perr[3]*pixscale, popt[4], perr[4], math.degrees(popt[5]) % 360., math.degrees(perr[5]), popt[6], perr[6], popt[7], perr[7])) return best_fit @typechecked def crop_around_star(image: np.ndarray, im_index: int, position: Optional[Union[Tuple[int, int, float], Tuple[None, None, float]]], im_size: int, fwhm: int, pixscale: float, index_out_port: Optional[OutputPort], image_out_port: OutputPort) -> np.ndarray: if position is None: center = None width = None else: if position[0] is None and position[1] is None: center = None else: center = (position[1], position[0]) # (y, x) width = int(math.ceil(position[2]/pixscale)) starpos = locate_star(image, center, width, fwhm) try: im_crop = crop_image(image, tuple(starpos), im_size) except ValueError: warnings.warn(f'Chosen image size is too large to crop the image around the ' f'brightest pixel (image index = {im_index}, pixel [x, y] ' f'= [{starpos[0]}, {starpos[1]}]). Using the center of the ' f'image instead.') if index_out_port is not None: index_out_port.append([im_index], data_dim=1) starpos = center_pixel(image) im_crop = crop_image(image, tuple(starpos), im_size) return im_crop @typechecked def crop_rotating_star(image: np.ndarray, im_index: int, position: Union[Tuple[float, float], np.ndarray], im_size: int, filter_size: Optional[int], search_size: int) -> np.ndarray: starpos = locate_star(image=image, center=tuple(position), width=search_size, fwhm=filter_size) return crop_image(image=image, center=tuple(starpos), size=im_size) @typechecked def photometry(image: np.ndarray, im_index: int, aperture: Union[Aperture, List[Aperture]]) -> np.ndarray: # https://photutils.readthedocs.io/en/stable/overview.html # In Photutils, pixel coordinates are zero-indexed, meaning that (x, y) = (0, 0) # corresponds to the center of the lowest, leftmost array element. This means that # the value of data[0, 0] is taken as the value over the range -0.5 < x <= 0.5, # -0.5 < y <= 0.5. Note that this is the same coordinate system as used by PynPoint. return np.array(aperture_photometry(image, aperture, method='exact')['aperture_sum']) @typechecked def image_stat(image_in: np.ndarray, im_index: int, indices: Optional[np.ndarray]) -> np.ndarray: if indices is None: image_select = np.copy(image_in) else: image_reshape = np.reshape(image_in, (image_in.shape[0]*image_in.shape[1])) image_select = image_reshape[indices] nmin = np.nanmin(image_select) nmax = np.nanmax(image_select) nsum = np.nansum(image_select) mean = np.nanmean(image_select) median = np.nanmedian(image_select) std = np.nanstd(image_select) return np.asarray([nmin, nmax, nsum, mean, median, std]) @typechecked def subtract_psf(image: np.ndarray, im_index: int, parang_thres: Optional[float], nref: Optional[int], reference: Optional[np.ndarray], ang_diff: np.ndarray, image_in_port: InputPort) -> np.ndarray: if parang_thres: index_thres = np.where(ang_diff > parang_thres)[0] if index_thres.size == 0: reference = image_in_port.get_all() warnings.warn('No images meet the rotation threshold. Creating a reference ' 'PSF from the median of all images instead.') else: if nref: index_diff = np.abs(im_index - index_thres) index_near = np.argsort(index_diff)[:nref] index_sort = np.sort(index_thres[index_near]) reference = image_in_port[index_sort, :, :] else: reference = image_in_port[index_thres, :, :] reference = np.median(reference, axis=0) return image-reference @typechecked def dwt_denoise_line_in_time(signal_in: np.ndarray, im_index: int, threshold_function: bool, padding: str, wavelet_conf) -> np.ndarray: """ Definition of the temporal denoising for DWT. Parameters ---------- signal_in : np.ndarray 1D input signal. Returns ------- np.ndarray Multilevel 1D inverse discrete wavelet transform. """ if threshold_function: threshold_mode = 'soft' else: threshold_mode = 'hard' coef = pywt.wavedec(signal_in, wavelet=wavelet_conf.m_wavelet, level=None, mode=padding) sigma = mad(coef[-1]) threshold = sigma * np.sqrt(2 * np.log(len(signal_in))) denoised = coef[:] denoised[1:] = (pywt.threshold(i, value=threshold, mode=threshold_mode) for i in denoised[1:]) return pywt.waverec(denoised, wavelet=wavelet_conf.m_wavelet, mode=padding) @typechecked def cwt_denoise_line_in_time(signal_in: np.ndarray, im_index: int, threshold_function: bool, padding: str, median_filter: bool, wavelet_conf) -> np.ndarray: """ Definition of temporal denoising for CWT. Parameters ---------- signal_in : np.ndarray 1D input signal. Returns ------- np.ndarray 1D output signal. """ cwt_capsule = WaveletAnalysisCapsule(signal_in=signal_in, padding=padding, wavelet_in=wavelet_conf.m_wavelet, order=wavelet_conf.m_wavelet_order, frequency_resolution=wavelet_conf.m_resolution) cwt_capsule.compute_cwt() cwt_capsule.denoise_spectrum(soft=threshold_function) if median_filter: cwt_capsule.median_filter() cwt_capsule.update_signal() return cwt_capsule.get_signal() @typechecked def normalization(image_in: np.ndarray, im_index: int) -> np.ndarray: return image_in - np.median(image_in) @typechecked def time_filter(timeline: np.ndarray, im_index: int, sigma: Tuple[float, float]) -> np.ndarray: median = np.median(timeline) std = np.std(timeline) index_lower = np.argwhere(timeline < median-sigma[0]*std) index_upper = np.argwhere(timeline > median+sigma[1]*std) if index_lower.size > 0: mask = np.ones(timeline.shape, dtype=bool) mask[index_lower] = False timeline[index_lower] = np.mean(timeline[mask]) if index_upper.size > 0: mask = np.ones(timeline.shape, dtype=bool) mask[index_upper] = False timeline[index_upper] = np.mean(timeline[mask]) return timeline # This function cannot by @typechecked because of a compatibility issue with numba @jit(cache=True, nopython=True) def calc_fast_convolution(F_roof_tmp: np.complex128, W: np.ndarray, tmp_s: tuple, N_size: float, tmp_G: np.ndarray, N: Tuple[int, ...]) -> np.ndarray: new = np.zeros(N, dtype=np.complex64) if ((tmp_s[0] == 0) and (tmp_s[1] == 0)) or \ ((tmp_s[0] == N[0] / 2) and (tmp_s[1] == 0)) or \ ((tmp_s[0] == 0) and (tmp_s[1] == N[1] / 2)) or \ ((tmp_s[0] == N[0] / 2) and (tmp_s[1] == N[1] / 2)): for m in range(0, N[0], 1): for j in range(0, N[1], 1): new[m, j] = F_roof_tmp * W[m - tmp_s[0], j - tmp_s[1]] else: for m in range(0, N[0], 1): for j in range(0, N[1], 1): new[m, j] = (F_roof_tmp * W[m - tmp_s[0], j - tmp_s[1]] + np.conjugate(F_roof_tmp) * W[(m + tmp_s[0]) % N[0], (j + tmp_s[1]) % N[1]]) if ((tmp_s[0] == N[0] / 2) and (tmp_s[1] == 0)) or \ ((tmp_s[0] == 0) and (tmp_s[1] == N[1] / 2)) or \ ((tmp_s[0] == N[0] / 2) and (tmp_s[1] == N[1] / 2)): # causes problems, unknown why res = new / float(N_size) else: res = new / float(N_size) tmp_G = tmp_G - res return tmp_G @typechecked def bad_pixel_interpolation(image_in: np.ndarray, bad_pixel_map: np.ndarray, iterations: int) -> np.ndarray: """ Internal function to interpolate bad pixels. Parameters ---------- image_in : np.ndarray Input image. bad_pixel_map : np.ndarray Bad pixel map. iterations : int Number of iterations. Returns ------- np.ndarray Image in which the bad pixels have been interpolated. """ image_in = image_in * bad_pixel_map # for names see ref paper g = copy.deepcopy(image_in) G = np.fft.fft2(g) w = copy.deepcopy(bad_pixel_map) W = np.fft.fft2(w) N = g.shape N_size = float(N[0] * N[1]) F_roof = np.zeros(N, dtype=complex) tmp_G = copy.deepcopy(G) iteration = 0 while iteration < iterations: # 1.) select line using max search and compute conjugate tmp_s = np.unravel_index(np.argmax(abs(tmp_G.real[:, 0: N[1] // 2])), (N[0], N[1] // 2)) tmp_s_conjugate = (np.mod(N[0] - tmp_s[0], N[0]), np.mod(N[1] - tmp_s[1], N[1])) # 2.) compute the new F_roof # special cases s = 0 or s = N/2 no conjugate line exists if ((tmp_s[0] == 0) and (tmp_s[1] == 0)) or \ ((tmp_s[0] == N[0] / 2) and (tmp_s[1] == 0)) or \ ((tmp_s[0] == 0) and (tmp_s[1] == N[1] / 2)) or \ ((tmp_s[0] == N[0] / 2) and (tmp_s[1] == N[1] / 2)): F_roof_tmp = N_size * tmp_G[tmp_s] / W[(0, 0)] # 3.) update F_roof F_roof[tmp_s] += F_roof_tmp # conjugate line exists else: a = (np.power(np.abs(W[(0, 0)]), 2)) b = np.power(np.abs(W[(2 * tmp_s[0]) % N[0], (2 * tmp_s[1]) % N[1]]), 2) if a == b: W[(2 * tmp_s[0]) % N[0], (2 * tmp_s[1]) % N[1]] += 0.00000000001 a = (np.power(np.abs(W[(0, 0)]), 2)) b = np.power(np.abs(W[(2 * tmp_s[0]) % N[0], (2 * tmp_s[1]) % N[1]]), 2.0) + 0.01 c = a - b F_roof_tmp = N_size * (tmp_G[tmp_s] * W[(0, 0)] - np.conj(tmp_G[tmp_s]) * W[(2 * tmp_s[0]) % N[0], (2 * tmp_s[1]) % N[1]]) / c # 3.) update F_roof F_roof[tmp_s] += F_roof_tmp F_roof[tmp_s_conjugate] += np.conjugate(F_roof_tmp) # 4.) calc the new error spectrum using fast numba function tmp_G = calc_fast_convolution(F_roof_tmp, W, tmp_s, N_size, tmp_G, N) iteration += 1 return image_in * bad_pixel_map + np.fft.ifft2(F_roof).real * (1 - bad_pixel_map) @typechecked def image_interpolation(image_in: np.ndarray, im_index: int, iterations: int, bad_pixel_map: np.ndarray) -> np.ndarray: return bad_pixel_interpolation(image_in, bad_pixel_map, iterations) @typechecked def replace_pixels(image: np.ndarray, im_index: int, index: np.ndarray, size: int, replace: str) -> np.ndarray: im_mask = np.copy(image) for _, item in enumerate(index): im_mask[item[0], item[1]] = np.nan for _, item in enumerate(index): im_tmp = im_mask[item[0]-size:item[0]+size+1, item[1]-size:item[1]+size+1] if np.size(np.where(im_tmp != np.nan)[0]) == 0: im_mask[item[0], item[1]] = image[item[0], item[1]] else: if replace == 'mean': im_mask[item[0], item[1]] = np.nanmean(im_tmp) elif replace == 'median': im_mask[item[0], item[1]] = np.nanmedian(im_tmp) elif replace == 'nan': im_mask[item[0], item[1]] = np.nan return im_mask # This function cannot by @typechecked because of a compatibility issue with numba @jit(cache=True, nopython=True) def sigma_filter(dev_image: np.ndarray, var_image: np.ndarray, mean_image: np.ndarray, source_image: np.ndarray, out_image: np.ndarray, bad_pixel_map: np.ndarray) -> None: for i in range(source_image.shape[0]): for j in range(source_image.shape[1]): if dev_image[i][j] < var_image[i][j]: out_image[i][j] = source_image[i][j] else: out_image[i][j] = mean_image[i][j] bad_pixel_map[i][j] = 0 return out_image, bad_pixel_map @typechecked def bad_pixel_sigma_filter(image_in: np.ndarray, im_index: int, box: int, sigma: float, iterate: int, map_out_port: Optional[OutputPort]) -> np.ndarray: # Algorithm adapted from http://idlastro.gsfc.nasa.gov/ftp/pro/image/sigma_filter.pro # Initialize bad pixel map bad_pixel_map = np.ones(image_in.shape) while iterate > 0: # Source image source_image = copy.deepcopy(image_in) source_blur = cv2.blur(copy.deepcopy(source_image), (box, box)) # Mean image box2 = box * box mean_image = (source_blur * box2 - source_image) / (box2 - 1) # Squared deviation between mean and source image dev_image = (mean_image - source_image) ** 2 dev_blur = cv2.blur(copy.deepcopy(dev_image), (box, box)) # Compute variance by smoothing the image with the deviations from the mean fact = float(sigma ** 2) / (box2 - 2) var_image = fact * (dev_blur * box2 - dev_image) # Update image_in for the next iteration by setting out_image equal to image_in out_image = image_in # Apply the sigma filter out_image, bad_pixel_map = sigma_filter(dev_image, var_image, mean_image, source_image, out_image, bad_pixel_map) # Subtract 1 from the number of iterations iterate -= 1 if map_out_port is not None: # Write bad pixel map to the database when CPU = 1 map_out_port.append(bad_pixel_map, data_dim=3) return out_image @typechecked def apply_shift(image_in: np.ndarray, im_index: int, shift: Union[Tuple[float, float], np.ndarray], interpolation: str) -> np.ndarray: return shift_image(image_in, shift, interpolation) PynPoint-0.11.0/pynpoint/util/attributes.py000066400000000000000000000123351450275315200207560ustar00rootroot00000000000000""" Functions for adding attributes to a dataset in the central database. """ import warnings import numpy as np from astropy.io import fits from typeguard import typechecked from pynpoint.core.attributes import get_attributes from pynpoint.core.dataio import ConfigPort, OutputPort @typechecked def set_static_attr(fits_file: str, header: fits.header.Header, config_port: ConfigPort, image_out_port: OutputPort, check: bool = True) -> None: """ Function which adds the static attributes to the central database. Parameters ---------- fits_file : str Name of the FITS file. header : astropy.io.fits.header.Header Header information from the FITS file that is read. config_port : pynpoint.core.dataio.ConfigPort Configuration port. image_out_port : pynpoint.core.dataio.OutputPort Output port of the images to which the static attributes are stored. check : bool Print a warning if certain attributes from the configuration file are not present in the FITS header. If set to `False`, attributes are still written to the dataset but there will be no warning if a keyword is not found in the FITS header. Returns ------- NoneType None """ attributes = get_attributes() static = [] for key, value in attributes.items(): if value['config'] == 'header' and value['attribute'] == 'static': static.append(key) for attr in static: fitskey = config_port.get_attribute(attr) if isinstance(fitskey, np.bytes_): fitskey = str(fitskey.decode('utf-8')) if fitskey != 'None': if fitskey in header: status = image_out_port.check_static_attribute(attr, header[fitskey]) if status == 1: image_out_port.add_attribute(attr, header[fitskey], static=True) elif status == 0: pass elif status == -1: warnings.warn(f'Static attribute {fitskey} has changed. Possibly the ' f'current file {fits_file} does not belong to the data set ' f'\'{image_out_port.tag}\'. Attribute value is updated.') elif check: warnings.warn(f'Static attribute {attr} (={fitskey}) not found in the FITS ' 'header.') @typechecked def set_nonstatic_attr(header: fits.header.Header, config_port: ConfigPort, image_out_port: OutputPort, check: bool = True) -> None: """ Function which adds the non-static attributes to the central database. Parameters ---------- header : astropy.io.fits.header.Header Header information from the FITS file that is read. config_port : pynpoint.core.dataio.ConfigPort Configuration port. image_out_port : pynpoint.core.dataio.OutputPort Output port of the images to which the non-static attributes are stored. Returns ------- NoneType None """ attributes = get_attributes() nonstatic = [] for key, value in attributes.items(): if value['attribute'] == 'non-static': nonstatic.append(key) for attr in nonstatic: if attributes[attr]['config'] == 'header': fitskey = config_port.get_attribute(attr) # if type(fitskey) == np.bytes_: # fitskey = str(fitskey.decode('utf-8')) if fitskey != 'None': if fitskey in header: image_out_port.append_attribute_data(attr, header[fitskey]) elif header['NAXIS'] == 2 and attr == 'NFRAMES': image_out_port.append_attribute_data(attr, 1) elif check: warnings.warn('Non-static attribute %s (=%s) not found in the ' 'FITS header.' % (attr, fitskey)) image_out_port.append_attribute_data(attr, -1) @typechecked def set_extra_attr(fits_file: str, nimages: int, config_port: ConfigPort, image_out_port: OutputPort, first_index: int) -> int: """ Function which adds extra attributes to the central database. Parameters ---------- fits_file : str Absolute path and filename of the FITS file. nimages : int Number of images. config_port : pynpoint.core.dataio.ConfigPort Configuration port. image_out_port : pynpoint.core.dataio.OutputPort Output port of the images to which the attributes are stored. first_index : int First image index of the current subset. Returns ------- int First image index for the next subset. """ pixscale = config_port.get_attribute('PIXSCALE') image_index = np.arange(first_index, first_index+nimages, 1) for item in image_index: image_out_port.append_attribute_data('INDEX', item) image_out_port.append_attribute_data('FILES', fits_file) image_out_port.add_attribute('PIXSCALE', pixscale, static=True) return first_index + nimages PynPoint-0.11.0/pynpoint/util/continuous.py000066400000000000000000000173531450275315200210030ustar00rootroot00000000000000# This code is written by Davide Albanese, # (C) 2011 mlpy Developers. # See: Practical Guide to Wavelet Analysis - C. Torrence and G. P. Compo. # Changes made by the PynPoint developers: # - added type hints and type checks # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . from typing import Union import numpy as np from scipy.special import gamma from typeguard import typechecked @typechecked def normalization(s: Union[np.ndarray, np.generic], dt: int) -> Union[np.ndarray, np.generic]: """" Parameters ---------- s : numpy.ndarray Scales. dt : int Time step. Returns ------- numpy.ndarray Normalized data. """ return np.sqrt((2 * np.pi * s) / dt) @typechecked def morletft(s: np.ndarray, w: np.ndarray, w0: int, dt: int) -> np.ndarray: """" Fourier transformed morlet function. Parameters ---------- s : numpy.ndarray Scales. w : numpy.ndarray Angular frequencies. w0 : int Omega0 frequency. dt : int Time step. Returns ------- numpy.ndarray Normalized Fourier transformed morlet function """ p = 0.75112554446494251 # pi**(-1.0/4.0) pos = w > 0 wavelet = np.zeros((s.shape[0], w.shape[0])) for i in range(s.shape[0]): n = normalization(s[i], dt) wavelet[i][pos] = n * p * np.exp(-(s[i] * w[pos] - w0) ** 2 / 2.0) return wavelet @typechecked def dogft(s: np.ndarray, w: np.ndarray, order: int, dt: int) -> np.ndarray: """ Fourier transformed DOG function. Parameters ---------- s : numpy.ndarray Scales. w : numpy.ndarray Angular frequencies. order : int Wavelet order. dt : int Time step. Returns ------- numpy.ndarray Normalized Fourier transformed DOG function. """ p = - (0.0 + 1.0j) ** order / np.sqrt(gamma(order + 0.5)) wavelet = np.zeros((s.shape[0], w.shape[0]), dtype=np.complex128) for i in range(s.shape[0]): n = normalization(s[i], dt) h = s[i] * w wavelet[i] = n * p * h ** order * np.exp(-h ** 2 / 2.0) return wavelet @typechecked def angularfreq(N: int, dt: int) -> np.ndarray: """ Compute angular frequencies. Parameters ---------- N : int Number of data samples. dt : int Time step. Returns ------- numpy.ndarray Angular frequencies (1D). """ # See (5) at page 64. N2 = int(N / 2.0) w = np.empty(N) for i in range(w.shape[0]): if i <= N2: w[i] = (2 * np.pi * i) / (N * dt) else: w[i] = (2 * np.pi * (i - N)) / (N * dt) return w @typechecked def autoscales(N: int, dt: int, dj: float, wf: str, p: int) -> np.ndarray: """ Compute scales as fractional power of two. Parameters ---------- N : int Number of data samples. dt : int Time step. dj : float Scale resolution (smaller values of give finer resolution). wf : str Wavelet function ("morlet", "paul", or "dog"). p : int omega0 ("morlet") or order ("paul", "dog"). Returns ------- numpy.ndarray Scales (1D). """ if wf == 'dog': s0 = (dt * np.sqrt(p + 0.5)) / np.pi elif wf == 'morlet': s0 = (dt * (p + np.sqrt(2 + p ** 2))) / (2 * np.pi) else: raise ValueError('Wavelet function not available.') # See (9) and (10) at page 67. J = int(np.floor(dj ** -1 * np.log2((N * dt) / s0))) s = np.empty(J + 1) for i in range(s.shape[0]): s[i] = s0 * 2 ** (i * dj) return s # def fourier_from_scales(scales, wf, p): # """Compute the equivalent fourier period # from scales. # # :Parameters: # scales : list or 1d numpy array # scales # wf : string ('morlet', 'paul', 'dog') # wavelet function # p : float # wavelet function parameter ('omega0' for morlet, 'm' for paul # and dog) # # :Returns: # fourier wavelengths # """ # # scales_arr = np.asarray(scales) # # if wf == 'dog': # return (2 * np.pi * scales_arr) / np.sqrt(p + 0.5) # elif wf == 'morlet': # return (4 * np.pi * scales_arr) / (p + np.sqrt(2 + p ** 2)) # else: # raise ValueError('wavelet function not available') # def scales_from_fourier(f, wf, p): # """Compute scales from fourier period. # # :Parameters: # f : list or 1d numpy array # fourier wavelengths # wf : string ('morlet', 'paul', 'dog') # wavelet function # p : float # wavelet function parameter ('omega0' for morlet, 'm' for paul # and dog) # # :Returns: # scales # """ # # f_arr = np.asarray(f) # # if wf == 'dog': # return (f_arr * np.sqrt(p + 0.5)) / (2 * np.pi) # elif wf == 'morlet': # return (f_arr * (p + np.sqrt(2 + p ** 2))) / (4 * np.pi) # else: # raise ValueError('wavelet function not available') @typechecked def cwt(x: np.ndarray, dt: int, scales: np.ndarray, wf: str = "dog", p: int = 2) -> np.ndarray: """ Continuous Wavelet Transform. Parameters ---------- x : numpy.ndarray Data (1D). dt : int Time step. scales : numpy.ndarray Scales (1D). wf : str Wavelet function ("morlet", "paul", or "dog"). p : int omega0 ("morlet") or order ("paul", "dog"). Returns ------- numpy.ndarray Transformed data (2D). """ x_arr = np.asarray(x) - np.mean(x) scales_arr = np.asarray(scales) if x_arr.ndim != 1: raise ValueError('x must be an 1d numpy array of list') if scales_arr.ndim != 1: raise ValueError('scales must be an 1d numpy array of list') w = angularfreq(N=x_arr.shape[0], dt=dt) if wf == 'dog': wft = dogft(s=scales_arr, w=w, order=p, dt=dt) elif wf == 'morlet': wft = morletft(s=scales_arr, w=w, w0=p, dt=dt) else: raise ValueError('wavelet function is not available') X_ARR = np.empty((wft.shape[0], wft.shape[1]), dtype=np.complex128) x_arr_ft = np.fft.fft(x_arr) for i in range(X_ARR.shape[0]): X_ARR[i] = np.fft.ifft(x_arr_ft * wft[i]) return X_ARR @typechecked def icwt(X: np.ndarray, scales: np.ndarray) -> np.ndarray: """ Inverse Continuous Wavelet Transform. The reconstruction factor is not applied. Parameters ---------- X : numpy.ndarray Transformed data (2D). scales : numpy.ndarray Scales (1D). Returns ------- numpy.ndarray 1D data. """ X_arr = np.asarray(X) scales_arr = np.asarray(scales) if X_arr.shape[0] != scales_arr.shape[0]: raise ValueError('X, scales: shape mismatch') # See (11), (13) at page 68 X_ARR = np.empty_like(X_arr) for i in range(scales_arr.shape[0]): X_ARR[i] = X_arr[i] / np.sqrt(scales_arr[i]) return np.sum(np.real(X_ARR), axis=0) PynPoint-0.11.0/pynpoint/util/image.py000066400000000000000000000365761450275315200176670ustar00rootroot00000000000000""" Functions for image processing. """ import math from typing import Optional, Tuple, Union import numpy as np from scipy.ndimage import fourier_shift, shift, rotate from skimage.transform import rescale from typeguard import typechecked @typechecked def center_pixel(image: np.ndarray) -> Tuple[int, int]: """ Function to get the pixel position of the image center. Note that this position can not be unambiguously defined for an even-sized image. Python indexing starts at 0 so the coordinates of the pixel in the bottom-left corner are (0, 0). Parameters ---------- image : np.ndarray Input image (2D or 3D). Returns ------- tuple(int, int) Pixel position (y, x) of the image center. """ if image.shape[-2] % 2 == 0 and image.shape[-1] % 2 == 0: center = (image.shape[-2] // 2 - 1, image.shape[-1] // 2 - 1) elif image.shape[-2] % 2 == 0 and image.shape[-1] % 2 == 1: center = (image.shape[-2] // 2 - 1, (image.shape[-1]-1) // 2) elif image.shape[-2] % 2 == 1 and image.shape[-1] % 2 == 0: center = ((image.shape[-2] - 1) // 2, image.shape[-1] // 2 - 1) elif image.shape[-2] % 2 == 1 and image.shape[-1] % 2 == 1: center = ((image.shape[-2] - 1) // 2, (image.shape[-1] - 1) // 2) else: raise RuntimeError('Unexpected image shape. This error should not occur.') return center @typechecked def center_subpixel(image: np.ndarray) -> Tuple[float, float]: """ Function to get the precise position of the image center. The center of the pixel in the bottom left corner of the image is defined as (0, 0), so the bottom left corner of the image is located at (-0.5, -0.5). Parameters ---------- image : np.ndarray Input image (2D or 3D). Returns ------- tuple(float, float) Subpixel position (y, x) of the image center. """ center_x = float(image.shape[-1]) / 2 - 0.5 center_y = float(image.shape[-2]) / 2 - 0.5 return center_y, center_x @typechecked def crop_image(image: np.ndarray, center: Optional[tuple], size: int, copy: bool = True) -> np.ndarray: """ Function to crop square images around a specified position. Parameters ---------- image : np.ndarray Input image (2D or 3D). center : tuple(int, int), None The new image center (y, x). The center of the image is used if set to None. size : int Image size (pix) for both dimensions. Increased by 1 pixel if size is an even number. copy : bool Whether or not to return a copy (instead of a view) of the cropped image (default: True). Returns ------- np.ndarray Cropped odd-sized image (2D or 3D). """ if center is None or (center[0] is None and center[1] is None): center = center_pixel(image) # if image.shape[-1] % 2 == 0: # warnings.warn('The image is even-size so there is not a uniquely defined pixel in ' # 'the center of the image. The image center is determined (with pixel ' # 'precision) with the pynpoint.util.image.center_pixel function.') if size % 2 == 0: size += 1 x_start = center[1] - (size - 1) // 2 x_end = center[1] + (size - 1) // 2 + 1 y_start = center[0] - (size - 1) // 2 y_end = center[0] + (size - 1) // 2 + 1 if x_start < 0 or y_start < 0 or x_end > image.shape[-1] or y_end > image.shape[-2]: raise ValueError('Target image resolution does not fit inside the input image resolution.') return np.array(image[..., y_start:y_end, x_start:x_end], copy=copy) @typechecked def rotate_images(images: np.ndarray, angles: np.ndarray) -> np.ndarray: """ Function to rotate all images in clockwise direction. Parameters ---------- images : np.ndarray Stack of images (3D). angles : np.ndarray Rotation angles (deg). Returns ------- np.ndarray Rotated images. """ im_rot = np.zeros(images.shape) for i, item in enumerate(angles): im_rot[i, ] = rotate(input=images[i, ], angle=item, reshape=False) return im_rot @typechecked def create_mask(im_shape: Tuple[int, int], size: Union[Tuple[float, float], Tuple[float, None], Tuple[None, float], Tuple[None, None]]) -> np.ndarray: """ Function to create a mask for the central and outer image regions. Parameters ---------- im_shape : tuple(int, int) Image size in both dimensions. size : tuple(float, float) Size (pix) of the inner and outer mask. Returns ------- np.ndarray Image mask. """ mask = np.ones(im_shape) npix = im_shape[0] if size[0] is not None or size[1] is not None: if npix % 2 == 0: x_grid = y_grid = np.linspace(-npix / 2 + 0.5, npix / 2 - 0.5, npix) else: x_grid = y_grid = np.linspace(-(npix - 1) / 2, (npix - 1) / 2, npix) xx_grid, yy_grid = np.meshgrid(x_grid, y_grid) rr_grid = np.sqrt(xx_grid**2 + yy_grid**2) if size[0] is not None: mask[rr_grid < size[0]] = 0. if size[1] is not None: if size[1] > npix / 2: size = (size[0], npix / 2) mask[rr_grid > size[1]] = 0. return mask @typechecked def shift_image(image: np.ndarray, shift_yx: Union[Tuple[float, float], np.ndarray], interpolation: str, mode: str = 'constant') -> np.ndarray: """ Function to shift an image. Parameters ---------- image : np.ndarray Input image (2D or 3D). If 3D the image is not shifted along the 0th axis. shift_yx : tuple(float, float), np.ndarray Shift (y, x) to be applied (pix). An additional shift of zero pixels will be added for the first dimension in case the input image is 3D. interpolation : str Interpolation type ('spline', 'bilinear', or 'fft'). mode : str Interpolation mode. Returns ------- np.ndarray Shifted image. """ if image.ndim == 2: shift_val = (shift_yx[0], shift_yx[1]) elif image.ndim == 3: shift_val = (0, shift_yx[0], shift_yx[1]) else: raise ValueError('Invalid number of dimensions for image: must be 2 or 3') if interpolation == 'spline': im_center = shift(image, shift_val, order=5, mode=mode) elif interpolation == 'bilinear': im_center = shift(image, shift_val, order=1, mode=mode) elif interpolation == 'fft': fft_shift = fourier_shift(np.fft.fftn(image), shift_val) im_center = np.fft.ifftn(fft_shift).real else: raise ValueError('interpolation must be one of the following: spline, bilinear, fft') return im_center @typechecked def scale_image(image: np.ndarray, scaling_y: Union[float, np.float32], scaling_x: Union[float, np.float32]) -> np.ndarray: """ Function to spatially scale an image. Parameters ---------- image : np.ndarray Input image (2D). scaling_y : float Scaling factor y. scaling_x : float Scaling factor x. Returns ------- np.ndarray Shifted image (2D). """ sum_before = np.sum(image) im_scale = rescale(image, (scaling_y, scaling_x), order=5, mode='reflect', channel_axis=None, anti_aliasing=True) sum_after = np.sum(im_scale) return im_scale * (sum_before / sum_after) @typechecked def cartesian_to_polar(center: Tuple[float, float], y_pos: float, x_pos: float) -> Tuple[float, float]: """ Function to convert pixel coordinates to polar coordinates. Parameters ---------- center : tuple(float, float) Image center (y, x) from :func:`~pynpoint.util.image.center_subpixel`. y_pos : float Pixel coordinate along the vertical axis. The bottom left corner of the image is (-0.5, -0.5). x_pos : float Pixel coordinate along the horizontal axis. The bottom left corner of the image is (-0.5, -0.5). Returns ------- tuple(float, float) Separation (pix) and position angle (deg). The angle is measured counterclockwise with respect to the positive y-axis. """ sep = math.sqrt((center[1] - x_pos)**2 + (center[0] - y_pos)**2) ang = math.atan2(y_pos-center[1], x_pos-center[0]) ang = (math.degrees(ang) - 90) % 360 return sep, ang @typechecked def polar_to_cartesian(image: np.ndarray, sep: float, ang: float) -> Tuple[float, float]: """ Function to convert polar coordinates to pixel coordinates. Parameters ---------- image : np.ndarray Input image (2D or 3D). sep : float Separation (pixels). ang : float Position angle (deg), measured counterclockwise with respect to the positive y-axis. Returns ------- tuple(float, float) Cartesian coordinates (y, x). The bottom left corner of the image is (-0.5, -0.5). """ center = center_subpixel(image) # (y, x) x_pos = center[1] + sep * math.cos(math.radians(ang + 90)) y_pos = center[0] + sep * math.sin(math.radians(ang + 90)) return y_pos, x_pos @typechecked def pixel_distance(im_shape: Tuple[int, int], position: Optional[Tuple[int, int]] = None) -> Tuple[ np.ndarray, np.ndarray, np.ndarray]: """ Function to calculate the distance of each pixel with respect to a given pixel position. Supports both odd and even sized images. Parameters ---------- im_shape : tuple(int, int) Image shape (y, x). position : tuple(int, int) Pixel center (y, x) from which the distance is calculated. The image center is used if set to None. Python indexing starts at zero so the center of the bottom left pixel is (0, 0). Returns ------- np.ndarray 2D array with the distances of each pixel from the provided pixel position. np.ndarray 2D array with the x coordinates. np.ndarray 2D array with the y coordinates. """ if im_shape[0] % 2 == 0: y_grid = np.linspace(-im_shape[0] / 2 + 0.5, im_shape[0] / 2 - 0.5, im_shape[0]) else: y_grid = np.linspace(-(im_shape[0] - 1) / 2, (im_shape[0] - 1) / 2, im_shape[0]) if im_shape[1] % 2 == 0: x_grid = np.linspace(-im_shape[1] / 2 + 0.5, im_shape[1] / 2 - 0.5, im_shape[1]) else: x_grid = np.linspace(-(im_shape[1] - 1) / 2, (im_shape[1] - 1) / 2, im_shape[1]) if position is not None: y_shift = y_grid[position[0]] x_shift = x_grid[position[1]] y_grid -= y_shift x_grid -= x_shift xx_grid, yy_grid = np.meshgrid(x_grid, y_grid) return np.sqrt(xx_grid**2 + yy_grid**2), xx_grid, yy_grid @typechecked def subpixel_distance(im_shape: Tuple[int, int], position: Tuple[float, float], shift_center: bool = True) -> np.ndarray: """ Function to calculate the distance of each pixel with respect to a given subpixel position. Supports both odd and even sized images. Parameters ---------- im_shape : tuple(int, int) Image shape (y, x). position : tuple(float, float) Pixel center (y, x) from which the distance is calculated. Python indexing starts at zero so the bottom left image corner is (-0.5, -0.5). shift_center : bool Apply the coordinate correction for the image center. Returns ------- np.ndarray 2D array with the distances of each pixel from the provided pixel position. """ # Get 2D x and y coordinates with respect to the image center _, xx_grid, yy_grid = pixel_distance(im_shape, position=None) if im_shape[0] % 2 == 0: # Distance from the image center to the center of the outermost pixel # Even sized images y_size = im_shape[0] / 2 + 0.5 x_size = im_shape[1] / 2 + 0.5 else: # Distance from the image center to the center of the outermost pixel # Odd sized images y_size = (im_shape[0] - 1) / 2 x_size = (im_shape[1] - 1) / 2 if shift_center: # Shift the image center to the center of the bottom left pixel yy_grid += y_size xx_grid += x_size # Apply a subpixel shift of the coordinate system to the requested position yy_grid -= position[0] xx_grid -= position[1] return np.sqrt(xx_grid**2 + yy_grid**2) @typechecked def select_annulus(image_in: np.ndarray, radius_in: float, radius_out: float, mask_position: Optional[Tuple[float, float]] = None, mask_radius: Optional[float] = None) -> np.ndarray: """ image_in : np.ndarray Input image. radius_in : float Inner radius of the annulus (pix). radius_out : float Outer radius of the annulus (pix). mask_position : tuple(float, float), None Center (pix) position (y, x) in of the circular region that is excluded. Not used if set to None. mask_radius : float, None Radius (pix) of the circular region that is excluded. Not used if set to None. """ im_shape = image_in.shape if im_shape[0] % 2 == 0: y_grid = np.linspace(-im_shape[0] / 2 + 0.5, im_shape[0] / 2 - 0.5, im_shape[0]) else: y_grid = np.linspace(-(im_shape[0] - 1) / 2, (im_shape[0] - 1) / 2, im_shape[0]) if im_shape[1] % 2 == 0: x_grid = np.linspace(-im_shape[1] / 2 + 0.5, im_shape[1] / 2 - 0.5, im_shape[1]) else: x_grid = np.linspace(-(im_shape[1] - 1) / 2, (im_shape[1] - 1) / 2, im_shape[1]) xx_grid, yy_grid = np.meshgrid(x_grid, y_grid) rr_grid = np.sqrt(xx_grid**2 + yy_grid**2) mask = np.ones(im_shape) indices = np.where((rr_grid < radius_in) | (rr_grid > radius_out)) mask[indices[0], indices[1]] = 0. if mask_position is not None and mask_radius is not None: distance = subpixel_distance(im_shape=im_shape, position=mask_position) indices = np.where(distance < mask_radius) mask[indices[0], indices[1]] = 0. indices = np.where(mask == 1.) return image_in[indices[0], indices[1]] @typechecked def rotate_coordinates(center: Tuple[float, float], position: Union[Tuple[float, float], np.ndarray], angle: float) -> Tuple[float, float]: """ Function to rotate coordinates around the image center. Parameters ---------- center : tuple(float, float) Image center (y, x) with subpixel accuracy. position : tuple(float, float) Position (y, x) in the image, or a 2D numpy array of positions. angle : float Angle (deg) to rotate in counterclockwise direction. Returns ------- tuple(float, float) New position (y, x). """ pos_y = (position[1] - center[1]) * math.sin(np.radians(angle)) + \ (position[0] - center[0]) * math.cos(np.radians(angle)) pos_x = (position[1] - center[1]) * math.cos(np.radians(angle)) - \ (position[0] - center[0]) * math.sin(np.radians(angle)) return center[0]+pos_y, center[1]+pos_x PynPoint-0.11.0/pynpoint/util/limits.py000066400000000000000000000152631450275315200200740ustar00rootroot00000000000000""" Functions for calculating detection limits. """ import math from typing import Tuple import numpy as np from scipy.stats import t from photutils.aperture import aperture_photometry, CircularAperture from typeguard import typechecked from pynpoint.util.analysis import student_t, fake_planet,\ compute_aperture_flux_elements from pynpoint.util.image import polar_to_cartesian, center_subpixel from pynpoint.util.psf import pca_psf_subtraction from pynpoint.util.residuals import combine_residuals @typechecked def contrast_limit(path_images: str, path_psf: str, noise: np.ndarray, mask: np.ndarray, parang: np.ndarray, psf_scaling: float, extra_rot: float, pca_number: int, threshold: Tuple[str, float], aperture: float, residuals: str, snr_inject: float, position: Tuple[float, float]) -> Tuple[float, float, float, float]: """ Function for calculating the contrast limit at a specified position for a given sigma level or false positive fraction, both corrected for small sample statistics. Parameters ---------- path_images : str System location of the stack of images (3D). path_psf : str System location of the PSF template for the fake planet (3D). Either a single image or a stack of images equal in size to science data. noise : numpy.ndarray Residuals of the PSF subtraction (3D) without injection of fake planets. Used to measure the noise level with a correction for small sample statistics. mask : numpy.ndarray Mask (2D). parang : numpy.ndarray Derotation angles (deg). psf_scaling : float Additional scaling factor of the planet flux (e.g., to correct for a neutral density filter). Should have a positive value. extra_rot : float Additional rotation angle of the images in clockwise direction (deg). pca_number : int Number of principal components used for the PSF subtraction. threshold : tuple(str, float) Detection threshold for the contrast curve, either in terms of 'sigma' or the false positive fraction (FPF). The value is a tuple, for example provided as ('sigma', 5.) or ('fpf', 1e-6). Note that when sigma is fixed, the false positive fraction will change with separation. Also, sigma only corresponds to the standard deviation of a normal distribution at large separations (i.e., large number of samples). aperture : float Aperture radius (pix) for the calculation of the false positive fraction. residuals : str Method used for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). snr_inject : float Signal-to-noise ratio of the injected planet signal that is used to measure the amount of self-subtraction. position : tuple(float, float) The separation (pix) and position angle (deg) of the fake planet. Returns ------- float Separation (pix). float Position angle (deg). float Contrast (mag). float False positive fraction. """ images = np.load(path_images) psf = np.load(path_psf) # Cartesian coordinates of the fake planet yx_fake = polar_to_cartesian(images, position[0], position[1]-extra_rot) # Determine the noise level noise_apertures = compute_aperture_flux_elements(image=noise[0, ], x_pos=yx_fake[1], y_pos=yx_fake[0], size=aperture, ignore=False) t_noise = np.std(noise_apertures, ddof=1) * \ math.sqrt(1 + 1 / (noise_apertures.shape[0])) # get sigma from fpf or fpf from sigma # Note that the number of degrees of freedom is given by nu = n-1 with n the number of samples. # See Section 3 of Mawet et al. (2014) for more details on the Student's t distribution. if threshold[0] == 'sigma': sigma = threshold[1] # Calculate the FPF for a given sigma level fpf = t.sf(sigma, noise_apertures.shape[0] - 1, loc=0., scale=1.) elif threshold[0] == 'fpf': fpf = threshold[1] # Calculate the sigma level for a given FPF sigma = t.isf(fpf, noise_apertures.shape[0] - 1, loc=0., scale=1.) else: raise ValueError('Threshold type not recognized.') # Aperture properties im_center = center_subpixel(images) # Measure the flux of the star ap_phot = CircularAperture((im_center[1], im_center[0]), aperture) phot_table = aperture_photometry(psf_scaling*psf[0, ], ap_phot, method='exact') star = phot_table['aperture_sum'][0] # Magnitude of the injected planet flux_in = snr_inject*t_noise mag = -2.5*math.log10(flux_in/star) # Inject the fake planet fake = fake_planet(images=images, psf=psf, parang=parang, position=(position[0], position[1]), magnitude=mag, psf_scaling=psf_scaling) # Run the PSF subtraction _, im_res = pca_psf_subtraction(images=fake*mask, angles=-1.*parang+extra_rot, pca_number=pca_number) # Stack the residuals im_res = combine_residuals(method=residuals, res_rot=im_res) flux_out_frame = im_res[0, ] - noise[0, ] # Measure the flux of the fake planet after PCA # the first element is the planet flux_out = compute_aperture_flux_elements(image=flux_out_frame, x_pos=yx_fake[1], y_pos=yx_fake[0], size=aperture, ignore=False)[0] # Calculate the amount of self-subtraction attenuation = flux_out/flux_in # the throughput can not be negative. However, this can happen due to numerical inaccuracies if attenuation < 0: attenuation = 0 # Calculate the detection limit contrast = (sigma*t_noise + np.mean(noise_apertures))/(attenuation*star) # The flux_out can be negative, for example if the aperture includes self-subtraction regions if contrast > 0.: contrast = -2.5*math.log10(contrast) else: contrast = np.nan # Separation [pix], position angle [deg], contrast [mag], FPF return position[0], position[1], contrast, fpf PynPoint-0.11.0/pynpoint/util/mcmc.py000066400000000000000000000124131450275315200175040ustar00rootroot00000000000000""" Functions for MCMC sampling. """ import math from typing import Optional, Tuple import numpy as np from typeguard import typechecked from pynpoint.util.analysis import fake_planet, merit_function from pynpoint.util.psf import pca_psf_subtraction from pynpoint.util.residuals import combine_residuals @typechecked def lnprob(param: np.ndarray, bounds: Tuple[Tuple[float, float], Tuple[float, float], Tuple[float, float]], images: np.ndarray, psf: np.ndarray, mask: np.ndarray, parang: np.ndarray, psf_scaling: float, pixscale: float, pca_number: int, extra_rot: float, aperture: Tuple[int, int, float], indices: np.ndarray, merit: str, residuals: str, var_noise: Optional[float]) -> float: """ Function for the log posterior function. Should be placed at the highest level of the Python module to be pickable for the multiprocessing. Parameters ---------- param : numpy.ndarray The separation (arcsec), angle (deg), and contrast (mag). The angle is measured in counterclockwise direction with respect to the positive y-axis. bounds : tuple(tuple(float, float), tuple(float, float), tuple(float, float)) The boundaries of the separation (arcsec), angle (deg), and contrast (mag). Each set of boundaries is specified as a tuple. images : numpy.ndarray Stack with images. psf : numpy.ndarray PSF template, either a single image (2D) or a cube (3D) with the dimensions equal to *images*. mask : numpy.ndarray Array with the circular mask (zeros) of the central and outer regions. parang : numpy.ndarray Array with the angles for derotation. psf_scaling : float Additional scaling factor of the planet flux (e.g., to correct for a neutral density filter). Should be negative in order to inject negative fake planets. pixscale : float Additional scaling factor of the planet flux (e.g., to correct for a neutral density filter). Should be negative in order to inject negative fake planets. pca_number : int Number of principal components used for the PSF subtraction. extra_rot : float Additional rotation angle of the images (deg). aperture : tuple(int, int, float) Position (y, x) of the aperture center (pix) and aperture radius (pix). indices : numpy.ndarray Non-masked image indices. merit : str Figure of merit that is used for the likelihood function ('gaussian' or 'poisson'). Pixels are assumed to be independent measurements which are expected to be equal to zero in case the best-fit negative PSF template is injected. With 'gaussian', the variance is estimated from the pixel values within an annulus at the separation of the aperture (but excluding the pixels within the aperture). With 'poisson', a Poisson distribution is assumed for the variance of each pixel value. residuals : str Method used for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). var_noise : float, None Variance of the noise which is required when `merit` is set to 'gaussian' or 'hessian'. Returns ------- float Log posterior probability. """ @typechecked def _lnprior() -> float: """ Internal function for the log prior function. Returns ------- float Log prior. """ if bounds[0][0] <= param[0] <= bounds[0][1] and \ bounds[1][0] <= param[1] <= bounds[1][1] and \ bounds[2][0] <= param[2] <= bounds[2][1]: ln_prior = 0. else: ln_prior = -np.inf return ln_prior @typechecked def _lnlike() -> float: """ Internal function for the log likelihood function. Returns ------- float Log likelihood. """ sep, ang, mag = param fake = fake_planet(images=images, psf=psf, parang=parang-extra_rot, position=(sep/pixscale, ang), magnitude=mag, psf_scaling=psf_scaling) im_res_rot, im_res_derot = pca_psf_subtraction(images=fake*mask, angles=-1.*parang+extra_rot, pca_number=pca_number, indices=indices) res_stack = combine_residuals(method=residuals, res_rot=im_res_derot, residuals=im_res_rot, angles=parang) chi_square = merit_function(residuals=res_stack[0, ], merit=merit, aperture=aperture, sigma=0., var_noise=var_noise) return -0.5*chi_square ln_prior = _lnprior() if math.isinf(ln_prior): ln_prob = -np.inf else: ln_prob = ln_prior + _lnlike() return ln_prob PynPoint-0.11.0/pynpoint/util/module.py000066400000000000000000000242361450275315200200600ustar00rootroot00000000000000""" Functions for Pypeline modules. """ import sys import time import math import cmath import warnings from typing import Dict, Optional, Tuple, TYPE_CHECKING, Union import numpy as np from typeguard import typechecked # The following is needed to avoid circular dependencies: # The PypelineModule uses methods from module.py, but methods in this file also use PypelineModule # for their type hints. If we were to simply import the PypelineModule class here, this circular # dependency would lead to an ImportError at runtime. By using the TYPE_CHECKING flag, we can avoid # this: because TYPE_CHECKING is always False at runtime, there are no ImportErrors, while the # PypelineModule is still available for static type checkers (i.e., not for the typeguard library). # In Python 3.7, this problem can be circumvented more elegantly by using: # >>> from __future__ import annotations # This changes the behavior of type hints such that they are no longer evaluated at definition time, # see also PEP 563. In Python 4.0, this is supposed to become the new default behavior. if TYPE_CHECKING: from pynpoint.core.processing import PypelineModule @typechecked def progress(current: int, total: int, message: str, start_time: Optional[float] = None) -> None: """ Function to show and update the progress as standard output. Parameters ---------- current : int Current index. total : int Total index number. message : str Message that is printed. start_time : float, None, optional Start time in seconds. Not used if set to None. Returns ------- NoneType None """ @typechecked def time_string(delta_time: float) -> str: """ Converts to input time in seconds to a string which displays as hh:mm:ss. Parameters ---------- delta_time : float Input time in seconds. Returns ------- str: String with the formatted time. """ hours = int(delta_time / 3600.) minutes = int((delta_time % 3600.) / 60.) seconds = int(delta_time % 60.) return f'{hours:>02}:{minutes:>02}:{seconds:>02}' fraction = float(current) / float(total) percentage = 100.*fraction if start_time is None: sys.stdout.write(f'\r{message} {percentage:4.1f}% \r') else: if fraction > 0. and current+1 != total: time_taken = time.time() - start_time time_left = time_taken / fraction * (1. - fraction) sys.stdout.write(f'{message} {percentage:4.1f}% - ETA: {time_string(time_left)}\r') if current+1 == total: sys.stdout.write((29 + len(message)) * ' ' + '\r') sys.stdout.write(message+' [DONE]\n') sys.stdout.flush() @typechecked def memory_frames(memory: Union[int, np.int64], nimages: int) -> np.ndarray: """ Function to subdivide the input images is in quantities of MEMORY. Parameters ---------- memory : int Number of images that is simultaneously loaded into the memory. nimages : int Number of images in the stack. Returns ------- numpy.ndarray Array with the indices where a stack of images is subdivided. """ if memory == 0 or memory >= nimages: frames = np.asarray([0, nimages]) else: frames = np.linspace(start=0, stop=nimages - nimages % memory, num=int(float(nimages)/float(memory))+1, endpoint=True, dtype=np.int64) if nimages % memory > 0: frames = np.append(frames, nimages) return frames @typechecked def angle_average(angles: np.ndarray) -> float: """ Function to calculate the average value of a list of angles. Parameters ---------- angles : numpy.ndarray Parallactic angles (deg). Returns ------- float Average angle (deg). """ cmath_rect = sum(cmath.rect(1, math.radians(ang)) for ang in angles) cmath_phase = cmath.phase(cmath_rect/len(angles)) return math.degrees(cmath_phase) @typechecked def angle_difference(angle_1: float, angle_2: float) -> float: """ Function to calculate the difference between two angles. Parameters ---------- angle_1 : float First angle (deg). angle_2 : float Second angle (deg). Returns ------- float Angle difference (deg). """ angle_diff = (angle_1-angle_2) % 360. if angle_diff >= 180.: angle_diff -= 360. return angle_diff @typechecked def stack_angles(memory: Union[int, np.int64], parang: np.ndarray, max_rotation: float) -> np.ndarray: """ Function to subdivide the input images is in quantities of MEMORY with a restriction on the maximum field rotation across a subset of images. Parameters ---------- memory : int Number of images that is simultaneously loaded into the memory. parang : numpy.ndarray Parallactic angles (deg). max_rotation : float Maximum field rotation (deg). Returns ------- numpy.ndarray Array with the indices where a stack of images is subdivided. """ warnings.warn('Testing of util.module.stack_angles has been limited, please use carefully.') nimages = parang.size if memory == 0 or memory >= nimages: frames = [0, nimages] else: frames = [0, ] parang_start = parang[0] im_count = 0 for i in range(1, parang.size): abs_start_diff = abs(angle_difference(parang_start, parang[i-1])) abs_current_diff = abs(angle_difference(parang[i], parang[i-1])) if abs_start_diff > max_rotation or abs_current_diff > max_rotation: frames.append(i) parang_start = parang[i] im_count = 0 else: im_count += 1 if im_count == memory: frames.append(i) if i < parang.size-1: parang_start = parang[i+1] im_count = 0 if frames[-1] != nimages: frames.append(nimages) return np.asarray(frames) @typechecked def update_arguments(index: int, nimages: int, args_in: Optional[tuple]) -> Optional[tuple]: """ Function to update the arguments of an input function. Specifically, arguments which contain an array with the first dimension equal in size to the total number of images will be substituted by the array element of the image index. Parameters ---------- index : int Image index in the stack. nimages : int Total number of images in the stack. args_in : tuple, None Function arguments that have to be updated. Returns ------- tuple, None Updated function arguments. """ if args_in is None: args_out = None else: args_out = [] for item in args_in: if isinstance(item, np.ndarray) and item.shape[0] == nimages: args_out.append(item[index]) else: args_out.append(item) args_out = tuple(args_out) return args_out # This function *cannot* be decorated with @typechecked, because the typeguard library checks type # hints at *runtime*, when PypelineModule is not available without causing circular dependencies. def module_info(pipeline_module: 'PypelineModule') -> None: """ Function to print the module name. Parameters ---------- pipeline_module : PypelineModule Pipeline module. Returns ------- NoneType None """ module_name = type(pipeline_module).__name__ str_length = len(module_name) print('\n' + str_length * '-') print(module_name) print(str_length * '-' + '\n') print(f'Module name: {pipeline_module._m_name}') # This function *cannot* be decorated with @typechecked, because the typeguard library checks type # hints at *runtime*, when PypelineModule is not available without causing circular dependencies. def input_info(pipeline_module: 'PypelineModule') -> None: """ Function to print information about the input data. Parameters ---------- pipeline_module : PypelineModule Pipeline module. Returns ------- NoneType None """ input_ports = list(pipeline_module._m_input_ports.keys()) if len(input_ports) == 1: input_shape = pipeline_module._m_input_ports[input_ports[0]].get_shape() print(f'Input port: {input_ports[0]} {input_shape}') else: print('Input ports:', end='') for i, item in enumerate(input_ports): input_shape = pipeline_module._m_input_ports[input_ports[i]].get_shape() if i < len(input_ports) - 1: print(f' {item} {input_shape},', end='') else: print(f' {item} {input_shape}') # This function *cannot* be decorated with @typechecked, because the typeguard library checks type # hints at *runtime*, when PypelineModule is not available without causing circular dependencies. def output_info(pipeline_module: 'PypelineModule', output_shape: Dict[str, Tuple[int, ...]]) -> None: """ Function to print information about the output data. Parameters ---------- pipeline_module : PypelineModule Pipeline module. output_shape : dict Dictionary with the output dataset names and shapes. Returns ------- NoneType None """ output_ports = list(pipeline_module._m_output_ports.keys()) if len(output_ports) == 1: if output_ports[0][:11] != 'fits_header': print(f'Output port: {output_ports[0]} {output_shape[output_ports[0]]}') else: print('Output ports:', end='') for i, item in enumerate(output_ports): if i < len(output_ports) - 1: print(f' {item} {output_shape[output_ports[i]]},', end='') else: print(f' {item} {output_shape[output_ports[i]]}') PynPoint-0.11.0/pynpoint/util/multiline.py000066400000000000000000000161711450275315200205740ustar00rootroot00000000000000""" Utilities for multiprocessing of lines in time with the poison pill pattern. """ import multiprocessing from typing import Callable, List, Optional import numpy as np from typeguard import typechecked from pynpoint.core.dataio import InputPort, OutputPort from pynpoint.util.multiproc import TaskInput, TaskResult, TaskCreator, TaskProcessor, \ MultiprocessingCapsule, apply_function class LineReader(TaskCreator): """ Reader of task inputs for :class:`~pynpoint.util.multiline.LineProcessingCapsule`. Continuously read all rows of a dataset and puts them into a task queue. """ @typechecked def __init__(self, data_port_in: InputPort, tasks_queue_in: multiprocessing.JoinableQueue, data_mutex_in: multiprocessing.Lock, num_proc: int, data_length: int) -> None: """ Parameters ---------- data_port_in : pynpoint.core.dataio.InputPort Input port. tasks_queue_in : multiprocessing.queues.JoinableQueue Tasks queue. data_mutex_in : multiprocessing.synchronize.Lock A mutex shared with the writer to ensure that no read and write operations happen at the same time. num_proc : int Number of processors. data_length : int Length of the processed data. Returns ------- NoneType None """ super(LineReader, self).__init__(data_port_in, tasks_queue_in, data_mutex_in, num_proc) self.m_data_length = data_length @typechecked def run(self) -> None: """ Returns ------- NoneType None """ n_rows = self.m_data_in_port.get_shape()[1] row_length = int(np.ceil(self.m_data_in_port.get_shape()[1]/float(self.m_num_proc))) i = 0 while i < n_rows: j = min((i + row_length), n_rows) # lock mutex and read data with self.m_data_mutex: self.m_data_in_port._check_status_and_activate() tmp_data = self.m_data_in_port[:, i:j, :] # read rows from i to j self.m_data_in_port.close_port() param = (self.m_data_length, ((None, None, None), (i, j, None), (None, None, None))) self.m_task_queue.put(TaskInput(tmp_data, param)) i = j self.create_poison_pills() class LineTaskProcessor(TaskProcessor): """ Processor of task inputs for :class:`~pynpoint.util.multiline.LineProcessingCapsule`. A processor applies a function on a row of lines in time. """ @typechecked def __init__(self, tasks_queue_in: multiprocessing.JoinableQueue, result_queue_in: multiprocessing.JoinableQueue, function: Callable, function_args: Optional[tuple]) -> None: """ Parameters ---------- tasks_queue_in : multiprocessing.queues.JoinableQueue Tasks queue. result_queue_in : multiprocessing.queues.JoinableQueue Results queue. function : function Input function. function_args : tuple, None Optional function arguments. Returns ------- NoneType None """ super(LineTaskProcessor, self).__init__(tasks_queue_in, result_queue_in) self.m_function = function self.m_function_args = function_args @typechecked def run_job(self, tmp_task: TaskInput) -> TaskResult: """ Parameters ---------- tmp_task : pynpoint.util.multiproc.TaskInput Task input with the subsets of lines and the job parameters. Returns ------- pynpoint.util.multiproc.TaskResult Task result. """ result_arr = np.zeros((tmp_task.m_job_parameter[0], tmp_task.m_input_data.shape[1], tmp_task.m_input_data.shape[2])) count = 0 for i in range(tmp_task.m_input_data.shape[1]): for j in range(tmp_task.m_input_data.shape[2]): result_arr[:, i, j] = apply_function(tmp_task.m_input_data[:, i, j], count, self.m_function, self.m_function_args) count += 1 return TaskResult(result_arr, tmp_task.m_job_parameter[1]) class LineProcessingCapsule(MultiprocessingCapsule): """ Capsule for parallel processing of lines in time with the poison pill pattern. A function is applied in parallel to each line in time, for example as in :class:`~pynpoint.processing.timedenoising.WaveletTimeDenoisingModule`. """ @typechecked def __init__(self, image_in_port: InputPort, image_out_port: OutputPort, num_proc: int, function: Callable, function_args: Optional[tuple], data_length: int) -> None: """ Parameters ---------- image_in_port : pynpoint.core.dataio.InputPort Input port. image_out_port : pynpoint.core.dataio.OutputPort Output port. num_proc : int Number of processors. function : function Input function that is applied to the lines. function_args : tuple, None, optional Function arguments. data_length : int Length of the processed data. Returns ------- NoneType None """ self.m_function = function self.m_function_args = function_args self.m_data_length = data_length super(LineProcessingCapsule, self).__init__(image_in_port, image_out_port, num_proc) @typechecked def create_processors(self) -> List[LineTaskProcessor]: """ Returns ------- list(pynpoint.util.multiproc.LineTaskProcessor, ) List with instances of :class:`~pynpoint.util.multiproc.LineTaskProcessor` """ processors = [] for _ in range(self.m_num_proc): processors.append(LineTaskProcessor(tasks_queue_in=self.m_tasks_queue, result_queue_in=self.m_result_queue, function=self.m_function, function_args=self.m_function_args)) return processors @typechecked def init_creator(self, image_in_port: InputPort) -> LineReader: """ Parameters ---------- image_in_port : pynpoint.core.dataio.InputPort Input port from where the subsets of lines are read. Returns ------- pynpoint.util.multiline.LineReader Line reader object. """ return LineReader(image_in_port, self.m_tasks_queue, self.m_data_mutex, self.m_num_proc, self.m_data_length) PynPoint-0.11.0/pynpoint/util/multipca.py000066400000000000000000000406551450275315200204140ustar00rootroot00000000000000""" Capsule for multiprocessing of the PSF subtraction with PCA. Residuals are created in parallel for a range of principal components for which the PCA basis is required as input. """ import sys import multiprocessing from typing import List, Optional, Tuple, Union import numpy as np from typeguard import typechecked from sklearn.decomposition import PCA from pynpoint.core.dataio import OutputPort from pynpoint.util.multiproc import TaskProcessor, TaskCreator, TaskWriter, TaskResult, \ TaskInput, MultiprocessingCapsule, to_slice from pynpoint.util.postproc import postprocessor from pynpoint.util.residuals import combine_residuals class PcaTaskCreator(TaskCreator): """ The TaskCreator of the PCA multiprocessing. Creates one task for each principal component number. Does not require an input port since the data is directly given to the task processors. """ @typechecked def __init__(self, tasks_queue_in: multiprocessing.JoinableQueue, num_proc: int, pca_numbers: Union[np.ndarray, tuple]) -> None: """ Parameters ---------- tasks_queue_in : multiprocessing.queues.JoinableQueue Input task queue. num_proc : int Number of processors. pca_numbers : np.ndarray, tuple Principal components for which the residuals are computed. Returns ------- NoneType None """ super(PcaTaskCreator, self).__init__(None, tasks_queue_in, None, num_proc) self.m_pca_numbers = pca_numbers @typechecked def run(self) -> None: """ Run method of PcaTaskCreator. Returns ------- NoneType None """ if isinstance(self.m_pca_numbers, tuple): for i, pca_first in enumerate(self.m_pca_numbers[0]): for j, pca_secon in enumerate(self.m_pca_numbers[1]): parameters = (((i, i+1, None), (j, j+1, None), (None, None, None)), ) self.m_task_queue.put(TaskInput(tuple((pca_first, pca_secon)), parameters)) self.create_poison_pills() else: for i, pca_number in enumerate(self.m_pca_numbers): parameters = (((i, i+1, None), (None, None, None), (None, None, None)), ) self.m_task_queue.put(TaskInput(pca_number, parameters)) self.create_poison_pills() class PcaTaskProcessor(TaskProcessor): """ The TaskProcessor of the PCA multiprocessing is the core of the parallelization. An instance of this class will calculate one forward and backward PCA transformation given the pre-trained scikit-learn PCA model. It does not get data from the TaskCreator but uses its own copy of the input data, which are the same and independent for each task. The following residuals can be created: * Mean residuals -- requirements[0] = True * Median residuals -- requirements[1] = True * Noise-weighted residuals -- requirements[2] = True * Clipped mean of the residuals -- requirements[3] = True """ @typechecked def __init__(self, tasks_queue_in: multiprocessing.JoinableQueue, result_queue_in: multiprocessing.JoinableQueue, star_reshape: np.ndarray, angles: np.ndarray, scales: Optional[np.ndarray], pca_model: Optional[PCA], im_shape: tuple, indices: Optional[np.ndarray], requirements: Tuple[bool, bool, bool, bool], processing_type: str) -> None: """ Parameters ---------- tasks_queue_in : multiprocessing.queues.JoinableQueue Input task queue. result_queue_in : multiprocessing.queues.JoinableQueue Input result queue. star_reshape : np.ndarray Reshaped (2D) stack of images. angles : np.ndarray Derotation angles (deg). scales : np.ndarray scaling factors pca_model : sklearn.decomposition.pca.PCA PCA object with the basis. im_shape : tuple(int, int, int) Original shape of the stack of images. indices : np.ndarray Non-masked image indices. requirements : tuple(bool, bool, bool, bool) Required output residuals. processing_type : str selected processing type. Returns ------- NoneType None """ super(PcaTaskProcessor, self).__init__(tasks_queue_in, result_queue_in) self.m_star_reshape = star_reshape self.m_pca_model = pca_model self.m_angles = angles self.m_scales = scales self.m_im_shape = im_shape self.m_indices = indices self.m_requirements = requirements self.m_processing_type = processing_type @typechecked def run_job(self, tmp_task: TaskInput) -> TaskResult: """ Run method of PcaTaskProcessor. Parameters ---------- tmp_task : pynpoint.util.multiproc.TaskInput Input task. Returns ------- pynpoint.util.multiproc.TaskResult Output residuals. """ # correct data type of pca_number if necessary if isinstance(tmp_task.m_input_data, tuple): pca_number = tmp_task.m_input_data else: pca_number = int(tmp_task.m_input_data) residuals, res_rot = postprocessor(images=self.m_star_reshape, angles=self.m_angles, scales=self.m_scales, pca_number=pca_number, pca_sklearn=self.m_pca_model, im_shape=self.m_im_shape, indices=self.m_indices, processing_type=self.m_processing_type) # differentiate between IFS data or Mono-Wavelength data if res_rot.ndim == 3: res_output = np.zeros((4, res_rot.shape[-2], res_rot.shape[-1])) else: res_output = np.zeros((4, len(self.m_star_reshape), res_rot.shape[-2], res_rot.shape[-1])) if self.m_requirements[0]: res_output[0, ] = combine_residuals(method='mean', res_rot=res_rot) if self.m_requirements[1]: res_output[1, ] = combine_residuals(method='median', res_rot=res_rot) if self.m_requirements[2]: res_output[2, ] = combine_residuals(method='weighted', res_rot=res_rot, residuals=residuals, angles=self.m_angles) if self.m_requirements[3]: res_output[3, ] = combine_residuals(method='clipped', res_rot=res_rot) sys.stdout.write('.') sys.stdout.flush() return TaskResult(res_output, tmp_task.m_job_parameter[0]) class PcaTaskWriter(TaskWriter): """ The TaskWriter of the PCA parallelization. Four different ports are used to save the results of the task processors (mean, median, weighted, and clipped). """ @typechecked def __init__(self, result_queue_in: multiprocessing.JoinableQueue, mean_out_port: Optional[OutputPort], median_out_port: Optional[OutputPort], weighted_out_port: Optional[OutputPort], clip_out_port: Optional[OutputPort], data_mutex_in: multiprocessing.Lock, requirements: Tuple[bool, bool, bool, bool]) -> None: """ Constructor of PcaTaskWriter. Parameters ---------- result_queue_in : multiprocessing.queues.JoinableQueue Input result queue. mean_out_port : pynpoint.core.dataio.OutputPort Output port with the mean residuals. Not used if set to None. median_out_port : pynpoint.core.dataio.OutputPort Output port with the median residuals. Not used if set to None. weighted_out_port : pynpoint.core.dataio.OutputPort Output port with the noise-weighted residuals. Not used if set to None. clip_out_port : pynpoint.core.dataio.OutputPort Output port with the clipped mean residuals. Not used if set to None. data_mutex_in : multiprocessing.synchronize.Lock A mutual exclusion variable which ensure that no read and write simultaneously occur. requirements : tuple(bool, bool, bool, bool) Required output residuals. Returns ------- NoneType None """ super(PcaTaskWriter, self).__init__(result_queue_in, None, data_mutex_in) self.m_mean_out_port = mean_out_port self.m_median_out_port = median_out_port self.m_weighted_out_port = weighted_out_port self.m_clip_out_port = clip_out_port self.m_requirements = requirements @typechecked def run(self) -> None: """ Run method of PcaTaskWriter. Writes the residuals to the output ports. Returns ------- NoneType None """ while True: next_result = self.m_result_queue.get() poison_pill_case = self.check_poison_pill(next_result) if poison_pill_case == 1: break if poison_pill_case == 2: continue with self.m_data_mutex: res_slice = to_slice(next_result.m_position) if next_result.m_position[1][0] is None: res_slice = (next_result.m_position[0][0]) else: res_slice = (next_result.m_position[0][0], next_result.m_position[1][0]) if self.m_requirements[0]: self.m_mean_out_port._check_status_and_activate() self.m_mean_out_port[res_slice] = next_result.m_data_array[0] self.m_mean_out_port.close_port() if self.m_requirements[1]: self.m_median_out_port._check_status_and_activate() self.m_median_out_port[res_slice] = next_result.m_data_array[1] self.m_median_out_port.close_port() if self.m_requirements[2]: self.m_weighted_out_port._check_status_and_activate() self.m_weighted_out_port[res_slice] = next_result.m_data_array[2] self.m_weighted_out_port.close_port() if self.m_requirements[3]: self.m_clip_out_port._check_status_and_activate() self.m_clip_out_port[res_slice] = next_result.m_data_array[3] self.m_clip_out_port.close_port() self.m_result_queue.task_done() class PcaMultiprocessingCapsule(MultiprocessingCapsule): """ Capsule for PCA multiprocessing with the poison pill pattern. """ @typechecked def __init__(self, mean_out_port: Optional[OutputPort], median_out_port: Optional[OutputPort], weighted_out_port: Optional[OutputPort], clip_out_port: Optional[OutputPort], num_proc: int, pca_numbers: Union[tuple, np.ndarray], pca_model: Optional[PCA], star_reshape: np.ndarray, angles: np.ndarray, scales: Optional[np.ndarray], im_shape: tuple, indices: Optional[np.ndarray], processing_type: str) -> None: """ Constructor of PcaMultiprocessingCapsule. Parameters ---------- mean_out_port : pynpoint.core.dataio.OutputPort Output port for the mean residuals. median_out_port : pynpoint.core.dataio.OutputPort Output port for the median residuals. weighted_out_port : pynpoint.core.dataio.OutputPort Output port for the noise-weighted residuals. clip_out_port : pynpoint.core.dataio.OutputPort Output port for the mean clipped residuals. num_proc : int Number of processors. pca_numbers : np.ndarray Number of principal components. pca_model : sklearn.decomposition.pca.PCA PCA object with the basis. star_reshape : np.ndarray Reshaped (2D) input images. angles : np.ndarray Derotation angles (deg). scales : np.ndarray scaling factors. im_shape : tuple(int, int, int) Original shape of the input images. indices : np.ndarray Non-masked pixel indices. processing_type : str selection of processing type Returns ------- NoneType None """ self.m_mean_out_port = mean_out_port self.m_median_out_port = median_out_port self.m_weighted_out_port = weighted_out_port self.m_clip_out_port = clip_out_port self.m_pca_numbers = pca_numbers self.m_pca_model = pca_model self.m_star_reshape = star_reshape self.m_angles = angles self.m_scales = scales self.m_im_shape = im_shape self.m_indices = indices self.m_processing_type = processing_type self.m_requirements = [False, False, False, False] if self.m_mean_out_port is not None: self.m_requirements[0] = True if self.m_median_out_port is not None: self.m_requirements[1] = True if self.m_weighted_out_port is not None: self.m_requirements[2] = True if self.m_clip_out_port is not None: self.m_requirements[3] = True self.m_requirements = tuple(self.m_requirements) super(PcaMultiprocessingCapsule, self).__init__(None, None, num_proc) @typechecked def create_writer(self, image_out_port: None) -> PcaTaskWriter: """ Method to create an instance of PcaTaskWriter. Parameters ---------- image_out_port : None Output port, not used. Returns ------- pynpoint.util.multipca.PcaTaskWriter PCA task writer. """ return PcaTaskWriter(self.m_result_queue, self.m_mean_out_port, self.m_median_out_port, self.m_weighted_out_port, self.m_clip_out_port, self.m_data_mutex, self.m_requirements) @typechecked def init_creator(self, image_in_port: None) -> PcaTaskCreator: """ Method to create an instance of PcaTaskCreator. Parameters ---------- image_in_port : None Input port, not used. Returns ------- pynpoint.util.multipca.PcaTaskCreator PCA task creator. """ return PcaTaskCreator(self.m_tasks_queue, self.m_num_proc, self.m_pca_numbers) @typechecked def create_processors(self) -> List[PcaTaskProcessor]: """ Method to create a list of instances of PcaTaskProcessor. Returns ------- list(pynpoint.util.multipca.PcaTaskProcessor, ) PCA task processors. """ processors = [] for _ in range(self.m_num_proc): processors.append(PcaTaskProcessor(self.m_tasks_queue, self.m_result_queue, self.m_star_reshape, self.m_angles, self.m_scales, self.m_pca_model, self.m_im_shape, self.m_indices, self.m_requirements, self.m_processing_type)) return processors PynPoint-0.11.0/pynpoint/util/multiproc.py000066400000000000000000000405641450275315200206130ustar00rootroot00000000000000""" Abstract interfaces for multiprocessing applications with the poison pill pattern. """ import multiprocessing from sys import platform from typing import Callable, Optional, Union from abc import ABCMeta, abstractmethod import numpy as np from typeguard import typechecked from pynpoint.core.dataio import InputPort, OutputPort # On macOS, the spawn start method is the default since Python 3.8. # The fork start method should be considered unsafe as it can lead # to crashes of the subprocess according to the documentation. if platform in ['darwin', 'linux']: multiprocessing.set_start_method('fork') class TaskInput: """ Class for tasks that are processed by the :class:`~pynpoint.util.multiproc.TaskProcessor`. """ @typechecked def __init__(self, input_data: Union[np.ndarray, np.integer, tuple], job_parameter: tuple) -> None: """ Parameters ---------- input_data : int, float, np.ndarray Input data for by the :class:`~pynpoint.util.multiproc.TaskProcessor`. job_parameter : tuple Additional data or parameters. Returns ------- NoneType None """ self.m_input_data = input_data self.m_job_parameter = job_parameter class TaskResult: """ Class for results that can be stored by the :class:`~pynpoint.util.multiproc.TaskWriter`. """ @typechecked def __init__(self, data_array: np.ndarray, position: tuple) -> None: """ Parameters ---------- data_array : np.ndarray Array with the results for a given position. position : tuple(tuple(int, int, int), tuple(int, int, int), tuple(int, int, int)) The position where the results will be stored. Returns ------- NoneType None """ self.m_data_array = data_array self.m_position = position class TaskCreator(multiprocessing.Process, metaclass=ABCMeta): """ Abstract interface for :class:`~pynpoint.util.multiproc.TaskCreator` classes. A :class:`~pynpoint.util.multiproc.TaskCreator` creates instances of :class:`~pynpoint.util.multiproc.TaskInput`, which will be processed by the :class:`~pynpoint.util.multiproc.TaskProcessor`, and appends them to the central task queue. In general there is only one :class:`~pynpoint.util.multiproc.TaskCreator` running for a poison pill multiprocessing application. A :class:`~pynpoint.util.multiproc.TaskCreator` communicates with to the :class:`~pynpoint.util.multiproc.TaskWriter` in order to avoid simultaneously access to the central database. """ @typechecked def __init__(self, data_port_in: Optional[InputPort], tasks_queue_in: multiprocessing.JoinableQueue, data_mutex_in: Optional[multiprocessing.Lock], num_proc: int) -> None: """ Parameters ---------- data_port_in : pynpoint.core.dataio.InputPort, None An input port which links to the data that has to be processed. tasks_queue_in : multiprocessing.queues.JoinableQueue The central task queue. data_mutex_in : multiprocessing.synchronize.Lock, None A mutex shared with the writer to ensure that no read and write operations happen at the same time. num_proc : int Maximum number of instances of :class:`~pynpoint.util.multiproc.TaskProcessor` that run simultaneously. Returns ------- NoneType None """ multiprocessing.Process.__init__(self) self.m_data_in_port = data_port_in self.m_task_queue = tasks_queue_in self.m_data_mutex = data_mutex_in self.m_num_proc = num_proc @typechecked def create_poison_pills(self) -> None: """ Creates poison pills for the :class:`~pynpoint.util.multiproc.TaskProcessor` and :class:`~pynpoint.util.multiproc.TaskWriter`. A process will shut down if it receives a poison pill as a new task. This method should be executed at the end of the :func:`~pynpoint.util.multiproc.TaskCreator.run` method. Returns ------- NoneType None """ for _ in range(self.m_num_proc-1): # poison pills self.m_task_queue.put(1) # final poison pill self.m_task_queue.put(None) @abstractmethod @typechecked def run(self) -> None: """ Creates objects of the :class:`~pynpoint.util.multiproc.TaskInput` until all tasks are placed in the task queue. Returns ------- NoneType None """ class TaskProcessor(multiprocessing.Process, metaclass=ABCMeta): """ Abstract interface for :class:`~pynpoint.util.multiproc.TaskProcessor` classes. The number of instances of :class:`~pynpoint.util.multiproc.TaskProcessor` that run simultaneously in a poison pill multiprocessing application can be set with ``CPU`` parameter in the central configuration file. A :class:`~pynpoint.util.multiproc.TaskProcessor` takes tasks from a task queue, processes the task, and stores the results back into a result queue. The process will shut down if the next task is a poison pill. The order in which process finish is not fixed. """ @typechecked def __init__(self, tasks_queue_in: multiprocessing.JoinableQueue, result_queue_in: multiprocessing.JoinableQueue) -> None: """ Parameters ---------- tasks_queue_in : multiprocessing.queues.JoinableQueue The input task queue with instances of :class:`~pynpoint.util.multiproc.TaskInput`. result_queue_in : multiprocessing.queues.JoinableQueue The result task queue with instances of :class:`~pynpoint.util.multiproc.TaskResult`. Returns ------- NoneType None """ multiprocessing.Process.__init__(self) self.m_task_queue = tasks_queue_in self.m_result_queue = result_queue_in @typechecked def check_poison_pill(self, next_task: Union[TaskInput, int, None]) -> bool: """ Function to check if the next task is a poison pill. Parameters ---------- next_task : int, None, pynpoint.util.multiproc.TaskInput The next task. Returns ------- bool True if the next task is a poison pill, False otherwise. """ if next_task == 1: # poison pill poison_pill = True self.m_task_queue.task_done() elif next_task is None: # final poison pill poison_pill = True # shut down writer process self.m_result_queue.put(None) self.m_task_queue.task_done() else: # no poison pill poison_pill = False return poison_pill @typechecked def run(self) -> None: """ Run method to start the :class:`~pynpoint.util.multiproc.TaskProcessor`. The run method will continue to process tasks from the input task queue until it receives a poison pill. Returns ------- NoneType None """ while True: next_task = self.m_task_queue.get() if self.check_poison_pill(next_task): break result = self.run_job(next_task) self.m_task_queue.task_done() self.m_result_queue.put(result) @abstractmethod @typechecked def run_job(self, tmp_task: TaskInput) -> None: """ Abstract interface for the :func:`~pynpoint.util.multiproc.TaskProcessor.run_job` method which is called from the :func:`~pynpoint.util.multiproc.TaskProcessor.run` method for each task individually. Parameters ---------- tmp_task : pynpoint.util.multiproc.TaskInput Input task. Returns ------- NoneType None """ class TaskWriter(multiprocessing.Process): """ The :class:`~pynpoint.util.multiproc.TaskWriter` receives results from the result queue, which have been computed by a :class:`~pynpoint.util.multiproc.TaskProcessor`, and stores the results in the central database. The position parameter of the :class:`~pynpoint.util.multiproc.TaskResult` is used to slice the result to the correct position in the complete output dataset. """ @typechecked def __init__(self, result_queue_in: multiprocessing.JoinableQueue, data_out_port_in: Optional[OutputPort], data_mutex_in: multiprocessing.Lock) -> None: """ Parameters ---------- result_queue_in : multiprocessing.queues.JoinableQueue The result queue. data_out_port_in : pynpoint.core.dataio.OutputPort, None The output port where the results will be stored. data_mutex_in : multiprocessing.synchronize.Lock A mutex that is shared with the :class:`~pynpoint.util.multiproc.TaskWriter` which ensures that read and write operations to the database do not occur simultaneously. Returns ------- NoneType None """ multiprocessing.Process.__init__(self) self.m_result_queue = result_queue_in self.m_data_mutex = data_mutex_in self.m_data_out_port = data_out_port_in @typechecked def check_poison_pill(self, next_result: Union[TaskResult, None]) -> int: """ Function to check if the next result is a poison pill. Parameters ---------- next_result : None, pynpoint.util.multiproc.TaskResult The next result. Returns ------- int 0 -> no poison pill, 1 -> poison pill, 2 -> poison pill but still results in the queue (rare error case). """ if next_result is None: # check if there are results after the poison pill if self.m_result_queue.empty(): poison_pill = 1 # shut down the writer self.m_result_queue.task_done() else: poison_pill = 2 # put back the poison pill for the moment self.m_result_queue.put(None) self.m_result_queue.task_done() else: poison_pill = 0 return poison_pill @typechecked def run(self) -> None: """ Run method of the :class:`~pynpoint.util.multiproc.TaskWriter`. It is called once when it has to start storing the results until it receives a poison pill. Returns ------- NoneType None """ while True: next_result = self.m_result_queue.get() poison_pill_case = self.check_poison_pill(next_result) if poison_pill_case == 1: break if poison_pill_case == 2: continue with self.m_data_mutex: self.m_data_out_port._check_status_and_activate() self.m_data_out_port[to_slice(next_result.m_position)] = next_result.m_data_array self.m_data_out_port.close_port() self.m_result_queue.task_done() class MultiprocessingCapsule(metaclass=ABCMeta): """ Abstract interface for multiprocessing capsules based on the poison pill pattern. """ @typechecked def __init__(self, image_in_port: Optional[InputPort], image_out_port: Optional[OutputPort], num_proc: int) -> None: """ Parameters ---------- image_in_port : pynpoint.core.dataio.InputPort, None Port to the input data. image_out_port : pynpoint.core.dataio.OutputPort, None Port to the place where the output data will be stored. num_proc : int Number of task processors. Returns ------- NoneType None """ # buffer twice the data as processes are available self.m_tasks_queue = multiprocessing.JoinableQueue(maxsize=num_proc) self.m_result_queue = multiprocessing.JoinableQueue(maxsize=num_proc) self.m_num_proc = num_proc # database mutex self.m_data_mutex = multiprocessing.Lock() # create reader self.m_creator = self.init_creator(image_in_port) # create processors self.m_task_processors = self.create_processors() # create writer self.m_writer = self.create_writer(image_out_port) @abstractmethod @typechecked def create_processors(self) -> None: """ Function that is called from the constructor to create a list of instances of :class:`~pynpoint.util.multiproc.TaskProcessor`. Returns ------- NoneType None """ @abstractmethod @typechecked def init_creator(self, image_in_port: Optional[InputPort]) -> None: """ Function that is called from the constructor to create a :class:`~pynpoint.util.multiproc.TaskCreator`. Parameters ---------- image_in_port : pynpoint.core.dataio.InputPort, None Input port for the task creator. Returns ------- NoneType None """ @typechecked def create_writer(self, image_out_port: Optional[OutputPort]) -> TaskWriter: """ Function that is called from the constructor to create the :class:`~pynpoint.util.multiproc.TaskWriter`. Parameters ---------- image_out_port : pynpoint.core.dataio.OutputPort, None Output port for the creator. Returns ------- pynpoint.util.multiproc.TaskWriter Task writer. """ return TaskWriter(self.m_result_queue, image_out_port, self.m_data_mutex) @typechecked def run(self) -> None: """ Run method that starts the :class:`~pynpoint.util.multiproc.TaskCreator`, the instances of :class:`~pynpoint.util.multiproc.TaskProcessor`, and the :class:`~pynpoint.util.multiproc.TaskWriter`. They will be shut down when all tasks have finished. Returns ------- NoneType None """ # start all processes self.m_creator.start() for processor in self.m_task_processors: processor.start() self.m_writer.start() # wait for all tasks to have finished self.m_tasks_queue.join() self.m_result_queue.join() # clean up the processes for processor in self.m_task_processors: processor.join() self.m_writer.join() self.m_creator.join() @typechecked def apply_function(tmp_data: np.ndarray, data_index: int, func: Callable, func_args: Optional[tuple]) -> np.ndarray: """ Apply a function with optional arguments to the input data. Parameters ---------- tmp_data : np.ndarray Input data. data_index : int Index of the data subset. When processing a stack of images, the argument of ``data_index`` is the image index in the full stack. func : function Function. func_args : tuple, None Function arguments. Returns ------- np.ndarray The results of the function. """ if func_args is None: result = np.array(func(tmp_data, data_index)) else: result = np.array(func(tmp_data, data_index, *func_args)) return result @typechecked def to_slice(tuple_slice: tuple) -> tuple: """ Function to convert tuples into slices for a multiprocessing queue. Parameters ---------- tuple_slice : tuple Tuple to be converted into a slice. Returns ------- tuple(slice, slice, slice) Tuple with three slices. """ slices = [] for item in tuple_slice: # slice(start, stop step) slices.append(slice(item[0], item[1], item[2])) return tuple(slices) PynPoint-0.11.0/pynpoint/util/multistack.py000066400000000000000000000205131450275315200207450ustar00rootroot00000000000000""" Utilities for multiprocessing of stacks of images. """ import sys import multiprocessing from typing import Callable, List, Optional import numpy as np from typeguard import typechecked from pynpoint.core.dataio import InputPort, OutputPort from pynpoint.util.module import update_arguments from pynpoint.util.multiproc import TaskInput, TaskResult, TaskCreator, TaskProcessor, \ MultiprocessingCapsule, apply_function class StackReader(TaskCreator): """ Reader of task inputs for :class:`~pynpoint.util.multistack.StackProcessingCapsule`. Reads continuously stacks of images of a dataset and puts them into a task queue. """ @typechecked def __init__(self, data_port_in: InputPort, tasks_queue_in: multiprocessing.JoinableQueue, data_mutex_in: multiprocessing.Lock, num_proc: int, stack_size: int, result_shape: tuple) -> None: """ Parameters ---------- data_port_in : pynpoint.core.dataio.InputPort Input port. tasks_queue_in : multiprocessing.queues.JoinableQueue Tasks queue. data_mutex_in : multiprocessing.synchronize.Lock A mutex shared with the writer to ensure that no read and write operations happen at the same time. num_proc : int Number of processors. stack_size: int Number of images per stack. result_shape : tuple(int, ) Shape of the array with the output results (usually a stack of images). Returns ------- NoneType None """ super(StackReader, self).__init__(data_port_in, tasks_queue_in, data_mutex_in, num_proc) self.m_stack_size = stack_size self.m_result_shape = result_shape @typechecked def run(self) -> None: """ Returns ------- NoneType None """ with self.m_data_mutex: self.m_data_in_port._check_status_and_activate() nimages = self.m_data_in_port.get_shape()[0] self.m_data_in_port.close_port() i = 0 while i < nimages: j = min((i + self.m_stack_size), nimages) # lock mutex and read data with self.m_data_mutex: self.m_data_in_port._check_status_and_activate() tmp_data = self.m_data_in_port[i:j, ] # read images from i to j self.m_data_in_port.close_port() # first dimension (start, stop, step) stack_slice = [(i, j, None)] # additional dimensions for _ in self.m_result_shape: stack_slice.append((None, None, None)) param = (self.m_result_shape, tuple(stack_slice)) self.m_task_queue.put(TaskInput(tmp_data, param)) i = j self.create_poison_pills() class StackTaskProcessor(TaskProcessor): """ Processor of task inputs for :class:`~pynpoint.util.multistack.StackProcessingCapsule`. A processor applies a function on a stack of images. """ @typechecked def __init__(self, tasks_queue_in: multiprocessing.JoinableQueue, result_queue_in: multiprocessing.JoinableQueue, function: Callable, function_args: Optional[tuple], nimages: int) -> None: """ Parameters ---------- tasks_queue_in : multiprocessing.queues.JoinableQueue Tasks queue. result_queue_in : multiprocessing.queues.JoinableQueue Results queue. function : function Input function that is applied to the images. function_args : tuple, None Function arguments. nimages : int Total number of images. Returns ------- NoneType None """ super(StackTaskProcessor, self).__init__(tasks_queue_in, result_queue_in) self.m_function = function self.m_function_args = function_args self.m_nimages = nimages @typechecked def run_job(self, tmp_task: TaskInput) -> TaskResult: """ Parameters ---------- tmp_task : pynpoint.util.multiproc.TaskInput Task input with the subsets of images and the job parameters. Returns ------- pynpoint.util.multiproc.TaskResult Task result. """ result_nimages = tmp_task.m_input_data.shape[0] result_shape = tmp_task.m_job_parameter[0] # first dimension full_shape = [result_nimages] # additional dimensions for item in result_shape: full_shape.append(item) result_arr = np.zeros(full_shape) for i in range(result_nimages): # job parameter contains (result_shape, tuple(stack_slice)) index = tmp_task.m_job_parameter[1][0][0] + i args = update_arguments(index, self.m_nimages, self.m_function_args) result_arr[i, ] = apply_function(tmp_task.m_input_data[i, ], i, self.m_function, args) sys.stdout.write('.') sys.stdout.flush() return TaskResult(result_arr, tmp_task.m_job_parameter[1]) class StackProcessingCapsule(MultiprocessingCapsule): """ Capsule for parallel processing of stacks of images with the poison pill pattern. A function is applied in parallel to each stack of images. """ @typechecked def __init__(self, image_in_port: InputPort, image_out_port: OutputPort, num_proc: int, function: Callable, function_args: Optional[tuple], stack_size: int, result_shape: tuple, nimages: int) -> None: """ Parameters ---------- image_in_port : pynpoint.core.dataio.InputPort Input port. image_out_port : pynpoint.core.dataio.OutputPort Output port. num_proc : int Number of processors. function : function Input function. function_args : tuple, None Function arguments. stack_size: int Number of images per stack. result_shape : tuple(int, ) Shape of the array with output results (usually a stack of images). nimages : int Total number of images. Returns ------- NoneType None """ self.m_function = function self.m_function_args = function_args self.m_stack_size = stack_size self.m_result_shape = result_shape self.m_nimages = nimages super(StackProcessingCapsule, self).__init__(image_in_port, image_out_port, num_proc) @typechecked def create_processors(self) -> List[StackTaskProcessor]: """ Returns ------- list(pynpoint.util.multiproc.StackTaskProcessor, ) List with instances of :class:`~pynpoint.util.multiproc.StackTaskProcessor`. """ processors = [] for _ in range(self.m_num_proc): processors.append(StackTaskProcessor(tasks_queue_in=self.m_tasks_queue, result_queue_in=self.m_result_queue, function=self.m_function, function_args=self.m_function_args, nimages=self.m_nimages)) return processors @typechecked def init_creator(self, image_in_port: InputPort) -> StackReader: """ Parameters ---------- image_in_port : pynpoint.core.dataio.InputPort Input port from where the subsets of images are read. Returns ------- pynpoint.util.multistack.StackReader Reader of stacks of images. """ return StackReader(data_port_in=image_in_port, tasks_queue_in=self.m_tasks_queue, data_mutex_in=self.m_data_mutex, num_proc=self.m_num_proc, stack_size=self.m_stack_size, result_shape=self.m_result_shape) PynPoint-0.11.0/pynpoint/util/postproc.py000066400000000000000000000203551450275315200204420ustar00rootroot00000000000000""" Functions for post-processing. """ from typing import Union, Optional, Tuple import numpy as np from typeguard import typechecked from sklearn.decomposition import PCA from pynpoint.util.psf import pca_psf_subtraction from pynpoint.util.sdi import sdi_scaling @typechecked def postprocessor(images: np.ndarray, angles: np.ndarray, scales: Optional[np.ndarray], pca_number: Union[int, Tuple[Union[int, np.int32, np.int64], Union[int, np.int32, np.int64]]], pca_sklearn: Optional[PCA] = None, im_shape: Union[None, tuple] = None, indices: Optional[np.ndarray] = None, mask: Optional[np.ndarray] = None, processing_type: str = 'ADI'): """ Function to apply different kind of post processings. It is equivalent to :func:`~pynpoint.util.psf.pca_psf_subtraction` if ``processing_type='ADI'` and ``mask=None``. Parameters ---------- images : np.array Input images which should be reduced. angles : np.ndarray Derotation angles (deg). scales : np.array Scaling factors pca_number : tuple(int, int) Number of principal components used for the PSF subtraction. pca_sklearn : sklearn.decomposition.pca.PCA, None PCA object with the basis if not set to None. im_shape : tuple(int, int, int), None Original shape of the stack with images. Required if ``pca_sklearn`` is not set to None. indices : np.ndarray, None Non-masked image indices. All pixels are used if set to None. mask : np.ndarray Mask (2D). processing_type : str Post-processing type: - ADI: Angular differential imaging. - SDI: Spectral differential imaging. - SDI+ADI: Spectral and angular differential imaging. - ADI+SDI: Angular and spectral differential imaging. Returns ------- np.ndarray Residuals of the PSF subtraction. np.ndarray Derotated residuals of the PSF subtraction. """ if not isinstance(pca_number, tuple): pca_number = (pca_number, -1) if mask is None: mask = 1. res_raw = np.zeros(images.shape) res_rot = np.zeros(images.shape) if processing_type == 'ADI': if images.ndim == 2: res_raw, res_rot = pca_psf_subtraction(images=images*mask, angles=angles, scales=None, pca_number=pca_number[0], pca_sklearn=pca_sklearn, im_shape=im_shape, indices=indices) elif images.ndim == 4: for i in range(images.shape[0]): res_raw[i, ], res_rot[i, ] = pca_psf_subtraction(images=images[i, ]*mask, angles=angles, scales=None, pca_number=pca_number[0], pca_sklearn=pca_sklearn, im_shape=im_shape, indices=indices) elif processing_type == 'SDI': for i in range(images.shape[1]): im_scaled = sdi_scaling(images[:, i, :, :], scales) res_raw[:, i], res_rot[:, i] = pca_psf_subtraction(images=im_scaled*mask, angles=np.full(scales.size, angles[i]), scales=scales, pca_number=pca_number[0], pca_sklearn=pca_sklearn, im_shape=im_shape, indices=indices) elif processing_type == 'SDI+ADI': # SDI res_raw_int = np.zeros(res_raw.shape) for i in range(images.shape[1]): im_scaled = sdi_scaling(images[:, i], scales) res_raw_int[:, i], _ = pca_psf_subtraction(images=im_scaled*mask, angles=None, scales=scales, pca_number=pca_number[0], pca_sklearn=pca_sklearn, im_shape=im_shape, indices=indices) # ADI for i in range(images.shape[0]): res_raw[i], res_rot[i] = pca_psf_subtraction(images=res_raw_int[i]*mask, angles=angles, scales=None, pca_number=pca_number[1], pca_sklearn=pca_sklearn, im_shape=im_shape, indices=indices) elif processing_type == 'ADI+SDI': # ADI res_raw_int = np.zeros(res_raw.shape) for i in range(images.shape[0]): res_raw_int[i], _ = pca_psf_subtraction(images=images[i, ]*mask, angles=None, scales=None, pca_number=pca_number[0], pca_sklearn=pca_sklearn, im_shape=im_shape, indices=indices) # SDI for i in range(images.shape[1]): im_scaled = sdi_scaling(res_raw_int[:, i], scales) res_raw[:, i], res_rot[:, i] = pca_psf_subtraction(images=im_scaled*mask, angles=np.full(scales.size, angles[i]), scales=scales, pca_number=pca_number[1], pca_sklearn=pca_sklearn, im_shape=im_shape, indices=indices) elif processing_type == 'CODI': # flatten images from 4D to 3D ims = images.shape im_scaled_flat = np.zeros((ims[0]*ims[1], ims[2], ims[3])) scales_flat = np.zeros((ims[0]*ims[1])) angles_flat = np.zeros((ims[0]*ims[1])) for i in range(ims[1]): im_scaled_flat[i*ims[0]:(i+1)*ims[0]] = sdi_scaling(images[:, i], scales) scales_flat[i*ims[0]:(i+1)*ims[0]] = scales angles_flat[i*ims[0]:(i+1)*ims[0]] = angles[i] # codi res_raw_flat, res_rot_flat = pca_psf_subtraction(images=im_scaled_flat*mask, angles=angles_flat, scales=scales_flat, pca_number=pca_number[0], pca_sklearn=pca_sklearn, im_shape=im_shape, indices=indices) # inflate images from 3D to 4D for i in range(ims[1]): res_raw[:, i] = res_raw_flat[i*ims[0]:(i+1)*ims[0]] res_rot[:, i] = res_rot_flat[i*ims[0]:(i+1)*ims[0]] return res_raw, res_rot PynPoint-0.11.0/pynpoint/util/psf.py000066400000000000000000000134511450275315200173600ustar00rootroot00000000000000""" Functions for PSF subtraction. """ from typing import Optional, Union, Tuple import numpy as np from scipy.ndimage import rotate from sklearn.decomposition import PCA from typeguard import typechecked from pynpoint.util.image import scale_image, shift_image @typechecked def pca_psf_subtraction(images: np.ndarray, angles: Optional[np.ndarray], pca_number: Union[int, np.int32, np.int64], scales: Optional[np.ndarray] = None, pca_sklearn: Optional[PCA] = None, im_shape: Optional[tuple] = None, indices: Optional[np.ndarray] = None) -> Tuple[np.ndarray, np.ndarray]: """ Function for PSF subtraction with PCA. Parameters ---------- images : np.ndarray Stack of images. Also used as reference images if ```pca_sklearn``` is set to None. The data should have the original 3D shape if ``pca_sklearn`` is set to None or it should be in a 2D reshaped format if ``pca_sklearn`` is not set to None. angles : np.ndarray Parallactic angles (deg). pca_number : int Number of principal components. scales : np.ndarray, None Scaling factors for SDI. Not used if set to None. pca_sklearn : sklearn.decomposition.pca.PCA, None PCA object with the principal components. im_shape : tuple(int, int, int), None The original 3D shape of the stack with images. Only required if ``pca_sklearn`` is not set to None. indices : np.ndarray, None Array with the indices of the pixels that are used for the PSF subtraction. All pixels are used if set to None. Returns ------- np.ndarray Residuals of the PSF subtraction. np.ndarray Derotated residuals of the PSF subtraction. """ if pca_sklearn is None: # Create a PCA object if not provided as argument pca_sklearn = PCA(n_components=pca_number, svd_solver='arpack') # The 3D shape of the array with images im_shape = images.shape if indices is None: # Select the first image and get the unmasked image indices im_star = images[0, ].reshape(-1) indices = np.where(im_star != 0.)[0] # Reshape the images and select the unmasked pixels im_reshape = images.reshape(im_shape[0], im_shape[1]*im_shape[2]) im_reshape = im_reshape[:, indices] # Subtract the mean image # This is also done by sklearn.decomposition.PCA.fit() im_reshape -= np.mean(im_reshape, axis=0) # Fit the principal components pca_sklearn.fit(im_reshape) else: # If the PCA object is already there then so are the reshaped data im_reshape = np.copy(images) # Project the data on the principal components # Note that this is the same as sklearn.decomposition.PCA.transform() # It is harcoded because the number of components has been adjusted pca_rep = np.matmul(pca_sklearn.components_[:pca_number], im_reshape.T) # The zeros are added with vstack to account for the components that have not been used for the # transformation to the lower-dimensional space, while they were initiated with the PCA object. # Since inverse_transform uses the number of initial components, the zeros are added for # components > pca_number. These components do not impact the inverse transformation. zeros = np.zeros((pca_sklearn.n_components - pca_number, im_reshape.shape[0])) pca_rep = np.vstack((pca_rep, zeros)).T # Transform the data back to the original space psf_model = pca_sklearn.inverse_transform(pca_rep) # Create an array with the original shape residuals = np.zeros((im_shape[0], im_shape[1]*im_shape[2])) # Select all pixel indices if set to None if indices is None: indices = np.arange(0, im_reshape.shape[1], 1) # Subtract the PSF model residuals[:, indices] = im_reshape - psf_model # Reshape the residuals to the original shape residuals = residuals.reshape(im_shape) # ----------- back scale images scal_cor = np.zeros(residuals.shape) if scales is not None: # check if the number of parang is equal to the number of images if residuals.shape[0] != scales.shape[0]: raise ValueError(f'The number of images ({residuals.shape[0]}) is not equal to the ' f'number of wavelengths ({scales.shape[0]}).') for i, _ in enumerate(scales): # rescaling the images swaps = scale_image(residuals[i, ], 1/scales[i], 1/scales[i]) npix_del = scal_cor.shape[-1] - swaps.shape[-1] if npix_del == 0: scal_cor[i, ] = swaps else: if npix_del % 2 == 0: npix_del_a = int(npix_del/2) npix_del_b = int(npix_del/2) else: npix_del_a = int((npix_del-1)/2) npix_del_b = int((npix_del+1)/2) scal_cor[i, npix_del_a:-npix_del_b, npix_del_a:-npix_del_b] = swaps if npix_del % 2 == 1: scal_cor[i, ] = shift_image(scal_cor[i, ], (0.5, 0.5), interpolation='spline') else: scal_cor = residuals res_rot = np.zeros(residuals.shape) if angles is not None: # Check if the number of parang is equal to the number of images if residuals.shape[0] != angles.shape[0]: raise ValueError(f'The number of images ({residuals.shape[0]}) is not equal to the ' f'number of parallactic angles ({angles.shape[0]}).') for j, item in enumerate(angles): res_rot[j, ] = rotate(scal_cor[j, ], item, reshape=False) else: res_rot = scal_cor return scal_cor, res_rot PynPoint-0.11.0/pynpoint/util/remove.py000066400000000000000000000117451450275315200200710ustar00rootroot00000000000000""" Functions to write selected data and attributes to the database. """ import time from typing import Optional, Union import numpy as np from typeguard import typechecked from pynpoint.core.dataio import InputPort, OutputPort from pynpoint.util.module import progress, memory_frames @typechecked def write_selected_data(memory: Union[int, np.int64], indices: np.ndarray, image_in_port: InputPort, selected_out_port: Optional[OutputPort], removed_out_port: Optional[OutputPort]) -> None: """ Function to write the selected and removed data. Parameters ---------- memory : int Number of images that is simultaneously loaded into the memory. indices : numpy.ndarray Image indices that will be removed. image_in_port : pynpoint.core.dataio.InputPort Port to the input images. selected_out_port : pynpoint.core.dataio.OutputPort, None Port to store the selected images. No data is written if set to None. removed_out_port : pynpoint.core.dataio.OutputPort, None Port to store the removed images. No data is written if set to None. Returns ------- NoneType None """ nimages = image_in_port.get_shape()[0] frames = memory_frames(memory, nimages) if memory == 0 or memory >= nimages: memory = nimages start_time = time.time() for i, _ in enumerate(frames[:-1]): progress(i, len(frames[:-1]), 'Writing selected data...', start_time) images = image_in_port[frames[i]:frames[i+1], ] subset_del = np.where(np.logical_and(indices >= frames[i], indices < frames[i+1]))[0] index_del = indices[subset_del] % memory index_sel = np.ones(images.shape[0], bool) index_sel[index_del] = False if selected_out_port is not None and index_sel.size > 0: selected_out_port.append(images[index_sel]) if removed_out_port is not None and index_del.size > 0: removed_out_port.append(images[index_del]) @typechecked def write_selected_attributes(indices: np.ndarray, image_in_port: InputPort, selected_out_port: Optional[OutputPort], removed_out_port: Optional[OutputPort], module_type: str, history: str) -> None: """ Function to write the attributes of the selected and removed data. Parameters ---------- indices : numpy.ndarray Image indices that will be removed. image_in_port : pynpoint.core.dataio.InputPort Port to the input data. selected_out_port : pynpoint.core.dataio.OutputPort, None Port to store the attributes of the selected images. Not written if set to None. removed_out_port : pynpoint.core.dataio.OutputPort, None Port to store the attributes of the removed images. Not written if set to None. module_type : str history : str Returns ------- NoneType None """ if selected_out_port is not None: # First copy the existing attributes to the selected_out_port selected_out_port.copy_attributes(image_in_port) selected_out_port.add_history(module_type, history) if removed_out_port is not None: # First copy the existing attributes to the removed_out_port removed_out_port.copy_attributes(image_in_port) removed_out_port.add_history(module_type, history) non_static = image_in_port.get_all_non_static_attributes() index_sel = np.ones(image_in_port.get_shape()[0], bool) index_sel[indices] = False for i, attr_item in enumerate(non_static): values = image_in_port.get_attribute(attr_item) if values.shape[0] == image_in_port.get_shape()[0]: if selected_out_port is not None and index_sel.size > 0: selected_out_port.add_attribute(attr_item, values[index_sel], static=False) if removed_out_port is not None and indices.size > 0: removed_out_port.add_attribute(attr_item, values[indices], static=False) if 'NFRAMES' in non_static: nframes = image_in_port.get_attribute('NFRAMES') nframes_sel = np.zeros(nframes.shape, dtype=int) nframes_del = np.zeros(nframes.shape, dtype=int) for i, frames in enumerate(nframes): if indices.size == 0: nframes_sel[i] = frames nframes_del[i] = 0 else: sum_n = np.sum(nframes[:i]) index_del = np.where(np.logical_and(indices >= sum_n, indices < sum_n+frames))[0] nframes_sel[i] = frames - index_del.size nframes_del[i] = index_del.size if selected_out_port is not None: selected_out_port.add_attribute('NFRAMES', nframes_sel, static=False) if removed_out_port is not None: removed_out_port.add_attribute('NFRAMES', nframes_del, static=False) PynPoint-0.11.0/pynpoint/util/residuals.py000066400000000000000000000114431450275315200205620ustar00rootroot00000000000000""" Functions for combining the residuals of the PSF subtraction. """ from typing import Optional import numpy as np from typeguard import typechecked from scipy.ndimage import rotate @typechecked def combine_residuals(method: str, res_rot: np.ndarray, residuals: Optional[np.ndarray] = None, angles: Optional[np.ndarray] = None) -> np.ndarray: """ Wavelength wrapper for the combine_residual function. Produces an array with either 1 or number of wavelengths sized array. Parameters ---------- method : str Method used for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). res_rot : np.ndarray Derotated residuals of the PSF subtraction (3D). residuals : np.ndarray, None Non-derotated residuals of the PSF subtraction (3D). Only required for the noise-weighted residuals. angles : np.ndarray, None Derotation angles (deg). Only required for the noise-weighted residuals. Returns ------- np.ndarray Collapsed residuals (3D). """ if res_rot.ndim == 3: output = _residuals(method=method, res_rot=np.asarray(res_rot), residuals=residuals, angles=angles) if res_rot.ndim == 4: output = np.zeros((res_rot.shape[0], res_rot.shape[2], res_rot.shape[3])) for i in range(res_rot.shape[0]): if residuals is None: output[i, ] = _residuals(method=method, res_rot=res_rot[i, ], residuals=residuals, angles=angles)[0] else: output[i, ] = _residuals(method=method, res_rot=res_rot[i, ], residuals=residuals[i, ], angles=angles)[0] return output @typechecked def _residuals(method: str, res_rot: np.ndarray, residuals: Optional[np.ndarray] = None, angles: Optional[np.ndarray] = None) -> np.ndarray: """ Function for combining the derotated residuals of the PSF subtraction. Parameters ---------- method : str Method used for combining the residuals ('mean', 'median', 'weighted', or 'clipped'). res_rot : np.ndarray Derotated residuals of the PSF subtraction (3D). residuals : np.ndarray, None Non-derotated residuals of the PSF subtraction (3D). Only required for the noise-weighted residuals. angles : np.ndarray, None Derotation angles (deg). Only required for the noise-weighted residuals. Returns ------- np.ndarray Combined residuals (3D). """ if method == 'mean': stack = np.mean(res_rot, axis=0) elif method == 'median': stack = np.median(res_rot, axis=0) elif method == 'weighted': tmp_res_var = np.var(residuals, axis=0) res_repeat = np.repeat(tmp_res_var[np.newaxis, :, :], repeats=residuals.shape[0], axis=0) res_var = np.zeros(res_repeat.shape) for j, angle in enumerate(angles): # scipy.ndimage.rotate rotates in clockwise direction for positive angles res_var[j, ] = rotate(input=res_repeat[j, ], angle=angle, reshape=False) weight1 = np.divide(res_rot, res_var, out=np.zeros_like(res_var), where=(np.abs(res_var) > 1e-100) & (res_var != np.nan)) weight2 = np.divide(1., res_var, out=np.zeros_like(res_var), where=(np.abs(res_var) > 1e-100) & (res_var != np.nan)) sum1 = np.sum(weight1, axis=0) sum2 = np.sum(weight2, axis=0) stack = np.divide(sum1, sum2, out=np.zeros_like(sum2), where=(np.abs(sum2) > 1e-100) & (sum2 != np.nan)) elif method == 'clipped': stack = np.zeros(res_rot.shape[-2:]) for i in range(stack.shape[0]): for j in range(stack.shape[1]): pix_line = res_rot[:, i, j] if np.var(pix_line) > 0.: no_mean = pix_line - np.mean(pix_line) part1 = no_mean.compress((no_mean < 3.*np.sqrt(np.var(no_mean))).flat) part2 = part1.compress((part1 > -3.*np.sqrt(np.var(no_mean))).flat) stack[i, j] = np.mean(pix_line) + np.mean(part2) return stack[np.newaxis, ...] PynPoint-0.11.0/pynpoint/util/sdi.py000066400000000000000000000036601450275315200173500ustar00rootroot00000000000000""" Functions for spectral differential imaging. """ import numpy as np from typeguard import typechecked from pynpoint.util.image import scale_image, shift_image @typechecked def sdi_scaling(image_in: np.ndarray, scaling: np.ndarray) -> np.ndarray: """ Function to rescale the images by their wavelength ratios. Parameters ---------- image_in : np.ndarray Data to rescale scaling : np.ndarray Scaling factors. Returns ------- np.ndarray Rescaled images with the same shape as ``image_in``. """ if image_in.shape[0] != scaling.shape[0]: raise ValueError('The number of wavelengths is not equal to the number of available ' 'scaling factors.') image_out = np.zeros(image_in.shape) for i in range(image_in.shape[0]): swaps = scale_image(image_in[i, ], scaling[i], scaling[i]) npix_del = swaps.shape[-1] - image_out.shape[-1] if npix_del == 0: image_out[i, ] = swaps else: if npix_del % 2 == 0: npix_del_a = int(npix_del/2) npix_del_b = int(npix_del/2) else: npix_del_a = int((npix_del-1)/2) npix_del_b = int((npix_del+1)/2) image_out[i, ] = swaps[npix_del_a:-npix_del_b, npix_del_a:-npix_del_b] if npix_del % 2 == 1: image_out[i, ] = shift_image(image_out[i, ], (-0.5, -0.5), interpolation='spline') return image_out @typechecked def scaling_factors(wavelengths: np.ndarray) -> np.ndarray: """ Function to calculate the scaling factors for SDI. Parameters ---------- wavelengths : np.ndarray Array with the wavelength of each frame. Returns ------- np.ndarray Scaling factors. """ return max(wavelengths) / wavelengths PynPoint-0.11.0/pynpoint/util/star.py000066400000000000000000000073121450275315200175400ustar00rootroot00000000000000""" Functions for stellar extraction. """ import math import time from typing import Optional, Tuple, Union import cv2 import numpy as np from typeguard import typechecked from pynpoint.core.dataio import InputPort from pynpoint.util.image import crop_image, center_pixel from pynpoint.util.module import progress @typechecked def locate_star(image: np.ndarray, center: Optional[tuple], width: Optional[int], fwhm: Optional[int]) -> np.ndarray: """ Function to locate the star by finding the brightest pixel. Parameters ---------- image : numpy.ndarray Input image (2D). center : tuple(int, int), None Pixel center (y, x) of the subframe. The full image is used if set to None. width : int, None The width (pix) of the subframe. The full image is used if set to None. fwhm : int, None Full width at half maximum (pix) of the Gaussian kernel. Not used if set to None. Returns ------- numpy.ndarray Position (y, x) of the brightest pixel. """ if width is not None: if center is None: center = center_pixel(image) image = crop_image(image, center, width) if fwhm is None: smooth = np.copy(image) else: sigma = fwhm / math.sqrt(8. * math.log(2.)) kernel = (fwhm * 2 + 1, fwhm * 2 + 1) smooth = cv2.GaussianBlur(image, kernel, sigma) # argmax[0] is the y position and argmax[1] is the y position argmax = np.asarray(np.unravel_index(smooth.argmax(), smooth.shape)) if center is not None and width is not None: argmax[0] += center[0] - (image.shape[0] - 1) // 2 # y argmax[1] += center[1] - (image.shape[1] - 1) // 2 # x return argmax @typechecked def star_positions(input_port: InputPort, fwhm: Optional[int], position: Optional[Union[Tuple[int, int, float], Tuple[None, None, float], Tuple[int, int, None]]] = None) -> np.ndarray: """ Function to return the position of the star in each image. Parameters ---------- input_port : pynpoint.core.dataio.InputPort Input port where the images are stored. fwhm : int, None The FWHM (pix) of the Gaussian kernel that is used to smooth the images before the brightest pixel is located. No smoothing is applied if set to None. position : tuple(int, int, int), None Subframe that is selected to search for the star. The tuple contains the center (pix) and size (pix) (pos_x, pos_y, size). Setting `position` to None will use the full image to search for the star. If `position=(None, None, size)` then the center of the image will be used. If `position=(pos_x, pos_y, None)` then a fixed position is used for the aperture. Returns ------- numpy.ndarray Positions (y, x) of the brightest pixel. """ nimages = input_port.get_shape()[0] starpos = np.zeros((nimages, 2), dtype=np.int64) if position is not None and position[2] is None: # [y. x] position starpos[:, 0] = position[1] starpos[:, 1] = position[0] else: center = None width = None if position is not None: width = position[2] if position[0] is not None and position[1] is not None: center = position[0:2] start_time = time.time() for i in range(nimages): progress(i, nimages, 'Locating stellar position...', start_time) # [y. x] position starpos[i, :] = locate_star(input_port[i, ], center, width, fwhm) return starpos PynPoint-0.11.0/pynpoint/util/tests.py000066400000000000000000000270131450275315200177310ustar00rootroot00000000000000""" Functions for testing the pipeline and its modules. """ import os import math import shutil import subprocess from typing import List, Optional, Tuple, Union import h5py import numpy as np from typeguard import typechecked from astropy.io import fits from scipy.ndimage import shift @typechecked def create_config(filename: str) -> None: """ Create a configuration file. Parameters ---------- filename : str Configuration filename. Returns ------- NoneType None """ with open(filename, 'w') as file_obj: file_obj.write('[header]\n\n') file_obj.write('INSTRUMENT: INSTRUME\n') file_obj.write('NFRAMES: NAXIS3\n') file_obj.write('EXP_NO: ESO DET EXP NO\n') file_obj.write('NDIT: ESO DET NDIT\n') file_obj.write('PARANG_START: ESO ADA POSANG\n') file_obj.write('PARANG_END: ESO ADA POSANG END\n') file_obj.write('DITHER_X: ESO SEQ CUMOFFSETX\n') file_obj.write('DITHER_Y: ESO SEQ CUMOFFSETY\n') file_obj.write('DIT: None\n') file_obj.write('LATITUDE: None\n') file_obj.write('LONGITUDE: None\n') file_obj.write('PUPIL: None\n') file_obj.write('DATE: None\n') file_obj.write('RA: None\n') file_obj.write('DEC: None\n\n') file_obj.write('[settings]\n\n') file_obj.write('PIXSCALE: 0.027\n') file_obj.write('MEMORY: 39\n') file_obj.write('CPU: 1\n') @typechecked def create_random(path: str, nimages: float = 5) -> None: """ Create a dataset of images with Gaussian distributed pixel values. Parameters ---------- path : str Working folder. nimages : int Number of images. Returns ------- NoneType None """ if not os.path.exists(path): os.makedirs(path) file_in = os.path.join(path, 'PynPoint_database.hdf5') np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(nimages, 11, 11)) with h5py.File(file_in, 'w') as h5_file: dset = h5_file.create_dataset('images', data=images) dset.attrs['PIXSCALE'] = 0.01 h5_file.create_dataset('header_images/PARANG', data=np.arange(float(nimages))) @typechecked def create_fits(path: str, filename: str, image: np.ndarray, ndit: int, exp_no: int, dither_x: float, dither_y: float) -> None: """ Create a FITS file with images and header information. Parameters ---------- path : str Working folder. filename : str FITS filename. image : np.ndarray Images. ndit : int Number of integrations. exp_no : int Exposure number. dither_x : float Horizontal dither position relative to the image center. dither_y : float Vertical dither position relative to the image center. Returns ------- NoneType None """ hdu = fits.PrimaryHDU() header = hdu.header header['INSTRUME'] = 'IMAGER' header['HIERARCH ESO DET EXP NO'] = 1. header['HIERARCH ESO DET NDIT'] = ndit header['HIERARCH ESO DET EXP NO'] = exp_no header['HIERARCH ESO ADA POSANG'] = 0. header['HIERARCH ESO ADA POSANG END'] = 180. header['HIERARCH ESO SEQ CUMOFFSETX'] = dither_x header['HIERARCH ESO SEQ CUMOFFSETY'] = dither_y hdu.data = image hdu.writeto(os.path.join(path, filename)) @typechecked def create_fake_data(path: str) -> None: """ Create an ADI dataset with a star and planet. Parameters ---------- path : str Working folder. Returns ------- NoneType None """ if not os.path.exists(path): os.makedirs(path) ndit = 10 npix = 21 fwhm = 3. sep = 6. contrast = 1e-1 pos_star = 10. exp_no = 1 parang = np.linspace(0., 180., 10) np.random.seed(1) sigma = fwhm / (2.*math.sqrt(2.*math.log(2.))) x = np.arange(0., 21., 1.) y = np.arange(0., 21., 1.) xx, yy = np.meshgrid(x, y) images = np.zeros((ndit, npix, npix)) for i, item in enumerate(parang): images[i, ] = np.random.normal(loc=0, scale=2e-4, size=(npix, npix)) star = np.exp(-((xx-pos_star)**2+(yy-pos_star)**2)/(2.*sigma**2))/(2.*np.pi*sigma**2) x_shift = sep*math.cos(math.radians(item)) y_shift = sep*math.sin(math.radians(item)) images[i, ] += star + shift(contrast*star, (x_shift, y_shift), order=5) create_fits(path, 'images.fits', images, ndit, exp_no, 0., 0.) @typechecked def create_ifs_data(path: str) -> None: """ Create an IFS dataset with a star and planet. Parameters ---------- path : str Working folder. Returns ------- NoneType None """ ndit = 10 npix = 21 nwavel = 3 fwhm = 3. sep = 6. contrast = 1. pos_star = 10. exp_no = 1 parang = np.linspace(0., 180., 10) wavelength = [1., 1.1, 1.2] if not os.path.exists(path): os.makedirs(path) sigma = fwhm / (2.*math.sqrt(2.*math.log(2.))) x = y = np.arange(0., 21., 1.) xx, yy = np.meshgrid(x, y) np.random.seed(1) images = np.random.normal(loc=0, scale=0.05, size=(nwavel, ndit, npix, npix)) for i, par_item in enumerate(parang): for j, wav_item in enumerate(wavelength): sigma_scale = sigma*wav_item star = np.exp(-((xx-pos_star)**2+(yy-pos_star)**2)/(2.*sigma_scale**2)) x_shift = sep*math.cos(math.radians(par_item)) y_shift = sep*math.sin(math.radians(par_item)) images[j, i, ] += star + shift(contrast*star, (x_shift, y_shift), order=5) create_fits(path, 'images.fits', images, ndit, exp_no, 0., 0.) @typechecked def create_star_data(path: str, npix: int = 11, pos_star: float = 5.) -> None: """ Create a dataset with a PSF and Gaussian noise. Parameters ---------- path : str Working folder. npix : int Number of pixels in each dimension. Returns ------- NoneType None """ fwhm = 3. nimages = 5 exp_no = [1, 2] parang_start = [0., 90.] parang_end = [90., 180.] if not os.path.exists(path): os.makedirs(path) np.random.seed(1) for j, item in enumerate(exp_no): sigma = fwhm / (2. * math.sqrt(2.*math.log(2.))) x = y = np.arange(0., float(npix), 1.) xx, yy = np.meshgrid(x, y) images = np.random.normal(loc=0, scale=0.1, size=(nimages, npix, npix)) images += np.exp(-((xx-pos_star)**2+(yy-pos_star)**2)/(2.*sigma**2)) hdu = fits.PrimaryHDU() header = hdu.header header['INSTRUME'] = 'IMAGER' header['HIERARCH ESO DET EXP NO'] = item header['HIERARCH ESO DET NDIT'] = nimages header['HIERARCH ESO ADA POSANG'] = parang_start[j] header['HIERARCH ESO ADA POSANG END'] = parang_end[j] header['HIERARCH ESO SEQ CUMOFFSETX'] = 'None' header['HIERARCH ESO SEQ CUMOFFSETY'] = 'None' hdu.data = images hdu.writeto(os.path.join(path, f'images_{j}.fits')) @typechecked def create_dither_data(path: str) -> None: """ Create a dithering dataset with a stellar PSF. Parameters ---------- path : str Working folder. Returns ------- NoneType None """ if not os.path.exists(path): os.makedirs(path) ndit = 5 npix = 21 fwhm = 3. exp_no = [1, 2, 3, 4] pos_star = [(5., 5.), (5., 15.), (15., 15.), (15., 5.)] parang = np.full(10, 0.) np.random.seed(1) sigma = fwhm / (2.*math.sqrt(2.*math.log(2.))) x = np.arange(0., 21., 1.) y = np.arange(0., 21., 1.) xx, yy = np.meshgrid(x, y) for i, item in enumerate(exp_no): images = np.random.normal(loc=0, scale=0.1, size=(ndit, npix, npix)) for j in range(ndit): images[j, ] += np.exp(-((xx-pos_star[i][0])**2+(yy-pos_star[i][1])**2)/(2.*sigma**2)) create_fits(path, f'images_{i}.fits', images, ndit, item, pos_star[i][0]-10., pos_star[i][1]-10.) @typechecked def create_waffle_data(path: str) -> None: """ Create data with satellite spots and Gaussian noise. Parameters ---------- path : str Working folder. Returns ------- NoneType None """ if not os.path.exists(path): os.makedirs(path) fwhm = 3 npix = 101 x_spot = [25., 25., 75., 75.] y_spot = [25., 75., 75., 25.] sigma = fwhm / (2. * math.sqrt(2.*math.log(2.))) x = y = np.arange(0., npix, 1.) xx, yy = np.meshgrid(x, y) image = np.zeros((npix, npix)) for j in range(4): image += np.exp(-((xx-x_spot[j])**2+(yy-y_spot[j])**2)/(2.*sigma**2))/(2.*np.pi*sigma**2) hdu = fits.PrimaryHDU() header = hdu.header header['INSTRUME'] = 'IMAGER' header['HIERARCH ESO DET EXP NO'] = 'None' header['HIERARCH ESO DET NDIT'] = 'None' header['HIERARCH ESO ADA POSANG'] = 'None' header['HIERARCH ESO ADA POSANG END'] = 'None' header['HIERARCH ESO SEQ CUMOFFSETX'] = 'None' header['HIERARCH ESO SEQ CUMOFFSETY'] = 'None' hdu.data = image hdu.writeto(os.path.join(path, 'images.fits')) @typechecked def remove_test_data(path: str, folders: Optional[List[str]] = None, files: Optional[List[str]] = None) -> None: """ Function to remove data created by the test cases. Parameters ---------- path : str Working folder. folders : list(str, ) Folders to remove. files : list(str, ) Files to removes. Returns ------- NoneType None """ os.remove(path+'PynPoint_database.hdf5') os.remove(path+'PynPoint_config.ini') if folders is not None: for item in folders: shutil.rmtree(path+item) if files is not None: for item in files: os.remove(path+item) @typechecked def create_near_data(path: str) -> None: """ Create a stack of images with Gaussian distributed pixel values. Parameters ---------- path : str Working folder. Returns ------- NoneType None """ if not os.path.exists(path): os.makedirs(path) np.random.seed(1) image = np.random.normal(loc=0., scale=1., size=(10, 10)) exp_no = [1, 2] for i, item in enumerate(exp_no): fits_file = os.path.join(path, f'images_{i}.fits') primary_header = fits.Header() primary_header['INSTRUME'] = 'VISIR' primary_header['HIERARCH ESO DET CHOP NCYCLES'] = 5 primary_header['HIERARCH ESO DET SEQ1 DIT'] = 1. primary_header['HIERARCH ESO TPL EXPNO'] = item primary_header['HIERARCH ESO DET CHOP ST'] = 'T' primary_header['HIERARCH ESO DET CHOP CYCSKIP'] = 0 primary_header['HIERARCH ESO DET CHOP CYCSUM'] = 'F' chopa_header = fits.Header() chopa_header['HIERARCH ESO DET FRAM TYPE'] = 'HCYCLE1' chopb_header = fits.Header() chopb_header['HIERARCH ESO DET FRAM TYPE'] = 'HCYCLE2' hdu = [fits.PrimaryHDU(header=primary_header)] for _ in range(5): hdu.append(fits.ImageHDU(image, header=chopa_header)) hdu.append(fits.ImageHDU(image, header=chopb_header)) # last image is the average of all images hdu.append(fits.ImageHDU(image)) hdulist = fits.HDUList(hdu) hdulist.writeto(fits_file) subprocess.call('compress '+fits_file, shell=True) PynPoint-0.11.0/pynpoint/util/type_aliases.py000066400000000000000000000003341450275315200212460ustar00rootroot00000000000000""" Additional custom types to make type hints easier to read. """ from typing import Union import numpy as np StaticAttribute = Union[str, float, int, np.generic] NonStaticAttribute = Union[np.ndarray, tuple, list] PynPoint-0.11.0/pynpoint/util/wavelets.py000066400000000000000000000334531450275315200204260ustar00rootroot00000000000000""" Wrapper utils for the wavelet functions for the mlpy cwt implementation (see continous.py) """ import numpy as np from numba import jit from typeguard import typechecked from scipy.special import gamma, hermite from scipy.signal import medfilt from statsmodels.robust import mad from pynpoint.util.continuous import autoscales, cwt, icwt # from pynpoint.util.continuous import fourier_from_scales # This function cannot by @typechecked because of a compatibility issue with numba @jit(cache=True, nopython=True) def _fast_zeros(soft: bool, spectrum: np.ndarray, uthresh: float) -> np.ndarray: """ Fast numba method to modify values in the wavelet space by using a hard or soft threshold function. Parameters ---------- soft : bool If True soft the threshold function will be used, otherwise a hard threshold is applied. spectrum : numpy.ndarray The input 2D wavelet space. uthresh : float Threshold used by the threshold function. Returns ------- numpy.ndarray Modified spectrum. """ if soft: for i in range(0, spectrum.shape[0], 1): for j in range(0, spectrum.shape[1], 1): tmp_value = spectrum[i, j].real if abs(spectrum[i, j]) > uthresh: spectrum[i, j] = np.sign(tmp_value) * (abs(tmp_value) - uthresh) else: spectrum[i, j] = 0 else: for i in range(0, spectrum.shape[0], 1): for j in range(0, spectrum.shape[1], 1): if abs(spectrum[i, j]) < uthresh: spectrum[i, j] = 0 return spectrum class WaveletAnalysisCapsule: """ Capsule class to process one 1d time series using the CWT and wavelet de-nosing by wavelet shrinkage. """ @typechecked def __init__(self, signal_in: np.ndarray, wavelet_in: str = 'dog', order: int = 2, padding: str = 'none', frequency_resolution: float = 0.5) -> None: """ Parameters ---------- signal_in : numpy.ndarray 1D input signal. wavelet_in : str Wavelet function ('dog' or 'morlet'). order : int Order of the wavelet function. padding : str Padding method ('zero', 'mirror', or 'none'). frequency_resolution : float Wavelet space resolution in scale/frequency. Returns ------- NoneType None """ # save input data self.m_supported_wavelets = ['dog', 'morlet'] # check supported wavelets if wavelet_in not in self.m_supported_wavelets: raise ValueError(f'Wavelet {wavelet_in} is not supported') if wavelet_in == 'dog': self._m_c_reconstructions = {2: 3.5987, 4: 2.4014, 6: 1.9212, 8: 1.6467, 12: 1.3307, 16: 1.1464, 20: 1.0222, 30: 0.8312, 40: 0.7183, 60: 0.5853} elif wavelet_in == 'morlet': self._m_c_reconstructions = {5: 0.9484, 6: 0.7784, 7: 0.6616, 8: 0.5758, 10: 0.4579, 12: 0.3804, 14: 0.3254, 16: 0.2844, 20: 0.2272} self.m_wavelet = wavelet_in if padding not in ['none', 'zero', 'mirror']: raise ValueError('Padding can only be none, zero or mirror') self._m_data = signal_in - np.ones(len(signal_in)) * np.mean(signal_in) self.m_padding = padding self.__pad_signal() self._m_data_size = len(self._m_data) self._m_data_mean = np.mean(signal_in) if order not in self._m_c_reconstructions: raise ValueError('Wavelet ' + str(wavelet_in) + ' does not support order ' + str(order) + ". \n Only orders: " + str(sorted(self._m_c_reconstructions.keys())).strip('[]') + " are supported") self.m_order = order self._m_c_final_reconstruction = self._m_c_reconstructions[order] # create scales for wavelet transform self._m_scales = autoscales(N=self._m_data_size, dt=1, dj=frequency_resolution, wf=wavelet_in, p=order) self._m_number_of_scales = len(self._m_scales) self._m_frequency_resolution = frequency_resolution self.m_spectrum = None # --- functions for reconstruction value @staticmethod @typechecked def _morlet_function(omega0: float, x_in: float) -> np.complex128: """ Returns ------- numpy.complex128 Morlet function. """ return np.pi**(-0.25) * np.exp(1j * omega0 * x_in) * np.exp(-x_in**2/2.0) @staticmethod @typechecked def _dog_function(order: int, x_in: float) -> float: """ Returns ------- float DOG function. """ p_hpoly = hermite(order)[int(x_in / np.power(2, 0.5))] herm = p_hpoly / (np.power(2, float(order) / 2)) return ((-1)**(order+1)) / np.sqrt(gamma(order + 0.5)) * herm @typechecked def __pad_signal(self) -> None: """ Returns ------- NoneType None """ padding_length = int(len(self._m_data) * 0.5) if self.m_padding == 'zero': new_data = np.append(self._m_data, np.zeros(padding_length, dtype=np.float64)) self._m_data = np.append(np.zeros(padding_length, dtype=np.float64), new_data) elif self.m_padding == 'mirror': left_half_signal = self._m_data[:padding_length] right_half_signal = self._m_data[padding_length:] new_data = np.append(self._m_data, right_half_signal[::-1]) self._m_data = np.append(left_half_signal[::-1], new_data) @typechecked def __compute_reconstruction_factor(self) -> float: """ Computes the reconstruction factor. Returns ------- float Reconstruction factor. """ freq_res = self._m_frequency_resolution wavelet = self.m_wavelet order = self.m_order if wavelet == 'morlet': zero_function = self._morlet_function(order, 0) else: zero_function = self._dog_function(order, 0) c_delta = self._m_c_final_reconstruction reconstruction_factor = freq_res/(c_delta * zero_function) return reconstruction_factor.real @typechecked def compute_cwt(self) -> None: """ Compute the wavelet space of the given input signal. Returns ------- NoneType None """ self.m_spectrum = cwt(self._m_data, dt=1, scales=self._m_scales, wf=self.m_wavelet, p=self.m_order) @typechecked def update_signal(self) -> None: """ Updates the internal signal by the reconstruction of the current wavelet space. Returns ------- NoneType None """ self._m_data = icwt(self.m_spectrum, scales=self._m_scales) reconstruction_factor = self.__compute_reconstruction_factor() self._m_data *= reconstruction_factor @typechecked def denoise_spectrum(self, soft: bool = False) -> None: """ Applies wavelet shrinkage on the current wavelet space (m_spectrum) by either a hard of soft threshold function. Parameters ---------- soft : bool If True a soft threshold is used, hard otherwise. Returns ------- NoneType None """ if self.m_padding != 'none': noise_length_4 = len(self._m_data) // 4 noise_spectrum = self.m_spectrum[0, noise_length_4: (noise_length_4 * 3)].real else: noise_spectrum = self.m_spectrum[0, :].real sigma = mad(noise_spectrum) uthresh = sigma*np.sqrt(2.0*np.log(len(noise_spectrum))) self.m_spectrum = _fast_zeros(soft, self.m_spectrum, uthresh) @typechecked def median_filter(self) -> None: """ Applies a median filter on the internal 1d signal. Can be useful for cosmic ray correction after temporal de-noising Returns ------- NoneType None """ self._m_data = medfilt(self._m_data, 19) @typechecked def get_signal(self) -> np.ndarray: """ Returns the current version of the 1d signal. Use update_signal() in advance in order to get the current reconstruction of the wavelet space. Removes padded values as well. Returns ------- numpy.ndarray Current version of the 1D signal. """ tmp_data = self._m_data + np.ones(len(self._m_data)) * self._m_data_mean if self.m_padding == 'none': return tmp_data return tmp_data[len(self._m_data) // 4: 3 * (len(self._m_data) // 4)] # def __transform_period(self, # period): # # tmp_y = fourier_from_scales(self._m_scales, # self.m_wavelet, # self.m_order) # # def __transformation(x): # return np.log2(x + 1) * tmp_y[-1] / np.log2(tmp_y[-1] + 1) # # cutoff_scaled = __transformation(period) # # scale_new = tmp_y[-1] - tmp_y[0] # scale_old = self.m_spectrum.shape[0] # # factor = scale_old / scale_new # cutoff_scaled *= factor # # return cutoff_scaled # ----- plotting functions -------- # def __plot_or_save_spectrum(self): # plt.close() # # plt.figure(figsize=(8, 6)) # plt.subplot(1, 1, 1) # # tmp_y = fourier_from_scales(self._m_scales, # self.m_wavelet, # self.m_order) # # tmp_x = np.arange(0, self._m_data_size + 1, 1) # # scaled_spec = copy.deepcopy(self.m_spectrum.real) # for i, _ in enumerate(scaled_spec): # scaled_spec[i] /= np.sqrt(self._m_scales[i]) # # plt.imshow(abs(scaled_spec), # aspect='auto', # extent=[tmp_x[0], # tmp_x[-1], # tmp_y[0], # tmp_y[-1]], # cmap=plt.get_cmap("gist_ncar"), # origin='lower') # # # COI first part (only for DOG) with padding # # inner_frequency = 2.*np.pi/np.sqrt(self.m_order + 0.5) # coi = np.append(np.zeros(len(tmp_x)/4), # tmp_x[0:len(tmp_x) / 4]) # coi = np.append(coi, # tmp_x[0:len(tmp_x) / 4][::-1]) # coi = np.append(coi, # np.zeros(len(tmp_x) / 4)) # # plt.plot(np.arange(0, len(coi), 1.0), # inner_frequency * coi / np.sqrt(2), # color="white") # # plt.ylim([tmp_y[0], # tmp_y[-1]]) # # plt.fill_between(np.arange(0, len(coi), 1.0), # inner_frequency * coi / np.sqrt(2), # np.ones(len(coi)) * tmp_y[-1], # facecolor="none", # edgecolor='white', # alpha=0.4, # hatch="x") # # plt.yscale('log', basey=2) # plt.ylabel("Period in [s]") # plt.xlabel("Time in [s]") # plt.title("Spectrum computed with CWT using '" + str(self.m_wavelet) + # "' wavelet of order " + str(self.m_order)) # # def plot_spectrum(self): # """ # Shows a plot of the current wavelet space. # :return: None # """ # # self.__plot_or_save_spectrum() # plt.show() # # def save_spectrum(self, # location): # """ # Saves a plot of the current wavelet space to a given location. # :param location: Save location # :type location: str # :return: None # """ # self.__plot_or_save_spectrum() # plt.savefig(location) # plt.close() # # def __plot_or_save_signal(self): # plt.close() # plt.plot(self._m_data) # plt.title("Signal") # plt.ylabel("Value of the function") # plt.xlim([0, self._m_data_size]) # plt.xlabel("Time in [s]") # # def plot_signal(self): # """ # Plot the current signal. # :return: None # """ # self.__plot_or_save_signal() # plt.show() # # def save_signal(self, # location): # """ # Saves a plot of the current signal to a given location. # :param location: Save location # :type location: str # :return: None # """ # self.__plot_or_save_signal() # plt.savefig(location) PynPoint-0.11.0/requirements.txt000066400000000000000000000004011450275315200166340ustar00rootroot00000000000000astropy ~= 5.3.0 emcee ~= 3.1.0 h5py ~= 3.9.0 numba ~= 0.57.0 numpy ~= 1.24.0 opencv-python ~= 4.8.0 photutils ~= 1.9.0 PyWavelets ~= 1.4.0 scikit-image ~= 0.21.0 scikit-learn ~= 1.3.0 scipy ~= 1.11.0 statsmodels ~= 0.14.0 tqdm ~= 4.66.0 typeguard ~= 4.1.0 PynPoint-0.11.0/setup.py000066400000000000000000000024471450275315200150760ustar00rootroot00000000000000#!/usr/bin/env python import pkg_resources import setuptools with open('requirements.txt') as req_txt: parse_req = pkg_resources.parse_requirements(req_txt) install_requires = [str(req) for req in parse_req] setuptools.setup( name='pynpoint', version='0.11.0', description='Pipeline for processing and analysis of high-contrast imaging data', long_description=open('README.rst').read(), long_description_content_type='text/x-rst', author='Tomas Stolker & Markus Bonse', author_email='stolker@strw.leidenuniv.nl', url='https://github.com/PynPoint/PynPoint', project_urls={'Documentation': 'https://pynpoint.readthedocs.io'}, packages=setuptools.find_packages(include=['pynpoint', 'pynpoint.*']), install_requires=install_requires, tests_require=['pytest'], license='GPLv3', zip_safe=False, keywords='pynpoint', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Astronomy', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Natural Language :: English', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', ], ) PynPoint-0.11.0/tests/000077500000000000000000000000001450275315200145175ustar00rootroot00000000000000PynPoint-0.11.0/tests/__init__.py000066400000000000000000000000001450275315200166160ustar00rootroot00000000000000PynPoint-0.11.0/tests/test_core/000077500000000000000000000000001450275315200165065ustar00rootroot00000000000000PynPoint-0.11.0/tests/test_core/__init__.py000066400000000000000000000000001450275315200206050ustar00rootroot00000000000000PynPoint-0.11.0/tests/test_core/test_configport.py000066400000000000000000000053331450275315200222750ustar00rootroot00000000000000import os import pytest from pynpoint.core.pypeline import Pypeline from pynpoint.core.dataio import ConfigPort, DataStorage from pynpoint.util.tests import create_config, remove_test_data class TestConfigPort: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_config(self.test_dir+'PynPoint_config.ini') def teardown_class(self) -> None: remove_test_data(self.test_dir) def test_create_config_port(self) -> None: storage = DataStorage(self.test_dir + 'PynPoint_database.hdf5') with pytest.raises(ValueError) as error: ConfigPort('images', storage) assert str(error.value) == 'The tag name of the central configuration should be ' \ '\'config\'.' port = ConfigPort('config', None) with pytest.warns(UserWarning) as warning: check_error = port._check_error_cases() assert len(warning) == 1 assert warning[0].message.args[0] == 'ConfigPort can not load data unless a database is ' \ 'connected.' assert not check_error port = ConfigPort('config', storage) assert isinstance(port, ConfigPort) with pytest.warns(UserWarning) as warning: port._check_error_cases() assert len(warning) == 1 assert warning[0].message.args[0] == 'No data under the tag which is linked by the ' \ 'ConfigPort.' def test_get_config_attribute(self) -> None: create_config(self.test_dir+'PynPoint_config.ini') Pypeline(self.test_dir, self.test_dir, self.test_dir) storage = DataStorage(self.test_dir + 'PynPoint_database.hdf5') port = ConfigPort('config', None) with pytest.warns(UserWarning) as warning: attribute = port.get_attribute('CPU') assert len(warning) == 1 assert warning[0].message.args[0] == 'ConfigPort can not load data unless a database is ' \ 'connected.' assert attribute is None port = ConfigPort('config', storage) attribute = port.get_attribute('CPU') assert attribute == 1 attribute = port.get_attribute('NFRAMES') assert attribute == 'NAXIS3' attribute = port.get_attribute('PIXSCALE') assert attribute == pytest.approx(0.027, rel=self.limit, abs=0.) with pytest.warns(UserWarning) as warning: attribute = port.get_attribute('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'The attribute \'test\' was not found.' assert attribute is None PynPoint-0.11.0/tests/test_core/test_datastorage.py000066400000000000000000000035571450275315200224270ustar00rootroot00000000000000import os import pytest import h5py import numpy as np from pynpoint.core.dataio import DataStorage class TestDataStorage: def setup_class(self) -> None: self.limit = 1e-10 self.test_data = os.path.dirname(__file__) + '/PynPoint_database.hdf5' def test_create_storage_with_existing_database(self) -> None: np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(10, 100, 100)) with h5py.File(self.test_data, 'w') as hdf_file: hdf_file.create_dataset('images', data=images) storage = DataStorage(self.test_data) storage.open_connection() data = storage.m_data_bank['images'] assert data[0, 0, 0] == pytest.approx(0.00032486907273264834, rel=self.limit, abs=0.) assert np.mean(data) == pytest.approx(1.0506056979365338e-06, rel=self.limit, abs=0.) os.remove(self.test_data) def test_create_storage_without_existing_database(self) -> None: storage = DataStorage(self.test_data) storage.open_connection() storage.m_data_bank['data'] = [0, 1, 2, 5, 7] assert storage.m_data_bank['data'][2] == 2 assert list(storage.m_data_bank.keys()) == ['data', ] storage.close_connection() os.remove(self.test_data) def test_create_storage_with_wrong_location(self) -> None: file_in = '/test/test.hdf5' with pytest.raises(AssertionError): DataStorage(file_in) def test_open_close_connection(self) -> None: storage = DataStorage(self.test_data) storage.open_connection() assert storage.m_open is True storage.open_connection() assert storage.m_open is True storage.close_connection() assert storage.m_open is False storage.close_connection() assert storage.m_open is False os.remove(self.test_data) PynPoint-0.11.0/tests/test_core/test_inputport.py000066400000000000000000000075671450275315200222020ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.dataio import DataStorage, InputPort, OutputPort from pynpoint.util.tests import create_random, create_config, remove_test_data class TestInputPort: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir) create_config(self.test_dir+'PynPoint_config.ini') file_in = os.path.dirname(__file__) + '/PynPoint_database.hdf5' self.storage = DataStorage(file_in) def teardown_class(self) -> None: remove_test_data(self.test_dir) def test_create_instance_access_data(self) -> None: with pytest.raises(ValueError) as error: InputPort('config', self.storage) assert str(error.value) == 'The tag name \'config\' is reserved for the central ' \ 'configuration of PynPoint.' with pytest.raises(ValueError) as error: InputPort('fits_header', self.storage) assert str(error.value) == 'The tag name \'fits_header\' is reserved for storage of the ' \ 'FITS headers.' port = InputPort('images', self.storage) assert port[0, 0, 0] == pytest.approx(0.00032486907273264834, rel=self.limit, abs=0.) data = np.mean(port.get_all()) assert data == pytest.approx(1.1824138000882435e-05, rel=self.limit, abs=0.) assert len(port[0:2, 0, 0]) == 2 assert port.get_shape() == (5, 11, 11) assert port.get_attribute('PIXSCALE') == 0.01 assert port.get_attribute('PARANG')[0] == 0. with pytest.warns(UserWarning): assert port.get_attribute('none') is None def test_create_instance_access_non_existing_data(self) -> None: port = InputPort('test', self.storage) with pytest.warns(UserWarning): assert port[0, 0, 0] is None with pytest.warns(UserWarning): assert port.get_all() is None with pytest.warns(UserWarning): assert port.get_shape() is None with pytest.warns(UserWarning): assert port.get_attribute('num_files') is None with pytest.warns(UserWarning): assert port.get_all_non_static_attributes() is None with pytest.warns(UserWarning): assert port.get_all_static_attributes() is None def test_create_instance_no_data_storage(self) -> None: port = InputPort('test') with pytest.warns(UserWarning): assert port[0, 0, 0] is None with pytest.warns(UserWarning): assert port.get_all() is None with pytest.warns(UserWarning): assert port.get_shape() is None with pytest.warns(UserWarning): assert port.get_all_non_static_attributes() is None with pytest.warns(UserWarning): assert port.get_all_static_attributes() is None # def test_get_all_attributes(self) -> None: # # port = InputPort('images', self.storage) # # assert port.get_all_static_attributes() == {'PIXSCALE': 0.01} # assert port.get_all_non_static_attributes() == ['PARANG', ] # # port = OutputPort('images', self.storage) # assert port.del_all_attributes() is None # # port = InputPort('images', self.storage) # assert port.get_all_non_static_attributes() is None # def test_get_ndim(self) -> None: # # with pytest.warns(UserWarning) as warning: # ndim = InputPort('images', None).get_ndim() # # assert len(warning) == 1 # # assert warning[0].message.args[0] == 'InputPort can not load data ' \ # 'unless a database is connected.' # # assert ndim is None # # port = InputPort('images', self.storage) # assert port.get_ndim() == 3 PynPoint-0.11.0/tests/test_core/test_outputport.py000066400000000000000000000554261450275315200224000ustar00rootroot00000000000000import os from typing import Tuple import pytest import numpy as np from pynpoint.core.dataio import OutputPort, DataStorage, InputPort from pynpoint.util.tests import create_random class TestOutputPort: def setup_class(self) -> None: self.limit = 1e-10 self.storage = DataStorage(os.path.dirname(__file__) + '/PynPoint_database.hdf5') create_random(os.path.dirname(__file__)) def teardown_class(self) -> None: os.remove(os.path.dirname(__file__) + '/PynPoint_database.hdf5') def create_input_port(self, tag_name: str) -> InputPort: inport = InputPort(tag_name, self.storage) inport.open_port() return inport def create_output_port(self, tag_name: str) -> OutputPort: outport = OutputPort(tag_name, self.storage) return outport def test_create_instance(self) -> None: with pytest.raises(ValueError) as error: OutputPort('config', self.storage) assert str(error.value) == 'The tag name \'config\' is reserved for the central ' \ 'configuration of PynPoint.' with pytest.raises(ValueError) as error: OutputPort('fits_header', self.storage) assert str(error.value) == 'The tag name \'fits_header\' is reserved for storage of the ' \ 'FITS headers.' active_port = OutputPort('test', self.storage, activate_init=True) deactive_port = OutputPort('test', self.storage, activate_init=False) control_port = InputPort('test', self.storage) deactive_port.open_port() deactive_port.set_all(np.asarray([0, 1, 2, 3])) deactive_port.flush() with pytest.warns(UserWarning) as warning: control_port.get_all() assert len(warning) == 1 assert warning[0].message.args[0] == 'No data under the tag which is linked by the ' \ 'InputPort.' active_port.set_all(np.asarray([0, 1, 2, 3])) active_port.flush() assert np.array_equal(np.asarray([0, 1, 2, 3]), control_port.get_all()) active_port.del_all_data() # def test_set_all_new_data(self) -> None: # # outport = self.create_output_port('new_data') # # # ----- 1D input ----- # # data = [1, 3] # outport.set_all(data, data_dim=1) # # inport = self.create_input_port('new_data') # # assert np.array_equal(inport.get_all(), [1., 3.]) # outport.del_all_data() # # data = [1, 3] # outport.set_all(data, data_dim=2) # assert np.array_equal(inport.get_all(), [[1, 3]]) # outport.del_all_data() # # # ----- 2D input ----- # # data = [[1, 3], [2, 4]] # outport.set_all(data, data_dim=2) # assert np.array_equal(inport.get_all(), [[1, 3], [2, 4]]) # outport.del_all_data() # # data = [[1, 3], [2, 4]] # outport.set_all(data, data_dim=3) # assert np.array_equal(inport.get_all(), [[[1, 3], [2, 4]]]) # outport.del_all_data() # # # ----- 3D input ----- # # data = [[[1, 3], [2, 4]], [[1, 3], [2, 4]]] # outport.set_all(data, data_dim=3) # assert np.array_equal(inport.get_all(), [[[1, 3], [2, 4]], [[1, 3], [2, 4]]]) # outport.del_all_data() # def test_set_all_error(self) -> None: # # # ---- Test database not set ----- # # data = [1, 2, 3, 4, 0] # # with pytest.warns(UserWarning) as record: # out_port = OutputPort('some_data') # out_port.set_all(data) # # assert len(record) == 1 # # assert record[0].message.args[0] == 'OutputPort can not store data unless a database is ' \ # 'connected.' # # # ---- Test data dim of actual data for new data entry is < 1 or > 5 # # out_port = self.create_output_port('new_data') # # data = [[[[[[2, 2], ], ], ], ]] # # with pytest.raises(ValueError) as error: # out_port.set_all(data, data_dim=2) # # assert str(error.value) == 'Output port can only save numpy arrays from 1D to 5D. Use ' \ # 'Port attributes to save as int, float, or string.' # # # ---- Test data dim of data_dim for new data entry is < 1 or > 5 # # out_port = self.create_output_port('new_data') # # data = [1, 2, 4] # # with pytest.raises(ValueError) as error: # out_port.set_all(data, data_dim=0) # # assert str(error.value) == 'The data dimensions should be 1D, 2D, 3D, 4D, or 5D.' # # # ---- Test data_dim for new data entry is smaller than actual data # # out_port = self.create_output_port('new_data') # # data = [[1], [2]] # # with pytest.raises(ValueError) as error: # out_port.set_all(data, data_dim=1) # # assert str(error.value) == 'The dimensions of the data should be equal to or larger ' \ # 'than the dimensions of the input data.' # # # ---- Test data_dim == 3 and actual size == 1 # # out_port = self.create_output_port('new_data') # # data = [1, 2] # # with pytest.raises(ValueError) as error: # out_port.set_all(data, data_dim=3) # # assert str(error.value) == 'Cannot initialize 1D data in 3D data container.' # # def test_set_all_keep_attributes(self) -> None: # # def init_out_port() -> Tuple[OutputPort, InputPort]: # out_port = self.create_output_port('new_data') # control = self.create_input_port('new_data') # # data = [2, 3, 4] # out_port.set_all(data) # out_port.add_attribute(name='test1', value=1) # out_port.add_attribute(name='test2', value=12) # # return out_port, control # # out_port, control = init_out_port() # # with pytest.warns(UserWarning) as warning: # out_port.set_all([[]], data_dim=2, keep_attributes=True) # # assert len(warning) == 1 # # assert warning[0].message.args[0] == 'The new dataset that is stored under the tag name ' \ # '\'new_data\' is empty.' # # assert control.get_attribute('test1') == 1 # assert control.get_attribute('test2') == 12 # # out_port.del_all_data() # # def test_append_new_data(self) -> None: # # # using append even if no data exists # # out_port = self.create_output_port('new_data') # # # ----- 1D input ----- # # data = [3, ] # out_port.append(data) # # control = self.create_input_port('new_data') # assert control.get_all() == data # out_port.del_all_data() # # # ----- 2D input ----- # # data = [[3, 3], [3, 2]] # out_port.append(data) # # assert np.array_equal(control.get_all(), data) # out_port.del_all_data() # # # ----- 3D input ----- # # data = [[[3, 3], [3, 2]], [[3, 1], [3, 1]]] # out_port.append(data) # # assert np.array_equal(control.get_all(), data) # out_port.del_all_data() # # def test_append_existing_data(self) -> None: # # out_port = self.create_output_port('new_data') # # # ----- 1D ----- # # out_port.append([2, 3, 5], data_dim=1) # out_port.append([3, 3, 5]) # # control = self.create_input_port('new_data') # # assert np.array_equal(control.get_all(), [2, 3, 5, 3, 3, 5]) # # out_port.del_all_data() # # # ----- 2D ----- # # 1D input append to 1D data # # out_port.append([1, 1], data_dim=2) # out_port.append([3, 3]) # # assert np.array_equal(control.get_all(), [[1, 1], [3, 3]]) # # out_port.del_all_data() # # # 1D input append to 2D data # # out_port.append([[2, 3], [1, 1]], data_dim=2) # out_port.append([3, 3]) # # assert np.array_equal(control.get_all(), [[2, 3], [1, 1], [3, 3]]) # # out_port.del_all_data() # # # 2D input append to 2D data # # out_port.append([[2, 3], [1, 1]], data_dim=2) # out_port.append([[3, 3], [8, 8]]) # # assert np.array_equal(control.get_all(), [[2, 3], [1, 1], [3, 3], [8, 8]]) # # out_port.del_all_data() # # # 2D input append to 3D data # # out_port.append([[[2, 3], [1, 1]], [[2, 4], [1, 1]]], data_dim=3) # # out_port.append([[3, 3], [8, 8]]) # # assert np.array_equal(control.get_all(), # [[[2, 3], [1, 1]], # [[2, 4], [1, 1]], # [[3, 3], [8, 8]]]) # # out_port.del_all_data() # # # 3D input append to 3D data # # out_port.append([[[2, 3], [1, 1]], [[2, 4], [1, 1]]], data_dim=3) # out_port.append([[[22, 7], [10, 221]], [[223, 46], [1, 15]]]) # # assert np.array_equal(control.get_all(), # [[[2, 3], [1, 1]], # [[2, 4], [1, 1]], # [[22, 7], [10, 221]], # [[223, 46], [1, 15]]]) # # out_port.del_all_data() # # def test_append_existing_data_force_overwriting(self) -> None: # # out_port = self.create_output_port('new_data') # # # Error case (no force) # # out_port.append([2, 3, 5], data_dim=1) # out_port.append([[[22, 7], [10, 221]], [[223, 46], [1, 15]]], force=True) # # control = self.create_input_port('new_data') # # assert np.array_equal(control.get_all(), [[[22, 7], [10, 221]], [[223, 46], [1, 15]]]) # # out_port.del_all_data() # # def test_append_existing_data_error(self) -> None: # # # ---- port not active ---- # out_port = self.create_output_port('new_data') # out_port.deactivate() # # data = [1, ] # # out_port.append(data) # out_port.del_all_data() # out_port.activate() # # # 1 Element input # # # 1D input # # 2D input # # 3D input # # # Error case (no force) # out_port.set_all([2, 3, 5], data_dim=1) # # with pytest.raises(ValueError) as error: # out_port.append([[[22, 7], [10, 221]], [[223, 46], [1, 15]]]) # # assert str(error.value) == 'The port tag \'new_data\' is already used with a different ' \ # 'data type. The \'force\' parameter can be used to replace ' \ # 'the tag.' # out_port.del_all_data() # # def test_set_data_using_slicing(self) -> None: # # out_port = self.create_output_port('new_data') # # out_port.set_all([2, 5, 6, 7, ]) # out_port[3] = 44 # # control = self.create_input_port('new_data') # # assert np.array_equal(control.get_all(), [2, 5, 6, 44, ]) # # out_port.deactivate() # out_port[2] = 0 # # assert np.array_equal(control.get_all(), [2, 5, 6, 44, ]) # # out_port.activate() # out_port.del_all_data() # # def test_del_all_data(self) -> None: # # out_port = self.create_output_port('new_data') # out_port.set_all([0, 1]) # out_port.del_all_data() # # control = self.create_input_port('new_data') # # with pytest.warns(UserWarning) as warning: # control.get_all() # # assert len(warning) == 1 # assert warning[0].message.args[0] == 'No data under the tag which is linked by the ' \ # 'InputPort.' # # def test_add_static_attribute(self) -> None: # # out_port = self.create_output_port('new_data') # out_port.set_all([1]) # out_port.add_attribute('attr1', value=5) # out_port.add_attribute('attr2', value='no') # # control = self.create_input_port('new_data') # assert control.get_attribute('attr1') == 5 # # out_port.add_attribute('attr1', value=6) # assert control.get_attribute('attr1') == 6 # assert control.get_attribute('attr2') == 'no' # # out_port.deactivate() # out_port.add_attribute('attr3', value=33) # # with pytest.warns(UserWarning) as warning: # control.get_attribute('attr3') # # assert len(warning) == 1 # assert warning[0].message.args[0] == 'The attribute \'attr3\' was not found.' # # out_port.activate() # out_port.del_all_attributes() # out_port.del_all_data() # # def test_add_static_attribute_error(self) -> None: # # out_port = self.create_output_port('new_data') # # # add attribute while no data is set # with pytest.warns(UserWarning) as warning: # out_port.add_attribute('attr1', value=6) # # # check that only one warning was raised # assert len(warning) == 1 # # # check that the message matches # assert warning[0].message.args[0] == 'Can not store the attribute \'attr1\' because ' \ # 'the dataset \'new_data\' does not exist.' # # out_port.del_all_attributes() # out_port.del_all_data() # # def test_add_non_static_attribute(self) -> None: # # # two different data types # # out_port = self.create_output_port('new_data') # out_port.set_all([1]) # out_port.add_attribute('attr1', value=[6, 3], static=False) # # control = self.create_input_port('new_data') # assert np.array_equal(control.get_attribute('attr1'), [6, 3]) # # out_port.del_all_attributes() # out_port.del_all_data() # # def test_append_attribute_data(self) -> None: # # out_port = self.create_output_port('new_data') # out_port.del_all_data() # out_port.set_all([1]) # out_port.add_attribute('attr1', value=[2, 3], static=False) # # control = self.create_input_port('new_data') # assert np.array_equal(control.get_attribute('attr1'), [2, 3]) # # out_port.append_attribute_data('attr1', value=2) # assert np.array_equal(control.get_attribute('attr1'), [2, 3, 2]) # # out_port.deactivate() # out_port.append_attribute_data('attr1', value=2) # assert np.array_equal(control.get_attribute('attr1'), [2, 3, 2]) # # out_port.activate() # out_port.del_all_attributes() # out_port.del_all_data() # # def test_copy_attributes(self) -> None: # # out_port = self.create_output_port('new_data') # out_port.del_all_attributes() # out_port.del_all_data() # out_port.set_all([0, ]) # # # some static attributes # # out_port.add_attribute('attr1', 33) # out_port.add_attribute('attr2', 'string') # out_port.add_attribute('attr3', 3.141) # # # non static attributes # # out_port.add_attribute('attr_non_static', [3, 4, 5, 6], static=False) # # copy_port = self.create_output_port('other_data') # copy_port.del_all_attributes() # copy_port.del_all_data() # # copy_port.set_all([1, ]) # # # for attribute overwriting # # copy_port.add_attribute('attr_non_static', [3, 4, 44, 6], static=False) # # control = self.create_input_port('new_data') # copy_port.copy_attributes(control) # # copy_control = self.create_input_port('other_data') # # assert copy_control.get_attribute('attr1') == 33 # assert copy_control.get_attribute('attr2') == 'string' # # assert np.array_equal(copy_control.get_attribute('attr3'), 3.141) # assert np.array_equal(copy_control.get_attribute('attr_non_static'), [3, 4, 5, 6]) # # copy_port.del_all_attributes() # copy_port.del_all_data() # # out_port.del_all_attributes() # out_port.del_all_data() # # port = self.create_output_port('test') # port.deactivate() # # assert port.copy_attributes(control) is None # # port = self.create_input_port('test') # # with pytest.warns(UserWarning) as warning: # port.get_all_non_static_attributes() # # assert len(warning) == 1 # assert warning[0].message.args[0] == 'No data under the tag which is linked by the ' \ # 'InputPort.' # # def test_copy_attributes_same_tag(self) -> None: # # out_port1 = self.create_output_port('new_data') # out_port1.set_all([0, ]) # # out_port2 = self.create_output_port('new_data') # out_port2.set_all([2, ]) # # out_port1.add_attribute('attr1', 2) # # control1 = self.create_input_port('new_data') # out_port2.copy_attributes(control1) # # control2 = self.create_input_port('new_data') # assert control2.get_attribute('attr1') == 2 # # out_port1.del_all_data() # out_port1.del_all_attributes() # # def test_del_attribute(self) -> None: # # out_port = self.create_output_port('new_data') # out_port.set_all([0, ]) # # # static # # out_port.add_attribute('attr1', 4) # out_port.add_attribute('attr2', 5) # # # non static # # out_port.add_attribute('attr_non_static_1', [1, 2, 3], static=False) # out_port.add_attribute('attr_non_static_2', [2, 4, 6, 8], static=False) # out_port.del_attribute('attr1') # out_port.del_attribute('attr_non_static_1') # # # check is only the chosen attributes are deleted and the rest is still there # # control = self.create_input_port('new_data') # # with pytest.warns(UserWarning) as warning: # control.get_attribute('attr1') # # assert len(warning) == 1 # assert warning[0].message.args[0] == 'The attribute \'attr1\' was not found.' # # assert control.get_attribute('attr2') == 5 # # with pytest.warns(UserWarning) as warning: # control.get_attribute('attr_non_static_1') # # assert len(warning) == 1 # assert warning[0].message.args[0] == 'The attribute \'attr_non_static_1\' was not found.' # # assert np.array_equal(control.get_attribute('attr_non_static_2'), [2, 4, 6, 8]) # # out_port.del_all_data() # out_port.del_all_attributes() # # def test_del_attribute_error_case(self) -> None: # # out_port = self.create_output_port('new_data') # out_port.set_all([0, ]) # # # deactivated port # # out_port.add_attribute('attr_1', 5.554) # out_port.deactivate() # out_port.del_attribute('attr_1') # # control = self.create_input_port('new_data') # assert control.get_attribute('attr_1') == 5.554 # # out_port.activate() # # # not existing # with pytest.warns(UserWarning) as warning: # out_port.del_attribute('not_existing') # # assert len(warning) == 1 # # assert warning[0].message.args[0] == 'Attribute \'not_existing\' does not exist and ' \ # 'could not be deleted.' # # out_port.del_all_attributes() # out_port.del_all_data() # # def test_del_all_attributes(self) -> None: # # out_port = self.create_output_port('new_data') # out_port.set_all([0, ]) # out_port.add_attribute('attr_1', 4) # out_port.add_attribute('attr_2', [1, 3], static=False) # out_port.del_all_attributes() # # control = self.create_input_port('new_data') # # with pytest.warns(UserWarning) as warning: # control.get_attribute('attr_1') # # assert len(warning) == 1 # assert warning[0].message.args[0] == 'The attribute \'attr_1\' was not found.' # # with pytest.warns(UserWarning) as warning: # control.get_attribute('attr_2') # # assert len(warning) == 1 # assert warning[0].message.args[0] == 'The attribute \'attr_2\' was not found.' # # out_port.del_all_data() # # def test_add_history(self) -> None: # # out_port = self.create_output_port('new_data') # out_port.set_all([0, ]) # out_port.add_history('Test', 'history') # # control = self.create_input_port('new_data') # assert control.get_attribute('History: Test') == 'history' # # def test_check_attribute(self) -> None: # # out_port = self.create_output_port('new_data') # # out_port.set_all([0, ]) # out_port.add_attribute('static', 5, static=True) # out_port.add_attribute('non-static', np.arange(1, 11, 1), static=False) # # assert out_port.check_static_attribute('static', 5) == 0 # assert out_port.check_static_attribute('test', 3) == 1 # assert out_port.check_static_attribute('static', 33) == -1 # # assert out_port.check_non_static_attribute('non-static', np.arange(1, 11, 1)) == 0 # assert out_port.check_non_static_attribute('test', np.arange(1, 11, 1)) == 1 # assert out_port.check_non_static_attribute('non-static', np.arange(10, 21, 1)) == -1 # # out_port.deactivate() # # assert out_port.check_static_attribute('static', 5) is None # assert out_port.check_non_static_attribute('non-static', np.arange(1, 11, 1)) is None # # out_port.activate() # out_port.del_all_data() # out_port.del_all_attributes() # # def test_append_empty_data_new(self) -> None: # # out_port = self.create_output_port('empty_data') # # with pytest.warns(UserWarning) as warning: # out_port.append([]) # # assert len(warning) == 1 # # assert warning[0].message.args[0] == 'The new dataset that is stored under the tag name ' \ # '\'empty_data\' is empty.' # # def test_append_empty_data_add(self) -> None: # # out_port = self.create_output_port('empty_data') # # with pytest.warns(UserWarning) as warning: # out_port.append([]) # # assert len(warning) == 1 # # assert warning[0].message.args[0] == 'The dataset that is appended under the tag name ' \ # '\'empty_data\' is empty.' PynPoint-0.11.0/tests/test_core/test_processing.py000066400000000000000000000210111450275315200222660ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.background import LineSubtractionModule from pynpoint.processing.badpixel import BadPixelSigmaFilterModule from pynpoint.processing.extract import StarExtractionModule from pynpoint.processing.timedenoising import TimeNormalizationModule from pynpoint.util.tests import create_config, create_star_data, remove_test_data class TestProcessing: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) large_data = np.random.normal(loc=0, scale=2e-4, size=(10000, 5, 5)) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'w') as hdf_file: hdf_file.create_dataset('images', data=images) hdf_file.create_dataset('large_data', data=large_data) create_star_data(path=self.test_dir+'images') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) self.pipeline.set_attribute('images', 'PIXSCALE', 0.1, static=True) self.pipeline.set_attribute('large_data', 'PIXSCALE', 0.1, static=True) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_output_port_name(self) -> None: module = FitsReadingModule(name_in='read', image_tag='images', input_dir=self.test_dir+'images') module.add_output_port('test') with pytest.warns(UserWarning) as warning: module.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ReadingModule \'read\' is already ' \ 'used.' module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='images', image_out_tag='im_out') module.add_output_port('test') with pytest.warns(UserWarning) as warning: module.add_output_port('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Tag \'test\' of ProcessingModule \'badpixel\' is ' \ 'already used.' def test_output_port_set_connection(self) -> None: self.pipeline.m_data_storage.open_connection() module = BadPixelSigmaFilterModule(name_in='badpixel2', image_in_tag='images', image_out_tag='im_out') self.pipeline.add_module(module) port = module.add_output_port('test1') self.pipeline.m_data_storage.close_connection() def test_apply_function(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 20, static=True) self.pipeline.set_attribute('config', 'CPU', 4, static=True) module = LineSubtractionModule(name_in='subtract', image_in_tag='images', image_out_tag='im_subtract', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract') data = self.pipeline.get_data('images') assert np.mean(data) == pytest.approx(1.1824138000882435e-05, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) data = self.pipeline.get_data('im_subtract') assert np.mean(data) == pytest.approx(-1.2544487946113274e-21, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_args_none(self) -> None: module = TimeNormalizationModule(name_in='norm', image_in_tag='images', image_out_tag='im_norm') self.pipeline.add_module(module) self.pipeline.run_module('norm') data = self.pipeline.get_data('im_norm') assert np.mean(data) == pytest.approx(2.4012571778516812e-06, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_args_none_memory_none(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) module = TimeNormalizationModule(name_in='norm_none', image_in_tag='images', image_out_tag='im_norm') self.pipeline.add_module(module) self.pipeline.run_module('norm_none') data = self.pipeline.get_data('im_norm') assert np.mean(data) == pytest.approx(2.4012571778516812e-06, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_same_port(self) -> None: module = LineSubtractionModule(name_in='subtract_same', image_in_tag='im_subtract', image_out_tag='im_subtract', combine='mean', mask=None) self.pipeline.add_module(module) self.pipeline.run_module('subtract_same') data = self.pipeline.get_data('im_subtract') assert np.mean(data) == pytest.approx(-1.4336557652700885e-21, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_apply_function_args_none_memory_none_same_port(self) -> None: self.pipeline.set_attribute('config', 'MEMORY', 0, static=True) data = self.pipeline.get_data('images') assert np.mean(data) == pytest.approx(1.1824138000882435e-05, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) module = TimeNormalizationModule(name_in='norm_none_same', image_in_tag='images', image_out_tag='images') self.pipeline.add_module(module) self.pipeline.run_module('norm_none_same') data = self.pipeline.get_data('images') assert np.mean(data) == pytest.approx(2.4012571778516812e-06, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) # def test_apply_function_to_images_memory_none(self) -> None: # # module = StarExtractionModule(name_in='extract', # image_in_tag='im_subtract', # image_out_tag='extract', # index_out_tag=None, # image_size=0.5, # fwhm_star=0.1, # position=(None, None, 0.1)) # # self.pipeline.add_module(module) # self.pipeline.run_module('extract') # # data = self.pipeline.get_data('extract') # assert np.mean(data) == pytest.approx(1.8259937251367536e-05, rel=self.limit, abs=0.) # assert data.shape == (5, 5, 5) # def test_multiproc_large_data(self) -> None: # # self.pipeline.set_attribute('config', 'MEMORY', 1000, static=True) # self.pipeline.set_attribute('config', 'CPU', 1, static=True) # # module = LineSubtractionModule(name_in='subtract_single', # image_in_tag='large_data', # image_out_tag='im_sub_single', # combine='mean', # mask=None) # # self.pipeline.add_module(module) # self.pipeline.run_module('subtract_single') # # self.pipeline.set_attribute('config', 'CPU', 4, static=True) # # module = LineSubtractionModule(name_in='subtract_multi', # image_in_tag='large_data', # image_out_tag='im_sub_multi', # combine='mean', # mask=None) # # self.pipeline.add_module(module) # self.pipeline.run_module('subtract_multi') # # data_single = self.pipeline.get_data('im_sub_single') # data_multi = self.pipeline.get_data('im_sub_multi') # assert data_single == pytest.approx(data_multi, rel=self.limit, abs=0.) # assert data_single.shape == data_multi.shape PynPoint-0.11.0/tests/test_core/test_pypeline.py000066400000000000000000000334001450275315200217440ustar00rootroot00000000000000import os import pytest import h5py import numpy as np from astropy.io import fits from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.readwrite.fitswriting import FitsWritingModule from pynpoint.processing.badpixel import BadPixelSigmaFilterModule from pynpoint.util.tests import create_config, remove_test_data class TestPypeline: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) hdu = fits.PrimaryHDU() header = hdu.header header['INSTRUME'] = 'IMAGER' header['HIERARCH ESO DET EXP NO'] = 1 header['HIERARCH ESO DET NDIT'] = 5 header['HIERARCH ESO INS PIXSCALE'] = 0.01 header['HIERARCH ESO ADA POSANG'] = 10. header['HIERARCH ESO ADA POSANG END'] = 20. header['HIERARCH ESO SEQ CUMOFFSETX'] = 5. header['HIERARCH ESO SEQ CUMOFFSETY'] = 5. hdu.data = images hdu.writeto(self.test_dir+'images.fits') def teardown_class(self) -> None: remove_test_data(self.test_dir, files=['images.fits']) def test_create_default_config(self) -> None: with pytest.warns(UserWarning) as warning: Pypeline(self.test_dir, self.test_dir, self.test_dir) assert len(warning) == 1 assert warning[0].message.args[0] == 'Configuration file not found. Creating ' \ 'PynPoint_config.ini with default values ' \ 'in the working place.' with open(self.test_dir+'PynPoint_config.ini') as f_obj: count = 0 for _ in f_obj: count += 1 assert count == 25 def test_create_none_config(self) -> None: file_obj = open(self.test_dir+'PynPoint_config.ini', 'w') file_obj.write('[header]\n\n') file_obj.write('INSTRUMENT: None\n') file_obj.write('NFRAMES: None\n') file_obj.write('EXP_NO: None\n') file_obj.write('NDIT: None\n') file_obj.write('PARANG_START: ESO ADA POSANG\n') file_obj.write('PARANG_END: None\n') file_obj.write('DITHER_X: None\n') file_obj.write('DITHER_Y: None\n') file_obj.write('DIT: None\n') file_obj.write('LATITUDE: None\n') file_obj.write('LONGITUDE: None\n') file_obj.write('PUPIL: None\n') file_obj.write('DATE: None\n') file_obj.write('RA: None\n') file_obj.write('DEC: None\n\n') file_obj.write('[settings]\n\n') file_obj.write('PIXSCALE: None\n') file_obj.write('MEMORY: None\n') file_obj.write('CPU: None\n') file_obj.close() pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) attribute = pipeline.get_attribute('config', 'MEMORY', static=True) assert attribute == 0 attribute = pipeline.get_attribute('config', 'CPU', static=True) assert attribute == 0 attribute = pipeline.get_attribute('config', 'PIXSCALE', static=True) assert attribute == pytest.approx(0., rel=self.limit, abs=0.) attribute = pipeline.get_attribute('config', 'INSTRUMENT', static=True) assert attribute == 'None' create_config(self.test_dir+'PynPoint_config.ini') def test_create_pipeline_path_missing(self) -> None: dir_non_exists = self.test_dir + 'none_dir/' dir_exists = self.test_dir with pytest.raises(AssertionError) as error: Pypeline(dir_non_exists, dir_exists, dir_exists) assert str(error.value) == 'The folder that was chosen for the working place does not ' \ 'exist: '+self.test_dir+'none_dir/.' with pytest.raises(AssertionError) as error: Pypeline(dir_exists, dir_non_exists, dir_exists) assert str(error.value) == 'The folder that was chosen for the input place does not ' \ 'exist: '+self.test_dir+'none_dir/.' with pytest.raises(AssertionError) as error: Pypeline(dir_exists, dir_exists, dir_non_exists) assert str(error.value) == 'The folder that was chosen for the output place does not ' \ 'exist: '+self.test_dir+'none_dir/.' def test_create_pipeline_existing_database(self) -> None: np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'w') as hdf_file: dset = hdf_file.create_dataset('images', data=images) dset.attrs['PIXSCALE'] = 0.01 pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) data = pipeline.get_data('images') assert np.mean(data) == pytest.approx(1.1824138000882435e-05, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) assert pipeline.get_attribute('images', 'PIXSCALE') == 0.01 os.remove(self.test_dir+'PynPoint_database.hdf5') def test_create_pipeline_new_database(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) pipeline.m_data_storage.open_connection() pipeline.m_data_storage.close_connection() del pipeline os.remove(self.test_dir+'PynPoint_database.hdf5') def test_add_module(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read1', input_dir=None, image_tag='im_arr1') assert pipeline.add_module(module) is None module = FitsReadingModule(name_in='read2', input_dir=self.test_dir, image_tag='im_arr2') assert pipeline.add_module(module) is None with pytest.warns(UserWarning) as warning: pipeline.add_module(module) assert len(warning) == 1 assert warning[0].message.args[0] == 'Names of pipeline modules that are added to the ' \ 'Pypeline need to be unique. The current pipeline ' \ 'module, \'read2\', does already exist in the ' \ 'Pypeline dictionary so the previous module with ' \ 'the same name will be overwritten.' module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_arr1', image_out_tag='im_out') assert pipeline.add_module(module) is None module = FitsWritingModule(name_in='write1', file_name='result.fits', data_tag='im_arr1') assert pipeline.add_module(module) is None module = FitsWritingModule(name_in='write2', file_name='result.fits', data_tag='im_arr1', output_dir=self.test_dir) assert pipeline.add_module(module) is None assert pipeline.run() is None assert pipeline.get_module_names() == ['read1', 'read2', 'badpixel', 'write1', 'write2'] os.remove(self.test_dir+'result.fits') os.remove(self.test_dir+'PynPoint_database.hdf5') def test_run_module(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read', image_tag='im_arr') assert pipeline.add_module(module) is None assert pipeline.run_module('read') is None os.remove(self.test_dir+'PynPoint_database.hdf5') def test_run_module_wrong_tag(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read') pipeline.add_module(module) module = FitsWritingModule(name_in='write', file_name='result.fits', data_tag='im_list') pipeline.add_module(module) module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_list', image_out_tag='im_out') pipeline.add_module(module) with pytest.raises(AttributeError) as error: pipeline.run_module('badpixel') assert str(error.value) == 'Pipeline module \'badpixel\' is looking for data under a ' \ 'tag which does not exist in the database.' with pytest.raises(AttributeError) as error: pipeline.run_module('write') assert str(error.value) == 'Pipeline module \'write\' is looking for data under a tag ' \ 'which does not exist in the database.' with pytest.raises(AttributeError) as error: pipeline.run() assert str(error.value) == 'Pipeline module \'write\' is looking for data under a tag ' \ 'which is not created by a previous module or the data does ' \ 'not exist in the database.' assert pipeline.validate_pipeline_module('test') == (False, 'test') os.remove(self.test_dir+'PynPoint_database.hdf5') def test_run_module_non_existing(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) with pytest.warns(UserWarning) as warning: pipeline.run_module('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Pipeline module \'test\' not found.' os.remove(self.test_dir+'PynPoint_database.hdf5') def test_remove_module(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read') pipeline.add_module(module) module = BadPixelSigmaFilterModule(name_in='badpixel', image_in_tag='im_arr1', image_out_tag='im_out') pipeline.add_module(module) assert pipeline.get_module_names() == ['read', 'badpixel'] assert pipeline.remove_module('read') assert pipeline.get_module_names() == ['badpixel'] assert pipeline.remove_module('badpixel') with pytest.warns(UserWarning) as warning: pipeline.remove_module('test') assert len(warning) == 1 assert warning[0].message.args[0] == 'Pipeline module \'test\' is not found in the ' \ 'Pypeline dictionary so it could not be removed. ' \ 'The dictionary contains the following modules: [].' \ os.remove(self.test_dir+'PynPoint_database.hdf5') def test_get_shape(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) module = FitsReadingModule(name_in='read', image_tag='images') pipeline.add_module(module) pipeline.run_module('read') assert pipeline.get_shape('images') == (5, 11, 11) def test_get_tags(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) assert pipeline.get_tags() == ['images'] def test_list_attributes(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) attr_dict = pipeline.list_attributes('images') assert len(attr_dict) == 11 assert attr_dict['INSTRUMENT'] == 'IMAGER' assert attr_dict['PIXSCALE'] == 0.027 assert attr_dict['NFRAMES'] == [5] assert attr_dict['PARANG_START'] == [10.] def test_set_and_get_attribute(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) pipeline.set_attribute('images', 'PIXSCALE', 0.1, static=True) pipeline.set_attribute('images', 'PARANG', np.arange(1., 11., 1.), static=False) attribute = pipeline.get_attribute('images', 'PIXSCALE', static=True) assert attribute == pytest.approx(0.1, rel=self.limit, abs=0.) attribute = pipeline.get_attribute('images', 'PARANG', static=False) assert attribute == pytest.approx(np.arange(1., 11., 1.), rel=self.limit, abs=0.) pipeline.set_attribute('images', 'PARANG', np.arange(10., 21., 1.), static=False) attribute = pipeline.get_attribute('images', 'PARANG', static=False) assert attribute == pytest.approx(np.arange(10., 21., 1.), rel=self.limit, abs=0.) def test_get_data_range(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) data = pipeline.get_data('images', data_range=(0, 2)) assert data.shape == (2, 11, 11) def test_delete_data(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) pipeline.delete_data('images') assert len(pipeline.get_tags()) == 0 def test_delete_not_found(self) -> None: pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) with pytest.warns(UserWarning) as warning: pipeline.delete_data('images') assert len(warning) == 2 assert warning[0].message.args[0] == 'Dataset \'images\' not found in the database.' assert warning[1].message.args[0] == 'Attributes of \'images\' not found in the database.' def test_omp_num_threads(self) -> None: os.environ['OMP_NUM_THREADS'] = '2' pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) pipeline.run() PynPoint-0.11.0/tests/test_processing/000077500000000000000000000000001450275315200177325ustar00rootroot00000000000000PynPoint-0.11.0/tests/test_processing/__init__.py000066400000000000000000000000001450275315200220310ustar00rootroot00000000000000PynPoint-0.11.0/tests/test_processing/test_background.py000066400000000000000000000247151450275315200234730ustar00rootroot00000000000000import os import numpy as np import pytest from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.background import LineSubtractionModule, \ MeanBackgroundSubtractionModule, \ NoddingBackgroundModule, \ SimpleBackgroundSubtractionModule from pynpoint.processing.pcabackground import DitheringBackgroundModule from pynpoint.processing.stacksubset import StackCubesModule from pynpoint.util.tests import create_config, create_dither_data, create_star_data, \ remove_test_data class TestBackground: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_dither_data(self.test_dir+'dither') create_star_data(self.test_dir+'science') create_star_data(self.test_dir+'sky') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['dither', 'science', 'sky']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='dither', input_dir=self.test_dir+'dither') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('dither') assert np.sum(data) == pytest.approx(211.20534661914408, rel=self.limit, abs=0.) assert data.shape == (20, 21, 21) module = FitsReadingModule(name_in='read2', image_tag='science', input_dir=self.test_dir+'science') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('science') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) module = FitsReadingModule(name_in='read3', image_tag='sky', input_dir=self.test_dir+'sky') self.pipeline.add_module(module) self.pipeline.run_module('read3') data = self.pipeline.get_data('sky') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_simple_background(self) -> None: module = SimpleBackgroundSubtractionModule(shift=5, name_in='simple', image_in_tag='dither', image_out_tag='simple') self.pipeline.add_module(module) self.pipeline.run_module('simple') data = self.pipeline.get_data('simple') # TODO Unclear why the absolute value is required # Local the sum is 3.55 and on Github -3.55 assert np.abs(np.sum(data)) == pytest.approx(3.552713678800501e-15, rel=self.limit, abs=0.) assert data.shape == (20, 21, 21) def test_mean_background_shift(self) -> None: module = MeanBackgroundSubtractionModule(shift=5, cubes=1, name_in='mean2', image_in_tag='dither', image_out_tag='mean2') self.pipeline.add_module(module) self.pipeline.run_module('mean2') data = self.pipeline.get_data('mean2') assert np.sum(data) == pytest.approx(2.473864361018551, rel=self.limit, abs=0.) assert data.shape == (20, 21, 21) def test_mean_background_nframes(self) -> None: module = MeanBackgroundSubtractionModule(shift=None, cubes=1, name_in='mean1', image_in_tag='dither', image_out_tag='mean1') self.pipeline.add_module(module) self.pipeline.run_module('mean1') data = self.pipeline.get_data('mean1') assert np.sum(data) == pytest.approx(2.473864361018551, rel=self.limit, abs=0.) assert data.shape == (20, 21, 21) def test_dithering_attributes(self) -> None: module = DitheringBackgroundModule(name_in='pca_dither1', image_in_tag='dither', image_out_tag='pca_dither1', center=None, cubes=None, size=0.2, gaussian=0.05, subframe=0.1, pca_number=1, mask_star=0.05) self.pipeline.add_module(module) self.pipeline.run_module('pca_dither1') data = self.pipeline.get_data('dither_dither_crop1') assert np.sum(data) == pytest.approx(54.62410860562912, rel=self.limit, abs=0.) assert data.shape == (20, 9, 9) data = self.pipeline.get_data('dither_dither_star1') assert np.sum(data) == pytest.approx(54.873885838788595, rel=self.limit, abs=0.) assert data.shape == (5, 9, 9) data = self.pipeline.get_data('dither_dither_mean1') assert np.sum(data) == pytest.approx(54.204960755115245, rel=self.limit, abs=0.) assert data.shape == (5, 9, 9) data = self.pipeline.get_data('dither_dither_background1') assert np.sum(data) == pytest.approx(-0.24977723315947564, rel=self.limit, abs=0.) assert data.shape == (15, 9, 9) data = self.pipeline.get_data('dither_dither_pca_fit1') assert np.sum(data) == pytest.approx(-0.6816458444287745, rel=1e-5, abs=0.) assert data.shape == (5, 9, 9) data = self.pipeline.get_data('dither_dither_pca_res1') assert np.sum(data) == pytest.approx(55.63879076093719, rel=1e-6, abs=0.) assert data.shape == (5, 9, 9) data = self.pipeline.get_data('dither_dither_pca_mask1') assert np.sum(data) == pytest.approx(360.0, rel=self.limit, abs=0.) assert data.shape == (5, 9, 9) data = self.pipeline.get_data('pca_dither1') assert np.sum(data) == pytest.approx(208.24417329569593, rel=1e-6, abs=0.) assert data.shape == (20, 9, 9) attr = self.pipeline.get_attribute('dither_dither_pca_res1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40., rel=self.limit, abs=0.) assert attr.shape == (5, 2) def test_dithering_center(self) -> None: module = DitheringBackgroundModule(name_in='pca_dither2', image_in_tag='dither', image_out_tag='pca_dither2', center=[(5, 5), (5, 15), (15, 15), (15, 5)], cubes=1, size=0.2, gaussian=0.05, subframe=None, pca_number=1, mask_star=0.05) self.pipeline.add_module(module) self.pipeline.run_module('pca_dither2') data = self.pipeline.get_data('pca_dither2') assert np.sum(data) == pytest.approx(208.24417332523367, rel=1e-6, abs=0.) assert data.shape == (20, 9, 9) def test_nodding_background(self) -> None: module = StackCubesModule(name_in='mean', image_in_tag='sky', image_out_tag='mean', combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('mean') data = self.pipeline.get_data('mean') assert np.sum(data) == pytest.approx(21.108557759610548, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) attr = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.sum(attr) == pytest.approx(1, rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.sum(attr) == pytest.approx(2, rel=self.limit, abs=0.) assert attr.shape == (2, ) module = NoddingBackgroundModule(name_in='nodding', sky_in_tag='mean', science_in_tag='science', image_out_tag='nodding', mode='both') self.pipeline.add_module(module) self.pipeline.run_module('nodding') data = self.pipeline.get_data('nodding') assert np.sum(data) == pytest.approx(1.793466459074705, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_line_background_mean(self) -> None: module = LineSubtractionModule(name_in='line1', image_in_tag='science', image_out_tag='science_line1', combine='mean', mask=0.1) self.pipeline.add_module(module) self.pipeline.run_module('line1') data = self.pipeline.get_data('science_line1') assert np.sum(data) == pytest.approx(104.55443019231085, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_line_background_median(self) -> None: module = LineSubtractionModule(name_in='line2', image_in_tag='science', image_out_tag='science_line2', combine='median', mask=0.1) self.pipeline.add_module(module) self.pipeline.run_module('line2') data = self.pipeline.get_data('science_line2') assert np.sum(data) == pytest.approx(106.09825573198366, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) PynPoint-0.11.0/tests/test_processing/test_badpixel.py000066400000000000000000000223531450275315200231400ustar00rootroot00000000000000import os import math import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.processing.badpixel import BadPixelSigmaFilterModule, BadPixelMapModule, \ BadPixelInterpolationModule, BadPixelTimeFilterModule, \ ReplaceBadPixelsModule from pynpoint.util.tests import create_config, remove_test_data class TestBadPixel: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) dark = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) flat = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) images[0, 5, 5] = 1. dark[:, 5, 5] = 1. flat[:, 8, 8] = -1. flat[:, 9, 9] = -1. flat[:, 10, 10] = -1. with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'w') as hdf_file: hdf_file.create_dataset('images', data=images) hdf_file.create_dataset('dark', data=dark) hdf_file.create_dataset('flat', data=flat) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir) def test_bad_pixel_sigma_filter(self) -> None: module = BadPixelSigmaFilterModule(name_in='sigma1', image_in_tag='images', image_out_tag='sigma1', map_out_tag='None', box=9, sigma=3., iterate=5) self.pipeline.add_module(module) self.pipeline.run_module('sigma1') data = self.pipeline.get_data('sigma1') assert np.sum(data) == pytest.approx(0.006513475520308432, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_bad_pixel_map_out(self) -> None: module = BadPixelSigmaFilterModule(name_in='sigma2', image_in_tag='images', image_out_tag='sigma2', map_out_tag='bpmap', box=9, sigma=2., iterate=3) self.pipeline.add_module(module) self.pipeline.run_module('sigma2') data = self.pipeline.get_data('sigma2') assert data[0, 0, 0] == pytest.approx(-2.4570591355257687e-05, rel=self.limit, abs=0.) assert data[0, 5, 5] == pytest.approx(9.903775276151606e-06, rel=self.limit, abs=0.) assert np.sum(data) == pytest.approx(0.011777887008566097, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) data = self.pipeline.get_data('bpmap') assert data[0, 1, 1] == pytest.approx(1., rel=self.limit, abs=0.) assert data[0, 5, 5] == pytest.approx(0., rel=self.limit, abs=0.) assert np.sum(data) == pytest.approx(519.0, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_bad_pixel_map(self) -> None: module = BadPixelMapModule(name_in='bp_map1', dark_in_tag='dark', flat_in_tag='flat', bp_map_out_tag='bp_map1', dark_threshold=0.99, flat_threshold=-0.99) self.pipeline.add_module(module) self.pipeline.run_module('bp_map1') data = self.pipeline.get_data('bp_map1') assert data[0, 0, 0] == pytest.approx(1., rel=self.limit, abs=0.) assert data[0, 5, 5] == pytest.approx(0., rel=self.limit, abs=0.) assert np.sum(data) == pytest.approx(117., rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) def test_map_no_dark(self) -> None: module = BadPixelMapModule(name_in='bp_map2', dark_in_tag=None, flat_in_tag='flat', bp_map_out_tag='bp_map2', dark_threshold=0.99, flat_threshold=-0.99) self.pipeline.add_module(module) self.pipeline.run_module('bp_map2') data = self.pipeline.get_data('bp_map2') assert np.sum(data) == pytest.approx(118., rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) def test_map_no_flat(self) -> None: module = BadPixelMapModule(name_in='bp_map3', dark_in_tag='dark', flat_in_tag=None, bp_map_out_tag='bp_map3', dark_threshold=0.99, flat_threshold=-0.99) self.pipeline.add_module(module) self.pipeline.run_module('bp_map3') data = self.pipeline.get_data('bp_map3') assert np.sum(data) == pytest.approx(120., rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) def test_bad_pixel_interpolation(self) -> None: module = BadPixelInterpolationModule(name_in='interpolation', image_in_tag='images', bad_pixel_map_tag='bp_map1', image_out_tag='interpolation', iterations=10) self.pipeline.add_module(module) self.pipeline.run_module('interpolation') data = self.pipeline.get_data('interpolation') assert data[0, 0, 0] == pytest.approx(0.00032486907273264834, rel=1e-6, abs=0.) assert data[0, 5, 5] == pytest.approx(-1.4292408645473845e-05, rel=1e-6, abs=0.) assert np.sum(data) == pytest.approx(0.008683344127872174, rel=1e-6, abs=0.) assert data.shape == (5, 11, 11) def test_bad_pixel_time(self) -> None: module = BadPixelTimeFilterModule(name_in='time', image_in_tag='images', image_out_tag='time', sigma=(2., 2.)) self.pipeline.add_module(module) self.pipeline.run_module('time') data = self.pipeline.get_data('time') assert data[0, 0, 0] == pytest.approx(0.00032486907273264834, rel=self.limit, abs=0.) assert data[0, 5, 5] == pytest.approx(-0.00017468532119886812, rel=self.limit, abs=0.) assert np.sum(data) == pytest.approx(0.004175672043832705, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_replace_bad_pixels(self) -> None: module = ReplaceBadPixelsModule(name_in='replace1', image_in_tag='images', map_in_tag='bp_map1', image_out_tag='replace', size=2, replace='mean') self.pipeline.add_module(module) self.pipeline.run_module('replace1') data = self.pipeline.get_data('replace') assert data[0, 0, 0] == pytest.approx(0.00032486907273264834, rel=self.limit, abs=0.) assert data[0, 5, 5] == pytest.approx(4.260114004413933e-05, rel=self.limit, abs=0.) assert np.sum(data) == pytest.approx(0.00883357395370896, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) module = ReplaceBadPixelsModule(name_in='replace2', image_in_tag='images', map_in_tag='bp_map1', image_out_tag='replace', size=2, replace='median') self.pipeline.add_module(module) self.pipeline.run_module('replace2') data = self.pipeline.get_data('replace') assert data[0, 0, 0] == pytest.approx(0.00032486907273264834, rel=self.limit, abs=0.) assert data[0, 5, 5] == pytest.approx(4.327154179438619e-05, rel=self.limit, abs=0.) assert np.sum(data) == pytest.approx(0.008489525337709688, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) module = ReplaceBadPixelsModule(name_in='replace3', image_in_tag='images', map_in_tag='bp_map1', image_out_tag='replace', size=2, replace='nan') self.pipeline.add_module(module) self.pipeline.run_module('replace3') data = self.pipeline.get_data('replace') assert data[0, 0, 0] == pytest.approx(0.00032486907273264834, rel=self.limit, abs=0.) assert math.isnan(data[0, 5, 5]) assert np.nansum(data) == pytest.approx(0.009049653234723834, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) PynPoint-0.11.0/tests/test_processing/test_basic.py000066400000000000000000000113611450275315200224260ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.basic import SubtractImagesModule, AddImagesModule, RotateImagesModule, \ RepeatImagesModule from pynpoint.util.tests import create_config, remove_test_data, create_star_data class TestBasic: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'data1') create_star_data(self.test_dir+'data2') create_star_data(self.test_dir+'data3') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data1', 'data2', 'data3']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='data1', input_dir=self.test_dir+'data1', overwrite=True, check=True) self.pipeline.add_module(module) module = FitsReadingModule(name_in='read2', image_tag='data2', input_dir=self.test_dir+'data2', overwrite=True, check=True) self.pipeline.add_module(module) module = FitsReadingModule(name_in='read3', image_tag='data3', input_dir=self.test_dir+'data3', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') self.pipeline.run_module('read2') self.pipeline.run_module('read3') data = self.pipeline.get_data('data1') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) data = self.pipeline.get_data('data2') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) data = self.pipeline.get_data('data3') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_subtract_images(self) -> None: module = SubtractImagesModule(name_in='subtract', image_in_tags=('data1', 'data2'), image_out_tag='subtract', scaling=1.) self.pipeline.add_module(module) self.pipeline.run_module('subtract') data = self.pipeline.get_data('subtract') assert np.sum(data) == pytest.approx(0., rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_add_images(self) -> None: module = AddImagesModule(name_in='add', image_in_tags=('data1', 'data2'), image_out_tag='add', scaling=1.) self.pipeline.add_module(module) self.pipeline.run_module('add') data = self.pipeline.get_data('add') assert np.sum(data) == pytest.approx(211.08557759610554, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_rotate_images(self) -> None: module = RotateImagesModule(name_in='rotate', image_in_tag='data1', image_out_tag='rotate', angle=10.) self.pipeline.add_module(module) self.pipeline.run_module('rotate') data = self.pipeline.get_data('rotate') assert np.sum(data) == pytest.approx(105.86657256219851, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_repeat_images(self) -> None: module = RepeatImagesModule(name_in='repeat', image_in_tag='data1', image_out_tag='repeat', repeat=2) self.pipeline.add_module(module) self.pipeline.run_module('repeat') data1 = self.pipeline.get_data('data1') assert data1.shape == (10, 11, 11) data2 = self.pipeline.get_data('repeat') assert data2.shape == (20, 11, 11) assert data1 == pytest.approx(data2[0:10, ], rel=self.limit, abs=0.) assert data1 == pytest.approx(data2[10:20, ], rel=self.limit, abs=0.) PynPoint-0.11.0/tests/test_processing/test_centering.py000066400000000000000000000403421450275315200233240ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.centering import StarAlignmentModule, ShiftImagesModule, \ WaffleCenteringModule, FitCenterModule from pynpoint.processing.extract import StarExtractionModule from pynpoint.processing.resizing import AddLinesModule from pynpoint.util.tests import create_config, create_star_data, create_waffle_data, \ remove_test_data class TestCentering: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'star') create_waffle_data(self.test_dir+'waffle') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(path=self.test_dir, folders=['star', 'waffle']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='star', input_dir=self.test_dir+'star', overwrite=True, check=True) self.pipeline.add_module(module) module = FitsReadingModule(name_in='read2', image_tag='waffle', input_dir=self.test_dir+'waffle', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') self.pipeline.run_module('read2') data = self.pipeline.get_data('star') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) data = self.pipeline.get_data('waffle') assert np.sum(data) == pytest.approx(4.000000000000196, rel=self.limit, abs=0.) assert data.shape == (1, 101, 101) def test_star_extract(self) -> None: module = StarExtractionModule(name_in='extract1', image_in_tag='star', image_out_tag='extract1', index_out_tag='index', image_size=0.2, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract1') assert len(warning) == 3 assert warning[0].message.args[0] == 'Can not store the attribute \'INSTRUMENT\' ' \ 'because the dataset \'index\' does not exist.' data = self.pipeline.get_data('extract1') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_star_align(self) -> None: module = StarAlignmentModule(name_in='align1', image_in_tag='extract1', ref_image_in_tag=None, image_out_tag='align1', accuracy=10, resize=2., num_references=10, subframe=None) self.pipeline.add_module(module) self.pipeline.run_module('align1') data = self.pipeline.get_data('align1') assert np.sum(data) == pytest.approx(104.7074742320535, rel=self.limit, abs=0.) assert data.shape == (10, 18, 18) def test_star_align_subframe(self) -> None: module = StarAlignmentModule(name_in='align2', image_in_tag='extract1', ref_image_in_tag=None, image_out_tag='align2', accuracy=10, resize=None, num_references=10, subframe=0.1) self.pipeline.add_module(module) self.pipeline.run_module('align2') data = self.pipeline.get_data('align2') assert np.sum(data) == pytest.approx(104.39031104541652, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_star_align_ref(self) -> None: module = StarAlignmentModule(name_in='align3', image_in_tag='extract1', ref_image_in_tag='align2', image_out_tag='align3', accuracy=10, resize=None, num_references=10, subframe=None) self.pipeline.add_module(module) self.pipeline.run_module('align3') data = self.pipeline.get_data('align3') assert np.sum(data) == pytest.approx(104.46997194330757, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_star_align_number_ref(self) -> None: module = StarAlignmentModule(name_in='align4', image_in_tag='extract1', ref_image_in_tag='align2', image_out_tag='align4', accuracy=10, resize=None, num_references=20, subframe=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('align4') assert len(warning) == 1 assert warning[0].message.args[0] == 'Number of available images (10) is smaller than ' \ 'num_references (20). Using all available images ' \ 'instead.' data = self.pipeline.get_data('align4') assert np.sum(data) == pytest.approx(104.46997194330757, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_shift_images_spline(self) -> None: module = ShiftImagesModule(shift_xy=(1., 2.), interpolation='spline', name_in='shift1', image_in_tag='align1', image_out_tag='shift') self.pipeline.add_module(module) self.pipeline.run_module('shift1') data = self.pipeline.get_data('shift') assert np.sum(data) == pytest.approx(104.20425101355244, rel=self.limit, abs=0.) assert data.shape == (10, 18, 18) def test_shift_images_fft(self) -> None: module = ShiftImagesModule(shift_xy=(1., 2.), interpolation='fft', name_in='shift2', image_in_tag='align1', image_out_tag='shift_fft') self.pipeline.add_module(module) self.pipeline.run_module('shift2') data = self.pipeline.get_data('shift_fft') assert np.sum(data) == pytest.approx(104.7074742320535, rel=self.limit, abs=0.) assert data.shape == (10, 18, 18) def test_waffle_center_odd(self) -> None: module = AddLinesModule(name_in='add', image_in_tag='star', image_out_tag='star_add', lines=(45, 45, 45, 45)) self.pipeline.add_module(module) self.pipeline.run_module('add') data = self.pipeline.get_data('star_add') assert np.sum(data) == pytest.approx(105.54278879805278, rel=self.limit, abs=0.) assert data.shape == (10, 101, 101) module = WaffleCenteringModule(size=0.2, center=(50, 50), name_in='waffle', image_in_tag='star_add', center_in_tag='waffle', image_out_tag='center', radius=35., pattern='x', sigma=0.05) self.pipeline.add_module(module) with pytest.warns(DeprecationWarning) as warning: self.pipeline.run_module('waffle') assert len(warning) == 1 assert warning[0].message.args[0] == 'The \'pattern\' parameter will be deprecated in a ' \ 'future release. Please Use the \'angle\' ' \ 'parameter instead and set it to 45.0 degrees.' data = self.pipeline.get_data('center') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) attr = self.pipeline.get_attribute('center', 'History: WaffleCenteringModule') assert attr == '[x, y] = [50.0, 50.0]' def test_waffle_center_even(self) -> None: module = AddLinesModule(name_in='add1', image_in_tag='star_add', image_out_tag='star_even', lines=(0, 1, 0, 1)) self.pipeline.add_module(module) self.pipeline.run_module('add1') data = self.pipeline.get_data('star_even') assert np.sum(data) == pytest.approx(105.54278879805275, rel=self.limit, abs=0.) assert data.shape == (10, 102, 102) module = AddLinesModule(name_in='add2', image_in_tag='waffle', image_out_tag='waffle_even', lines=(0, 1, 0, 1)) self.pipeline.add_module(module) self.pipeline.run_module('add2') data = self.pipeline.get_data('waffle_even') assert np.sum(data) == pytest.approx(4.000000000000195, rel=self.limit, abs=0.) assert data.shape == (1, 102, 102) module = ShiftImagesModule(shift_xy=(0.5, 0.5), interpolation='spline', name_in='shift3', image_in_tag='star_even', image_out_tag='star_shift') self.pipeline.add_module(module) self.pipeline.run_module('shift3') data = self.pipeline.get_data('star_shift') assert np.sum(data) == pytest.approx(105.54278879805274, rel=self.limit, abs=0.) assert data.shape == (10, 102, 102) module = ShiftImagesModule(shift_xy=(0.5, 0.5), interpolation='spline', name_in='shift4', image_in_tag='waffle_even', image_out_tag='waffle_shift') self.pipeline.add_module(module) self.pipeline.run_module('shift4') data = self.pipeline.get_data('waffle_shift') assert np.sum(data) == pytest.approx(4.000000000000194, rel=self.limit, abs=0.) assert data.shape == (1, 102, 102) module = WaffleCenteringModule(size=0.2, center=(50, 50), name_in='waffle_even', image_in_tag='star_shift', center_in_tag='waffle_shift', image_out_tag='center_even', radius=35., pattern='x', sigma=0.05) self.pipeline.add_module(module) with pytest.warns(DeprecationWarning) as warning: self.pipeline.run_module('waffle_even') assert len(warning) == 1 assert warning[0].message.args[0] == 'The \'pattern\' parameter will be deprecated in a ' \ 'future release. Please Use the \'angle\' ' \ 'parameter instead and set it to 45.0 degrees.' data = self.pipeline.get_data('center_even') assert np.sum(data) == pytest.approx(105.22695036281449, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) attr = self.pipeline.get_attribute('center_even', 'History: WaffleCenteringModule') assert attr == '[x, y] = [50.5, 50.5]' def test_fit_center_full(self) -> None: module = FitCenterModule(name_in='fit1', image_in_tag='shift', fit_out_tag='fit_full', mask_out_tag='mask', method='full', mask_radii=(None, 0.1), sign='positive', model='gaussian', guess=(1., 2., 3., 3., 0.01, 0., 0.)) self.pipeline.add_module(module) self.pipeline.run_module('fit1') data = self.pipeline.get_data('fit_full') assert np.mean(data[:, 0]) == pytest.approx(0.94, rel=0., abs=0.05) assert np.mean(data[:, 2]) == pytest.approx(2.07, rel=0., abs=0.05) assert np.mean(data[:, 4]) == pytest.approx(0.0676, rel=0., abs=0.05) assert np.mean(data[:, 6]) == pytest.approx(0.0665, rel=0., abs=0.05) assert np.mean(data[:, 8]) == pytest.approx(0.24, rel=0., abs=0.05) assert data.shape == (10, 14) data = self.pipeline.get_data('mask') assert np.sum(data) == pytest.approx(103.45599730750453, rel=self.limit, abs=0.) assert data.shape == (10, 18, 18) def test_fit_center_mean(self) -> None: module = FitCenterModule(name_in='fit2', image_in_tag='shift', fit_out_tag='fit_mean', mask_out_tag=None, method='mean', mask_radii=(None, 0.1), sign='positive', model='moffat', guess=(1., 2., 3., 3., 0.01, 0., 0., 1.)) self.pipeline.add_module(module) self.pipeline.run_module('fit2') data = self.pipeline.get_data('fit_mean') assert np.mean(data[:, 0]) == pytest.approx(0.94, rel=0., abs=0.01) assert np.mean(data[:, 2]) == pytest.approx(2.059, rel=0., abs=0.01) assert np.mean(data[:, 4]) == pytest.approx(0.083, rel=0., abs=0.01) assert np.mean(data[:, 6]) == pytest.approx(0.08, rel=0., abs=0.01) assert np.mean(data[:, 8]) == pytest.approx(0.242, rel=0., abs=0.01) assert data.shape == (10, 16) def test_shift_images_tag(self) -> None: module = ShiftImagesModule(shift_xy='fit_full', interpolation='spline', name_in='shift5', image_in_tag='shift', image_out_tag='shift_tag_1') self.pipeline.add_module(module) self.pipeline.run_module('shift5') data = self.pipeline.get_data('shift_tag_1') assert np.sum(data) == pytest.approx(103.76410960085425, rel=1e-6, abs=0.9) assert data.shape == (10, 18, 18) def test_shift_images_tag_mean(self) -> None: module = ShiftImagesModule(shift_xy='fit_mean', interpolation='spline', name_in='shift6', image_in_tag='shift', image_out_tag='shift_tag_2') self.pipeline.add_module(module) self.pipeline.run_module('shift6') data = self.pipeline.get_data('shift_tag_2') assert np.sum(data) == pytest.approx(103.42285439876926, rel=1e-6, abs=0.) assert data.shape == (10, 18, 18) PynPoint-0.11.0/tests/test_processing/test_darkflat.py000066400000000000000000000100701450275315200231310ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.processing.darkflat import DarkCalibrationModule, FlatCalibrationModule from pynpoint.util.tests import create_config, remove_test_data class TestDarkFlat: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' np.random.seed(1) images = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) dark = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) flat = np.random.normal(loc=0, scale=2e-4, size=(5, 11, 11)) crop = np.random.normal(loc=0, scale=2e-4, size=(5, 7, 7)) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'w') as hdf_file: hdf_file.create_dataset('images', data=images) hdf_file.create_dataset('dark', data=dark) hdf_file.create_dataset('flat', data=flat) hdf_file.create_dataset('crop', data=crop) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir) def test_input_data(self) -> None: data = self.pipeline.get_data('dark') assert np.sum(data) == pytest.approx(-2.0262345764957305e-05, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) data = self.pipeline.get_data('flat') assert np.sum(data) == pytest.approx(0.0076413379497053, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_dark_calibration(self) -> None: module = DarkCalibrationModule(name_in='dark', image_in_tag='images', dark_in_tag='dark', image_out_tag='dark_cal') self.pipeline.add_module(module) self.pipeline.run_module('dark') data = self.pipeline.get_data('dark_cal') assert np.sum(data) == pytest.approx(0.00717386583629883, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_flat_calibration(self) -> None: module = FlatCalibrationModule(name_in='flat', image_in_tag='dark_cal', flat_in_tag='flat', image_out_tag='flat_cal') self.pipeline.add_module(module) self.pipeline.run_module('flat') data = self.pipeline.get_data('flat_cal') assert np.sum(data) == pytest.approx(0.00717439711853594, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_flat_crop(self) -> None: module = FlatCalibrationModule(name_in='flat_crop', image_in_tag='crop', flat_in_tag='flat', image_out_tag='flat_crop') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('flat_crop') assert len(warning) == 1 assert warning[0].message.args[0] == 'The calibration images were cropped around their ' \ 'center to match the shape of the science images.' data = self.pipeline.get_data('flat_crop') assert np.sum(data) == pytest.approx(-0.003242901413605404, rel=self.limit, abs=0.) assert data.shape == (5, 7, 7) def test_flat_too_small(self) -> None: module = FlatCalibrationModule(name_in='flat_small', image_in_tag='flat', flat_in_tag='crop', image_out_tag='flat_small') self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('flat_small') assert str(error.value) == 'Shape of the calibration images is smaller than the ' \ 'science images.' PynPoint-0.11.0/tests/test_processing/test_extract.py000066400000000000000000000231451450275315200230220ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.extract import StarExtractionModule, ExtractBinaryModule from pynpoint.util.tests import create_config, create_star_data, create_fake_data, remove_test_data class TestExtract: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'star') create_fake_data(self.test_dir+'binary') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(path=self.test_dir, folders=['star', 'binary']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='star', input_dir=self.test_dir+'star', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('star') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) module = FitsReadingModule(name_in='read2', image_tag='binary', input_dir=self.test_dir+'binary', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('binary') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) self.pipeline.set_attribute('binary', 'PARANG', -1.*np.linspace(0., 180., 10), static=False) def test_extract_position_none(self) -> None: module = StarExtractionModule(name_in='extract1', image_in_tag='star', image_out_tag='extract1', index_out_tag='index', image_size=0.2, fwhm_star=0.1, position=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract1') assert len(warning) == 3 assert warning[0].message.args[0] == 'Can not store the attribute \'INSTRUMENT\' because ' \ 'the dataset \'index\' does not exist.' assert warning[1].message.args[0] == 'Can not store the attribute \'PIXSCALE\' because ' \ 'the dataset \'index\' does not exist.' assert warning[2].message.args[0] == 'Can not store the attribute \'History: ' \ 'StarExtractionModule\' because the dataset ' \ '\'index\' does not exist.' data = self.pipeline.get_data('extract1') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_extract_center_none(self) -> None: module = StarExtractionModule(name_in='extract2', image_in_tag='star', image_out_tag='extract2', index_out_tag='index', image_size=0.2, fwhm_star=0.1, position=(None, None, 0.2)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract2') assert len(warning) == 3 assert warning[0].message.args[0] == 'Can not store the attribute \'INSTRUMENT\' because ' \ 'the dataset \'index\' does not exist.' assert warning[1].message.args[0] == 'Can not store the attribute \'PIXSCALE\' because ' \ 'the dataset \'index\' does not exist.' assert warning[2].message.args[0] == 'Can not store the attribute \'History: ' \ 'StarExtractionModule\' because the dataset ' \ '\'index\' does not exist.' data = self.pipeline.get_data('extract2') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_extract_position(self) -> None: module = StarExtractionModule(name_in='extract7', image_in_tag='star', image_out_tag='extract7', index_out_tag=None, image_size=0.2, fwhm_star=0.1, position=(5, 5, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('extract7') data = self.pipeline.get_data('extract7') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_extract_too_large(self) -> None: module = StarExtractionModule(name_in='extract3', image_in_tag='star', image_out_tag='extract3', index_out_tag=None, image_size=0.2, fwhm_star=0.1, position=(2, 2, 0.05)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract3') assert len(warning) == 10 assert warning[0].message.args[0] == f'Chosen image size is too large to crop the image ' \ f'around the brightest pixel (image index = 0, ' \ f'pixel [x, y] = [2, 2]). Using the center of ' \ f'the image instead.' data = self.pipeline.get_data('extract3') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) def test_star_extract_cpu(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = StarExtractionModule(name_in='extract4', image_in_tag='star', image_out_tag='extract4', index_out_tag='index', image_size=0.2, fwhm_star=0.1, position=(2, 2, 0.05)) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('extract4') assert len(warning) == 2 assert warning[0].message.args[0] == 'The \'index_out_port\' can only be used if ' \ 'CPU = 1. No data will be stored to this output port.' assert warning[1].message.args[0] == 'Chosen image size is too large to crop the image ' \ 'around the brightest pixel (image index = 0, ' \ 'pixel [x, y] = [2, 2]). Using the center of the ' \ 'image instead.' def test_extract_binary(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = ExtractBinaryModule(pos_center=(10., 10.), pos_binary=(10., 16.), name_in='extract5', image_in_tag='binary', image_out_tag='extract5', image_size=0.15, search_size=0.07, filter_size=None) self.pipeline.add_module(module) self.pipeline.run_module('extract5') data = self.pipeline.get_data('extract5') assert np.sum(data) == pytest.approx(1.3419098759577548, rel=self.limit, abs=0.) assert data.shape == (10, 7, 7) def test_extract_binary_filter(self) -> None: module = ExtractBinaryModule(pos_center=(10., 10.), pos_binary=(10., 16.), name_in='extract6', image_in_tag='binary', image_out_tag='extract6', image_size=0.15, search_size=0.07, filter_size=0.05) self.pipeline.add_module(module) self.pipeline.run_module('extract6') data = self.pipeline.get_data('extract6') assert np.sum(data) == pytest.approx(1.3789593661036972, rel=self.limit, abs=0.) assert data.shape == (10, 7, 7) PynPoint-0.11.0/tests/test_processing/test_filter.py000066400000000000000000000034431450275315200226340ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.filter import GaussianFilterModule from pynpoint.util.tests import create_config, remove_test_data, create_star_data class TestFilter: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'data') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='data', input_dir=self.test_dir+'data', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('data') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_gaussian_filter(self) -> None: module = GaussianFilterModule(name_in='filter', image_in_tag='data', image_out_tag='filtered', fwhm=0.1) self.pipeline.add_module(module) self.pipeline.run_module('filter') data = self.pipeline.get_data('filtered') assert np.sum(data) == pytest.approx(105.54278879805275, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) PynPoint-0.11.0/tests/test_processing/test_fluxposition.py000066400000000000000000000412601450275315200241110ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.fluxposition import FakePlanetModule, AperturePhotometryModule, \ FalsePositiveModule, SimplexMinimizationModule, \ MCMCsamplingModule, SystematicErrorModule from pynpoint.processing.stacksubset import DerotateAndStackModule from pynpoint.processing.psfsubtraction import PcaPsfSubtractionModule from pynpoint.util.tests import create_config, create_star_data, create_fake_data, remove_test_data class TestFluxPosition: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_fake_data(self.test_dir+'adi') create_star_data(self.test_dir+'psf', npix=21, pos_star=10.) create_star_data(self.test_dir+'ref', npix=21, pos_star=10.) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['adi', 'psf', 'ref']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='adi', input_dir=self.test_dir+'adi') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('adi') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) self.pipeline.set_attribute('adi', 'PARANG', np.linspace(0., 180., 10), static=False) module = FitsReadingModule(name_in='read2', image_tag='psf', input_dir=self.test_dir+'psf') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('psf') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) module = FitsReadingModule(name_in='read3', image_tag='ref', input_dir=self.test_dir+'psf') self.pipeline.add_module(module) self.pipeline.run_module('read3') data = self.pipeline.get_data('ref') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_aperture_photometry(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = AperturePhotometryModule(name_in='photometry1', image_in_tag='psf', phot_out_tag='photometry1', radius=0.1, position=None) self.pipeline.add_module(module) self.pipeline.run_module('photometry1') with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = AperturePhotometryModule(name_in='photometry2', image_in_tag='psf', phot_out_tag='photometry2', radius=0.1, position=None) self.pipeline.add_module(module) self.pipeline.run_module('photometry2') data = self.pipeline.get_data('photometry1') assert np.sum(data) == pytest.approx(100.80648929590365, rel=self.limit, abs=0.) assert data.shape == (10, 1) data_multi = self.pipeline.get_data('photometry2') assert data.shape == data_multi.shape assert data == pytest.approx(data_multi, rel=self.limit, abs=0.) def test_aperture_photometry_position(self) -> None: module = AperturePhotometryModule(name_in='photometry3', image_in_tag='psf', phot_out_tag='photometry3', radius=0.1, position=(10., 10.)) self.pipeline.add_module(module) self.pipeline.run_module('photometry3') data = self.pipeline.get_data('photometry3') assert np.sum(data) == pytest.approx(100.80648929590365, rel=self.limit, abs=0.) assert data.shape == (10, 1) def test_fake_planet(self) -> None: module = FakePlanetModule(position=(0.2, 180.), magnitude=2.5, psf_scaling=1., interpolation='spline', name_in='fake', image_in_tag='adi', psf_in_tag='psf', image_out_tag='fake') self.pipeline.add_module(module) self.pipeline.run_module('fake') data = self.pipeline.get_data('fake') assert np.sum(data) == pytest.approx(21.273233520675586, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_psf_subtraction(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=[1, ], name_in='pca', images_in_tag='fake', reference_in_tag='fake', res_mean_tag='res_mean', extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('pca') data = self.pipeline.get_data('res_mean') assert np.sum(data) == pytest.approx(0.013659056187572433, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_false_positive(self) -> None: module = FalsePositiveModule(position=(10., 2.), aperture=0.06, ignore=True, name_in='false1', image_in_tag='res_mean', snr_out_tag='snr_fpf1', optimize=False) self.pipeline.add_module(module) self.pipeline.run_module('false1') data = self.pipeline.get_data('snr_fpf1') assert data[0, 1] == pytest.approx(2., rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.216, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(180., rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(26.661611583224417, rel=self.limit, abs=0.) assert data[0, 5] == pytest.approx(1.3375156927387672e-08, rel=self.limit, abs=0.) assert data.shape == (1, 6) def test_false_positive_optimize(self) -> None: module = FalsePositiveModule(position=(10., 2.), aperture=0.06, ignore=True, name_in='false2', image_in_tag='res_mean', snr_out_tag='snr_fpf2', optimize=True, offset=0.1, tolerance=0.01) self.pipeline.add_module(module) self.pipeline.run_module('false2') data = self.pipeline.get_data('snr_fpf2') assert data[0, 1] == pytest.approx(2.0747802734374985, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.2139883890923524, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(179.52168877335356, rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(27.457328210661814, rel=self.limit, abs=0.) assert data[0, 5] == pytest.approx(1.0905578015907869e-08, rel=self.limit, abs=0.) assert data.shape == (1, 6) def test_simplex_minimization_hessian(self) -> None: module = SimplexMinimizationModule(name_in='simplex1', image_in_tag='fake', psf_in_tag='psf', res_out_tag='simplex_res', flux_position_tag='flux_position', position=(10., 3.), magnitude=2.5, psf_scaling=-1., merit='hessian', aperture=0.06, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.06, edge_size=None, extra_rot=0., reference_in_tag=None, residuals='median', offset=1.) self.pipeline.add_module(module) self.pipeline.run_module('simplex1') data = self.pipeline.get_data('simplex_res') assert np.sum(data) == pytest.approx(0.2781582369128238, rel=self.limit, abs=0.) assert data.shape == (35, 21, 21) data = self.pipeline.get_data('flux_position') assert data[24, 0] == pytest.approx(9.931627229080938, rel=self.limit, abs=0.) assert data[24, 1] == pytest.approx(2.6575231481481456, rel=self.limit, abs=0.) assert data[24, 2] == pytest.approx(0.1982554700445013, rel=self.limit, abs=0.) assert data[24, 3] == pytest.approx(179.46648003649148, rel=self.limit, abs=0.) assert data[24, 4] == pytest.approx(2.5256451474622708, rel=self.limit, abs=0.) assert data.shape == (35, 6) def test_simplex_minimization_reference(self) -> None: module = SimplexMinimizationModule(name_in='simplex2', image_in_tag='fake', psf_in_tag='psf', res_out_tag='simplex_res_ref', flux_position_tag='flux_position_ref', position=(10., 3.), magnitude=2.5, psf_scaling=-1., merit='poisson', aperture=0.06, sigma=0., tolerance=0.1, pca_number=1, cent_size=0.06, edge_size=None, extra_rot=0., reference_in_tag='ref', residuals='mean') self.pipeline.add_module(module) self.pipeline.run_module('simplex2') data = self.pipeline.get_data('simplex_res_ref') assert np.sum(data) == pytest.approx(9.734993454838076, rel=self.limit, abs=0.) assert data.shape == (28, 21, 21) data = self.pipeline.get_data('flux_position_ref') assert data[27, 0] == pytest.approx(10.049019964116436, rel=self.limit, abs=0.) assert data[27, 1] == pytest.approx(2.6444836362361936, rel=self.limit, abs=0.) assert data[27, 2] == pytest.approx(0.19860335205689572, rel=self.limit, abs=0.) assert data[27, 3] == pytest.approx(180.38183525629643, rel=self.limit, abs=0.) assert data[27, 4] == pytest.approx(2.5496922175196, rel=self.limit, abs=0.) assert data.shape == (28, 6) def test_mcmc_sampling(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = DerotateAndStackModule(name_in='stack', image_in_tag='psf', image_out_tag='psf_stack', derotate=False, stack='mean') self.pipeline.add_module(module) self.pipeline.run_module('stack') data = self.pipeline.get_data('psf_stack') assert np.sum(data) == pytest.approx(10.843655133957288, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) module = MCMCsamplingModule(name_in='mcmc1', image_in_tag='adi', psf_in_tag='psf_stack', chain_out_tag='mcmc', param=(0.15, 0., 1.), bounds=((0.1, 0.2), (-2., 2.), (-1., 2.)), nwalkers=6, nsteps=5, psf_scaling=-1., pca_number=1, aperture=(10, 16, 0.06), mask=None, extra_rot=0., merit='gaussian', residuals='median', resume=False, sigma=(1e-3, 1e-1, 1e-2)) self.pipeline.add_module(module) self.pipeline.run_module('mcmc1') data = self.pipeline.get_data('mcmc') assert data.shape == (5, 6, 3) data = self.pipeline.get_data('mcmc_backend') assert data.shape == (3, ) module = MCMCsamplingModule(name_in='mcmc2', image_in_tag='adi', psf_in_tag='psf_stack', chain_out_tag='mcmc', param=(0.15, 0., 1.), bounds=((0.1, 0.2), (-2., 2.), (-1., 2.)), nwalkers=6, nsteps=5, psf_scaling=-1., pca_number=1, aperture=(10, 16, 0.06), mask=None, extra_rot=0., merit='gaussian', residuals='median', resume=True, sigma=(1e-3, 1e-1, 1e-2)) self.pipeline.add_module(module) self.pipeline.run_module('mcmc2') data = self.pipeline.get_data('mcmc') assert data.shape == (10, 6, 3) data = self.pipeline.get_data('mcmc_backend') assert data.shape == (3, ) def test_systematic_error(self) -> None: module = SystematicErrorModule(name_in='error', image_in_tag='adi', psf_in_tag='psf', offset_out_tag='offset', position=(0.162, 0.), magnitude=5., angles=(0., 180., 2), psf_scaling=1., merit='gaussian', aperture=0.06, tolerance=0.1, pca_number=1, mask=(None, None), extra_rot=0., residuals='median', offset=1.) self.pipeline.add_module(module) self.pipeline.run_module('error') data = self.pipeline.get_data('offset') assert data[0, 0] == pytest.approx(-0.001114020093541973, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(-0.012163271644183737, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(-0.017943854263249293, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(0.001282493868968615, rel=self.limit, abs=0.) assert data[0, 4] == pytest.approx(-0.04125986733475884, rel=self.limit, abs=0.) assert data.shape == (2, 5) PynPoint-0.11.0/tests/test_processing/test_frameselection.py000066400000000000000000000370301450275315200243460ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.frameselection import RemoveFramesModule, FrameSelectionModule, \ RemoveLastFrameModule, RemoveStartFramesModule, \ ImageStatisticsModule, FrameSimilarityModule, \ SelectByAttributeModule, ResidualSelectionModule from pynpoint.util.tests import create_config, remove_test_data, create_star_data class TestFrameSelection: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'images') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir+'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) attr = self.pipeline.get_attribute('read', 'NDIT', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('read', 'NFRAMES', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (2, ) self.pipeline.set_attribute('read', 'NDIT', [4, 4], static=False) def test_remove_last_frame(self) -> None: module = RemoveLastFrameModule(name_in='last', image_in_tag='read', image_out_tag='last') self.pipeline.add_module(module) self.pipeline.run_module('last') data = self.pipeline.get_data('last') assert np.sum(data) == pytest.approx(84.68885503527224, rel=self.limit, abs=0.) assert data.shape == (8, 11, 11) self.pipeline.set_attribute('last', 'PARANG', np.arange(8.), static=False) self.pipeline.set_attribute('last', 'STAR_POSITION', np.full((8, 2), 5.), static=False) attr = self.pipeline.get_attribute('last', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(28., rel=self.limit, abs=0.) assert attr.shape == (8, ) attr = self.pipeline.get_attribute('last', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(80., rel=self.limit, abs=0.) assert attr.shape == (8, 2) def test_remove_start_frame(self) -> None: module = RemoveStartFramesModule(frames=1, name_in='start', image_in_tag='last', image_out_tag='start') self.pipeline.add_module(module) self.pipeline.run_module('start') data = self.pipeline.get_data('start') assert np.sum(data) == pytest.approx(64.44307047549808, rel=self.limit, abs=0.) assert data.shape == (6, 11, 11) attr = self.pipeline.get_attribute('start', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(24., rel=self.limit, abs=0.) assert attr.shape == (6, ) attr = self.pipeline.get_attribute('start', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(60., rel=self.limit, abs=0.) assert attr.shape == (6, 2) def test_remove_frames(self) -> None: module = RemoveFramesModule(name_in='remove', image_in_tag='start', selected_out_tag='selected', removed_out_tag='removed', frames=[2, 5]) self.pipeline.add_module(module) self.pipeline.run_module('remove') data = self.pipeline.get_data('selected') assert np.sum(data) == pytest.approx(43.68337741822863, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('removed') assert np.sum(data) == pytest.approx(20.759693057269445, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) attr = self.pipeline.get_attribute('selected', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(14., rel=self.limit, abs=0.) assert attr.shape == (4, ) attr = self.pipeline.get_attribute('selected', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40., rel=self.limit, abs=0.) assert attr.shape == (4, 2) attr = self.pipeline.get_attribute('removed', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(10., rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('removed', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(20., rel=self.limit, abs=0.) assert attr.shape == (2, 2) def test_frame_selection(self) -> None: module = FrameSelectionModule(name_in='select1', image_in_tag='start', selected_out_tag='selected1', removed_out_tag='removed1', index_out_tag='index1', method='median', threshold=2., fwhm=0.1, aperture=('circular', 0.1), position=(None, None, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('select1') data = self.pipeline.get_data('selected1') assert np.sum(data) == pytest.approx(54.58514780071149, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) data = self.pipeline.get_data('removed1') assert np.sum(data) == pytest.approx(9.857922674786586, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) data = self.pipeline.get_data('index1') assert np.sum(data) == pytest.approx(5, rel=self.limit, abs=0.) assert data.shape == (1, ) attr = self.pipeline.get_attribute('selected1', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(17., rel=self.limit, abs=0.) assert attr.shape == (5, ) attr = self.pipeline.get_attribute('selected1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(50, rel=self.limit, abs=0.) assert attr.shape == (5, 2) attr = self.pipeline.get_attribute('removed1', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(7., rel=self.limit, abs=0.) assert attr.shape == (1, ) attr = self.pipeline.get_attribute('removed1', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(10, rel=self.limit, abs=0.) assert attr.shape == (1, 2) module = FrameSelectionModule(name_in='select2', image_in_tag='start', selected_out_tag='selected2', removed_out_tag='removed2', index_out_tag='index2', method='max', threshold=1., fwhm=0.1, aperture=('annulus', 0.05, 0.1), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select2') data = self.pipeline.get_data('selected2') assert np.sum(data) == pytest.approx(21.42652724866543, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) data = self.pipeline.get_data('removed2') assert np.sum(data) == pytest.approx(43.016543226832646, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('index2') assert np.sum(data) == pytest.approx(10, rel=self.limit, abs=0.) assert data.shape == (4, ) attr = self.pipeline.get_attribute('selected2', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(8., rel=self.limit, abs=0.) assert attr.shape == (2, ) attr = self.pipeline.get_attribute('selected2', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(20, rel=self.limit, abs=0.) assert attr.shape == (2, 2) attr = self.pipeline.get_attribute('removed2', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(16., rel=self.limit, abs=0.) assert attr.shape == (4, ) attr = self.pipeline.get_attribute('removed2', 'STAR_POSITION', static=False) assert np.sum(attr) == pytest.approx(40, rel=self.limit, abs=0.) assert attr.shape == (4, 2) module = FrameSelectionModule(name_in='select3', image_in_tag='start', selected_out_tag='selected3', removed_out_tag='removed3', index_out_tag='index3', method='range', threshold=(10., 10.7), fwhm=0.1, aperture=('circular', 0.1), position=None) self.pipeline.add_module(module) self.pipeline.run_module('select3') data = self.pipeline.get_data('selected3') assert np.sum(data) == pytest.approx(22.2568501695632, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) data = self.pipeline.get_data('removed3') assert np.sum(data) == pytest.approx(42.18622030593487, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('index3') assert np.sum(data) == pytest.approx(12, rel=self.limit, abs=0.) assert data.shape == (4, ) def test_image_statistics_full(self) -> None: module = ImageStatisticsModule(name_in='stat1', image_in_tag='read', stat_out_tag='stat1', position=None) self.pipeline.add_module(module) self.pipeline.run_module('stat1') data = self.pipeline.get_data('stat1') assert np.sum(data) == pytest.approx(115.68591492205017, rel=self.limit, abs=0.) assert data.shape == (10, 6) def test_image_statistics_position(self) -> None: module = ImageStatisticsModule(name_in='stat2', image_in_tag='read', stat_out_tag='stat2', position=(5, 5, 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('stat2') data = self.pipeline.get_data('stat2') assert np.sum(data) == pytest.approx(118.7138708968444, rel=self.limit, abs=0.) assert data.shape == (10, 6) def test_frame_similarity_mse(self) -> None: module = FrameSimilarityModule(name_in='simi1', image_tag='read', method='MSE', mask_radius=(0., 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('simi1') attr = self.pipeline.get_attribute('read', 'MSE', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(0.11739141370277852, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_frame_similarity_pcc(self) -> None: module = FrameSimilarityModule(name_in='simi2', image_tag='read', method='PCC', mask_radius=(0., 0.1)) self.pipeline.add_module(module) self.pipeline.run_module('simi2') attr = self.pipeline.get_attribute('read', 'PCC', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(9.134820985662829, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_frame_similarity_ssim(self) -> None: module = FrameSimilarityModule(name_in='simi3', image_tag='read', method='SSIM', mask_radius=(0., 0.1), temporal_median='constant') self.pipeline.add_module(module) self.pipeline.run_module('simi3') attr = self.pipeline.get_attribute('read', 'SSIM', static=False) assert np.min(attr) > 0. assert np.sum(attr) == pytest.approx(9.074290801266256, rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_select_by_attribute(self) -> None: self.pipeline.set_attribute('read', 'INDEX', np.arange(44), static=False) module = SelectByAttributeModule(name_in='frame_removal_1', image_in_tag='read', attribute_tag='SSIM', number_frames=6, order='descending', selected_out_tag='select_sim', removed_out_tag='remove_sim') self.pipeline.add_module(module) self.pipeline.run_module('frame_removal_1') attr = self.pipeline.get_attribute('select_sim', 'INDEX', static=False) assert np.sum(attr) == pytest.approx(946, rel=self.limit, abs=0.) assert attr.shape == (44, ) attr = self.pipeline.get_attribute('select_sim', 'SSIM', static=False) assert np.sum(attr) == pytest.approx(5.545578246610884, rel=self.limit, abs=0.) assert attr.shape == (6, ) attr = self.pipeline.get_attribute('remove_sim', 'SSIM', static=False) assert np.sum(attr) == pytest.approx(3.528712554655373, rel=self.limit, abs=0.) assert attr.shape == (4, ) def test_residual_selection(self) -> None: module = ResidualSelectionModule(name_in='residual_select', image_in_tag='start', selected_out_tag='res_selected', removed_out_tag='res_removed', percentage=80., annulus_radii=(0.1, 0.2)) self.pipeline.add_module(module) self.pipeline.run_module('residual_select') data = self.pipeline.get_data('res_selected') assert np.sum(data) == pytest.approx(41.77295229983322, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('res_removed') assert np.sum(data) == pytest.approx(22.670118175664847, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) PynPoint-0.11.0/tests/test_processing/test_limits.py000066400000000000000000000146671450275315200226620ustar00rootroot00000000000000import os from urllib.request import urlretrieve import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.limits import ContrastCurveModule, MassLimitsModule from pynpoint.processing.psfpreparation import AngleInterpolationModule from pynpoint.util.tests import create_config, create_star_data, remove_test_data class TestLimits: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'self.limits', npix=21, pos_star=10.) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(path=self.test_dir, folders=['self.limits'], files=['model.AMES-Cond-2000.M-0.0.NaCo.Vega']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir+'self.limits') self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(108.43655133957289, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_angle_interpolation(self) -> None: module = AngleInterpolationModule(name_in='angle', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle') attr = self.pipeline.get_attribute('read', 'PARANG', static=False) assert np.sum(attr) == pytest.approx(900., rel=self.limit, abs=0.) assert attr.shape == (10, ) def test_contrast_curve(self) -> None: proc = ['single', 'multi'] for item in proc: if item == 'multi': with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = ContrastCurveModule(name_in='contrast_'+item, image_in_tag='read', psf_in_tag='read', contrast_out_tag='limits_'+item, separation=(0.2, 0.3, 0.2), angle=(0., 360., 180.), threshold=('sigma', 5.), psf_scaling=1., aperture=0.05, pca_number=2, cent_size=None, edge_size=1., extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('contrast_'+item) data = self.pipeline.get_data('limits_'+item) assert data[0, 0] == pytest.approx(0.2, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(2.580878183791224, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.0007097688120261913, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(0.00020126490906225968, rel=self.limit, abs=0.) assert data.shape == (1, 4) def test_contrast_curve_fpf(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 module = ContrastCurveModule(name_in='contrast_fpf', image_in_tag='read', psf_in_tag='read', contrast_out_tag='limits_fpf', separation=(0.2, 0.3, 0.2), angle=(0., 360., 180.), threshold=('fpf', 1e-6), psf_scaling=1., aperture=0.05, pca_number=2, cent_size=None, edge_size=1., extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('contrast_fpf') data = self.pipeline.get_data('limits_fpf') assert data[0, 0] == pytest.approx(0.2, rel=self.limit, abs=0.) assert data[0, 1] == pytest.approx(1.9339430843041776, rel=self.limit, abs=0.) assert data[0, 2] == pytest.approx(0.000709768812026221, rel=self.limit, abs=0.) assert data[0, 3] == pytest.approx(1e-06, rel=self.limit, abs=0.) assert data.shape == (1, 4) def test_mass_limits(self) -> None: separation = np.linspace(0.1, 1.0, 10) contrast = -2.5*np.log10(1e-4/separation) variance = 0.1*contrast limits = np.zeros((10, 4)) limits[:, 0] = separation limits[:, 1] = contrast limits[:, 2] = variance with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['contrast_limits'] = limits url = 'https://home.strw.leidenuniv.nl/~stolker/pynpoint/' \ 'model.AMES-Cond-2000.M-0.0.NaCo.Vega' filename = self.test_dir + 'model.AMES-Cond-2000.M-0.0.NaCo.Vega' urlretrieve(url, filename) module = MassLimitsModule(model_file=filename, star_prop={'magnitude': 10., 'distance': 100., 'age': 20.}, name_in='mass', contrast_in_tag='contrast_limits', mass_out_tag='mass_limits', instr_filter='L\'') self.pipeline.add_module(module) self.pipeline.run_module('mass') data = self.pipeline.get_data('mass_limits') assert np.mean(data[:, 0]) == pytest.approx(0.55, rel=self.limit, abs=0.) assert np.mean(data[:, 1]) == pytest.approx(0.001891690765603738, rel=self.limit, abs=0.) assert np.mean(data[:, 2]) == pytest.approx(0.000964309686441908, rel=self.limit, abs=0.) assert np.mean(data[:, 3]) == pytest.approx(-0.000696402843279597, rel=self.limit, abs=0.) assert data.shape == (10, 4) PynPoint-0.11.0/tests/test_processing/test_psfpreparation.py000066400000000000000000000320131450275315200243770ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.psfpreparation import PSFpreparationModule, AngleInterpolationModule, \ AngleCalculationModule, SDIpreparationModule, \ SortParangModule from pynpoint.util.tests import create_config, create_star_data, create_ifs_data, remove_test_data class TestPsfPreparation: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'prep') create_ifs_data(self.test_dir+'prep_ifs') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['prep', 'prep_ifs']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir+'prep') self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) module = FitsReadingModule(name_in='read_ifs', image_tag='read_ifs', input_dir=self.test_dir+'prep_ifs', ifs_data=True) self.pipeline.add_module(module) self.pipeline.run_module('read_ifs') data = self.pipeline.get_data('read_ifs') assert np.sum(data) == pytest.approx(749.8396528807369, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_angle_interpolation(self) -> None: module = AngleInterpolationModule(name_in='angle1', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle1') data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(900., rel=self.limit, abs=0.) assert data.shape == (10, ) def test_angle_calculation(self) -> None: self.pipeline.set_attribute('read', 'LATITUDE', -25.) self.pipeline.set_attribute('read', 'LONGITUDE', -70.) self.pipeline.set_attribute('read', 'DIT', 1.) self.pipeline.set_attribute('read', 'RA', (90., 90., 90., 90.), static=False) self.pipeline.set_attribute('read', 'DEC', (-51., -51., -51., -51.), static=False) self.pipeline.set_attribute('read', 'PUPIL', (90., 90., 90., 90.), static=False) date = ('2012-12-01T07:09:00.0000', '2012-12-01T07:09:01.0000', '2012-12-01T07:09:02.0000', '2012-12-01T07:09:03.0000') self.pipeline.set_attribute('read', 'DATE', date, static=False) module = AngleCalculationModule(instrument='NACO', name_in='angle2', data_tag='read') self.pipeline.add_module(module) self.pipeline.run_module('angle2') data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(-550.2338293743467, rel=self.limit, abs=0.) assert data.shape == (10, ) self.pipeline.set_attribute('read', 'RA', (60000.0, 60000.0, 60000.0, 60000.0), static=False) self.pipeline.set_attribute('read', 'DEC', (-510000., -510000., -510000., -510000.), static=False) module = AngleCalculationModule(instrument='SPHERE/IRDIS', name_in='angle3', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle3') warning_0 = 'For SPHERE data it is recommended to use the header keyword \'ESO INS4 ' \ 'DROT2 RA\' to specify the object\'s right ascension. The input will be ' \ 'parsed accordingly. Using the regular \'RA\' keyword will lead to wrong ' \ 'parallactic angles.' warning_1 = 'For SPHERE data it is recommended to use the header keyword \'ESO INS4 ' \ 'DROT2 DEC\' to specify the object\'s declination. The input will be parsed ' \ 'accordingly. Using the regular \'DEC\' keyword will lead to wrong ' \ 'parallactic angles.' if len(warning) == 2: assert warning[0].message.args[0] == warning_0 assert warning[1].message.args[0] == warning_1 data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(1704.2202367435675, rel=self.limit, abs=0.) assert data.shape == (10, ) module = AngleCalculationModule(instrument='SPHERE/IFS', name_in='angle4', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle4') warning_0 = 'AngleCalculationModule has not been tested for SPHERE/IFS data.' warning_1 = 'For SPHERE data it is recommended to use the header keyword \'ESO INS4 ' \ 'DROT2 RA\' to specify the object\'s right ascension. The input will be ' \ 'parsed accordingly. Using the regular \'RA\' keyword will lead to wrong ' \ 'parallactic angles.' warning_2 = 'For SPHERE data it is recommended to use the header keyword \'ESO INS4 ' \ 'DROT2 DEC\' to specify the object\'s declination. The input will be parsed ' \ 'accordingly. Using the regular \'DEC\' keyword will lead to wrong ' \ 'parallactic angles.' if len(warning) == 3: assert warning[0].message.args[0] == warning_0 assert warning[1].message.args[0] == warning_1 assert warning[2].message.args[0] == warning_2 data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(-890.8506514377593, rel=self.limit, abs=0.) assert data.shape == (10, ) def test_angle_sort(self) -> None: index = self.pipeline.get_data('header_read/INDEX') self.pipeline.set_attribute('read', 'INDEX', index[::-1], static=False) module = SortParangModule(name_in='sort1', image_in_tag='read', image_out_tag='read_sorted') self.pipeline.add_module(module) self.pipeline.run_module('sort1') self.pipeline.set_attribute('read', 'INDEX', index, static=False) parang = self.pipeline.get_data('header_read/PARANG')[::-1] parang_sort = self.pipeline.get_data('header_read_sorted/PARANG') assert np.sum(parang) == pytest.approx(np.sum(parang_sort), rel=self.limit, abs=0.) parang_set = [0., 1., 2., 3., 4., 5., 6., 7., 8., 9.] self.pipeline.set_attribute('read_ifs', 'PARANG', parang_set, static=False) data = self.pipeline.get_data('read_sorted') assert np.sum(data[0]) == pytest.approx(9.71156815235485, rel=self.limit, abs=0.) def test_angle_sort_ifs(self) -> None: index = self.pipeline.get_data('header_read_ifs/INDEX') self.pipeline.set_attribute('read_ifs', 'INDEX', index[::-1], static=False) module = SortParangModule(name_in='sort2', image_in_tag='read_ifs', image_out_tag='read_ifs_sorted') self.pipeline.add_module(module) self.pipeline.run_module('sort2') self.pipeline.set_attribute('read_ifs', 'INDEX', index, static=False) parang = self.pipeline.get_data('header_read_ifs/PARANG')[::-1] parang_sort = self.pipeline.get_data('header_read_ifs_sorted/PARANG') assert np.sum(parang) == pytest.approx(np.sum(parang_sort), rel=self.limit, abs=0.) data = self.pipeline.get_data('read_ifs_sorted') assert np.sum(data[0, 0]) == pytest.approx(21.185139976163477, rel=self.limit, abs=0.) def test_angle_interpolation_mismatch(self) -> None: self.pipeline.set_attribute('read', 'NDIT', [9, 9, 9, 9], static=False) module = AngleInterpolationModule(name_in='angle5', data_tag='read') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('angle5') warning_0 = 'There is a mismatch between the NDIT and NFRAMES values. The parallactic ' \ 'angles are calculated with a linear interpolation by using NFRAMES steps. ' \ 'A frame selection should be applied after the parallactic angles are ' \ 'calculated.' if len(warning) == 1: assert warning[0].message.args[0] == warning_0 data = self.pipeline.get_data('header_read/PARANG') assert np.sum(data) == pytest.approx(900., rel=self.limit, abs=0.) assert data.shape == (10, ) def test_psf_preparation_norm_mask(self) -> None: module = PSFpreparationModule(name_in='prep1', image_in_tag='read', image_out_tag='prep1', mask_out_tag='mask1', norm=True, cent_size=0.1, edge_size=1.0) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('prep1') assert np.sum(data) == pytest.approx(-1.5844830188044685, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) data = self.pipeline.get_data('mask1') assert np.sum(data) == pytest.approx(52, rel=self.limit, abs=0.) assert data.shape == (11, 11) def test_psf_preparation_none(self) -> None: module = PSFpreparationModule(name_in='prep2', image_in_tag='read', image_out_tag='prep2', mask_out_tag='mask2', norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('prep2') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_psf_preparation_no_mask_out(self) -> None: module = PSFpreparationModule(name_in='prep3', image_in_tag='read', image_out_tag='prep3', mask_out_tag=None, norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(module) self.pipeline.run_module('prep3') data = self.pipeline.get_data('prep3') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_psf_preparation_sdi(self) -> None: module = PSFpreparationModule(name_in='prep4', image_in_tag='read_ifs', image_out_tag='prep4', mask_out_tag=None, norm=False, cent_size=None, edge_size=None) self.pipeline.add_module(module) self.pipeline.run_module('prep4') data = self.pipeline.get_data('prep4') assert np.sum(data) == pytest.approx(749.8396528807369, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_sdi_preparation(self) -> None: module = SDIpreparationModule(name_in='sdi', wavelength=(0.65, 0.6), width=(0.1, 0.5), image_in_tag='read', image_out_tag='sdi') self.pipeline.add_module(module) self.pipeline.run_module('sdi') data = self.pipeline.get_data('sdi') assert np.sum(data) == pytest.approx(21.084666133914183, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) attribute = self.pipeline.get_attribute('sdi', 'History: SDIpreparationModule') assert attribute == '(line, continuum) = (0.65, 0.6)' PynPoint-0.11.0/tests/test_processing/test_psfsubtraction_adi.py000066400000000000000000000556641450275315200252460ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.psfpreparation import PSFpreparationModule from pynpoint.processing.psfsubtraction import PcaPsfSubtractionModule, ClassicalADIModule from pynpoint.util.tests import create_config, create_fake_data, remove_test_data class TestPsfSubtractionAdi: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_fake_data(self.test_dir+'science') create_fake_data(self.test_dir+'reference') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science', 'reference']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='science', input_dir=self.test_dir+'science') self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('science') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) self.pipeline.set_attribute('science', 'PARANG', np.linspace(0., 180., 10), static=False) module = FitsReadingModule(name_in='read2', image_tag='reference', input_dir=self.test_dir+'reference') self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('reference') assert np.sum(data) == pytest.approx(11.012854046962481, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_psf_preparation(self) -> None: module = PSFpreparationModule(name_in='prep1', image_in_tag='science', image_out_tag='science_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.05, edge_size=1.) self.pipeline.add_module(module) self.pipeline.run_module('prep1') data = self.pipeline.get_data('science_prep') assert np.sum(data) == pytest.approx(5.029285028467547, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) module = PSFpreparationModule(name_in='prep2', image_in_tag='reference', image_out_tag='reference_prep', mask_out_tag=None, norm=False, resize=None, cent_size=0.05, edge_size=1.) self.pipeline.add_module(module) self.pipeline.run_module('prep2') data = self.pipeline.get_data('reference_prep') assert np.sum(data) == pytest.approx(5.029285028467547, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) def test_classical_adi(self) -> None: module = ClassicalADIModule(threshold=None, nreference=None, residuals='mean', extra_rot=0., name_in='cadi1', image_in_tag='science_prep', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi1') data = self.pipeline.get_data('cadi_res') assert np.sum(data) == pytest.approx(0.8381625719865213, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('cadi_stack') assert np.sum(data) == pytest.approx(0.08395606034388256, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_classical_adi_threshold(self) -> None: module = ClassicalADIModule(threshold=(0.1, 0.03, 1.), nreference=5, residuals='median', extra_rot=0., name_in='cadi2', image_in_tag='science_prep', res_out_tag='cadi_res', stack_out_tag='cadi_stack') self.pipeline.add_module(module) self.pipeline.run_module('cadi2') data = self.pipeline.get_data('cadi_res') assert np.sum(data) == pytest.approx(0.7158207863548083, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('cadi_stack') assert np.sum(data) == pytest.approx(0.07448334552227256, rel=self.limit, abs=0.) assert data.shape == (1, 21, 21) def test_psf_subtraction_pca_single(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_single', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single', res_median_tag='res_median_single', res_weighted_tag='res_weighted_single', res_rot_mean_clip_tag='res_clip_single', res_arr_out_tag='res_arr_single', basis_out_tag='basis_single', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_single') data = self.pipeline.get_data('res_mean_single') assert np.sum(data) == pytest.approx(-0.00011857022709778602, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_median_single') assert np.sum(data) == pytest.approx(-0.002184868916566093, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_weighted_single') assert np.sum(data) == pytest.approx(0.08102176735226937, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) # data = self.pipeline.get_data('res_clip_single') # assert np.sum(data) == pytest.approx(7.09495495339349e-05, rel=self.limit, abs=0.) # assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_arr_single1') assert np.sum(data) == pytest.approx(-0.0002751385418691618, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('basis_single') assert np.sum(data) == pytest.approx(0.09438697731322143, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_no_mean', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean') data = self.pipeline.get_data('res_mean_no_mean') assert np.sum(data) == pytest.approx(0.0006081272007585688, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_no_mean') assert np.sum(data) == pytest.approx(5.118005177367776, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref') data = self.pipeline.get_data('res_mean_ref') assert np.sum(data) == pytest.approx(0.0006330226118859073, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref') assert np.sum(data) == pytest.approx(0.0943869773132221, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_no_mean(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref_no_mean', images_in_tag='science', reference_in_tag='reference', res_mean_tag='res_mean_ref_no_mean', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean') data = self.pipeline.get_data('res_mean_ref_no_mean') assert np.sum(data) == pytest.approx(0.0006081272007585764, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_no_mean') assert np.sum(data) == pytest.approx(5.118005177367774, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_pca_single_mask(self) -> None: pca = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_single_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_single_mask', res_median_tag='res_median_single_mask', res_weighted_tag='res_weighted_single_mask', res_rot_mean_clip_tag='res_clip_single_mask', res_arr_out_tag='res_arr_single_mask', basis_out_tag='basis_single_mask', extra_rot=45., subtract_mean=True) self.pipeline.add_module(pca) self.pipeline.run_module('pca_single_mask') data = self.pipeline.get_data('res_mean_single_mask') assert np.sum(data) == pytest.approx(0.00010696166038626307, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_median_single_mask') assert np.sum(data) == pytest.approx(-0.0021005307611346156, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_weighted_single_mask') assert np.sum(data) == pytest.approx(0.06014309988789256, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_clip_single_mask') # assert np.sum(data) == pytest.approx(9.35120662148806e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('res_arr_single_mask1') assert np.sum(data) == pytest.approx(0.0006170872862547557, rel=self.limit, abs=0.) assert data.shape == (10, 21, 21) data = self.pipeline.get_data('basis_single_mask') assert np.sum(data) == pytest.approx(0.08411251293842359, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_no_mean_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_no_mean_mask') data = self.pipeline.get_data('res_mean_no_mean_mask') assert np.sum(data) == pytest.approx(2.3542359949502915e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_no_mean_mask') assert np.sum(data) == pytest.approx(5.655460951633232, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_mask', extra_rot=0., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_mask') data = self.pipeline.get_data('res_mean_ref_mask') assert np.sum(data) == pytest.approx(9.400558926815758e-06, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_mask') assert np.sum(data) == pytest.approx(0.08411251293842326, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_ref_no_mean_mask(self) -> None: module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_ref_no_mean_mask', images_in_tag='science_prep', reference_in_tag='reference_prep', res_mean_tag='res_mean_ref_no_mean_mask', res_median_tag=None, res_weighted_tag=None, res_rot_mean_clip_tag=None, res_arr_out_tag=None, basis_out_tag='basis_ref_no_mean_mask', extra_rot=0., subtract_mean=False) self.pipeline.add_module(module) self.pipeline.run_module('pca_ref_no_mean_mask') data = self.pipeline.get_data('res_mean_ref_no_mean_mask') assert np.sum(data) == pytest.approx(2.354235994950671e-05, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) data = self.pipeline.get_data('basis_ref_no_mean_mask') assert np.sum(data) == pytest.approx(5.655460951633233, rel=self.limit, abs=0.) assert data.shape == (2, 21, 21) def test_psf_subtraction_pca_multi(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_multi', images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi', res_median_tag='res_median_multi', res_weighted_tag='res_weighted_multi', res_rot_mean_clip_tag='res_clip_multi', res_arr_out_tag=None, basis_out_tag='basis_multi', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi') data_single = self.pipeline.get_data('res_mean_single') data_multi = self.pipeline.get_data('res_mean_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_median_single') data_multi = self.pipeline.get_data('res_median_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_weighted_single') data_multi = self.pipeline.get_data('res_weighted_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('basis_single') data_multi = self.pipeline.get_data('basis_multi') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) def test_psf_subtraction_pca_multi_mask(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 module = PcaPsfSubtractionModule(pca_numbers=range(1, 3), name_in='pca_multi_mask', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_multi_mask', res_median_tag='res_median_multi_mask', res_weighted_tag='res_weighted_multi_mask', res_rot_mean_clip_tag='res_clip_multi_mask', res_arr_out_tag=None, basis_out_tag='basis_multi_mask', extra_rot=45., subtract_mean=True) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_mask') data_single = self.pipeline.get_data('res_mean_single_mask') data_multi = self.pipeline.get_data('res_mean_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_median_single_mask') data_multi = self.pipeline.get_data('res_median_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('res_weighted_single_mask') data_multi = self.pipeline.get_data('res_weighted_multi_mask') assert data_single.shape == data_multi.shape assert data_single[data_single > 1e-12] == \ pytest.approx(data_multi[data_multi > 1e-12], rel=self.limit, abs=0.) data_single = self.pipeline.get_data('basis_single_mask') data_multi = self.pipeline.get_data('basis_multi_mask') assert data_single.shape == data_multi.shape assert data_single == pytest.approx(data_multi, rel=self.limit, abs=0.) def test_psf_subtraction_len_parang(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 parang = self.pipeline.get_data('header_science/PARANG') self.pipeline.set_attribute('science_prep', 'PARANG', np.append(parang, 0.), static=False) module = PcaPsfSubtractionModule(pca_numbers=[1, ], name_in='pca_len_parang', images_in_tag='science_prep', reference_in_tag='science_prep', res_mean_tag='res_mean_len_parang', extra_rot=0.) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('pca_len_parang') assert str(error.value) == 'The number of images (10) is not equal to the number of ' \ 'parallactic angles (11).' PynPoint-0.11.0/tests/test_processing/test_psfsubtraction_sdi.py000066400000000000000000000160701450275315200252540ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.psfsubtraction import PcaPsfSubtractionModule from pynpoint.util.tests import create_config, create_ifs_data, remove_test_data class TestPsfSubtractionSdi: def setup_class(self) -> None: self.limit = 1e-5 self.test_dir = os.path.dirname(__file__) + '/' create_ifs_data(self.test_dir+'science') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['science']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='science', input_dir=self.test_dir+'science', ifs_data=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('science') assert np.sum(data) == pytest.approx(749.8396528807368, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) self.pipeline.set_attribute('science', 'WAVELENGTH', [1., 1.1, 1.2], static=False) self.pipeline.set_attribute('science', 'PARANG', np.linspace(0., 180., 10), static=False) def test_psf_subtraction_sdi(self) -> None: processing_types = ['ADI', 'SDI+ADI', 'ADI+SDI', 'CODI'] expected = [[-0.16718942968552664, -0.790697125718532, 19.507979777136892, -0.21617058715490922], [-0.001347198747121658, -0.08621264803633322, 2.3073192270025333, -0.010269745733878437], [0.009450917836998779, -0.05776205365084376, -0.43506678222476264, 0.0058856438951644455], [-0.2428739554898396, -0.5069023645693083, 9.326414176548905, 0.00]] shape_expc = [(2, 3, 21, 21), (2, 2, 3, 21, 21), (1, 1, 3, 21, 21), (2, 3, 21, 21)] pca_numbers = [range(1, 3), (range(1, 3), range(1, 3)), ([1], [1]), range(1, 3)] res_arr_tags = [None, None, 'res_arr_single_sdi_ADI+SDI', None] for i, p_type in enumerate(processing_types): module = PcaPsfSubtractionModule(pca_numbers=pca_numbers[i], name_in='pca_single_sdi_'+p_type, images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_single_sdi_'+p_type, res_median_tag='res_median_single_sdi_'+p_type, res_weighted_tag='res_weighted_single_sdi_'+p_type, res_rot_mean_clip_tag='res_clip_single_sdi_'+p_type, res_arr_out_tag=res_arr_tags[i], basis_out_tag='basis_single_sdi_'+p_type, extra_rot=0., subtract_mean=True, processing_type=p_type) self.pipeline.add_module(module) self.pipeline.run_module('pca_single_sdi_'+p_type) data = self.pipeline.get_data('res_mean_single_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][0], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_median_single_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][1], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_weighted_single_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][2], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_clip_single_sdi_'+p_type) # assert np.sum(data) == pytest.approx(expected[i][3], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] # data = self.pipeline.get_data('basis_single_sdi_'+p_type) # assert np.sum(data) == pytest.approx(-1.3886119555248766, rel=self.limit, abs=0.) # assert data.shape == (5, 30, 30) def test_psf_subtraction_sdi_multi(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 processing_types = ['SDI', 'ADI+SDI'] pca_numbers = [range(1, 3), (range(1, 3), range(1, 3))] expected = [[-0.004159475403024583, 0.02613693149969979, -0.12940723035023394, -0.008432530081399985], [-0.006580571531064533, -0.08171546066331437, 0.5700432018961117, -0.014527353460544753]] shape_expc = [(2, 3, 21, 21), (2, 2, 3, 21, 21)] for i, p_type in enumerate(processing_types): module = PcaPsfSubtractionModule(pca_numbers=pca_numbers[i], name_in='pca_multi_sdi_'+p_type, images_in_tag='science', reference_in_tag='science', res_mean_tag='res_mean_multi_sdi_'+p_type, res_median_tag='res_median_multi_sdi_'+p_type, res_weighted_tag='res_weighted_multi_sdi_'+p_type, res_rot_mean_clip_tag='res_clip_multi_sdi_'+p_type, res_arr_out_tag=None, basis_out_tag=None, extra_rot=0., subtract_mean=True, processing_type=p_type) self.pipeline.add_module(module) self.pipeline.run_module('pca_multi_sdi_'+p_type) data = self.pipeline.get_data('res_mean_multi_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][0], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_median_multi_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][1], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_weighted_multi_sdi_'+p_type) assert np.sum(data) == pytest.approx(expected[i][2], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] data = self.pipeline.get_data('res_clip_multi_sdi_'+p_type) # assert np.sum(data) == pytest.approx(expected[i][3], rel=self.limit, abs=0.) assert data.shape == shape_expc[i] PynPoint-0.11.0/tests/test_processing/test_resizing.py000066400000000000000000000147731450275315200232110ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.resizing import CropImagesModule, ScaleImagesModule, \ AddLinesModule, RemoveLinesModule from pynpoint.util.tests import create_config, create_star_data, create_ifs_data, remove_test_data class TestResizing: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'resize') create_ifs_data(self.test_dir+'resize_ifs') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['resize', 'resize_ifs']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='read', input_dir=self.test_dir+'resize', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('read') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) module = FitsReadingModule(name_in='read_ifs', image_tag='read_ifs', input_dir=self.test_dir+'resize_ifs', overwrite=True, check=True, ifs_data=True) self.pipeline.add_module(module) self.pipeline.run_module('read_ifs') data = self.pipeline.get_data('read_ifs') assert np.sum(data) == pytest.approx(749.8396528807369, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_crop_images(self) -> None: module = CropImagesModule(size=0.2, center=None, name_in='crop1', image_in_tag='read', image_out_tag='crop1') self.pipeline.add_module(module) self.pipeline.run_module('crop1') module = CropImagesModule(size=0.2, center=(4, 4), name_in='crop2', image_in_tag='read', image_out_tag='crop2') self.pipeline.add_module(module) self.pipeline.run_module('crop2') module = CropImagesModule(size=0.2, center=(4, 4), name_in='crop_ifs', image_in_tag='read_ifs', image_out_tag='crop_ifs') self.pipeline.add_module(module) self.pipeline.run_module('crop_ifs') data = self.pipeline.get_data('crop1') assert np.sum(data) == pytest.approx(104.93318507061295, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) data = self.pipeline.get_data('crop2') assert np.sum(data) == pytest.approx(105.64863165433025, rel=self.limit, abs=0.) assert data.shape == (10, 9, 9) data = self.pipeline.get_data('crop_ifs') assert np.sum(data) == pytest.approx(15.870936600122521, rel=self.limit, abs=0.) assert data.shape == (3, 10, 9, 9) def test_scale_images(self) -> None: module = ScaleImagesModule(name_in='scale1', image_in_tag='read', image_out_tag='scale1', scaling=(2., 2., None), pixscale=True) self.pipeline.add_module(module) self.pipeline.run_module('scale1') module = ScaleImagesModule(name_in='scale2', image_in_tag='read', image_out_tag='scale2', scaling=(None, None, 2.), pixscale=True) self.pipeline.add_module(module) self.pipeline.run_module('scale2') data = self.pipeline.get_data('scale1') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 22, 22) data = self.pipeline.get_data('scale2') assert np.sum(data) == pytest.approx(211.08557759610554, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) attr = self.pipeline.get_attribute('read', 'PIXSCALE', static=True) assert attr == pytest.approx(0.027, rel=self.limit, abs=0.) attr = self.pipeline.get_attribute('scale1', 'PIXSCALE', static=True) assert attr == pytest.approx(0.0135, rel=self.limit, abs=0.) attr = self.pipeline.get_attribute('scale2', 'PIXSCALE', static=True) assert attr == pytest.approx(0.027, rel=self.limit, abs=0.) def test_add_lines(self) -> None: module = AddLinesModule(lines=(2, 5, 0, 3), name_in='add', image_in_tag='read', image_out_tag='add') self.pipeline.add_module(module) self.pipeline.run_module('add') data = self.pipeline.get_data('add') assert np.sum(data) == pytest.approx(105.54278879805275, rel=self.limit, abs=0.) assert data.shape == (10, 14, 18) def test_remove_lines(self) -> None: module = RemoveLinesModule(lines=(2, 5, 0, 3), name_in='remove', image_in_tag='read', image_out_tag='remove') self.pipeline.add_module(module) self.pipeline.run_module('remove') data = self.pipeline.get_data('remove') assert np.sum(data) == pytest.approx(67.49726677462391, rel=self.limit, abs=0.) assert data.shape == (10, 8, 4) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 self.pipeline.run_module('remove') data_multi = self.pipeline.get_data('remove') assert data == pytest.approx(data_multi, rel=self.limit, abs=0.) assert data.shape == data_multi.shape PynPoint-0.11.0/tests/test_processing/test_stacksubsample.py000066400000000000000000000266741450275315200244030ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.stacksubset import StackAndSubsetModule, StackCubesModule, \ DerotateAndStackModule, CombineTagsModule from pynpoint.util.tests import create_config, create_star_data, create_ifs_data, remove_test_data class TestStackSubset: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_ifs_data(self.test_dir+'data_ifs') create_star_data(self.test_dir+'data') create_star_data(self.test_dir+'extra') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data_ifs', 'extra', 'data']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read1', image_tag='images', input_dir=self.test_dir+'data', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('images') assert np.mean(data) == pytest.approx(0.08722544528764692, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) module = FitsReadingModule(name_in='read2', image_tag='extra', input_dir=self.test_dir+'extra', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') extra = self.pipeline.get_data('extra') assert data == pytest.approx(extra, rel=self.limit, abs=0.) module = FitsReadingModule(name_in='read_ifs', image_tag='images_ifs', input_dir=self.test_dir+'data_ifs', overwrite=True, check=True, ifs_data=True) self.pipeline.add_module(module) self.pipeline.run_module('read_ifs') self.pipeline.set_attribute('images_ifs', 'PARANG', np.linspace(0., 180., 10), static=False) data = self.pipeline.get_data('images_ifs') assert np.sum(data) == pytest.approx(749.8396528807369, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_stack_and_subset(self) -> None: self.pipeline.set_attribute('images', 'PARANG', np.arange(10.), static=False) module = StackAndSubsetModule(name_in='stack1', image_in_tag='images', image_out_tag='stack1', random=4, stacking=2, combine='mean', max_rotation=None) self.pipeline.add_module(module) self.pipeline.run_module('stack1') data = self.pipeline.get_data('stack1') assert np.mean(data) == pytest.approx(0.08758276283743936, rel=self.limit, abs=0.) assert data.shape == (4, 11, 11) data = self.pipeline.get_data('header_stack1/INDEX') assert data == pytest.approx(np.arange(4), rel=self.limit, abs=0.) assert data.shape == (4, ) data = self.pipeline.get_data('header_stack1/PARANG') assert data == pytest.approx([0.5, 2.5, 6.5, 8.5], rel=self.limit, abs=0.) assert data.shape == (4, ) def test_stack_max_rotation(self) -> None: angles = np.arange(10.) angles[1:6] = 3. angles[9] = 50. self.pipeline.set_attribute('images', 'PARANG', angles, static=False) module = StackAndSubsetModule(name_in='stack2', image_in_tag='images', image_out_tag='stack2', random=None, stacking=2, combine='median', max_rotation=1.) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('stack2') assert len(warning) == 1 assert warning[0].message.args[0] == 'Testing of util.module.stack_angles has been ' \ 'limited, please use carefully.' data = self.pipeline.get_data('stack2') assert np.mean(data) == pytest.approx(0.08580759396987508, rel=self.limit, abs=0.) assert data.shape == (7, 11, 11) data = self.pipeline.get_data('header_stack2/INDEX') assert data == pytest.approx(np.arange(7), rel=self.limit, abs=0.) assert data.shape == (7, ) data = self.pipeline.get_data('header_stack2/PARANG') assert data.shape == (7, ) self.pipeline.set_attribute('images', 'PARANG', np.arange(10.), static=False) def test_stack_cube(self) -> None: module = StackCubesModule(name_in='stackcube', image_in_tag='images', image_out_tag='mean', combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('stackcube') data = self.pipeline.get_data('mean') assert np.mean(data) == pytest.approx(0.08722544528764689, rel=self.limit, abs=0.) assert data.shape == (2, 11, 11) attribute = self.pipeline.get_attribute('mean', 'INDEX', static=False) assert np.mean(attribute) == pytest.approx(0.5, rel=self.limit, abs=0.) assert attribute.shape == (2, ) attribute = self.pipeline.get_attribute('mean', 'NFRAMES', static=False) assert np.mean(attribute) == pytest.approx(1, rel=self.limit, abs=0.) assert attribute.shape == (2, ) def test_derotate_and_stack(self) -> None: module = DerotateAndStackModule(name_in='derotate1', image_in_tag='images', image_out_tag='derotate1', derotate=True, stack='mean', extra_rot=10.) self.pipeline.add_module(module) self.pipeline.run_module('derotate1') data = self.pipeline.get_data('derotate1') assert np.mean(data) == pytest.approx(0.08709860116308817, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) module = DerotateAndStackModule(name_in='derotate2', image_in_tag='images', image_out_tag='derotate2', derotate=False, stack='median', extra_rot=0.) self.pipeline.add_module(module) self.pipeline.run_module('derotate2') data = self.pipeline.get_data('derotate2') assert np.mean(data) == pytest.approx(0.0861160094566323, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) data = self.pipeline.get_data('derotate2') assert np.mean(data) == pytest.approx(0.0861160094566323, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) module = DerotateAndStackModule(name_in='derotate_ifs1', image_in_tag='images_ifs', image_out_tag='derotate_ifs1', derotate=True, stack='mean', extra_rot=0., dimension='time') self.pipeline.add_module(module) self.pipeline.run_module('derotate_ifs1') data = self.pipeline.get_data('derotate_ifs1') assert np.mean(data) == pytest.approx(0.1884438996655355, rel=self.limit, abs=0.) assert data.shape == (3, 1, 21, 21) module = DerotateAndStackModule(name_in='derotate_ifs2', image_in_tag='images_ifs', image_out_tag='derotate_ifs2', derotate=False, stack='median', extra_rot=0., dimension='wavelength') self.pipeline.add_module(module) self.pipeline.run_module('derotate_ifs2') data = self.pipeline.get_data('derotate_ifs2') assert np.mean(data) == pytest.approx(0.055939644983170146, rel=self.limit, abs=0.) assert data.shape == (1, 10, 21, 21) module = DerotateAndStackModule(name_in='derotate_ifs3', image_in_tag='images_ifs', image_out_tag='derotate_ifs3', derotate=True, stack=None, extra_rot=0., dimension='wavelength') self.pipeline.add_module(module) self.pipeline.run_module('derotate_ifs3') data = self.pipeline.get_data('derotate_ifs3') assert np.mean(data) == pytest.approx(0.05653316989966066, rel=self.limit, abs=0.) assert data.shape == (3, 10, 21, 21) def test_combine_tags(self) -> None: module = CombineTagsModule(image_in_tags=['images', 'extra'], check_attr=True, index_init=False, name_in='combine1', image_out_tag='combine1') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('combine1') assert len(warning) == 1 assert warning[0].message.args[0] == 'The non-static keyword FILES is already used but ' \ 'with different values. It is advisable to only ' \ 'combine tags that descend from the same data set.' data = self.pipeline.get_data('combine1') assert np.mean(data) == pytest.approx(0.0872254452876469, rel=self.limit, abs=0.) assert data.shape == (20, 11, 11) data = self.pipeline.get_data('header_combine1/INDEX') assert data[19] == 9 assert data.shape == (20, ) module = CombineTagsModule(image_in_tags=['images', 'extra'], check_attr=False, index_init=True, name_in='combine2', image_out_tag='combine2') self.pipeline.add_module(module) self.pipeline.run_module('combine2') data = self.pipeline.get_data('combine1') extra = self.pipeline.get_data('combine2') assert data == pytest.approx(extra, rel=self.limit, abs=0.) data = self.pipeline.get_data('header_combine2/INDEX') assert data[19] == 19 assert data.shape == (20, ) PynPoint-0.11.0/tests/test_processing/test_timedenoising.py000066400000000000000000000215431450275315200242060ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.processing.resizing import AddLinesModule from pynpoint.processing.timedenoising import CwtWaveletConfiguration, DwtWaveletConfiguration, \ WaveletTimeDenoisingModule, TimeNormalizationModule from pynpoint.util.tests import create_config, remove_test_data, create_star_data class TestTimeDenoising: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'images') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['images']) def test_read_data(self) -> None: module = FitsReadingModule(name_in='read', image_tag='images', input_dir=self.test_dir+'images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_wavelet_denoising_cwt_dog(self) -> None: cwt_config = CwtWaveletConfiguration(wavelet='dog', wavelet_order=2, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'dog' assert cwt_config.m_wavelet_order == 2 assert not cwt_config.m_keep_mean assert cwt_config.m_resolution == 0.5 module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_cwt_dog', image_in_tag='images', image_out_tag='wavelet_cwt_dog', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_cwt_dog') data = self.pipeline.get_data('wavelet_cwt_dog') assert np.sum(data) == pytest.approx(105.1035789572968, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 4 self.pipeline.run_module('wavelet_cwt_dog') data_multi = self.pipeline.get_data('wavelet_cwt_dog') assert data == pytest.approx(data_multi, rel=self.limit, abs=0.) assert data.shape == data_multi.shape def test_wavelet_denoising_cwt_morlet(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file['config'].attrs['CPU'] = 1 cwt_config = CwtWaveletConfiguration(wavelet='morlet', wavelet_order=5, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'morlet' assert cwt_config.m_wavelet_order == 5 assert not cwt_config.m_keep_mean assert cwt_config.m_resolution == 0.5 module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_cwt_morlet', image_in_tag='images', image_out_tag='wavelet_cwt_morlet', padding='mirror', median_filter=False, threshold_function='hard') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_cwt_morlet') data = self.pipeline.get_data('wavelet_cwt_morlet') assert np.sum(data) == pytest.approx(104.86262840716438, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) data = self.pipeline.get_attribute('wavelet_cwt_morlet', 'NFRAMES', static=False) assert data[0] == data[1] == 5 def test_wavelet_denoising_dwt(self) -> None: dwt_config = DwtWaveletConfiguration(wavelet='db8') assert dwt_config.m_wavelet == 'db8' module = WaveletTimeDenoisingModule(wavelet_configuration=dwt_config, name_in='wavelet_dwt', image_in_tag='images', image_out_tag='wavelet_dwt', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_dwt') data = self.pipeline.get_data('wavelet_dwt') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_time_normalization(self) -> None: module = TimeNormalizationModule(name_in='timenorm', image_in_tag='images', image_out_tag='timenorm') self.pipeline.add_module(module) self.pipeline.run_module('timenorm') data = self.pipeline.get_data('timenorm') assert np.sum(data) == pytest.approx(56.443663773873, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_wavelet_denoising_even_size(self) -> None: module = AddLinesModule(name_in='add', image_in_tag='images', image_out_tag='images_even', lines=(1, 0, 1, 0)) self.pipeline.add_module(module) self.pipeline.run_module('add') data = self.pipeline.get_data('images_even') assert np.sum(data) == pytest.approx(105.54278879805275, rel=self.limit, abs=0.) assert data.shape == (10, 12, 12) cwt_config = CwtWaveletConfiguration(wavelet='dog', wavelet_order=2, keep_mean=False, resolution=0.5) assert cwt_config.m_wavelet == 'dog' assert cwt_config.m_wavelet_order == 2 assert not cwt_config.m_keep_mean assert cwt_config.m_resolution == 0.5 module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_even_1', image_in_tag='images_even', image_out_tag='wavelet_even_1', padding='zero', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_even_1') data = self.pipeline.get_data('wavelet_even_1') assert np.sum(data) == pytest.approx(105.1035789572968, rel=self.limit, abs=0.) assert data.shape == (10, 12, 12) module = WaveletTimeDenoisingModule(wavelet_configuration=cwt_config, name_in='wavelet_even_2', image_in_tag='images_even', image_out_tag='wavelet_even_2', padding='mirror', median_filter=True, threshold_function='soft') self.pipeline.add_module(module) self.pipeline.run_module('wavelet_even_2') data = self.pipeline.get_data('wavelet_even_2') assert np.sum(data) == pytest.approx(105.06809820408587, rel=self.limit, abs=0.) assert data.shape == (10, 12, 12) data = self.pipeline.get_attribute('images', 'NFRAMES', static=False) assert data == pytest.approx([5, 5], rel=self.limit, abs=0.) data = self.pipeline.get_attribute('wavelet_even_1', 'NFRAMES', static=False) assert data == pytest.approx([5, 5], rel=self.limit, abs=0.) data = self.pipeline.get_attribute('wavelet_even_2', 'NFRAMES', static=False) assert data == pytest.approx([5, 5], rel=self.limit, abs=0.) PynPoint-0.11.0/tests/test_readwrite/000077500000000000000000000000001450275315200175445ustar00rootroot00000000000000PynPoint-0.11.0/tests/test_readwrite/__init__.py000066400000000000000000000000001450275315200216430ustar00rootroot00000000000000PynPoint-0.11.0/tests/test_readwrite/test_attr_reading.py000066400000000000000000000304241450275315200236230ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.attr_reading import ParangReadingModule, AttributeReadingModule, \ WavelengthReadingModule from pynpoint.util.tests import create_config, create_random, remove_test_data class TestAttributeReading: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir, nimages=10) create_config(self.test_dir+'PynPoint_config.ini') np.savetxt(self.test_dir+'parang.dat', np.arange(10., 20., 1.)) np.savetxt(self.test_dir+'new.dat', np.arange(20., 30., 1.)) np.savetxt(self.test_dir+'attribute.dat', np.arange(0, 10, 1), fmt='%i') np.savetxt(self.test_dir+'wavelength.dat', np.arange(0., 10., 1.)) data2d = np.random.normal(loc=0, scale=2e-4, size=(10, 10)) np.savetxt(self.test_dir+'data_2d.dat', data2d) self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, files=['parang.dat', 'new.dat', 'attribute.dat', 'data_2d.dat', 'wavelength.dat']) def test_input_data(self) -> None: data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(0.007133341144768919, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_parang_reading(self) -> None: module = ParangReadingModule(name_in='parang1', data_tag='images', file_name='parang.dat', input_dir=None, overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('parang1') data = self.pipeline.get_data('header_images/PARANG') assert data.dtype == 'float64' assert data == pytest.approx(np.arange(10., 20., 1.), rel=self.limit, abs=0.) assert data.shape == (10, ) def test_parang_reading_same(self) -> None: module = ParangReadingModule(name_in='parang2', data_tag='images', file_name='parang.dat', input_dir=None, overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('parang2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The PARANG attribute is already present and ' \ 'contains the same values as are present in ' \ 'parang.dat.' def test_parang_reading_present(self) -> None: module = ParangReadingModule(name_in='parang3', data_tag='images', file_name='new.dat', input_dir=None, overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('parang3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The PARANG attribute is already present. Set the ' \ '\'overwrite\' parameter to True in order to ' \ 'overwrite the values with new.dat.' def test_parang_reading_overwrite(self) -> None: module = ParangReadingModule(file_name='new.dat', name_in='parang4', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('parang4') def test_parang_reading_2d(self) -> None: module = ParangReadingModule(name_in='parang6', data_tag='images', file_name='data_2d.dat', input_dir=None, overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('parang6') assert str(error.value) == 'The input file data_2d.dat should contain a 1D data set with ' \ 'the parallactic angles.' def test_attribute_reading(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='EXP_NO', name_in='attribute1', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('attribute1') data = self.pipeline.get_data('header_images/EXP_NO') assert data.dtype == 'int64' assert data == pytest.approx(np.arange(10), rel=self.limit, abs=0.) assert data.shape == (10, ) def test_attribute_reading_present(self) -> None: module = AttributeReadingModule(file_name='parang.dat', attribute='PARANG', name_in='attribute3', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('attribute3') assert warning[0].message.args[0] == 'The attribute \'PARANG\' is already present. Set ' \ 'the \'overwrite\' parameter to True in order to ' \ 'overwrite the values with parang.dat.' def test_attribute_reading_invalid(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='test', name_in='attribute4', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('attribute4') assert str(error.value) == '\'test\' is not a valid attribute.' def test_attribute_reading_2d(self) -> None: module = AttributeReadingModule(file_name='data_2d.dat', attribute='DITHER_X', name_in='attribute5', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('attribute5') assert str(error.value) == 'The input file data_2d.dat should contain a 1D list with ' \ 'attributes.' def test_attribute_reading_same(self) -> None: module = AttributeReadingModule(file_name='attribute.dat', attribute='EXP_NO', name_in='attribute6', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('attribute6') assert len(warning) == 1 assert warning[0].message.args[0] == 'The \'EXP_NO\' attribute is already present and ' \ 'contains the same values as are present in ' \ 'attribute.dat.' def test_attribute_reading_overwrite(self) -> None: module = AttributeReadingModule(file_name='parang.dat', attribute='PARANG', name_in='attribute7', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('attribute7') data = self.pipeline.get_attribute('images', 'PARANG', static=False) assert data == pytest.approx(np.arange(10., 20., 1.), rel=self.limit, abs=0.) def test_wavelength_reading(self) -> None: module = WavelengthReadingModule(file_name='wavelength.dat', name_in='wavelength1', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('wavelength1') data = self.pipeline.get_data('header_images/WAVELENGTH') assert data.dtype == 'float64' assert data == pytest.approx(np.arange(10.), rel=self.limit, abs=0.) assert data.shape == (10, ) def test_wavelength_reading_same(self) -> None: module = WavelengthReadingModule(file_name='wavelength.dat', name_in='wavelength2', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('wavelength2') assert len(warning) == 1 assert warning[0].message.args[0] == 'The WAVELENGTH attribute is already present and ' \ 'contains the same values as are present in ' \ 'wavelength.dat.' def test_wavelength_reading_present(self) -> None: module = WavelengthReadingModule(file_name='new.dat', name_in='wavelength3', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('wavelength3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The WAVELENGTH attribute is already present. Set ' \ 'the \'overwrite\' parameter to True in order to ' \ 'overwrite the values with new.dat.' def test_wavelength_reading_overwrite(self) -> None: module = WavelengthReadingModule(file_name='new.dat', name_in='wavelength4', input_dir=None, data_tag='images', overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('wavelength4') def test_wavelength_reading_2d(self) -> None: module = WavelengthReadingModule(file_name='data_2d.dat', name_in='wavelength6', input_dir=None, data_tag='images', overwrite=False) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('wavelength6') assert str(error.value) == 'The input file data_2d.dat should contain a 1D data set with ' \ 'the wavelengths.' PynPoint-0.11.0/tests/test_readwrite/test_attr_writing.py000066400000000000000000000067151450275315200237030ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.attr_writing import ParangWritingModule, AttributeWritingModule from pynpoint.util.tests import create_config, create_random, remove_test_data class TestAttributeWriting: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, files=['parang.dat', 'attribute.dat']) def test_input_data(self) -> None: data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(0.007153603490533874, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_parang_writing(self) -> None: module = ParangWritingModule(file_name='parang.dat', name_in='parang_write1', output_dir=None, data_tag='images', header=None) self.pipeline.add_module(module) self.pipeline.run_module('parang_write1') data = np.loadtxt(self.test_dir+'parang.dat') assert np.sum(data) == pytest.approx(10., rel=self.limit, abs=0.) assert data.shape == (5, ) def test_attribute_writing(self) -> None: module = AttributeWritingModule(file_name='attribute.dat', name_in='attr_write1', output_dir=None, data_tag='images', attribute='PARANG', header=None) self.pipeline.add_module(module) self.pipeline.run_module('attr_write1') data = np.loadtxt(self.test_dir+'attribute.dat') assert np.sum(data) == pytest.approx(10., rel=self.limit, abs=0.) assert data.shape == (5, ) def test_attribute_not_present(self) -> None: module = AttributeWritingModule(file_name='attribute.dat', name_in='attr_write3', output_dir=None, data_tag='images', attribute='test', header=None) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('attr_write3') assert str(error.value) == 'The \'test\' attribute is not present in \'images\'.' def test_parang_writing_not_present(self) -> None: with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: del hdf_file['header_images/PARANG'] module = ParangWritingModule(file_name='parang.dat', name_in='parang_write3', output_dir=None, data_tag='images', header=None) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('parang_write3') assert str(error.value) == 'The PARANG attribute is not present in \'images\'.' PynPoint-0.11.0/tests/test_readwrite/test_fitsreading.py000066400000000000000000000232041450275315200234550ustar00rootroot00000000000000import os import pytest import numpy as np from astropy.io import fits from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.util.tests import create_config, create_star_data, remove_test_data class TestFitsReading: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'fits') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['fits'], files=['filenames.dat']) def test_fits_reading(self) -> None: module = FitsReadingModule(name_in='read1', input_dir=self.test_dir+'fits', image_tag='input', overwrite=False, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('input') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_fits_reading_overwrite(self) -> None: module = FitsReadingModule(name_in='read2', input_dir=self.test_dir+'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('input') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_static_not_found(self) -> None: self.pipeline.set_attribute('config', 'DIT', 'Test', static=True) module = FitsReadingModule(name_in='read3', input_dir=self.test_dir+'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 2 for item in warning: assert item.message.args[0] == 'Static attribute DIT (=Test) not found in the FITS ' \ 'header.' self.pipeline.set_attribute('config', 'DIT', 'ESO DET DIT', static=True) def test_static_changing(self) -> None: with fits.open(self.test_dir+'fits/images_0.fits') as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir+'fits/images_0.fits', overwrite=True) with fits.open(self.test_dir+'fits/images_1.fits') as hdu: header = hdu[0].header header['HIERARCH ESO DET DIT'] = 0.2 hdu.writeto(self.test_dir+'fits/images_1.fits', overwrite=True) module = FitsReadingModule(name_in='read4', input_dir=self.test_dir+'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read4') assert len(warning) == 1 assert warning[0].message.args[0] == f'Static attribute ESO DET DIT has changed. ' \ f'Possibly the current file {self.test_dir}fits/' \ f'images_1.fits does not belong to the data set ' \ f'\'input\'. Attribute value is updated.' def test_header_attribute(self) -> None: with fits.open(self.test_dir+'fits/images_0.fits') as hdu: header = hdu[0].header header['PARANG'] = 1.0 hdu.writeto(self.test_dir+'fits/images_0.fits', overwrite=True) with fits.open(self.test_dir+'fits/images_1.fits') as hdu: header = hdu[0].header header['PARANG'] = 2.0 header['HIERARCH ESO DET DIT'] = 0.1 hdu.writeto(self.test_dir+'fits/images_1.fits', overwrite=True) module = FitsReadingModule(name_in='read5', input_dir=self.test_dir+'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read5') def test_non_static_not_found(self) -> None: self.pipeline.set_attribute('config', 'DIT', 'None', static=True) for i in range(2): with fits.open(f'{self.test_dir}/fits/images_{i}.fits') as hdu: header = hdu[0].header del header['HIERARCH ESO DET DIT'] del header['HIERARCH ESO DET EXP NO'] hdu.writeto(f'{self.test_dir}/fits/images_{i}.fits', overwrite=True) module = FitsReadingModule(name_in='read6', input_dir=self.test_dir+'fits', image_tag='input', overwrite=True, check=True) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read6') assert len(warning) == 2 for item in warning: assert item.message.args[0] == 'Non-static attribute EXP_NO (=ESO DET EXP NO) not ' \ 'found in the FITS header.' def test_fits_read_files(self) -> None: module = FitsReadingModule(name_in='read7', input_dir=None, image_tag='files', overwrite=False, check=True, filenames=[self.test_dir+'fits/images_0.fits', self.test_dir+'fits/images_1.fits']) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read7') assert len(warning) == 2 for item in warning: assert item.message.args[0] == 'Non-static attribute EXP_NO (=ESO DET EXP NO) not ' \ 'found in the FITS header.' data = self.pipeline.get_data('files') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_fits_read_textfile(self) -> None: with open(self.test_dir+'filenames.dat', 'w') as file_obj: file_obj.write(self.test_dir+'fits/images_0.fits\n') file_obj.write(self.test_dir+'fits/images_1.fits\n') module = FitsReadingModule(name_in='read8', input_dir=None, image_tag='files', overwrite=True, check=True, filenames=self.test_dir+'filenames.dat') self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read8') assert len(warning) == 2 for item in warning: assert item.message.args[0] == 'Non-static attribute EXP_NO (=ESO DET EXP NO) not ' \ 'found in the FITS header.' data = self.pipeline.get_data('files') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_fits_read_files_exists(self) -> None: module = FitsReadingModule(name_in='read9', input_dir=None, image_tag='files', overwrite=True, check=True, filenames=[f'{self.test_dir}fits/images_0.fits', f'{self.test_dir}fits/images_2.fits']) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read9') assert str(error.value) == f'The file {self.test_dir}fits/images_2.fits does not exist. ' \ f'Please check that the path is correct.' def test_fits_read_textfile_exists(self) -> None: with open(self.test_dir+'filenames.dat', 'w') as file_obj: file_obj.write(self.test_dir+'fits/images_0.fits\n') file_obj.write(self.test_dir+'fits/images_2.fits\n') module = FitsReadingModule(name_in='read10', input_dir=None, image_tag='files', overwrite=True, check=True, filenames=self.test_dir+'filenames.dat') self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read10') assert str(error.value) == f'The file {self.test_dir}fits/images_2.fits does not exist. ' \ f'Please check that the path is correct.' PynPoint-0.11.0/tests/test_readwrite/test_fitswriting.py000066400000000000000000000137261450275315200235370ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.fitsreading import FitsReadingModule from pynpoint.readwrite.fitswriting import FitsWritingModule from pynpoint.util.tests import create_config, create_star_data, remove_test_data class TestFitsWriting: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_star_data(self.test_dir+'fits_data') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['fits_data'], files=['test.fits']) def test_fits_reading(self) -> None: module = FitsReadingModule(name_in='read', input_dir=self.test_dir+'fits_data', image_tag='images', overwrite=True, check=True) self.pipeline.add_module(module) self.pipeline.run_module('read') data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(105.54278879805277, rel=self.limit, abs=0.) assert data.shape == (10, 11, 11) def test_fits_writing(self) -> None: module = FitsWritingModule(file_name='test.fits', name_in='write1', output_dir=None, data_tag='images', data_range=None, overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('write1') def test_filename_extension(self) -> None: with pytest.raises(ValueError) as error: FitsWritingModule(file_name='test.dat', name_in='write3', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=None) assert str(error.value) == 'Output \'file_name\' requires the FITS extension.' def test_data_range(self) -> None: module = FitsWritingModule(file_name='test.fits', name_in='write4', output_dir=None, data_tag='images', data_range=(0, 10), overwrite=True, subset_size=None) self.pipeline.add_module(module) self.pipeline.run_module('write4') def test_not_overwritten(self) -> None: module = FitsWritingModule(file_name='test.fits', name_in='write5', output_dir=None, data_tag='images', data_range=None, overwrite=False, subset_size=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write5') assert len(warning) == 1 assert warning[0].message.args[0] == 'Filename already present. Use overwrite=True ' \ 'to overwrite an existing FITS file.' def test_subset_size(self) -> None: module = FitsWritingModule(file_name='test.fits', name_in='write6', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=10) self.pipeline.add_module(module) self.pipeline.run_module('write6') def test_subset_size_data_range(self) -> None: module = FitsWritingModule(file_name='test.fits', name_in='write7', output_dir=None, data_tag='images', data_range=(8, 18), overwrite=True, subset_size=10) self.pipeline.add_module(module) self.pipeline.run_module('write7') def test_attribute_length(self) -> None: text = 'long_text_long_text_long_text_long_text_long_text_long_text_long_text_long_text' self.pipeline.set_attribute('images', 'short', 'value', static=True) self.pipeline.set_attribute('images', 'longer_than_eight1', 'value', static=True) self.pipeline.set_attribute('images', 'longer_than_eight2', text, static=True) module = FitsWritingModule(file_name='test.fits', name_in='write8', output_dir=None, data_tag='images', data_range=None, overwrite=True, subset_size=None) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write8') assert len(warning) == 1 assert warning[0].message.args[0] == 'Key \'hierarch longer_than_eight2\' with value ' \ '\'long_text_long_text_long_text_long_text_long_' \ 'text_long_text_long_text_long_text\' is too ' \ 'long for the FITS format. To avoid an error, ' \ 'the value was truncated to \'long_text_long_text' \ '_long_text_long_text_long_tex\'.' PynPoint-0.11.0/tests/test_readwrite/test_hdf5reading.py000066400000000000000000000066171450275315200233470ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.hdf5reading import Hdf5ReadingModule from pynpoint.util.tests import create_config, create_random, remove_test_data class TestHdf5Reading: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir+'data') create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['data']) def test_hdf5_reading(self) -> None: data = np.random.normal(loc=0, scale=2e-4, size=(4, 10, 10)) with h5py.File(self.test_dir+'data/PynPoint_database.hdf5', 'a') as hdf_file: hdf_file.create_dataset('extra', data=data) hdf_file.create_dataset('header_extra/PARANG', data=[1., 2., 3., 4.]) module = Hdf5ReadingModule(name_in='read1', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir+'data', tag_dictionary={'images': 'images'}) self.pipeline.add_module(module) self.pipeline.run_module('read1') data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(0.007153603490533874, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_dictionary_none(self) -> None: module = Hdf5ReadingModule(name_in='read2', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir+'data', tag_dictionary=None) self.pipeline.add_module(module) self.pipeline.run_module('read2') data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(0.007153603490533874, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) def test_wrong_tag(self) -> None: module = Hdf5ReadingModule(name_in='read3', input_filename='PynPoint_database.hdf5', input_dir=self.test_dir+'data', tag_dictionary={'test': 'test'}) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 1 assert warning[0].message.args[0] == 'The dataset with tag name \'test\' is not found in ' \ 'the HDF5 file.' with h5py.File(self.test_dir+'data/PynPoint_database.hdf5', 'r') as hdf_file: assert set(hdf_file.keys()) == set(['extra', 'header_extra', 'header_images', 'images']) def test_no_input_filename(self) -> None: module = Hdf5ReadingModule(name_in='read4', input_filename=None, input_dir=self.test_dir+'data', tag_dictionary=None) self.pipeline.add_module(module) self.pipeline.run_module('read4') data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(0.007153603490533874, rel=self.limit, abs=0.) assert data.shape == (5, 11, 11) PynPoint-0.11.0/tests/test_readwrite/test_hdf5writing.py000066400000000000000000000074751450275315200234240ustar00rootroot00000000000000import os import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.hdf5reading import Hdf5ReadingModule from pynpoint.readwrite.hdf5writing import Hdf5WritingModule from pynpoint.util.tests import create_config, create_random, remove_test_data class TestHdf5Writing: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, files=['test.hdf5']) def test_hdf5_writing(self) -> None: module = Hdf5WritingModule(name_in='write1', file_name='test.hdf5', output_dir=None, tag_dictionary={'images': 'data1'}, keep_attributes=True, overwrite=True) self.pipeline.add_module(module) self.pipeline.run_module('write1') def test_no_data_tag(self) -> None: module = Hdf5WritingModule(name_in='write2', file_name='test.hdf5', output_dir=None, tag_dictionary={'empty': 'empty'}, keep_attributes=True, overwrite=False) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('write2') assert len(warning) == 1 assert warning[0].message.args[0] == 'No data under the tag which is linked by the ' \ 'InputPort.' def test_overwrite_false(self) -> None: module = Hdf5WritingModule(name_in='write3', file_name='test.hdf5', output_dir=None, tag_dictionary={'images': 'data2'}, keep_attributes=True, overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('write3') def test_dictionary_none(self) -> None: module = Hdf5WritingModule(name_in='write4', file_name='test.hdf5', output_dir=None, tag_dictionary=None, keep_attributes=True, overwrite=False) self.pipeline.add_module(module) self.pipeline.run_module('write4') def test_hdf5_reading(self) -> None: module = Hdf5ReadingModule(name_in='read', input_filename='test.hdf5', input_dir=self.test_dir, tag_dictionary={'data1': 'data1', 'data2': 'data2'}) self.pipeline.add_module(module) self.pipeline.run_module('read') data1 = self.pipeline.get_data('data1') data2 = self.pipeline.get_data('data2') data3 = self.pipeline.get_data('images') assert data1 == pytest.approx(data2, rel=self.limit, abs=0.) assert data2 == pytest.approx(data3, rel=self.limit, abs=0.) attribute1 = self.pipeline.get_attribute('images', 'PARANG', static=False) attribute2 = self.pipeline.get_attribute('data1', 'PARANG', static=False) attribute3 = self.pipeline.get_attribute('data2', 'PARANG', static=False) assert np.allclose(attribute1, attribute2, rtol=self.limit, atol=0.) assert np.allclose(attribute2, attribute3, rtol=self.limit, atol=0.) PynPoint-0.11.0/tests/test_readwrite/test_nearreading.py000066400000000000000000000246511450275315200234440ustar00rootroot00000000000000import os import pytest import numpy as np from astropy.io import fits from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.nearreading import NearReadingModule from pynpoint.util.tests import create_config, create_near_data, remove_test_data class TestNearReading: def setup_class(self) -> None: self.limit = 1e-8 self.test_dir = os.path.dirname(__file__) + '/' self.fitsfile = self.test_dir + 'near/images_0.fits' create_near_data(path=self.test_dir + 'near') create_config(self.test_dir + 'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) self.pipeline.set_attribute('config', 'NFRAMES', 'ESO DET CHOP NCYCLES', static=True) self.pipeline.set_attribute('config', 'EXP_NO', 'ESO TPL EXPNO', static=True) self.pipeline.set_attribute('config', 'NDIT', 'None', static=True) self.pipeline.set_attribute('config', 'PARANG_START', 'None', static=True) self.pipeline.set_attribute('config', 'PARANG_END', 'None', static=True) self.pipeline.set_attribute('config', 'DITHER_X', 'None', static=True) self.pipeline.set_attribute('config', 'DITHER_Y', 'None', static=True) self.pipeline.set_attribute('config', 'PIXSCALE', 0.045, static=True) self.pipeline.set_attribute('config', 'MEMORY', 100, static=True) self.positions = ('chopa', 'chopb') def teardown_class(self) -> None: remove_test_data(self.test_dir, folders=['near']) def test_near_read(self) -> None: module = NearReadingModule(name_in='read1a', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) self.pipeline.run_module('read1a') for item in self.positions: data = self.pipeline.get_data(item) assert np.mean(data) == pytest.approx(0.060582854, rel=self.limit, abs=0.) assert data.shape == (10, 10, 10) def test_near_subtract_crop_mean(self) -> None: module = NearReadingModule(name_in='read1b', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1], subtract=True, crop=(None, None, 0.3), combine='mean') self.pipeline.add_module(module) self.pipeline.run_module('read1b') data = self.pipeline.get_data(self.positions[0]) assert np.mean(data) == pytest.approx(0., rel=self.limit, abs=0.) assert data.shape == (2, 7, 7) data = self.pipeline.get_data(self.positions[1]) assert np.mean(data) == pytest.approx(0., rel=self.limit, abs=0.) assert data.shape == (2, 7, 7) def test_near_median(self) -> None: module = NearReadingModule(name_in='read1c', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1], combine='median') self.pipeline.add_module(module) self.pipeline.run_module('read1c') data = self.pipeline.get_data(self.positions[0]) assert np.mean(data) == pytest.approx(0.060582854, rel=self.limit, abs=0.) assert data.shape == (2, 10, 10) data = self.pipeline.get_data(self.positions[1]) assert np.mean(data) == pytest.approx(0.060582854, rel=self.limit, abs=0.) assert data.shape == (2, 10, 10) def test_static_not_found(self) -> None: self.pipeline.set_attribute('config', 'DIT', 'Test', static=True) module = NearReadingModule(name_in='read2', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read2') assert len(warning) == 4 for item in warning: assert item.message.args[0] == 'Static attribute DIT (=Test) not found in the FITS ' \ 'header.' self.pipeline.set_attribute('config', 'DIT', 'ESO DET SEQ1 DIT', static=True) def test_nonstatic_not_found(self) -> None: self.pipeline.set_attribute('config', 'NDIT', 'Test', static=True) module = NearReadingModule(name_in='read3', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read3') assert len(warning) == 4 for item in warning: assert item.message.args[0] == 'Non-static attribute NDIT (=Test) not found in the ' \ 'FITS header.' self.pipeline.set_attribute('config', 'NDIT', 'None', static=True) def test_check_header(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[0].header['ESO DET CHOP ST'] = 'F' hdulist[0].header['ESO DET CHOP CYCSKIP'] = 1 hdulist[0].header['ESO DET CHOP CYCSUM'] = 'T' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read4', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read4') assert len(warning) == 3 assert warning[0].message.args[0] == 'Dataset was obtained without chopping.' assert warning[1].message.args[0] == 'Chop cycles (1) have been skipped.' assert warning[2].message.args[0] == 'FITS file contains averaged images.' with fits.open(self.fitsfile) as hdulist: hdulist[0].header['ESO DET CHOP ST'] = 'T' hdulist[0].header['ESO DET CHOP CYCSKIP'] = 0 hdulist[0].header['ESO DET CHOP CYCSUM'] = 'F' hdulist.writeto(self.fitsfile, overwrite=True) def test_frame_type_invalid(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[10].header['ESO DET FRAM TYPE'] = 'Test' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read5', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read5') assert str(error.value) == 'Frame type (Test) not a valid value. Expecting HCYCLE1 or ' \ 'HCYCLE2 as value for ESO DET FRAM TYPE.' def test_frame_type_missing(self) -> None: with fits.open(self.fitsfile) as hdulist: hdulist[10].header.remove('ESO DET FRAM TYPE') hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read6', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.raises(ValueError) as error: self.pipeline.run_module('read6') assert str(error.value) == 'Frame type not found in the FITS header. Image number: 9.' def test_same_cycle(self) -> None: with fits.open(self.fitsfile) as hdulist: with pytest.warns(UserWarning) as warning: hdulist[10].header['ESO DET FRAM TYPE'] = 'HCYCLE1' assert len(warning) == 1 assert warning[0].message.args[0] == 'Keyword name \'ESO DET FRAM TYPE\' is greater ' \ 'than 8 characters or contains characters not ' \ 'allowed by the FITS standard; a HIERARCH card ' \ 'will be created.' hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read7', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read7') assert len(warning) == 2 assert warning[0].message.args[0] == 'Previous and current chop position (HCYCLE1) are ' \ 'the same. Skipping the current image.' assert warning[1].message.args[0] == 'The number of images is not equal for chop A and ' \ 'chop B.' def test_odd_number_images(self) -> None: with fits.open(self.fitsfile) as hdulist: del hdulist[11] hdulist.writeto(self.fitsfile, overwrite=True) module = NearReadingModule(name_in='read8', input_dir=self.test_dir+'near', chopa_out_tag=self.positions[0], chopb_out_tag=self.positions[1]) self.pipeline.add_module(module) with pytest.warns(UserWarning) as warning: self.pipeline.run_module('read8') assert len(warning) == 2 assert warning[0].message.args[0] == f'FITS file contains odd number of images: ' \ f'{self.fitsfile}' assert warning[1].message.args[0] == 'The number of chop cycles (5) is not equal to ' \ 'half the number of available HDU images (4).' PynPoint-0.11.0/tests/test_readwrite/test_textwriting.py000066400000000000000000000060071450275315200235500ustar00rootroot00000000000000import os import h5py import pytest import numpy as np from pynpoint.core.pypeline import Pypeline from pynpoint.readwrite.textwriting import TextWritingModule from pynpoint.util.tests import create_config, create_random, remove_test_data class TestTextWriting: def setup_class(self) -> None: self.limit = 1e-10 self.test_dir = os.path.dirname(__file__) + '/' create_random(self.test_dir, nimages=1) create_config(self.test_dir+'PynPoint_config.ini') self.pipeline = Pypeline(self.test_dir, self.test_dir, self.test_dir) def teardown_class(self) -> None: remove_test_data(self.test_dir, files=['image.dat', 'data.dat']) def test_input_data(self) -> None: data = self.pipeline.get_data('images') assert np.sum(data) == pytest.approx(0.0008557279524431129, rel=self.limit, abs=0.) assert data.shape == (1, 11, 11) def test_text_writing(self) -> None: text_write = TextWritingModule(name_in='text_write', data_tag='images', file_name='image.dat', output_dir=None, header=None) self.pipeline.add_module(text_write) self.pipeline.run_module('text_write') data = np.loadtxt(self.test_dir+'image.dat') assert np.sum(data) == pytest.approx(0.0008557279524431129, rel=self.limit, abs=0.) assert data.shape == (11, 11) def test_text_writing_ndim(self) -> None: data_4d = np.random.normal(loc=0, scale=2e-4, size=(5, 5, 5, 5)) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file.create_dataset('data_4d', data=data_4d) text_write = TextWritingModule(name_in='write_4d', data_tag='data_4d', file_name='data.dat', output_dir=None, header=None) self.pipeline.add_module(text_write) with pytest.raises(ValueError) as error: self.pipeline.run_module('write_4d') assert str(error.value) == 'Only 1D or 2D arrays can be written to a text file.' def test_text_writing_int(self) -> None: data_int = np.arange(1, 11, 1) with h5py.File(self.test_dir+'PynPoint_database.hdf5', 'a') as hdf_file: hdf_file.create_dataset('data_int', data=data_int) text_write = TextWritingModule(name_in='write_int', data_tag='data_int', file_name='data.dat', output_dir=None, header=None) self.pipeline.add_module(text_write) self.pipeline.run_module('write_int') data = np.loadtxt(self.test_dir+'data.dat') assert data == pytest.approx(data_int, rel=self.limit, abs=0.) assert data.shape == (10, )