asdf-2.5.1/0000755000446400020070000000000013605166132014633 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/.gitignore0000644000446400020070000000116113605165746016634 0ustar eslavichSTSCI\science00000000000000# Compiled files *.py[co] *.a *.o *.so __pycache__ # Ignore .c files by default to avoid including generated code. If you want to # add a non-generated .c extension, use `git add -f filename.c`. *.c # Other generated files */cython_version.py htmlcov .coverage MANIFEST # Tox .tox .pytest_cache # Sphinx docs/api docs/_build # Eclipse editor project files .project .pydevproject .settings # Pycharm editor project files .idea # Packages/installer info *.egg *.egg-info dist build eggs .eggs parts bin var sdist develop-eggs .installed.cfg distribute-*.tar.gz pip-wheel-metadata # Other .*.swp *~ # Mac OSX .DS_Store asdf-2.5.1/.gitmodules0000644000446400020070000000015513605165746017023 0ustar eslavichSTSCI\science00000000000000[submodule "asdf-standard"] path = asdf-standard url = https://github.com/spacetelescope/asdf-standard.git asdf-2.5.1/.rtd-environment.yml0000644000446400020070000000102513605165746020577 0ustar eslavichSTSCI\science00000000000000# Note: it's important that none of the packages below depend on ASDF, # because if they do, the conda version of ASDF will get installed. If that # happens, then that version will take precendence over the latest developer # version of ASDF that gets installed just before the documentation build, # and Sphinx will pick up only the older stable conda package. name: asdf dependencies: - python>=3 - pip>=10.0 - sphinx<1.7 - numpy - pyyaml - jsonschema>=2.3,<4 - semantic_version - pytest - pip: - ".[docs]" asdf-2.5.1/.travis.yml0000644000446400020070000000614513605165767016767 0ustar eslavichSTSCI\science00000000000000# We set the language to c because python isn't supported on the MacOS X nodes # on Travis. However, the language ends up being irrelevant anyway, since we # install Python ourselves using conda. language: c os: - linux # Use Travis' container-based architecture sudo: false addons: apt: packages: - graphviz - texlive-latex-extra - dvipng env: global: # The following versions are the 'default' for tests, unless # overidden underneath. They are defined here in order to save having # to repeat them for all configurations. - TOX_CMD='tox --' - TOX_ARGS='--remote-data' # This is the Python version that will be used by the parent conda # environment, but it will not be used in the test environments # themselves. - PYTHON_VERSION=3.6 matrix: # Make sure that installation does not fail - TOXENV='py36-stable' TOX_CMD='tox --notest' TOX_ARGS='' # Make sure README will display properly on pypi - TOXENV='checkdocs' - TOXENV='py35-stable' - TOXENV='py36-stable' - TOXENV='py37-stable' matrix: fast_finish: true include: # Do a coverage test - env: TOXENV='coverage' TOX_ARGS='' # Perform a sanity check of packaging using twine - env: TOXENV='twine' TOX_ARGS='' # Check for sphinx doc build warnings - we do this first because it # may run for a long time - env: TOXENV='docbuild' TOX_ARGS='' # Do a code style check - env: TOXENV='style' TOX_ARGS='' # try older numpy versions - env: TOXENV='py35-numpy11' - env: TOXENV='py36-numpy12' # test against oldest compatible versions of all dependencies - env: TOXENV='py35-legacy' # also test against development versions of Astropy and GWCS - env: TOXENV='py37-astrodev' # Test against development version of numpy (this job can fail) - env: TOXENV='py37-numpydev' # Test against prerelease versions of all dependencies - env: TOXENV='prerelease' # Test against an installed asdf package - env: TOXENV='packaged' # Try a run on OSX - os: osx env: TOXENV='py37-stable' - os: windows env: TOXENV='py37-stable' TOX_ARGS='--remote-data' - os: windows env: TOXENV='py35-stable' - os: windows env: TOXENV='py36-stable' TOX_ARGS='--remote-data' # Windows test against development version of numpy (this job can fail) - os: windows env: TOXENV='py37-numpydev' allow_failures: # There doesn't appear to be a stable version of numpy available for # Py37 on Windows at the moment - os: windows env: TOXENV='py37-stable' TOX_ARGS='--remote-data' - env: TOXENV='py37-numpydev' - env: TOXENV='prerelease' install: - git clone git://github.com/astropy/ci-helpers.git - source ci-helpers/travis/setup_conda.sh - conda install openssl - pip install tox tox-conda>=0.2 script: - $TOX_CMD $TOX_ARGS asdf-2.5.1/CHANGES.rst0000644000446400020070000003332713605166107016447 0ustar eslavichSTSCI\science000000000000002.6 (unreleased) ------------------ - Add ``package`` property to extension metadata, and deprecate use of ``software``. [#728] - AsdfDeprecationWarning now subclasses DeprecationWarning. [#710] 2.5.1 (2020-01-07) ------------------ - Fix bug in test causing failure when test suite is run against an installed asdf package. [#732] 2.5.0 (2019-12-23) ------------------ - Added asdf-standard 1.4.0 to the list of supported versions. [#704] - Fix load_schema LRU cache memory usage issue [#682] - Add convenience method for fetching the default resolver [#682] - ``SpecItem`` and ``Spec`` were depreicated in ``semantic_version`` and were replaced with ``SimpleSpec``. [#715] - Pinned the minimum required ``semantic_version`` to 2.8. [#715] - Fix bug causing segfault after update of a memory-mapped file. [#716] 2.4.2 (2019-08-29) ------------------ - Limit the version of ``semantic_version`` to <=2.6.0 to work around a Deprecation warning. [#700] 2.4.1 (2019-08-27) ------------------ - Define the ``in`` operator for top-level ``AsdfFile`` objects. [#623] - Overhaul packaging infrastructure. Remove use of ``astropy_helpers``. [#670] - Automatically register schema tester plugin. Do not enable schema tests by default. Add configuration setting and command line option to enable schema tests. [#676] - Enable handling of subclasses of known custom types by using decorators for convenience. [#563] - Add support for jsonschema 3.x. [#684] 2.3.4 (unreleased) ------------------ - Fix bug in ``NDArrayType.__len__``. It must be a method, not a property. [#673] 2.3.3 (2019-04-02) ------------------ - Pass ``ignore_unrecognized_tag`` setting through to ASDF-in-FITS. [#650] - Use ``$schema`` keyword if available to determine meta-schema to use when testing whether schemas themselves are valid. [#654] - Take into account resolvers from installed extensions when loading schemas for validation. [#655] - Fix compatibility issue with new release of ``pyyaml`` (version 5.1). [#662] - Allow use of ``pathlib.Path`` objects for ``custom_schema`` option. [#663] 2.3.2 (2019-02-19) ------------------ - Fix bug that occurs when comparing installed extension version with that found in file. [#641] 2.3.1 (2018-12-20) ------------------ - Provide source information for ``AsdfDeprecationWarning`` that come from extensions from external packages. [#629] - Ensure that top-level accesses to the tree outside a closed context handler result in an ``OSError``. [#628] - Fix the way ``generic_io`` handles URIs and paths on Windows. [#632] - Fix bug in ``asdftool`` that prevented ``extract`` command from being visible. [#633] 2.3.0 (2018-11-28) ------------------ - Storage of arbitrary precision integers is now provided by ``asdf.IntegerType``. Reading a file with integer literals that are too large now causes only a warning instead of a validation error. This is to provide backwards compatibility for files that were created with a buggy version of ASDF (see #553 below). [#566] - Remove WCS tags. These are now provided by the `gwcs package `_. [#593] - Deprecate the ``asdf.asdftypes`` module in favor of ``asdf.types``. [#611] - Support use of ``pathlib.Path`` with ``asdf.open`` and ``AsdfFile.write_to``. [#617] - Update ASDF Standard submodule to version 1.3.0. 2.2.1 (2018-11-15) ------------------ - Fix an issue with the README that caused sporadic installation failures and also prevented the long description from being rendered on pypi. [#607] 2.2.0 (2018-11-14) ------------------ - Add new parameter ``lazy_load`` to ``AsdfFile.open``. It is ``True`` by default and preserves the default behavior. ``False`` detaches the loaded tree from the underlying file: all blocks are fully read and numpy arrays are materialized. Thus it becomes safe to close the file and continue using ``AsdfFile.tree``. However, ``copy_arrays`` parameter is still effective and the active memory maps may still require the file to stay open in case ``copy_arrays`` is ``False``. [#573] - Add ``AsdfConversionWarning`` for failures to convert ASDF tree into custom types. This warning is converted to an error when using ``assert_roundtrip_tree`` for tests. [#583] - Deprecate ``asdf.AsdfFile.open`` in favor of ``asdf.open``. [#579] - Add readonly protection to memory mapped arrays when the underlying file handle is readonly. [#579] 2.1.2 (2018-11-13) ------------------ - Make sure that all types corresponding to core tags are added to the type index before any others. This fixes a bug that was related to the way that subclass tags were overwritten by external extensions. [#598] 2.1.1 (2018-11-01) ------------------ - Make sure extension metadata is written even when constructing the ASDF tree on-the-fly. [#549] - Fix large integer validation when storing `numpy` integer literals in the tree. [#553] - Fix bug that caused subclass of external type to be serialized by the wrong tag. [#560] - Fix bug that occurred when attempting to open invalid file but Astropy import fails while checking for ASDF-in-FITS. [#562] - Fix bug that caused tree creation to fail when unable to locate a schema file for an unknown tag. This now simply causes a warning, and the offending node is converted to basic Python data structures. [#571] 2.1.0 (2018-09-25) ------------------ - Add API function for retrieving history entries. [#501] - Store ASDF-in-FITS data inside a 1x1 BINTABLE HDU. [#519] - Allow implicit conversion of ``namedtuple`` into serializable types. [#534] - Fix bug that prevented use of ASDF-in-FITS with HDUs that have names with underscores. [#543] - Add option to ``generic_io.get_file`` to close underlying file handle. [#544] - Add top-level ``keys`` method to ``AsdfFile`` to access tree keys. [#545] 2.0.3 (2018-09-06) ------------------ - Update asdf-standard to reflect more stringent (and, consequently, more correct) requirements on the formatting of complex numbers. [#526] - Fix bug with dangling file handle when using ASDF-in-FITS. [#533] - Fix bug that prevented fortran-order arrays from being serialized properly. [#539] 2.0.2 (2018-07-27) ------------------ - Allow serialization of broadcasted ``numpy`` arrays. [#507] - Fix bug that caused result of ``set_array_compression`` to be overwritten by ``all_array_compression`` argument to ``write_to``. [#510] - Add workaround for Python OSX write limit bug (see https://bugs.python.org/issue24658). [#521] - Fix bug with custom schema validation when using out-of-line definitions in schema file. [#522] 2.0.1 (2018-05-08) ------------------ - Allow test suite to run even when package is not installed. [#502] 2.0.0 (2018-04-19) ------------------ - Astropy-specific tags have moved to Astropy core package. [#359] - ICRSCoord tag has moved to Astropy core package. [#401] - Remove support for Python 2. [#409] - Create ``pytest`` plugin to be used for testing schema files. [#425] - Add metadata about extensions used to create a file to the history section of the file itself. [#475] - Remove hard dependency on Astropy. It is still required for testing, and for processing ASDF-in-FITS files. [#476] - Add command for extracting ASDF extension from ASDF-in-FITS file and converting it to a pure ASDF file. [#477] - Add command for removing ASDF extension from ASDF-in-FITS file. [#480] - Add an ``ExternalArrayReference`` type for referencing arrays in external files. [#400] - Improve the way URIs are detected for ASDF-in-FITS files in order to fix bug with reading gzipped ASDF-in-FITS files. [#416] - Explicitly disallow access to entire tree for ASDF file objects that have been closed. [#407] - Install and load extensions using ``setuptools`` entry points. [#384] - Automatically initialize ``asdf-standard`` submodule in ``setup.py``. [#398] - Allow foreign tags to be resolved in schemas and files. Deprecate ``tag_to_schema_resolver`` property for ``AsdfFile`` and ``AsdfExtensionList``. [#399] - Fix bug that caused serialized FITS tables to be duplicated in embedded ASDF HDU. [#411] - Create and use a new non-standard FITS extension instead of ImageHDU for storing ASDF files embedded in FITS. Explicitly remove support for the ``.update`` method of ``AsdfInFits``, even though it didn't appear to be working previously. [#412] - Allow package to be imported and used from source directory and builds in development mode. [#420] - Add command to ``asdftool`` for querying installed extensions. [#418] - Implement optional top-level validation pass using custom schema. This can be used to ensure that particular ASDF files follow custom conventions beyond those enforced by the standard. [#442] - Remove restrictions affecting top-level attributes ``data``, ``wcs``, and ``fits``. Bump top-level ASDF schema version to v1.1.0. [#444] 1.3.3 (2018-03-01) ------------------ - Update test infrastructure to rely on new Astropy v3.0 plugins. [#461] - Disable use of 2to3. This was causing test failures on Debian builds. [#463] 1.3.2 (2018-02-22) ------------------ - Updates to allow this version of ASDF to be compatible with Astropy v3.0. [#450] - Remove tests that are no longer relevant due to latest updates to Astropy's testing infrastructure. [#458] 1.3.1 (2017-11-02) ------------------ - Relax requirement on ``semantic_version`` version to 2.3.1. [#361] - Fix bug when retrieving file format version from new ASDF file. [#365] - Fix bug when duplicating inline arrays. [#370] - Allow tag references using the tag URI scheme to be resolved in schema files. [#371] 1.3.0 (2017-10-24) ------------------ - Fixed a bug in reading data from an "http:" url. [#231] - Implements v 1.1.0 of the asdf schemas. [#233] - Added a function ``is_asdf_file`` which inspects the input and returns ``True`` or ``False``. [#239] - The ``open`` method of ``AsdfInFits`` now accepts URIs and open file handles in addition to HDULists. The ``open`` method of ``AsdfFile`` will now try to parse the given URI or file handle as ``AsdfInFits`` if it is not obviously a regular ASDF file. [#241] - Updated WCS frame fields ``obsgeoloc`` and ``obsgeovel`` to reflect recent updates in ``astropy`` that changed representation from ``Quantity`` to ``CartesianRepresentation``. Updated to reflect ``astropy`` change that combines ``galcen_ra`` and ``galcen_dec`` into ``galcen_coord``. Added support for new field ``galcen_v_sun``. Added support for required module versions for tag classes. [#244] - Added support for ``lz4`` compression algorithm [#258]. Also added support for using a different compression algorithm for writing out a file than the one that was used for reading the file (e.g. to convert blocks to use a different compression algorithm) [#257] - Tag classes may now use an optional ``supported_versions`` attribute to declare exclusive support for particular versions of the corresponding schema. If this attribute is omitted (as it is for most existing tag classes), the tag is assumed to be compatible with all versions of the corresponding schema. If ``supported_versions`` is provided, the tag class implementation can include code that is conditioned on the schema version. If an incompatible schema is encountered, or if deserialization of the tagged object fails with an exception, a raw Python data structure will be returned. [#272] - Added option to ``AsdfFile.open`` to allow suppression of warning messages when mismatched schema versions are encountered. [#294] - Added a diff tool to ``asdftool`` to allow for visual comparison of pairs of ASDF files. [#286] - Added command to ``asdftool`` to display available tags. [#303] - When possible, display name of ASDF file that caused version mismatch warning. [#306] - Issue a warning when an unrecognized tag is encountered. [#295] This warning is silenced by default, but can be enabled with a parameter to the ``AsdfFile`` constructor, or to ``AsdfFile.open``. Also added an option for ignoring warnings from unrecognized schema tags. [#319] - Fix bug with loading JSON schemas in Python 3.5. [#317] - Remove all remnants of support for Python 2.6. [#333] - Fix issues with the type index used for writing out ASDF files. This ensures that items in the type index are not inadvertently overwritten by later versions of the same type. It also makes sure that schema example tests run against the correct version of the ASDF standard. [#350] - Update time schema to reflect changes in astropy. This fixes an outstanding bug. [#343] - Add ``copy_arrays`` option to ``asdf.open`` to control whether or not underlying array data should be memory mapped, if possible. [#355] - Allow the tree to be accessed using top-level ``__getitem__`` and ``__setitem__``. [#352] 1.2.1(2016-11-07) ----------------- - Make asdf conditionally dependent on the version of astropy to allow running it with older versions of astropy. [#228] 1.2.0(2016-10-04) ----------------- - Added Tabular model. [#214] - Forced new blocks to be contiguous [#221] - Rewrote code which tags complex objects [#223] - Fixed version error message [#224] 1.0.5 (2016-06-28) ------------------ - Fixed a memory leak when reading wcs that grew memory to over 10 Gb. [#200] 1.0.4 (2016-05-25) ------------------ - Added wrapper class for astropy.core.Time, TaggedTime. [#198] 1.0.2 (2016-02-29) ------------------ - Renamed package to ASDF. [#190] - Stopped support for Python 2.6 [#191] 1.0.1 (2016-01-08) ------------------ - Fixed installation from the source tarball on Python 3. [#187] - Fixed error handling when opening ASDF files not supported by the current version of asdf. [#178] - Fixed parse error that could occur sometimes when YAML data was read from a stream. [#183] 1.0.0 (2015-09-18) ------------------ - Initial release. asdf-2.5.1/CODE_OF_CONDUCT.md0000644000446400020070000000626413567314375017455 0ustar eslavichSTSCI\science00000000000000# Spacetelescope Open Source Code of Conduct We expect all "spacetelescope" organization projects to adopt a code of conduct that ensures a productive, respectful environment for all open source contributors and participants. We are committed to providing a strong and enforced code of conduct and expect everyone in our community to follow these guidelines when interacting with others in all forums. Our goal is to keep ours a positive, inclusive, successful, and growing community. The community of participants in open source Astronomy projects is made up of members from around the globe with a diverse set of skills, personalities, and experiences. It is through these differences that our community experiences success and continued growth. As members of the community, - We pledge to treat all people with respect and provide a harassment- and bullying-free environment, regardless of sex, sexual orientation and/or gender identity, disability, physical appearance, body size, race, nationality, ethnicity, and religion. In particular, sexual language and imagery, sexist, racist, or otherwise exclusionary jokes are not appropriate. - We pledge to respect the work of others by recognizing acknowledgment/citation requests of original authors. As authors, we pledge to be explicit about how we want our own work to be cited or acknowledged. - We pledge to welcome those interested in joining the community, and realize that including people with a variety of opinions and backgrounds will only serve to enrich our community. In particular, discussions relating to pros/cons of various technologies, programming languages, and so on are welcome, but these should be done with respect, taking proactive measure to ensure that all participants are heard and feel confident that they can freely express their opinions. - We pledge to welcome questions and answer them respectfully, paying particular attention to those new to the community. We pledge to provide respectful criticisms and feedback in forums, especially in discussion threads resulting from code contributions. - We pledge to be conscientious of the perceptions of the wider community and to respond to criticism respectfully. We will strive to model behaviors that encourage productive debate and disagreement, both within our community and where we are criticized. We will treat those outside our community with the same respect as people within our community. - We pledge to help the entire community follow the code of conduct, and to not remain silent when we see violations of the code of conduct. We will take action when members of our community violate this code such as such as contacting conduct@stsci.edu (all emails sent to this address will be treated with the strictest confidence) or talking privately with the person. This code of conduct applies to all community situations online and offline, including mailing lists, forums, social media, conferences, meetings, associated social events, and one-to-one interactions. Parts of this code of conduct have been adapted from the Astropy and Numfocus codes of conduct. http://www.astropy.org/code_of_conduct.html https://www.numfocus.org/about/code-of-conduct/ asdf-2.5.1/CONTRIBUTING.md0000644000446400020070000000201313567314375017073 0ustar eslavichSTSCI\science00000000000000Please open a new issue or new pull request for bugs, feedback, or new features you would like to see. If there is an issue you would like to work on, please leave a comment and we will be happy to assist. New contributions and contributors are very welcome! The main development work is done on the "master" branch. The "stable" branch is protected and used for official releases. The rest of the branches are for release maintenance and should not be used normally. Unless otherwise told by a maintainer, pull request should be made and submitted to the "master" branch. New to github or open source projects? If you are unsure about where to start or haven't used github before, please feel free to contact the package maintainers. Feedback and feature requests? Is there something missing you would like to see? Please open an issue or send an email to the maintainers. This package follows the Spacetelescope `Code of Conduct`_ strives to provide a welcoming community to all of our users and contributors. asdf-2.5.1/MANIFEST.in0000644000446400020070000000130613605165746016403 0ustar eslavichSTSCI\science00000000000000include README.rst include CHANGES.rst include ez_setup.py include ah_bootstrap.py include setup_helpers.py include setup.cfg recursive-include *.pyx *.c *.pxd recursive-include docs * recursive-include licenses * recursive-include cextern * recursive-include scripts * recursive-include asdf-standard/schemas *.yaml recursive-include asdf-standard/reference_files * # Python version specific compatibility packages, normally excluded by default # depending on the Python version doing the building recursive-include asdf/compat *.py prune build prune docs/_build prune docs/api recursive-include astropy_helpers * exclude astropy_helpers/.git exclude astropy_helpers/.gitignore global-exclude *.pyc *.o asdf-2.5.1/PKG-INFO0000644000446400020070000003125513605166132015736 0ustar eslavichSTSCI\science00000000000000Metadata-Version: 2.1 Name: asdf Version: 2.5.1 Summary: Python tools to handle ASDF files Home-page: http://github.com/spacetelescope/asdf Author: Erik Bray, Dan D'Avella, Michael Droettboom Author-email: mdroe@stsci.edu License: BSD Description: ASDF - Advanced Scientific Data Format ====================================== .. _begin-summary-text: The **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat (ASDF) is a next-generation interchange format for scientific data. This package contains the Python implementation of the ASDF Standard. More information on the ASDF Standard itself can be found `here `__. The ASDF format has the following features: * A hierarchical, human-readable metadata format (implemented using `YAML `__) * Numerical arrays are stored as binary data blocks which can be memory mapped. Data blocks can optionally be compressed. * The structure of the data can be automatically validated using schemas (implemented using `JSON Schema `__) * Native Python data types (numerical types, strings, dicts, lists) are serialized automatically * ASDF can be extended to serialize custom data types .. _end-summary-text: ASDF is under active development `on github `__. More information on contributing can be found `below <#contributing>`__. Overview -------- This section outlines basic use cases of the ASDF package for creating and reading ASDF files. Creating a file ~~~~~~~~~~~~~~~ .. _begin-create-file-text: We're going to store several `numpy` arrays and other data to an ASDF file. We do this by creating a "tree", which is simply a `dict`, and we provide it as input to the constructor of `AsdfFile`: .. code:: python import asdf import numpy as np # Create some data sequence = np.array([x for x in range(100)]) squares = np.array([x**2 for x in range(100)]) random = np.random.random(100) # Store the data in an arbitrarily nested dictionary tree = { 'foo': 42, 'name': 'Monty', 'sequence': sequence, 'powers': { 'squares' : squares }, 'random': random } # Create the ASDF file object from our data tree af = asdf.AsdfFile(tree) # Write the data to a new file af.write_to('example.asdf') If we open the newly created file, we can see some of the key features of ASDF on display: :: #ASDF 1.0.0 #ASDF_STANDARD 1.2.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 2.0.0} history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: {name: asdf, version: 2.0.0} foo: 42 name: Monty powers: squares: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [100] random: !core/ndarray-1.0.0 source: 2 datatype: float64 byteorder: little shape: [100] sequence: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [100] ... The metadata in the file mirrors the structure of the tree that was stored. It is hierarchical and human-readable. Notice that metadata has been added to the tree that was not explicitly given by the user. Notice also that the numerical array data is not stored in the metadata tree itself. Instead, it is stored as binary data blocks below the metadata section (not shown here). It is possible to compress the array data when writing the file: .. code:: python af.write_to('compressed.asdf', all_array_compression='zlib') Available compression algorithms are ``'zlib'``, ``'bzp2'``, and ``'lz4'``. .. _end-create-file-text: Reading a file ~~~~~~~~~~~~~~ .. _begin-read-file-text: To read an existing ASDF file, we simply use the top-level `open` function of the `asdf` package: .. code:: python import asdf af = asdf.open('example.asdf') The `open` function also works as a context handler: .. code:: python with asdf.open('example.asdf') as af: ... To access the data stored in the file, use the top-level `AsdfFile.tree` attribute: .. code:: python >>> import asdf >>> af = asdf.open('example.asdf') >>> af.tree {'asdf_library': {'author': 'Space Telescope Science Institute', 'homepage': 'http://github.com/spacetelescope/asdf', 'name': 'asdf', 'version': '1.3.1'}, 'foo': 42, 'name': 'Monty', 'powers': {'squares': }, 'random': , 'sequence': } The tree is simply a Python `dict`, and nodes are accessed like any other dictionary entry: .. code:: python >>> af.tree['name'] 'Monty' >>> af.tree['powers'] {'squares': } Array data remains unloaded until it is explicitly accessed: .. code:: python >>> af.tree['powers']['squares'] array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256, 289, 324, 361, 400, 441, 484, 529, 576, 625, 676, 729, 784, 841, 900, 961, 1024, 1089, 1156, 1225, 1296, 1369, 1444, 1521, 1600, 1681, 1764, 1849, 1936, 2025, 2116, 2209, 2304, 2401, 2500, 2601, 2704, 2809, 2916, 3025, 3136, 3249, 3364, 3481, 3600, 3721, 3844, 3969, 4096, 4225, 4356, 4489, 4624, 4761, 4900, 5041, 5184, 5329, 5476, 5625, 5776, 5929, 6084, 6241, 6400, 6561, 6724, 6889, 7056, 7225, 7396, 7569, 7744, 7921, 8100, 8281, 8464, 8649, 8836, 9025, 9216, 9409, 9604, 9801]) >>> import numpy as np >>> expected = [x**2 for x in range(100)] >>> np.equal(af.tree['powers']['squares'], expected).all() True By default, uncompressed data blocks are memory mapped for efficient access. Memory mapping can be disabled by using the ``copy_arrays`` option of `open` when reading: .. code:: python af = asdf.open('example.asdf', copy_arrays=True) .. _end-read-file-text: For more information and for advanced usage examples, see the `documentation <#documentation>`__. Extending ASDF ~~~~~~~~~~~~~~ Out of the box, the ``asdf`` package automatically serializes and deserializes native Python types. It is possible to extend ``asdf`` by implementing custom tag types that correspond to custom user types. More information on extending ASDF can be found in the `official documentation `__. Installation ------------ .. _begin-pip-install-text: Stable releases of the ASDF Python package are registered `at PyPi `__. The latest stable version can be installed using ``pip``: :: $ pip install asdf .. _begin-source-install-text: The latest development version of ASDF is available from the ``master`` branch `on github `__. To clone the project: :: $ git clone https://github.com/spacetelescope/asdf To install: :: $ cd asdf $ git submodule update --init $ pip install . To install in `development mode `__:: $ pip install -e . .. note:: The source repository makes use of a git submodule for referencing the schemas provided by the ASDF standard. While this submodule is automatically initialized when installing the package (including in development mode), it may be necessary for developers to manually update the submodule if changes are made upstream. See the `documentation on git submodules `__ for more information. .. _end-source-install-text: Testing ------- .. _begin-testing-text: To install the test dependencies from a source checkout of the repository: :: $ pip install -e .[tests] To run the unit tests from a source checkout of the repository: :: $ pytest It is also possible to run the test suite from an installed version of the package. In a Python interpreter: .. code:: python import asdf asdf.test() Please note that the `astropy `__ package must be installed to run the tests. It is also possible to run the tests using `tox `__. It is first necessary to install ``tox`` and `tox-conda `__: :: $ pip install tox tox-conda To list all available environments: :: $ tox -va To run a specific environment: :: $ tox -e .. _end-testing-text: Documentation ------------- More detailed documentation on this software package can be found `here `__. More information on the ASDF Standard itself can be found `here `__. If you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, information can be found `here `__. Contributing ------------ We welcome feedback and contributions to the project. Contributions of code, documentation, or general feedback are all appreciated. Please follow the `contributing guidelines `__ to submit an issue or a pull request. We strive to provide a welcoming community to all of our users by abiding to the `Code of Conduct `__. Platform: UNKNOWN Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.3 Description-Content-Type: text/x-rst Provides-Extra: docs Provides-Extra: tests Provides-Extra: all asdf-2.5.1/README.rst0000644000446400020070000002511413605165746016337 0ustar eslavichSTSCI\science00000000000000.. ASDF - Advanced Scientific Data Format ====================================== .. raw:: html

STScI Logo

ASDF - Advanced Scientific Data Format

Build Status Documentation Status Coverage Status license stsci astropy

OverviewInstallationTestingDocumentationContributing

.. _begin-summary-text The **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat (ASDF) is a next-generation interchange format for scientific data. This package contains the Python implementation of the ASDF Standard. More information on the ASDF Standard itself can be found `here `__. The ASDF format has the following features: * A hierarchical, human-readable metadata format (implemented using `YAML `__) * Numerical arrays are stored as binary data blocks which can be memory mapped. Data blocks can optionally be compressed. * The structure of the data can be automatically validated using schemas (implemented using `JSON Schema `__) * Native Python data types (numerical types, strings, dicts, lists) are serialized automatically * ASDF can be extended to serialize custom data types .. _end-summary-text ASDF is under active development `on github `__. More information on contributing can be found `below <#contributing>`__. Overview -------- This section outlines basic use cases of the ASDF package for creating and reading ASDF files. Creating a file ~~~~~~~~~~~~~~~ .. _begin-create-file-text We're going to store several `numpy` arrays and other data to an ASDF file. We do this by creating a "tree", which is simply a `dict`, and we provide it as input to the constructor of `AsdfFile`: .. code:: python import asdf import numpy as np # Create some data sequence = np.array([x for x in range(100)]) squares = np.array([x**2 for x in range(100)]) random = np.random.random(100) # Store the data in an arbitrarily nested dictionary tree = { 'foo': 42, 'name': 'Monty', 'sequence': sequence, 'powers': { 'squares' : squares }, 'random': random } # Create the ASDF file object from our data tree af = asdf.AsdfFile(tree) # Write the data to a new file af.write_to('example.asdf') If we open the newly created file, we can see some of the key features of ASDF on display: :: #ASDF 1.0.0 #ASDF_STANDARD 1.2.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 2.0.0} history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: {name: asdf, version: 2.0.0} foo: 42 name: Monty powers: squares: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [100] random: !core/ndarray-1.0.0 source: 2 datatype: float64 byteorder: little shape: [100] sequence: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [100] ... The metadata in the file mirrors the structure of the tree that was stored. It is hierarchical and human-readable. Notice that metadata has been added to the tree that was not explicitly given by the user. Notice also that the numerical array data is not stored in the metadata tree itself. Instead, it is stored as binary data blocks below the metadata section (not shown here). It is possible to compress the array data when writing the file: .. code:: python af.write_to('compressed.asdf', all_array_compression='zlib') Available compression algorithms are ``'zlib'``, ``'bzp2'``, and ``'lz4'``. .. _end-create-file-text Reading a file ~~~~~~~~~~~~~~ .. _begin-read-file-text To read an existing ASDF file, we simply use the top-level `open` function of the `asdf` package: .. code:: python import asdf af = asdf.open('example.asdf') The `open` function also works as a context handler: .. code:: python with asdf.open('example.asdf') as af: ... To access the data stored in the file, use the top-level `AsdfFile.tree` attribute: .. code:: python >>> import asdf >>> af = asdf.open('example.asdf') >>> af.tree {'asdf_library': {'author': 'Space Telescope Science Institute', 'homepage': 'http://github.com/spacetelescope/asdf', 'name': 'asdf', 'version': '1.3.1'}, 'foo': 42, 'name': 'Monty', 'powers': {'squares': }, 'random': , 'sequence': } The tree is simply a Python `dict`, and nodes are accessed like any other dictionary entry: .. code:: python >>> af.tree['name'] 'Monty' >>> af.tree['powers'] {'squares': } Array data remains unloaded until it is explicitly accessed: .. code:: python >>> af.tree['powers']['squares'] array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256, 289, 324, 361, 400, 441, 484, 529, 576, 625, 676, 729, 784, 841, 900, 961, 1024, 1089, 1156, 1225, 1296, 1369, 1444, 1521, 1600, 1681, 1764, 1849, 1936, 2025, 2116, 2209, 2304, 2401, 2500, 2601, 2704, 2809, 2916, 3025, 3136, 3249, 3364, 3481, 3600, 3721, 3844, 3969, 4096, 4225, 4356, 4489, 4624, 4761, 4900, 5041, 5184, 5329, 5476, 5625, 5776, 5929, 6084, 6241, 6400, 6561, 6724, 6889, 7056, 7225, 7396, 7569, 7744, 7921, 8100, 8281, 8464, 8649, 8836, 9025, 9216, 9409, 9604, 9801]) >>> import numpy as np >>> expected = [x**2 for x in range(100)] >>> np.equal(af.tree['powers']['squares'], expected).all() True By default, uncompressed data blocks are memory mapped for efficient access. Memory mapping can be disabled by using the ``copy_arrays`` option of `open` when reading: .. code:: python af = asdf.open('example.asdf', copy_arrays=True) .. _end-read-file-text For more information and for advanced usage examples, see the `documentation <#documentation>`__. Extending ASDF ~~~~~~~~~~~~~~ Out of the box, the ``asdf`` package automatically serializes and deserializes native Python types. It is possible to extend ``asdf`` by implementing custom tag types that correspond to custom user types. More information on extending ASDF can be found in the `official documentation `__. Installation ------------ .. _begin-pip-install-text Stable releases of the ASDF Python package are registered `at PyPi `__. The latest stable version can be installed using ``pip``: :: $ pip install asdf .. _begin-source-install-text The latest development version of ASDF is available from the ``master`` branch `on github `__. To clone the project: :: $ git clone https://github.com/spacetelescope/asdf To install: :: $ cd asdf $ git submodule update --init $ pip install . To install in `development mode `__:: $ pip install -e . .. note:: The source repository makes use of a git submodule for referencing the schemas provided by the ASDF standard. While this submodule is automatically initialized when installing the package (including in development mode), it may be necessary for developers to manually update the submodule if changes are made upstream. See the `documentation on git submodules `__ for more information. .. _end-source-install-text Testing ------- .. _begin-testing-text To install the test dependencies from a source checkout of the repository: :: $ pip install -e .[tests] To run the unit tests from a source checkout of the repository: :: $ pytest It is also possible to run the test suite from an installed version of the package. In a Python interpreter: .. code:: python import asdf asdf.test() Please note that the `astropy `__ package must be installed to run the tests. It is also possible to run the tests using `tox `__. It is first necessary to install ``tox`` and `tox-conda `__: :: $ pip install tox tox-conda To list all available environments: :: $ tox -va To run a specific environment: :: $ tox -e .. _end-testing-text Documentation ------------- More detailed documentation on this software package can be found `here `__. More information on the ASDF Standard itself can be found `here `__. If you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, information can be found `here `__. Contributing ------------ We welcome feedback and contributions to the project. Contributions of code, documentation, or general feedback are all appreciated. Please follow the `contributing guidelines `__ to submit an issue or a pull request. We strive to provide a welcoming community to all of our users by abiding to the `Code of Conduct `__. asdf-2.5.1/asdf/0000755000446400020070000000000013605166132015550 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/__init__.py0000644000446400020070000000242413567314375017676 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ asdf: Python library for reading and writing Advanced Scientific Data Format (ASDF) files """ # Affiliated packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._internal_init import * # ---------------------------------------------------------------------------- __all__ = [ 'AsdfFile', 'CustomType', 'AsdfExtension', 'Stream', 'open', 'test', 'commands', 'IntegerType', 'ExternalArrayReference' ] try: import yaml as _ except ImportError: raise ImportError("asdf requires pyyaml") try: import jsonschema as _ except ImportError: raise ImportError("asdf requires jsonschema") try: import numpy as _ except ImportError: raise ImportError("asdf requires numpy") from .asdf import AsdfFile, open_asdf from .types import CustomType from .extension import AsdfExtension from .stream import Stream from . import commands from .tags.core import IntegerType from .tags.core.external_reference import ExternalArrayReference from jsonschema import ValidationError open = open_asdf # Avoid redundancy/confusion in the top-level namespace del open_asdf asdf-2.5.1/asdf/_internal_init.py0000644000446400020070000000751513567314375021143 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- __all__ = ['__version__', '__githash__', 'test'] try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' # set up the test command def _get_test_runner(): import os from astropy.tests.helper import TestRunner return TestRunner(os.path.dirname(__file__)) def test(package=None, test_path=None, args=None, plugins=None, verbose=False, pastebin=None, remote_data=False, pep8=False, pdb=False, coverage=False, open_files=False, **kwargs): """ Run the tests using `pytest `__. A proper set of arguments is constructed and passed to `pytest.main`_. .. _pytest: http://pytest.org/latest/ .. _pytest.main: http://pytest.org/latest/builtin.html#pytest.main Parameters ---------- package : str, optional The name of a specific package to test, e.g. 'asdf.tags'. If nothing is specified all default tests are run. test_path : str, optional Specify location to test by path. May be a single file or directory. Must be specified absolutely or relative to the calling directory. args : str, optional Additional arguments to be passed to pytest.main_ in the ``args`` keyword argument. plugins : list, optional Plugins to be passed to pytest.main_ in the ``plugins`` keyword argument. verbose : bool, optional Convenience option to turn on verbose output from pytest_. Passing True is the same as specifying ``'-v'`` in ``args``. pastebin : {'failed','all',None}, optional Convenience option for turning on pytest_ pastebin output. Set to ``'failed'`` to upload info for failed tests, or ``'all'`` to upload info for all tests. remote_data : bool, optional Controls whether to run tests marked with @remote_data. These tests use online data and are not run by default. Set to True to run these tests. pep8 : bool, optional Turn on PEP8 checking via the `pytest-pep8 plugin `_ and disable normal tests. Same as specifying ``'--pep8 -k pep8'`` in ``args``. pdb : bool, optional Turn on PDB post-mortem analysis for failing tests. Same as specifying ``'--pdb'`` in ``args``. coverage : bool, optional Generate a test coverage report. The result will be placed in the directory htmlcov. open_files : bool, optional Fail when any tests leave files open. Off by default, because this adds extra run time to the test suite. Requires the `psutil `_ package. parallel : int, optional When provided, run the tests in parallel on the specified number of CPUs. If parallel is negative, it will use the all the cores on the machine. Requires the `pytest-xdist `_ plugin installed. kwargs Any additional keywords passed into this function will be passed on to the astropy test runner. This allows use of test-related functionality implemented in later versions of astropy without explicitly updating the package template. """ try: import astropy except ImportError: raise ImportError("Running the tests requires astropy") test_runner = _get_test_runner() return test_runner.run_tests( package=package, test_path=test_path, args=args, plugins=plugins, verbose=verbose, pastebin=pastebin, remote_data=remote_data, pep8=pep8, pdb=pdb, coverage=coverage, open_files=open_files, **kwargs) asdf-2.5.1/asdf/asdf.py0000644000446400020070000014175113605166107017052 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import time import re import copy import datetime import warnings import importlib from pkg_resources import parse_version import numpy as np from jsonschema import ValidationError from . import block from . import constants from . import generic_io from . import reference from . import schema from . import treeutil from . import util from . import version from . import versioning from . import yamlutil from .exceptions import AsdfDeprecationWarning from .extension import AsdfExtensionList, default_extensions from .tags.core import AsdfObject, Software, HistoryEntry, ExtensionMetadata def get_asdf_library_info(): """ Get information about asdf to include in the asdf_library entry in the Tree. """ return Software({ 'name': 'asdf', 'version': version.version, 'homepage': 'http://github.com/spacetelescope/asdf', 'author': 'Space Telescope Science Institute' }) class AsdfFile(versioning.VersionedMixin): """ The main class that represents an ASDF file object. """ def __init__(self, tree=None, uri=None, extensions=None, version=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, ignore_implicit_conversion=False, copy_arrays=False, lazy_load=True, custom_schema=None, _readonly=False): """ Parameters ---------- tree : dict or AsdfFile, optional The main tree data in the ASDF file. Must conform to the ASDF schema. uri : str, optional The URI for this ASDF file. Used to resolve relative references against. If not provided, will be automatically determined from the associated file object, if possible and if created from `AsdfFile.open`. extensions : list of AsdfExtension A list of extensions to use when reading and writing ASDF files. See `~asdf.types.AsdfExtension` for more information. version : str, optional The ASDF version to use when writing out. If not provided, it will write out in the latest version supported by asdf. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. Set to `True` by default. ignore_unrecognized_tag : bool, optional When `True`, do not raise warnings for unrecognized tags. Set to `False` by default. ignore_implicit_conversion : bool When `True`, do not raise warnings when types in the tree are implicitly converted into a serializable object. The motivating case for this is currently `namedtuple`, which cannot be serialized as-is. copy_arrays : bool, optional When `False`, when reading files, attempt to memmap underlying data arrays when possible. lazy_load : bool, optional When `True` and the underlying file handle is seekable, data arrays will only be loaded lazily: i.e. when they are accessed for the first time. In this case the underlying file must stay open during the lifetime of the tree. Setting to False causes all data arrays to be loaded up front, which means that they can be accessed even after the underlying file is closed. Note: even if `lazy_load` is `False`, `copy_arrays` is still taken into account. custom_schema : str, optional Path to a custom schema file that will be used for a secondary validation pass. This can be used to ensure that particular ASDF files follow custom conventions beyond those enforced by the standard. """ if custom_schema is not None: self._custom_schema = schema.load_custom_schema(custom_schema) schema.check_schema(self._custom_schema) else: self._custom_schema = None self._extensions = [] self._extension_metadata = {} self._process_extensions(extensions) self._ignore_version_mismatch = ignore_version_mismatch self._ignore_unrecognized_tag = ignore_unrecognized_tag self._ignore_implicit_conversion = ignore_implicit_conversion self._file_format_version = None self._fd = None self._closed = False self._external_asdf_by_uri = {} self._blocks = block.BlockManager( self, copy_arrays=copy_arrays, lazy_load=lazy_load, readonly=_readonly) self._uri = None if tree is None: self.tree = {} elif isinstance(tree, AsdfFile): if self._extensions != tree._extensions: raise ValueError( "Can not copy AsdfFile and change active extensions") self._uri = tree.uri # Set directly to self._tree (bypassing property), since # we can assume the other AsdfFile is already valid. self._tree = tree.tree self.run_modifying_hook('copy_to_new_asdf', validate=False) self.find_references() else: self.tree = tree self.find_references() if uri is not None: self._uri = uri self._comments = [] if version is not None: self.version = version def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def _check_extensions(self, tree, strict=False): if 'history' not in tree or not isinstance(tree['history'], dict) or \ 'extensions' not in tree['history']: return for extension in tree['history']['extensions']: filename = "'{}' ".format(self._fname) if self._fname else '' if extension.extension_class not in self._extension_metadata: msg = "File {}was created with extension '{}', which is " \ "not currently installed" if extension.software: msg += " (from package {}-{})".format( extension.software['name'], extension.software['version']) fmt_msg = msg.format(filename, extension.extension_class) if strict: raise RuntimeError(fmt_msg) else: warnings.warn(fmt_msg) elif extension.software: installed = self._extension_metadata[extension.extension_class] # Local extensions may not have a real version if not installed[1]: continue # Compare version in file metadata with installed version if parse_version(installed[1]) < parse_version(extension.software['version']): msg = "File {}was created with extension '{}' from " \ "package {}-{}, but older version {}-{} is installed" fmt_msg = msg.format( filename, extension.extension_class, extension.software['name'], extension.software['version'], installed[0], installed[1]) if strict: raise RuntimeError(fmt_msg) else: warnings.warn(fmt_msg) def _process_extensions(self, extensions): if extensions is None or extensions == []: self._extensions = default_extensions.extension_list self._extension_metadata = default_extensions.package_metadata return if isinstance(extensions, AsdfExtensionList): self._extensions = extensions return if not isinstance(extensions, list): extensions = [extensions] # Process metadata about custom extensions for extension in extensions: ext_name = util.get_class_name(extension) self._extension_metadata[ext_name] = ('', '') extensions = default_extensions.extensions + extensions self._extensions = AsdfExtensionList(extensions) self._extension_metadata.update(default_extensions.package_metadata) def _update_extension_history(self): if 'history' not in self.tree: self.tree['history'] = dict(extensions=[]) # Support clients who are still using the old history format elif isinstance(self.tree['history'], list): histlist = self.tree['history'] self.tree['history'] = dict(entries=histlist, extensions=[]) warnings.warn("The ASDF history format has changed in order to " "support metadata about extensions. History entries " "should now be stored under tree['history']['entries'].") elif 'extensions' not in self.tree['history']: self.tree['history']['extensions'] = [] for extension in self.type_index.get_extensions_used(): ext_name = util.get_class_name(extension) ext_meta = ExtensionMetadata(extension_class=ext_name) metadata = self._extension_metadata.get(ext_name) if metadata is not None: ext_meta.software = dict(name=metadata[0], version=metadata[1]) for i, entry in enumerate(self.tree['history']['extensions']): # Update metadata about this extension if it already exists if entry.extension_class == ext_meta.extension_class: self.tree['history']['extensions'][i] = ext_meta break else: self.tree['history']['extensions'].append(ext_meta) @property def file_format_version(self): if self._file_format_version is None: return versioning.AsdfVersion(self.version_map['FILE_FORMAT']) else: return self._file_format_version def close(self): """ Close the file handles associated with the `AsdfFile`. """ if self._fd and not self._closed: # This is ok to always do because GenericFile knows # whether it "owns" the file and should close it. self._fd.close() self._fd = None self._closed = True for external in self._external_asdf_by_uri.values(): external.close() self._external_asdf_by_uri.clear() self._blocks.close() def copy(self): return self.__class__( copy.deepcopy(self._tree), self._uri, self._extensions ) __copy__ = __deepcopy__ = copy @property def uri(self): """ Get the URI associated with the `AsdfFile`. In many cases, it is automatically determined from the file handle used to read or write the file. """ if self._uri is not None: return self._uri if self._fd is not None: return self._fd._uri return None @property def tag_to_schema_resolver(self): warnings.warn( "The 'tag_to_schema_resolver' property is deprecated. Use " "'tag_mapping' instead.", AsdfDeprecationWarning) return self._extensions.tag_mapping @property def tag_mapping(self): return self._extensions.tag_mapping @property def url_mapping(self): return self._extensions.url_mapping @property def resolver(self): return self._extensions.resolver @property def type_index(self): return self._extensions.type_index def resolve_uri(self, uri): """ Resolve a (possibly relative) URI against the URI of this ASDF file. May be overridden by base classes to change how URIs are resolved. This does not apply any `uri_mapping` that was passed to the constructor. Parameters ---------- uri : str An absolute or relative URI to resolve against the URI of this ASDF file. Returns ------- uri : str The resolved URI. """ return generic_io.resolve_uri(self.uri, uri) def open_external(self, uri, do_not_fill_defaults=False): """ Open an external ASDF file, from the given (possibly relative) URI. There is a cache (internal to this ASDF file) that ensures each external ASDF file is loaded only once. Parameters ---------- uri : str An absolute or relative URI to resolve against the URI of this ASDF file. do_not_fill_defaults : bool, optional When `True`, do not fill in missing default values. Returns ------- asdffile : AsdfFile The external ASDF file. """ # For a cache key, we want to ignore the "fragment" part. base_uri = util.get_base_uri(uri) resolved_uri = self.resolve_uri(base_uri) # A uri like "#" should resolve back to ourself. In that case, # just return `self`. if resolved_uri == '' or resolved_uri == self.uri: return self asdffile = self._external_asdf_by_uri.get(resolved_uri) if asdffile is None: asdffile = open_asdf( resolved_uri, mode='r', do_not_fill_defaults=do_not_fill_defaults) self._external_asdf_by_uri[resolved_uri] = asdffile return asdffile @property def tree(self): """ Get/set the tree of data in the ASDF file. When set, the tree will be validated against the ASDF schema. """ if self._closed: raise OSError("Cannot access data from closed ASDF file") return self._tree @tree.setter def tree(self, tree): asdf_object = AsdfObject(tree) # Only perform custom validation if the tree is not empty self._validate(asdf_object, custom=bool(tree)) self._tree = asdf_object def keys(self): return self.tree.keys() def __getitem__(self, key): return self.tree[key] def __setitem__(self, key, value): self.tree[key] = value def __contains__(self, item): return item in self.tree @property def comments(self): """ Get the comments after the header, before the tree. """ return self._comments def _validate(self, tree, custom=True, reading=False): tagged_tree = yamlutil.custom_tree_to_tagged_tree( tree, self) schema.validate(tagged_tree, self, reading=reading) # Perform secondary validation pass if requested if custom and self._custom_schema: schema.validate(tagged_tree, self, self._custom_schema, reading=reading) def validate(self): """ Validate the current state of the tree against the ASDF schema. """ self._validate(self._tree) def make_reference(self, path=[]): """ Make a new reference to a part of this file's tree, that can be assigned as a reference to another tree. Parameters ---------- path : list of str and int, optional The parts of the path pointing to an item in this tree. If omitted, points to the root of the tree. Returns ------- reference : reference.Reference A reference object. Examples -------- For the given AsdfFile ``ff``, add an external reference to the data in an external file:: >>> import asdf >>> flat = asdf.open("http://stsci.edu/reference_files/flat.asdf") # doctest: +SKIP >>> ff.tree['flat_field'] = flat.make_reference(['data']) # doctest: +SKIP """ return reference.make_reference(self, path) @property def blocks(self): """ Get the block manager associated with the `AsdfFile`. """ return self._blocks def set_array_storage(self, arr, array_storage): """ Set the block type to use for the given array data. Parameters ---------- arr : numpy.ndarray The array to set. If multiple views of the array are in the tree, only the most recent block type setting will be used, since all views share a single block. array_storage : str Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. """ block = self.blocks[arr] self.blocks.set_array_storage(block, array_storage) def get_array_storage(self, arr): """ Get the block type for the given array data. Parameters ---------- arr : numpy.ndarray """ return self.blocks[arr].array_storage def set_array_compression(self, arr, compression): """ Set the compression to use for the given array data. Parameters ---------- arr : numpy.ndarray The array to set. If multiple views of the array are in the tree, only the most recent compression setting will be used, since all views share a single block. compression : str or None Must be one of: - ``''`` or `None`: no compression - ``zlib``: Use zlib compression - ``bzp2``: Use bzip2 compression - ``lz4``: Use lz4 compression - ``''`` or `None`: no compression - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None. """ self.blocks[arr].output_compression = compression def get_array_compression(self, arr): """ Get the compression type for the given array data. Parameters ---------- arr : numpy.ndarray Returns ------- compression : str or None """ return self.blocks[arr].output_compression @classmethod def _parse_header_line(cls, line): """ Parses the header line in a ASDF file to obtain the ASDF version. """ parts = line.split() if len(parts) != 2 or parts[0] != constants.ASDF_MAGIC: raise ValueError("Does not appear to be a ASDF file.") try: version = versioning.AsdfVersion(parts[1].decode('ascii')) except ValueError: raise ValueError( "Unparseable version in ASDF file: {0}".format(parts[1])) return version @classmethod def _parse_comment_section(cls, content): """ Parses the comment section, between the header line and the Tree or first block. """ comments = [] lines = content.splitlines() for line in lines: if not line.startswith(b'#'): raise ValueError("Invalid content between header and tree") comments.append(line[1:].strip()) return comments @classmethod def _find_asdf_version_in_comments(cls, comments): for comment in comments: parts = comment.split() if len(parts) == 2 and parts[0] == constants.ASDF_STANDARD_COMMENT: try: version = versioning.AsdfVersion(parts[1].decode('ascii')) except ValueError: pass else: return version return None @classmethod def _open_asdf(cls, self, fd, uri=None, mode='r', validate_checksums=False, do_not_fill_defaults=False, _get_yaml_content=False, _force_raw_types=False, strict_extension_check=False, ignore_missing_extensions=False): """Attempt to populate AsdfFile data from file-like object""" if strict_extension_check and ignore_missing_extensions: raise ValueError( "'strict_extension_check' and 'ignore_missing_extensions' are " "incompatible options") self._mode = mode fd = generic_io.get_file(fd, mode=self._mode, uri=uri) self._fd = fd # The filename is currently only used for tracing warning information self._fname = self._fd._uri if self._fd._uri else '' header_line = fd.read_until(b'\r?\n', 2, "newline", include=True) self._file_format_version = cls._parse_header_line(header_line) self.version = self._file_format_version comment_section = fd.read_until( b'(%YAML)|(' + constants.BLOCK_MAGIC + b')', 5, "start of content", include=False, exception=False) self._comments = cls._parse_comment_section(comment_section) version = cls._find_asdf_version_in_comments(self._comments) if version is not None: self.version = version yaml_token = fd.read(4) tree = {} has_blocks = False if yaml_token == b'%YAM': reader = fd.reader_until( constants.YAML_END_MARKER_REGEX, 7, 'End of YAML marker', include=True, initial_content=yaml_token) # For testing: just return the raw YAML content if _get_yaml_content: yaml_content = reader.read() fd.close() return yaml_content # We parse the YAML content into basic data structures # now, but we don't do anything special with it until # after the blocks have been read tree = yamlutil.load_tree(reader, self, self._ignore_version_mismatch) has_blocks = fd.seek_until(constants.BLOCK_MAGIC, 4, include=True) elif yaml_token == constants.BLOCK_MAGIC: has_blocks = True elif yaml_token != b'': raise IOError("ASDF file appears to contain garbage after header.") if has_blocks: self._blocks.read_internal_blocks( fd, past_magic=True, validate_checksums=validate_checksums) self._blocks.read_block_index(fd, self) tree = reference.find_references(tree, self) if not do_not_fill_defaults: schema.fill_defaults(tree, self, reading=True) try: self._validate(tree, reading=True) except ValidationError: self.close() raise tree = yamlutil.tagged_tree_to_custom_tree(tree, self, _force_raw_types) if not (ignore_missing_extensions or _force_raw_types): self._check_extensions(tree, strict=strict_extension_check) self._tree = tree self.run_hook('post_read') return self @classmethod def _open_impl(cls, self, fd, uri=None, mode='r', validate_checksums=False, do_not_fill_defaults=False, _get_yaml_content=False, _force_raw_types=False, strict_extension_check=False, ignore_missing_extensions=False): """Attempt to open file-like object as either AsdfFile or AsdfInFits""" if not is_asdf_file(fd): try: # TODO: this feels a bit circular, try to clean up. Also # this introduces another dependency on astropy which may # not be desireable. from . import fits_embed return fits_embed.AsdfInFits._open_impl(fd, uri=uri, validate_checksums=validate_checksums, ignore_version_mismatch=self._ignore_version_mismatch, extensions=self._extensions, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions, ignore_unrecognized_tag=self._ignore_unrecognized_tag, _extension_metadata=self._extension_metadata) except ValueError: raise ValueError( "Input object does not appear to be an ASDF file or a FITS with " + "ASDF extension") from None except ImportError: raise ValueError( "Input object does not appear to be an ASDF file. Cannot check " + "if it is a FITS with ASDF extension because 'astropy' is not " + "installed") from None return cls._open_asdf(self, fd, uri=uri, mode=mode, validate_checksums=validate_checksums, do_not_fill_defaults=do_not_fill_defaults, _get_yaml_content=_get_yaml_content, _force_raw_types=_force_raw_types, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions) @classmethod def open(cls, fd, uri=None, mode='r', validate_checksums=False, extensions=None, do_not_fill_defaults=False, ignore_version_mismatch=True, ignore_unrecognized_tag=False, _force_raw_types=False, copy_arrays=False, lazy_load=True, custom_schema=None, strict_extension_check=False, ignore_missing_extensions=False): """ Open an existing ASDF file. .. deprecated:: 2.2 Use `asdf.open` instead. """ warnings.warn( "The method AsdfFile.open has been deprecated and will be removed " "in asdf-3.0. Use the top-level asdf.open function instead.", AsdfDeprecationWarning) return open_asdf( fd, uri=uri, mode=mode, validate_checksums=validate_checksums, extensions=extensions, do_not_fill_defaults=do_not_fill_defaults, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag, _force_raw_types=_force_raw_types, copy_arrays=copy_arrays, lazy_load=lazy_load, custom_schema=custom_schema, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions, _compat=True) def _write_tree(self, tree, fd, pad_blocks): fd.write(constants.ASDF_MAGIC) fd.write(b' ') fd.write(self.version_map['FILE_FORMAT'].encode('ascii')) fd.write(b'\n') fd.write(b'#') fd.write(constants.ASDF_STANDARD_COMMENT) fd.write(b' ') fd.write(self.version_string.encode('ascii')) fd.write(b'\n') if len(tree): yamlutil.dump_tree(tree, fd, self) if pad_blocks: padding = util.calculate_padding( fd.tell(), pad_blocks, fd.block_size) fd.fast_forward(padding) def _pre_write(self, fd, all_array_storage, all_array_compression, auto_inline): if all_array_storage not in (None, 'internal', 'external', 'inline'): raise ValueError( "Invalid value for all_array_storage: '{0}'".format( all_array_storage)) self._all_array_storage = all_array_storage self._all_array_compression = all_array_compression if auto_inline in (True, False): raise ValueError( "Invalid value for auto_inline: '{0}'".format(auto_inline)) if auto_inline is not None: try: self._auto_inline = int(auto_inline) except ValueError: raise ValueError( "Invalid value for auto_inline: '{0}'".format(auto_inline)) else: self._auto_inline = None if len(self._tree): self.run_hook('pre_write') # This is where we'd do some more sophisticated block # reorganization, if necessary self._blocks.finalize(self) self._tree['asdf_library'] = get_asdf_library_info() self._update_extension_history() def _serial_write(self, fd, pad_blocks, include_block_index): self._write_tree(self._tree, fd, pad_blocks) self.blocks.write_internal_blocks_serial(fd, pad_blocks) self.blocks.write_external_blocks(fd.uri, pad_blocks) if include_block_index: self.blocks.write_block_index(fd, self) def _random_write(self, fd, pad_blocks, include_block_index): self._write_tree(self._tree, fd, False) self.blocks.write_internal_blocks_random_access(fd) self.blocks.write_external_blocks(fd.uri, pad_blocks) if include_block_index: self.blocks.write_block_index(fd, self) fd.truncate() def _post_write(self, fd): if len(self._tree): self.run_hook('post_write') # TODO: there has got to be a better way to do this... if hasattr(self, '_all_array_storage'): del self._all_array_storage if hasattr(self, '_all_array_compression'): del self._all_array_compression if hasattr(self, '_auto_inline'): del self._auto_inline def update(self, all_array_storage=None, all_array_compression='input', auto_inline=None, pad_blocks=False, include_block_index=True, version=None): """ Update the file on disk in place. Parameters ---------- all_array_storage : string, optional If provided, override the array storage type of all blocks in the file immediately before writing. Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. all_array_compression : string, optional If provided, set the compression type on all binary blocks in the file. Must be one of: - ``''`` or `None`: No compression. - ``zlib``: Use zlib compression. - ``bzp2``: Use bzip2 compression. - ``lz4``: Use lz4 compression. - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None auto_inline : int, optional When the number of elements in an array is less than this threshold, store the array as inline YAML, rather than a binary block. This only works on arrays that do not share data with other arrays. Default is 0. pad_blocks : float or bool, optional Add extra space between blocks to allow for updating of the file. If `False` (default), add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. include_block_index : bool, optional If `False`, don't include a block index at the end of the file. (Default: `True`) A block index is never written if the file has a streamed block. version : str, optional The ASDF version to write out. If not provided, it will write out in the latest version supported by asdf. """ fd = self._fd if fd is None: raise ValueError( "Can not update, since there is no associated file") if not fd.writable(): raise IOError( "Can not update, since associated file is read-only. Make " "sure that the AsdfFile was opened with mode='rw' and the " "underlying file handle is writable.") if version is not None: self.version = version if all_array_storage == 'external': # If the file is fully exploded, there's no benefit to # update, so just use write_to() self.write_to(fd, all_array_storage=all_array_storage) fd.truncate() return if not fd.seekable(): raise IOError( "Can not update, since associated file is not seekable") self.blocks.finish_reading_internal_blocks() self._pre_write(fd, all_array_storage, all_array_compression, auto_inline) try: fd.seek(0) if not self.blocks.has_blocks_with_offset(): # If we don't have any blocks that are being reused, just # write out in a serial fashion. self._serial_write(fd, pad_blocks, include_block_index) fd.truncate() return # Estimate how big the tree will be on disk by writing the # YAML out in memory. Since the block indices aren't yet # known, we have to count the number of block references and # add enough space to accommodate the largest block number # possible there. tree_serialized = io.BytesIO() self._write_tree(self._tree, tree_serialized, pad_blocks=False) array_ref_count = [0] from .tags.core.ndarray import NDArrayType for node in treeutil.iter_tree(self._tree): if (isinstance(node, (np.ndarray, NDArrayType)) and self.blocks[node].array_storage == 'internal'): array_ref_count[0] += 1 serialized_tree_size = ( tree_serialized.tell() + constants.MAX_BLOCKS_DIGITS * array_ref_count[0]) if not block.calculate_updated_layout( self.blocks, serialized_tree_size, pad_blocks, fd.block_size): # If we don't have any blocks that are being reused, just # write out in a serial fashion. self._serial_write(fd, pad_blocks, include_block_index) fd.truncate() return fd.seek(0) self._random_write(fd, pad_blocks, include_block_index) fd.flush() finally: self._post_write(fd) def write_to(self, fd, all_array_storage=None, all_array_compression='input', auto_inline=None, pad_blocks=False, include_block_index=True, version=None): """ Write the ASDF file to the given file-like object. `write_to` does not change the underlying file descriptor in the `AsdfFile` object, but merely copies the content to a new file. Parameters ---------- fd : string or file-like object May be a string path to a file, or a Python file-like object. If a string path, the file is automatically closed after writing. If not a string path, all_array_storage : string, optional If provided, override the array storage type of all blocks in the file immediately before writing. Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. all_array_compression : string, optional If provided, set the compression type on all binary blocks in the file. Must be one of: - ``''`` or `None`: No compression. - ``zlib``: Use zlib compression. - ``bzp2``: Use bzip2 compression. - ``lz4``: Use lz4 compression. - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None. auto_inline : int, optional When the number of elements in an array is less than this threshold, store the array as inline YAML, rather than a binary block. This only works on arrays that do not share data with other arrays. Default is 0. pad_blocks : float or bool, optional Add extra space between blocks to allow for updating of the file. If `False` (default), add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. include_block_index : bool, optional If `False`, don't include a block index at the end of the file. (Default: `True`) A block index is never written if the file has a streamed block. version : str, optional The ASDF version to write out. If not provided, it will write out in the latest version supported by asdf. """ if version is not None: self.version = version with generic_io.get_file(fd, mode='w') as fd: # TODO: This is not ideal: we really should pass the URI through # explicitly to wherever it is required instead of making it an # attribute of the AsdfFile. if self._uri is None: self._uri = fd.uri self._pre_write(fd, all_array_storage, all_array_compression, auto_inline) try: self._serial_write(fd, pad_blocks, include_block_index) fd.flush() finally: self._post_write(fd) def find_references(self): """ Finds all external "JSON References" in the tree and converts them to `reference.Reference` objects. """ # Since this is the first place that the tree is processed when # creating a new ASDF object, this is where we pass the option to # ignore warnings about implicit type conversions. # Set directly to self._tree, since it doesn't need to be re-validated. self._tree = reference.find_references(self._tree, self, ignore_implicit_conversion=self._ignore_implicit_conversion) def resolve_references(self, do_not_fill_defaults=False): """ Finds all external "JSON References" in the tree, loads the external content, and places it directly in the tree. Saving a ASDF file after this operation means it will have no external references, and will be completely self-contained. """ # Set to the property self.tree so the resulting "complete" # tree will be validated. self.tree = reference.resolve_references(self._tree, self) def run_hook(self, hookname): """ Run a "hook" for each custom type found in the tree. Parameters ---------- hookname : str The name of the hook. If a `AsdfType` is found with a method with this name, it will be called for every instance of the corresponding custom type in the tree. """ type_index = self.type_index if not type_index.has_hook(hookname): return for node in treeutil.iter_tree(self._tree): hook = type_index.get_hook_for_type(hookname, type(node), self.version_string) if hook is not None: hook(node, self) def run_modifying_hook(self, hookname, validate=True): """ Run a "hook" for each custom type found in the tree. The hook is free to return a different object in order to modify the tree. Parameters ---------- hookname : str The name of the hook. If a `AsdfType` is found with a method with this name, it will be called for every instance of the corresponding custom type in the tree. validate : bool When `True` (default) validate the resulting tree. """ type_index = self.type_index if not type_index.has_hook(hookname): return def walker(node): hook = type_index.get_hook_for_type(hookname, type(node), self.version_string) if hook is not None: return hook(node, self) return node tree = treeutil.walk_and_modify(self.tree, walker) if validate: self._validate(tree) self._tree = tree return self._tree def resolve_and_inline(self): """ Resolves all external references and inlines all data. This produces something that, when saved, is a 100% valid YAML file. """ self.blocks.finish_reading_internal_blocks() self.resolve_references() for b in list(self.blocks.blocks): self.blocks.set_array_storage(b, 'inline') def fill_defaults(self): """ Fill in any values that are missing in the tree using default values from the schema. """ tree = yamlutil.custom_tree_to_tagged_tree(self._tree, self) schema.fill_defaults(tree, self) self._tree = yamlutil.tagged_tree_to_custom_tree(tree, self) def remove_defaults(self): """ Remove any values in the tree that are the same as the default values in the schema """ tree = yamlutil.custom_tree_to_tagged_tree(self._tree, self) schema.remove_defaults(tree, self) self._tree = yamlutil.tagged_tree_to_custom_tree(tree, self) def add_history_entry(self, description, software=None): """ Add an entry to the history list. Parameters ---------- description : str A description of the change. software : dict or list of dict A description of the software used. It should not include asdf itself, as that is automatically notated in the `asdf_library` entry. Each dict must have the following keys: - ``name``: The name of the software - ``author``: The author or institution that produced the software - ``homepage``: A URI to the homepage of the software - ``version``: The version of the software """ if isinstance(software, list): software = [Software(x) for x in software] elif software is not None: software = Software(software) time_ = datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())), ) entry = HistoryEntry({ 'description': description, 'time': time_, }) if software is not None: entry['software'] = software if 'history' not in self.tree: self.tree['history'] = dict(entries=[]) self.tree['history']['entries'].append(entry) try: self.validate() except Exception: self.tree['history']['entries'].pop() raise def get_history_entries(self): """ Get a list of history entries from the file object. Returns ------- entries : list A list of history entries. """ if 'history' not in self.tree: return [] if isinstance(self.tree['history'], list): return self.tree['history'] if 'entries' in self.tree['history']: return self.tree['history']['entries'] return [] # Inherit docstring from dictionary AsdfFile.keys.__doc__ = dict.keys.__doc__ def _check_and_set_mode(fileobj, asdf_mode): if asdf_mode is not None and asdf_mode not in ['r', 'rw']: msg = "Unrecognized asdf mode '{}'. Must be either 'r' or 'rw'" raise ValueError(msg.format(asdf_mode)) if asdf_mode is None: if isinstance(fileobj, io.IOBase): return 'rw' if fileobj.writable() else 'r' if isinstance(fileobj, generic_io.GenericFile): return fileobj.mode # This is the safest assumption for the default fallback return 'r' return asdf_mode def open_asdf(fd, uri=None, mode=None, validate_checksums=False, extensions=None, do_not_fill_defaults=False, ignore_version_mismatch=True, ignore_unrecognized_tag=False, _force_raw_types=False, copy_arrays=False, lazy_load=True, custom_schema=None, strict_extension_check=False, ignore_missing_extensions=False, _compat=False): """ Open an existing ASDF file. Parameters ---------- fd : string or file-like object May be a string ``file`` or ``http`` URI, or a Python file-like object. uri : string, optional The URI of the file. Only required if the URI can not be automatically determined from `fd`. mode : string, optional The mode to open the file in. Must be ``r`` (default) or ``rw``. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. Requires reading the entire file, so disabled by default. extensions : list of AsdfExtension A list of extensions to use when reading and writing ASDF files. See `~asdf.types.AsdfExtension` for more information. do_not_fill_defaults : bool, optional When `True`, do not fill in missing default values. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. Set to `True` by default. ignore_unrecognized_tag : bool, optional When `True`, do not raise warnings for unrecognized tags. Set to `False` by default. copy_arrays : bool, optional When `False`, when reading files, attempt to memmap underlying data arrays when possible. lazy_load : bool, optional When `True` and the underlying file handle is seekable, data arrays will only be loaded lazily: i.e. when they are accessed for the first time. In this case the underlying file must stay open during the lifetime of the tree. Setting to False causes all data arrays to be loaded up front, which means that they can be accessed even after the underlying file is closed. Note: even if `lazy_load` is `False`, `copy_arrays` is still taken into account. custom_schema : str, optional Path to a custom schema file that will be used for a secondary validation pass. This can be used to ensure that particular ASDF files follow custom conventions beyond those enforced by the standard. strict_extension_check : bool, optional When `True`, if the given ASDF file contains metadata about the extensions used to create it, and if those extensions are not installed, opening the file will fail. When `False`, opening a file under such conditions will cause only a warning. Defaults to `False`. ignore_missing_extensions : bool, optional When `True`, do not raise warnings when a file is read that contains metadata about extensions that are not available. Defaults to `False`. Returns ------- asdffile : AsdfFile The new AsdfFile object. """ readonly = False # For now retain backwards compatibility with the old API behavior, # specifically when being called from AsdfFile.open if not _compat: mode = _check_and_set_mode(fd, mode) readonly = (mode == 'r' and not copy_arrays) instance = AsdfFile(extensions=extensions, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag, copy_arrays=copy_arrays, lazy_load=lazy_load, custom_schema=custom_schema, _readonly=readonly) return AsdfFile._open_impl(instance, fd, uri=uri, mode=mode, validate_checksums=validate_checksums, do_not_fill_defaults=do_not_fill_defaults, _force_raw_types=_force_raw_types, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions) def is_asdf_file(fd): """ Determine if fd is an ASDF file. Reads the first five bytes and looks for the ``#ASDF`` string. Parameters ---------- fd : str, `~asdf.generic_io.GenericFile` """ if isinstance(fd, generic_io.InputStream): # If it's an InputStream let ASDF deal with it. return True to_close = False if isinstance(fd, AsdfFile): return True elif isinstance(fd, generic_io.GenericFile): pass else: try: fd = generic_io.get_file(fd, mode='r', uri=None) if not isinstance(fd, io.IOBase): to_close = True except ValueError: return False asdf_magic = fd.read(5) if fd.seekable(): fd.seek(0) if to_close: fd.close() if asdf_magic == constants.ASDF_MAGIC: return True return False asdf-2.5.1/asdf/asdftypes.py0000644000446400020070000000102213567314375020132 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import warnings from .exceptions import AsdfDeprecationWarning # This is not exhaustive, but represents the public API from .versioning import join_tag_version, split_tag_version from .types import (AsdfType, CustomType, format_tag, ExtensionTypeMeta, _all_asdftypes) warnings.warn( "The module asdf.asdftypes has been deprecated and will be removed in 3.0. " "Use asdf.types instead.", AsdfDeprecationWarning) asdf-2.5.1/asdf/block.py0000644000446400020070000012737613572221520017230 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import copy import hashlib import io import os import re import struct import weakref from collections import namedtuple from urllib import parse as urlparse import numpy as np from numpy.ma.core import masked_array import yaml from . import compression as mcompression from .compat.numpycompat import NUMPY_LT_1_7 from . import constants from . import generic_io from . import stream from . import treeutil from . import util from . import yamlutil class BlockManager: """ Manages the `Block`s associated with a ASDF file. """ def __init__(self, asdffile, copy_arrays=False, lazy_load=True, readonly=False): self._asdffile = weakref.ref(asdffile) self._internal_blocks = [] self._external_blocks = [] self._inline_blocks = [] self._streamed_blocks = [] self._block_type_mapping = { 'internal': self._internal_blocks, 'external': self._external_blocks, 'inline': self._inline_blocks, 'streamed': self._streamed_blocks } self._data_to_block_mapping = {} self._validate_checksums = False self._memmap = not copy_arrays self._lazy_load = lazy_load self._readonly = readonly def __len__(self): """ Return the total number of blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ return sum(len(x) for x in self._block_type_mapping.values()) def add(self, block): """ Add an internal block to the manager. """ block_set = self._block_type_mapping.get(block.array_storage, None) if block_set is not None: if block not in block_set: block_set.append(block) else: raise ValueError( "Unknown array storage type {0}".format(block.array_storage)) if block.array_storage == 'streamed' and len(self._streamed_blocks) > 1: raise ValueError("Can not add second streaming block") if block._data is not None: self._data_to_block_mapping[id(block._data)] = block def remove(self, block): """ Remove a block from the manager. """ block_set = self._block_type_mapping.get(block.array_storage, None) if block_set is not None: if block in block_set: block_set.remove(block) if block._data is not None: if id(block._data) in self._data_to_block_mapping: del self._data_to_block_mapping[id(block._data)] else: raise ValueError( "Unknown array storage type {0}".format(block.array_storage)) def set_array_storage(self, block, array_storage): """ Set the array storage type of the given block. Parameters ---------- block : Block instance array_storage : str Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. - ``streamed``: The special streamed inline block that appears at the end of the file. """ if array_storage not in ['internal', 'external', 'streamed', 'inline']: raise ValueError( "array_storage must be one of 'internal', 'external', " "'streamed' or 'inline'") if block.array_storage != array_storage: if block in self.blocks: self.remove(block) block._array_storage = array_storage self.add(block) if array_storage == 'streamed': block.output_compression = None @property def blocks(self): """ An iterator over all blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ for block_set in self._block_type_mapping.values(): for block in block_set: yield block @property def internal_blocks(self): """ An iterator over all internal blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ for block_set in (self._internal_blocks, self._streamed_blocks): for block in block_set: yield block @property def streamed_block(self): """ The streamed block (always the last internal block in a file), or `None` if a streamed block is not present. """ self.finish_reading_internal_blocks() if len(self._streamed_blocks): return self._streamed_blocks[0] @property def external_blocks(self): """ An iterator over all external blocks being managed. """ for block in self._external_blocks: yield block @property def inline_blocks(self): """ An iterator over all inline blocks being managed. """ for block in self._inline_blocks: yield block @property def memmap(self): """ The flag which indicates whether the arrays are memory mapped to the underlying file. """ return self._memmap @property def lazy_load(self): """ The flag which indicates whether the blocks are lazily read. """ return self._lazy_load def has_blocks_with_offset(self): """ Returns `True` if any of the internal blocks currently have an offset assigned. """ for block in self.internal_blocks: if block.offset is not None: return True return False def _new_block(self): return Block(memmap=self.memmap, lazy_load=self.lazy_load) def _sort_blocks_by_offset(self): def sorter(x): if x.offset is None: raise ValueError('Block is missing offset') else: return x.offset self._internal_blocks.sort(key=sorter) def _read_next_internal_block(self, fd, past_magic=False): # This assumes the file pointer is at the beginning of the # block, (or beginning + 4 if past_magic is True) block = self._new_block().read( fd, past_magic=past_magic, validate_checksum=self._validate_checksums) if block is not None: self.add(block) return block def read_internal_blocks(self, fd, past_magic=False, validate_checksums=False): """ Read internal blocks present in the file. If the file is seekable, only the first block will be read, and the reading of all others will be lazily deferred until an the loading of an array requests it. Parameters ---------- fd : GenericFile The file to read from. past_magic : bool, optional If `True`, the file position is immediately after the block magic token. If `False` (default), the file position is exactly at the beginning of the block magic token. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. """ self._validate_checksums = validate_checksums while True: block = self._read_next_internal_block(fd, past_magic=past_magic) if block is None: break past_magic = False # If the file handle is seekable, we only read the first # block and defer reading the rest until later. if fd.seekable(): break def finish_reading_internal_blocks(self): """ Read all remaining internal blocks present in the file, if any. This is called before updating a file, since updating requires knowledge of all internal blocks in the file. """ if not self._internal_blocks: return for i, block in enumerate(self._internal_blocks): if isinstance(block, UnloadedBlock): block.load() last_block = self._internal_blocks[-1] # Read all of the remaining blocks in the file, if any if (last_block._fd is not None and last_block._fd.seekable()): last_block._fd.seek(last_block.end_offset) while True: last_block = self._read_next_internal_block( last_block._fd, False) if last_block is None: break def write_internal_blocks_serial(self, fd, pad_blocks=False): """ Write all blocks to disk serially. Parameters ---------- fd : generic_io.GenericFile The file to write internal blocks to. The file position should be after the tree. """ for block in self.internal_blocks: if block.output_compression: block.offset = fd.tell() block.write(fd) else: if block.input_compression: block.update_size() padding = util.calculate_padding( block.size, pad_blocks, fd.block_size) block.allocated = block._size + padding block.offset = fd.tell() block.write(fd) fd.fast_forward(block.allocated - block._size) def write_internal_blocks_random_access(self, fd): """ Write all blocks to disk at their specified offsets. All internal blocks must have an offset assigned at this point. Parameters ---------- fd : generic_io.GenericFile The file to write internal blocks to. The file position should be after the tree. """ self._sort_blocks_by_offset() iter = self.internal_blocks last_block = next(iter) # We need to explicitly clear anything between the tree # and the first block, otherwise there may be other block # markers left over which will throw off block indexing. # We don't need to do this between each block. fd.clear(last_block.offset - fd.tell()) for block in iter: last_block.allocated = ((block.offset - last_block.offset) - last_block.header_size) fd.seek(last_block.offset) last_block.write(fd) last_block = block last_block.allocated = last_block.size fd.seek(last_block.offset) last_block.write(fd) fd.truncate(last_block.end_offset) def write_external_blocks(self, uri, pad_blocks=False): """ Write all blocks to disk serially. Parameters ---------- uri : str The base uri of the external blocks """ from . import asdf for i, block in enumerate(self.external_blocks): if uri is None: raise ValueError( "Can't write external blocks, since URI of main file is " "unknown.") subfd = self.get_external_uri(uri, i) asdffile = asdf.AsdfFile() block = copy.copy(block) block._array_storage = 'internal' asdffile.blocks.add(block) block._used = True asdffile.write_to(subfd, pad_blocks=pad_blocks) def write_block_index(self, fd, ctx): """ Write the block index. Parameters ---------- fd : GenericFile The file to write to. The file pointer should be at the end of the file. """ if len(self._internal_blocks) and not len(self._streamed_blocks): fd.write(constants.INDEX_HEADER) fd.write(b'\n') offsets = [x.offset for x in self.internal_blocks] yaml_version = tuple( int(x) for x in ctx.version_map['YAML_VERSION'].split('.')) yaml.dump( offsets, Dumper=yamlutil._yaml_base_dumper, stream=fd, explicit_start=True, explicit_end=True, version=yaml_version, allow_unicode=True, encoding='utf-8') _re_index_content = re.compile( br'^' + constants.INDEX_HEADER + br'\r?\n%YAML.*\.\.\.\r?\n?$') _re_index_misc = re.compile(br'^[\n\r\x20-\x7f]+$') def read_block_index(self, fd, ctx): """ Read the block index. Parameters ---------- fd : GenericFile The file to read from. It must be seekable. """ # This reads the block index by reading backward from the end # of the file. This tries to be as conservative as possible, # since not reading an index isn't a deal breaker -- # everything can still be read from the file, only slower. # Importantly, it must remain "transactionally clean", and not # create any blocks until we're sure the block index makes # sense. if not fd.seekable(): return if not len(self._internal_blocks): return first_block = self._internal_blocks[0] first_block_end = first_block.end_offset fd.seek(0, generic_io.SEEK_END) file_size = block_end = fd.tell() # We want to read on filesystem block boundaries. We use # "block_end - 5" here because we need to read at least 5 # bytes in the first block. block_start = ((block_end - 5) // fd.block_size) * fd.block_size buff_size = block_end - block_start content = b'' fd.seek(block_start, generic_io.SEEK_SET) buff = fd.read(buff_size) # Extra '\0' bytes are allowed after the ..., mainly to # workaround poor truncation support on Windows buff = buff.rstrip(b'\0') content = buff # We need an explicit YAML end marker, or there's no # block index for ending in (b'...', b'...\r\n', b'...\n'): if content.endswith(ending): break else: return # Read blocks in reverse order from the end of the file while True: # Look for the index header idx = content.find(constants.INDEX_HEADER) if idx != -1: content = content[idx:] index_start = block_start + idx break else: # If the rest of it starts to look like binary # values, bail... if not self._re_index_misc.match(buff): return if block_start <= first_block_end: return block_end = block_start block_start = max(block_end - fd.block_size, first_block_end) fd.seek(block_start, generic_io.SEEK_SET) buff_size = block_end - block_start buff = fd.read(buff_size) content = buff + content yaml_content = content[content.find(b'\n') + 1:] offsets = yaml.load(yaml_content, Loader=yamlutil._yaml_base_loader) # Make sure the indices look sane if not isinstance(offsets, list) or len(offsets) == 0: return last_offset = 0 for x in offsets: if (not isinstance(x, int) or x > file_size or x < 0 or x <= last_offset + Block._header.size): return last_offset = x # We always read the first block, so we can confirm that the # first entry in the block index matches the first block if offsets[0] != first_block.offset: return if len(offsets) == 1: # If there's only one block in the index, we've already # loaded the first block, so just return: we have nothing # left to do return # One last sanity check: Read the last block in the index and # make sure it makes sense. fd.seek(offsets[-1], generic_io.SEEK_SET) try: block = self._new_block().read(fd) except (ValueError, IOError): return # Now see if the end of the last block leads right into the index if (block.end_offset != index_start): return # It seems we're good to go, so instantiate the UnloadedBlock # objects for offset in offsets[1:-1]: self._internal_blocks.append( UnloadedBlock(fd, offset, memmap=self.memmap, lazy_load=self.lazy_load, readonly=self._readonly)) # We already read the last block in the file -- no need to read it again self._internal_blocks.append(block) # Materialize the internal blocks if we are not lazy if not self.lazy_load: self.finish_reading_internal_blocks() def get_external_filename(self, filename, index): """ Given a main filename and an index number, return a new file name for referencing an external block. """ filename = os.path.splitext(filename)[0] return filename + '{0:04d}.asdf'.format(index) def get_external_uri(self, uri, index): """ Given a main URI and an index number, return a new URI for saving an external block. """ if uri is None: uri = '' parts = list(urlparse.urlparse(uri)) path = parts[2] dirname, filename = os.path.split(path) filename = self.get_external_filename(filename, index) path = os.path.join(dirname, filename) parts[2] = path return urlparse.urlunparse(parts) def _find_used_blocks(self, tree, ctx): reserved_blocks = set() for node in treeutil.iter_tree(tree): hook = ctx.type_index.get_hook_for_type( 'reserve_blocks', type(node), ctx.version_string) if hook is not None: for block in hook(node, ctx): reserved_blocks.add(block) for block in list(self.blocks): if (getattr(block, '_used', 0) == 0 and block not in reserved_blocks): self.remove(block) def _handle_global_block_settings(self, ctx, block): all_array_storage = getattr(ctx, '_all_array_storage', None) if all_array_storage: self.set_array_storage(block, all_array_storage) all_array_compression = getattr(ctx, '_all_array_compression', 'input') # Only override block compression algorithm if it wasn't explicitly set # by AsdfFile.set_array_compression. if all_array_compression != 'input': block.output_compression = all_array_compression auto_inline = getattr(ctx, '_auto_inline', None) if auto_inline: if np.product(block.data.shape) < auto_inline: self.set_array_storage(block, 'inline') def finalize(self, ctx): """ At this point, we have a complete set of blocks for the file, with no extras. Here, they are reindexed, and possibly reorganized. """ # TODO: Should this reset the state (what's external and what # isn't) afterword? self._find_used_blocks(ctx.tree, ctx) for block in list(self.blocks): self._handle_global_block_settings(ctx, block) def get_block(self, source): """ Given a "source identifier", return a block. Parameters ---------- source : any If an integer, refers to the index of an internal block. If a string, is a uri to an external block. Returns ------- buffer : buffer """ # If an "int", it is the index of an internal block if isinstance(source, int): if source == -1: if len(self._streamed_blocks): return self._streamed_blocks[0] # If we don't have a streamed block, fall through so # we can read all of the blocks, ultimately arriving # at the last one, which, if all goes well is a # streamed block. # First, look in the blocks we've already read elif source >= 0: if source < len(self._internal_blocks): return self._internal_blocks[source] else: raise ValueError("Invalid source id {0}".format(source)) # If we have a streamed block or we already know we have # no blocks, reading any further isn't going to yield any # new blocks. if len(self._streamed_blocks) or len(self._internal_blocks) == 0: raise ValueError("Block '{0}' not found.".format(source)) # If the desired block hasn't already been read, and the # file is seekable, and we have at least one internal # block, then we can move the file pointer to the end of # the last known internal block, and start looking for # more internal blocks. This is "deferred block loading". last_block = self._internal_blocks[-1] if (last_block._fd is not None and last_block._fd.seekable()): last_block._fd.seek(last_block.end_offset) while True: next_block = self._read_next_internal_block( last_block._fd, False) if next_block is None: break if len(self._internal_blocks) - 1 == source: return next_block last_block = next_block if (source == -1 and last_block.array_storage == 'streamed'): return last_block raise ValueError("Block '{0}' not found.".format(source)) elif isinstance(source, str): asdffile = self._asdffile().open_external( source, do_not_fill_defaults=True) block = asdffile.blocks._internal_blocks[0] self.set_array_storage(block, 'external') # Handle the case of inline data elif isinstance(source, list): block = Block(data=np.array(source), array_storage='inline') else: raise TypeError("Unknown source '{0}'".format(source)) return block def get_source(self, block): """ Get a source identifier for a given block. Parameters ---------- block : Block Returns ------- source_id : str May be an integer for an internal block, or a URI for an external block. """ for i, internal_block in enumerate(self.internal_blocks): if block == internal_block: if internal_block.array_storage == 'streamed': return -1 return i for i, external_block in enumerate(self.external_blocks): if block == external_block: if self._asdffile().uri is None: raise ValueError( "Can't write external blocks, since URI of main file is " "unknown.") parts = list(urlparse.urlparse(self._asdffile().uri)) path = parts[2] filename = os.path.basename(path) return self.get_external_filename(filename, i) raise ValueError("block not found.") def _should_inline(self, array): if not np.issubdtype(array.dtype, np.number): return False if isinstance(array, masked_array): return False # Make sure none of the values are too large to store as literals if (array[~np.isnan(array)] > 2**52).any(): return False return array.size <= self._inline_threshold_size def find_or_create_block_for_array(self, arr, ctx): """ For a given array, looks for an existing block containing its underlying data. If not found, adds a new block to the block list. Returns the index in the block list to the array. Parameters ---------- arr : numpy.ndarray Returns ------- block : Block """ from .tags.core import ndarray if (isinstance(arr, ndarray.NDArrayType) and arr.block is not None): if arr.block in self.blocks: return arr.block else: arr._block = None base = util.get_array_base(arr) block = self._data_to_block_mapping.get(id(base)) if block is not None: return block block = Block(base) self.add(block) self._handle_global_block_settings(ctx, block) return block def get_streamed_block(self): """ Get the streamed block, which is always the last one. A streamed block, on writing, does not manage data of its own, but the user is expected to stream it to disk directly. """ block = self.streamed_block if block is None: block = Block(array_storage='streamed') self.add(block) return block def add_inline(self, array): """ Add an inline block for ``array`` to the block set. """ block = Block(array, array_storage='inline') self.add(block) return block def __getitem__(self, arr): return self.find_or_create_block_for_array(arr, object()) def close(self): for block in self.blocks: block.close() class Block: """ Represents a single block in a ASDF file. This is an implementation detail and should not be instantiated directly. Instead, should only be created through the `BlockManager`. """ _header = util.BinaryStruct([ ('flags', 'I'), ('compression', '4s'), ('allocated_size', 'Q'), ('used_size', 'Q'), ('data_size', 'Q'), ('checksum', '16s') ]) def __init__(self, data=None, uri=None, array_storage='internal', memmap=True, lazy_load=True): if isinstance(data, np.ndarray) and not data.flags.c_contiguous: if data.flags.f_contiguous: self._data = np.asfortranarray(data) else: self._data = np.ascontiguousarray(data) else: self._data = data self._uri = uri self._array_storage = array_storage self._fd = None self._offset = None self._input_compression = None self._output_compression = 'input' self._checksum = None self._should_memmap = memmap self._memmapped = False self._lazy_load = lazy_load self._readonly = False self.update_size() self._allocated = self._size def __repr__(self): return ''.format( self._array_storage[:3], self._offset, self._allocated, self._size) def __len__(self): return self._size @property def offset(self): return self._offset @offset.setter def offset(self, offset): self._offset = offset @property def allocated(self): return self._allocated @allocated.setter def allocated(self, allocated): self._allocated = allocated @property def header_size(self): return self._header.size + constants.BLOCK_HEADER_BOILERPLATE_SIZE @property def data_offset(self): return self._offset + self.header_size @property def size(self): return self._size + self.header_size @property def end_offset(self): """ The offset of the end of the allocated space for the block, and where the next block should begin. """ return self.offset + self.header_size + self.allocated def override_byteorder(self, byteorder): return byteorder @property def array_storage(self): return self._array_storage @property def input_compression(self): """ The compression codec used to read the block. """ return self._input_compression @input_compression.setter def input_compression(self, compression): self._input_compression = mcompression.validate(compression) @property def output_compression(self): """ The compression codec used to write the block. :return: """ if self._output_compression == 'input': return self._input_compression return self._output_compression @output_compression.setter def output_compression(self, compression): self._output_compression = mcompression.validate(compression) @property def checksum(self): return self._checksum @property def readonly(self): return self._readonly def _set_checksum(self, checksum): if checksum == b'\0' * 16: self._checksum = None else: self._checksum = checksum def _calculate_checksum(self, data): m = hashlib.new('md5') m.update(self.data.flatten()) return m.digest() def validate_checksum(self): """ Validate the content of the block against the current checksum. Returns ------- valid : bool `True` if the content is valid against the current checksum or there is no current checksum. Otherwise, `False`. """ if self._checksum: checksum = self._calculate_checksum(self.data) if checksum != self._checksum: return False return True def update_checksum(self): """ Update the checksum based on the current data contents. """ self._checksum = self._calculate_checksum(self.data) def update_size(self): """ Recalculate the on-disk size of the block. This causes any compression steps to run. It should only be called when updating the file in-place, otherwise the work is redundant. """ if self._data is not None: self._data_size = self._data.data.nbytes if not self.output_compression: self._size = self._data_size else: self._size = mcompression.get_compressed_size( self._data, self.output_compression) else: self._data_size = self._size = 0 def read(self, fd, past_magic=False, validate_checksum=False): """ Read a Block from the given Python file-like object. If the file is seekable and lazy_load is True, the reading or memmapping of the actual data is postponed until an array requests it. If the file is a stream or lazy_load is False, the data will be read into memory immediately. Parameters ---------- fd : GenericFile past_magic : bool, optional If `True`, the file position is immediately after the block magic token. If `False` (default), the file position is exactly at the beginning of the block magic token. validate_checksum : bool, optional If `True`, validate the data against the checksum, and raise a `ValueError` if the data doesn't match. """ offset = None if fd.seekable(): offset = fd.tell() if not past_magic: buff = fd.read(len(constants.BLOCK_MAGIC)) if len(buff) < 4: return None if buff not in (constants.BLOCK_MAGIC, constants.INDEX_HEADER[:len(buff)]): raise ValueError( "Bad magic number in block. " "This may indicate an internal inconsistency about the " "sizes of the blocks in the file.") if buff == constants.INDEX_HEADER[:len(buff)]: return None elif offset is not None: offset -= 4 buff = fd.read(2) header_size, = struct.unpack(b'>H', buff) if header_size < self._header.size: raise ValueError( "Header size must be >= {0}".format(self._header.size)) buff = fd.read(header_size) header = self._header.unpack(buff) # This is used by the documentation system, but nowhere else. self._flags = header['flags'] self.input_compression = header['compression'] self._set_checksum(header['checksum']) if (self.input_compression is None and header['used_size'] != header['data_size']): raise ValueError( "used_size and data_size must be equal when no compression is used.") if (header['flags'] & constants.BLOCK_FLAG_STREAMED and self.input_compression is not None): raise ValueError( "Compression set on a streamed block.") if fd.seekable(): # If the file is seekable, we can delay reading the actual # data until later. self._fd = fd self._offset = offset self._header_size = header_size if header['flags'] & constants.BLOCK_FLAG_STREAMED: # Support streaming blocks self._array_storage = 'streamed' if self._lazy_load: fd.fast_forward(-1) self._data_size = self._size = self._allocated = \ (fd.tell() - self.data_offset) + 1 else: self._data = fd.read_into_array(-1) self._data_size = self._size = self._allocated = len(self._data) else: self._allocated = header['allocated_size'] self._size = header['used_size'] self._data_size = header['data_size'] if self._lazy_load: fd.fast_forward(self._allocated) else: curpos = fd.tell() self._memmap_data() fd.seek(curpos) if not self._memmapped: self._data = self._read_data(fd, self._size, self._data_size) fd.fast_forward(self._allocated - self._size) else: fd.fast_forward(self._allocated) else: # If the file is a stream, we need to get the data now. if header['flags'] & constants.BLOCK_FLAG_STREAMED: # Support streaming blocks self._array_storage = 'streamed' self._data = fd.read_into_array(-1) self._data_size = self._size = self._allocated = len(self._data) else: self._allocated = header['allocated_size'] self._size = header['used_size'] self._data_size = header['data_size'] self._data = self._read_data(fd, self._size, self._data_size) fd.fast_forward(self._allocated - self._size) fd.close() if validate_checksum and not self.validate_checksum(): raise ValueError( "Block at {0} does not match given checksum".format( self._offset)) return self def _read_data(self, fd, used_size, data_size): """ Read the block data from a file. """ if not self.input_compression: return fd.read_into_array(used_size) else: return mcompression.decompress( fd, used_size, data_size, self.input_compression) def _memmap_data(self): """ Memory map the block data from the file. """ memmap = self._fd.can_memmap() and not self.input_compression if self._should_memmap and memmap: self._data = self._fd.memmap_array(self.data_offset, self._size) self._memmapped = True def write(self, fd): """ Write an internal block to the given Python file-like object. """ self._header_size = self._header.size flags = 0 data_size = used_size = allocated_size = 0 if self._array_storage == 'streamed': flags |= constants.BLOCK_FLAG_STREAMED elif self._data is not None: self.update_checksum() data_size = self._data.nbytes if not fd.seekable() and self.output_compression: buff = io.BytesIO() mcompression.compress(buff, self._data, self.output_compression) self.allocated = self._size = buff.tell() allocated_size = self.allocated used_size = self._size self.input_compression = self.output_compression assert allocated_size >= used_size if self.checksum is not None: checksum = self.checksum else: checksum = b'\0' * 16 fd.write(constants.BLOCK_MAGIC) fd.write(struct.pack(b'>H', self._header_size)) fd.write(self._header.pack( flags=flags, compression=mcompression.to_compression_header( self.output_compression), allocated_size=allocated_size, used_size=used_size, data_size=data_size, checksum=checksum)) if self._data is not None: if self.output_compression: if not fd.seekable(): fd.write(buff.getvalue()) else: # If the file is seekable, we write the # compressed data directly to it, then go back # and write the resulting size in the block # header. start = fd.tell() mcompression.compress( fd, self._data, self.output_compression) end = fd.tell() self.allocated = self._size = end - start fd.seek(self.offset + 6) self._header.update( fd, allocated_size=self.allocated, used_size=self._size) fd.seek(end) else: assert used_size == data_size fd.write_array(self._data) @property def data(self): """ Get the data for the block, as a numpy array. """ if self._data is None: if self._fd.is_closed(): raise IOError( "ASDF file has already been closed. " "Can not get the data.") # Be nice and reset the file position after we're done curpos = self._fd.tell() try: self._memmap_data() if not self._memmapped: self._fd.seek(self.data_offset) self._data = self._read_data( self._fd, self._size, self._data_size) finally: self._fd.seek(curpos) return self._data def close(self): if self._memmapped and self._data is not None: if NUMPY_LT_1_7: # pragma: no cover try: self._data.flush() except ValueError: pass else: self._data.flush() if self._data._mmap is not None: self._data._mmap.close() self._data = None class UnloadedBlock: """ Represents an indexed, but not yet loaded, internal block. All that is known about it is its offset. It converts itself to a full-fledged block whenever the underlying data or more detail is requested. """ def __init__(self, fd, offset, memmap=True, lazy_load=True, readonly=False): self._fd = fd self._offset = offset self._data = None self._uri = None self._array_storage = 'internal' self._input_compression = None self._output_compression = 'input' self._checksum = None self._should_memmap = memmap self._memmapped = False self._lazy_load = lazy_load self._readonly = readonly def __len__(self): self.load() return len(self) def close(self): pass @property def array_storage(self): return 'internal' @property def offset(self): return self._offset def __getattr__(self, attr): self.load() return getattr(self, attr) def load(self): self._fd.seek(self._offset, generic_io.SEEK_SET) self.__class__ = Block self.read(self._fd) def calculate_updated_layout(blocks, tree_size, pad_blocks, block_size): """ Calculates a block layout that will try to use as many blocks as possible in their original locations, though at this point the algorithm is fairly naive. The result will be stored in the offsets of the blocks. Parameters ---------- blocks : Blocks instance tree_size : int The amount of space to reserve for the tree at the beginning. Returns ------- Returns `False` if no good layout can be found and one is best off rewriting the file serially, otherwise, returns `True`. """ def unfix_block(i): # If this algorithm gets more sophisticated we could carefully # move memmapped blocks around without clobbering other ones. # TODO: Copy to a tmpfile on disk and memmap it from there. entry = fixed[i] copy = entry.block.data.copy() entry.block.close() entry.block._data = copy del fixed[i] free.append(entry.block) def fix_block(block, offset): block.offset = offset fixed.append(Entry(block.offset, block.offset + block.size, block)) fixed.sort() Entry = namedtuple("Entry", ['start', 'end', 'block']) fixed = [] free = [] for block in blocks._internal_blocks: if block.offset is not None: block.update_size() fixed.append( Entry(block.offset, block.offset + block.size, block)) else: free.append(block) if not len(fixed): return False fixed.sort() # Make enough room at the beginning for the tree, by popping off # blocks at the beginning while len(fixed) and fixed[0].start < tree_size: unfix_block(0) if not len(fixed): return False # This algorithm is pretty basic at this point -- it just looks # for the first open spot big enough for the free block to fit. while len(free): block = free.pop() last_end = tree_size for entry in fixed: if entry.start - last_end >= block.size: fix_block(block, last_end) break last_end = entry.end else: padding = util.calculate_padding( entry.block.size, pad_blocks, block_size) fix_block(block, last_end + padding) if blocks.streamed_block is not None: padding = util.calculate_padding( fixed[-1].block.size, pad_blocks, block_size) blocks.streamed_block.offset = fixed[-1].end + padding blocks._sort_blocks_by_offset() return True asdf-2.5.1/asdf/commands/0000755000446400020070000000000013605166132017351 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/commands/__init__.py0000644000446400020070000000061013567314375021472 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import importlib from .exploded import * from .to_yaml import * from .defragment import * from .diff import * from .tags import * from .extension import * # Extracting ASDF-in-FITS files requires Astropy if importlib.util.find_spec('astropy'): from .extract import * from .remove_hdu import * asdf-2.5.1/asdf/commands/defragment.py0000644000446400020070000000402313567314375022051 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Defragment command. """ import os import asdf from .main import Command from .. import AsdfFile __all__ = ['defragment'] class Defragment(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("defragment"), help="Defragment an ASDF file..", description="""Removes any unused blocks and unused space.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to collect.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.add_argument( "--compress", "-c", type=str, nargs="?", choices=['zlib', 'bzp2', 'lz4'], help="""Compress blocks using one of "zlib", "bzp2" or "lz4".""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return defragment(args.filename[0], args.output, args.resolve_references, args.compress) def defragment(input, output=None, resolve_references=False, compress=None): """ Defragment a given ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. compress : str, optional Compression to use. """ with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to( output, all_array_storage='internal', all_array_compression=compress) asdf-2.5.1/asdf/commands/diff.py0000644000446400020070000002264713567314375020661 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Implementation of command for displaying differences between two ASDF files. """ import os import sys from numpy import array_equal try: # Provides cross-platform color support import colorama colorama.init() RED = colorama.Fore.RED GREEN = colorama.Fore.GREEN RESET = colorama.Style.RESET_ALL except ImportError: from sys import platform # These platforms should support ansi color codes if platform.startswith('linux') or platform.startswith('darwin'): RED = '\x1b[31m' GREEN = '\x1b[32m' RESET = '\x1b[0m' else: RED = '' GREEN = '' RESET = '' import asdf from .main import Command from .. import AsdfFile from .. import treeutil from ..tagged import Tagged from ..util import human_list from ..tags.core.ndarray import NDArrayType __all__ = ['diff'] RESET_NEWLINE = RESET + '\n' NDARRAY_TAG = 'core/ndarray' LIST_MARKER = '-' THIS_MARKER = GREEN + "> " THAT_MARKER = RED + "< " class Diff(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("diff"), help="Report differences between two ASDF files", description="""Reports differences between two ASDF files""") parser.add_argument( 'filenames', metavar='asdf_file', nargs=2, help="The ASDF files to compare.") parser.add_argument( '-m', '--minimal', action='store_true', help="Show minimal differences between the two files") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return diff(args.filenames, args.minimal) class ArrayNode: """This class is used to represent unique dummy nodes in the diff tree. In general these dummy nodes will be list elements that we want to keep track of but not necessarily display. This allows the diff output to be cleaner.""" def __init__(self, name): self.name = name def __hash__(self): return hash(self.name) class PrintTree: """This class is used to remember the nodes in the tree that have already been displayed in the diff output. """ def __init__(self): self.__tree = dict(visited=False, children=dict()) def get_print_list(self, node_list): at_end = False print_list = [] current = self.__tree for node in ['tree'] + node_list: if at_end: print_list.append(node) elif not node in current['children']: print_list.append(node) at_end = True elif not current['children'][node]['visited']: print_list.append(node) else: print_list.append(None) if not at_end: current = current['children'][node] return print_list def __setitem__(self, node_list, visit): assert isinstance(node_list, list) current = self.__tree for node in ['tree'] + node_list: if not node in current['children']: current['children'][node] = dict(visited=True, children=dict()) current = current['children'][node] class DiffContext: """Class that contains context data of the diff to be computed""" def __init__(self, asdf0, asdf1, iostream, minimal=False): self.asdf0 = asdf0 self.asdf1 = asdf1 self.iostream = iostream self.minimal = minimal self.print_tree = PrintTree() def print_tree_context(diff_ctx, node_list, other, use_marker, last_was_list): """Print context information indicating location in ASDF tree.""" prefix = "" marker = THAT_MARKER if other else THIS_MARKER for node in diff_ctx.print_tree.get_print_list(node_list): if node is not None: node = LIST_MARKER if isinstance(node, ArrayNode) else node + ":" # All of this logic is just to make the display of arrays prettier if use_marker: line_prefix = " " if last_was_list else marker + prefix[2:] line_suffix = "" if node == LIST_MARKER else RESET_NEWLINE else: line_prefix = prefix line_suffix = RESET_NEWLINE diff_ctx.iostream.write(line_prefix + node + line_suffix) last_was_list = node == LIST_MARKER prefix += " " diff_ctx.print_tree[node_list] = True return last_was_list def print_in_tree(diff_ctx, node_list, thing, other, use_marker=False, last_was_list=False, ignore_lwl=False): """Recursively print tree context and diff information about object.""" last_was_list = print_tree_context( diff_ctx, node_list, other, use_marker, last_was_list) # If tree element is list, recursively print list contents if isinstance(thing, list): for i, subthing in enumerate(thing): key = ArrayNode("{}_{}".format(node_list[-1], i)) last_was_list = print_in_tree( diff_ctx, node_list+[key], subthing, other, use_marker=True, last_was_list=last_was_list, ignore_lwl=ignore_lwl) # If tree element is dictionary, recursively print dictionary contents elif isinstance(thing, dict): for key in sorted(thing.keys()): last_was_list = print_in_tree( diff_ctx, node_list+[key], thing[key], other, use_marker=True, last_was_list=last_was_list, ignore_lwl=ignore_lwl) # Print difference between leaf objects (no need to recurse further) else: use_marker = not last_was_list or ignore_lwl marker = THAT_MARKER if other else THIS_MARKER prefix = marker + " " * len(node_list) if use_marker else " " diff_ctx.iostream.write(prefix + str(thing) + RESET_NEWLINE) last_was_list = False return last_was_list def compare_objects(diff_ctx, obj0, obj1, keys=[]): """Displays diff of two objects if they are not equal""" if obj0 != obj1: print_in_tree(diff_ctx, keys, obj0, False, ignore_lwl=True) print_in_tree(diff_ctx, keys, obj1, True, ignore_lwl=True) def print_dict_diff(diff_ctx, tree, node_list, keys, other): """Recursively traverses dictionary object and displays differences""" for key in keys: if diff_ctx.minimal: nodes = node_list key = key else: nodes = node_list+[key] key = tree[key] use_marker = not diff_ctx.minimal print_in_tree(diff_ctx, nodes, key, other, use_marker=use_marker) def compare_ndarrays(diff_ctx, array0, array1, keys): """Compares two ndarray objects""" ignore_keys = set(['source', 'data']) compare_dicts(diff_ctx, array0, array1, keys, ignore_keys) differences = [] for field in ['shape', 'datatype']: if array0[field] != array1[field]: differences.append(field) array0 = NDArrayType.from_tree(array0, diff_ctx.asdf0) array1 = NDArrayType.from_tree(array1, diff_ctx.asdf1) if not array_equal(array0, array1): differences.append('contents') if differences: prefix = " " * (len(keys) + 1) msg = "ndarrays differ by {}".format(human_list(differences)) diff_ctx.iostream.write(prefix + RED + msg + RESET_NEWLINE) def both_are_ndarrays(tree0, tree1): """Returns True if both inputs correspond to ndarrays, False otherwise""" if not (isinstance(tree0, Tagged) and isinstance(tree1, Tagged)): return False if not (NDARRAY_TAG in tree0._tag and NDARRAY_TAG in tree1._tag): return False return True def compare_dicts(diff_ctx, dict0, dict1, keys, ignores=set()): """Recursively compares two dictionary objects""" keys0 = set(dict0.keys()) - ignores keys1 = set(dict1.keys()) - ignores # Recurse into subtree elements that are shared by both trees for key in sorted(keys0 & keys1): obj0 = dict0[key] obj1 = dict1[key] compare_trees(diff_ctx, obj0, obj1, keys=keys+[key]) # Display subtree elements existing only in this tree print_dict_diff(diff_ctx, dict0, keys, sorted(keys0-keys1), False) # Display subtree elements existing only in that tree print_dict_diff(diff_ctx, dict1, keys, sorted(keys1-keys0), True) def compare_trees(diff_ctx, tree0, tree1, keys=[]): """Recursively traverses two ASDF tree and compares them""" if both_are_ndarrays(tree0, tree1): compare_ndarrays(diff_ctx, tree0, tree1, keys) elif isinstance(tree0, dict) and isinstance(tree1, dict): compare_dicts(diff_ctx, tree0, tree1, keys) elif isinstance(tree0, list) and isinstance(tree1, list): for i, (obj0, obj1) in enumerate(zip(tree0, tree1)): key = ArrayNode("item_{}".format(i)) compare_trees(diff_ctx, obj0, obj1, keys+[key]) else: compare_objects(diff_ctx, tree0, tree1, keys) def diff(filenames, minimal, iostream=sys.stdout): """Top-level implementation of diff algorithm""" try: with asdf.open(filenames[0], _force_raw_types=True) as asdf0: with asdf.open(filenames[1], _force_raw_types=True) as asdf1: diff_ctx = DiffContext(asdf0, asdf1, iostream, minimal=minimal) compare_trees(diff_ctx, asdf0.tree, asdf1.tree) except ValueError as error: raise RuntimeError(str(error)) asdf-2.5.1/asdf/commands/exploded.py0000644000446400020070000000652213567314375021547 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Contains commands for dealing with exploded and imploded forms. """ import os import asdf from .main import Command from .. import AsdfFile __all__ = ['implode', 'explode'] class Implode(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("implode"), help="Implode a ASDF file.", description="""Combine a ASDF file, where the data may be stored in multiple ASDF files, into a single ASDF file.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to implode.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with "_all" appended.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return implode(args.filename[0], args.output, args.resolve_references) def implode(input, output=None, resolve_references=False): """ Implode a given ASDF file, which may reference external data, back into a single ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. """ if output is None: base, ext = os.path.splitext(input) output = base + '_all' + '.asdf' with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to(output, all_array_storage='internal') class Explode(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("explode"), help="Explode a ASDF file.", description="""From a single ASDF file, create a set of ASDF files where each data block is stored in a separate file.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to explode.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with "_exploded" appended.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return explode(args.filename[0], args.output) def explode(input, output=None): """ Explode a given ASDF file so each data block is in a separate file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. """ if output is None: base, ext = os.path.splitext(input) output = base + '_exploded' + '.asdf' with asdf.open(input) as ff: ff.write_to(output, all_array_storage='external') asdf-2.5.1/asdf/commands/extension.py0000644000446400020070000000461313567314375021756 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Implementation of command for reporting information about installed extensions. """ import sys from pkg_resources import iter_entry_points from .main import Command class QueryExtension(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( "extensions", help="Show information about installed extensions", description="""Reports information about installed ASDF extensions""") parser.add_argument( "-s", "--summary", action="store_true", help="Display only the installed extensions themselves") parser.add_argument( "-t", "--tags-only", action="store_true", help="Display tags from installed extensions, but no other information") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): if args.summary and args.tags_only: sys.stderr.write( "ERROR: Options -s/--summary and -t/--tags-only are not compatible\n") return 1 return find_extensions(args.summary, args.tags_only) def _format_entry_point(ep): extension_class = "{}.{}".format(ep.module_name, ep.attrs[0]) return "Extension Name: '{}' (from {}) Class: {}".format( ep.name, ep.dist, extension_class) def _format_type_name(typ): return "{}.{}".format(typ.__module__, typ.__name__) def _tag_comparator(a, b): return _format_type_name(a) < _format_type_name(b) def _print_extension_details(ext, tags_only): for typ in sorted(ext.types, key=lambda x: _format_type_name(x)): if typ.name is not None: print("- " + _format_type_name(typ)) if not tags_only: print(" implements: {}".format(typ.make_yaml_tag(typ.name))) if typ.types: print(" serializes:") for name in typ.types: print(" - {}".format(_format_type_name(name))) def find_extensions(summary, tags_only): for ep in iter_entry_points(group='asdf_extensions'): print(_format_entry_point(ep)) if not summary: _print_extension_details(ep.load()(), tags_only) print() asdf-2.5.1/asdf/commands/extract.py0000644000446400020070000000327213567314375021414 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Implementation of command for converting ASDF-in-FITS to standalone ASDF file. """ import sys import asdf from asdf import AsdfFile from asdf.fits_embed import AsdfInFits from .main import Command __all__ = ['extract_file'] class AsdfExtractor(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser(str("extract"), help="Extract ASDF extensions in ASDF-in-FITS files into pure ASDF files", description="Extracts ASDF extensions into pure ASDF files.") parser.add_argument( 'infile', action='store', type=str, help="Name of ASDF-in-FITS file containing extension to be extracted") parser.add_argument( 'outfile', action='store', type=str, help="Name of new pure ASDF file containing extracted extension") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return extract_file(args.infile, args.outfile) def extract_file(input_file, output_file): """Function for performing extraction from ASDF-in-FITS to pure ASDF.""" try: with asdf.open(input_file) as ih: if not isinstance(ih, AsdfInFits): msg = "Given input file '{}' is not ASDF-in-FITS" raise RuntimeError(msg.format(input_file)) with asdf.AsdfFile(ih.tree) as oh: oh.write_to(output_file) except (IOError, ValueError) as error: raise RuntimeError(str(error)) asdf-2.5.1/asdf/commands/main.py0000644000446400020070000000401013567314375020655 0ustar eslavichSTSCI\science00000000000000# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst import argparse import logging import sys from .. import util # This list is ordered in order of average workflow command_order = [ 'Explode', 'Implode' ] class Command: @classmethod def setup_arguments(cls, subparsers): raise NotImplementedError() @classmethod def run(cls, args): raise NotImplementedError() def make_argparser(): """ Most of the real work is handled by the subcommands in the commands subpackage. """ def help(args): parser.print_help() return 0 parser = argparse.ArgumentParser( "asdftool", description="Commandline utilities for managing ASDF files.") parser.add_argument( "--verbose", "-v", action="store_true", help="Increase verbosity") subparsers = parser.add_subparsers( title='subcommands', description='valid subcommands') help_parser = subparsers.add_parser( str("help"), help="Display usage information") help_parser.set_defaults(func=help) commands = dict((x.__name__, x) for x in util.iter_subclasses(Command)) for command in command_order: commands[str(command)].setup_arguments(subparsers) del commands[command] for name, command in sorted(commands.items()): command.setup_arguments(subparsers) return parser, subparsers def main_from_args(args): parser, subparsers = make_argparser() args = parser.parse_args(args) # Only needed for Python 3, apparently, but can't hurt if not hasattr(args, 'func'): parser.print_help() return 2 try: result = args.func(args) except RuntimeError as e: logging.error(str(e)) return 1 except IOError as e: logging.error(str(e)) return e.errno if result is None: result = 0 return result def main(args=None): if args is None: args = sys.argv[1:] sys.exit(main_from_args(args)) asdf-2.5.1/asdf/commands/remove_hdu.py0000644000446400020070000000267113567314375022101 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Implementation of command for removing ASDF HDU from ASDF-in-FITS file. """ import sys from astropy.io import fits from .main import Command __all__ = ['remove_hdu'] class FitsExtractor(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser(str("remove-hdu"), help="Remove ASDF extension from ASDF-in-FITS file", description="Removes ASDF extensions from ASDF-in-FITS files.") parser.add_argument('infile', action='store', type=str, help="Name of ASDF-in-FITS file containing extension to be removed") parser.add_argument('outfile', action='store', type=str, help="Name of new FITS output file") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return remove_hdu(args.infile, args.outfile) def remove_hdu(input_file, output_file): """Function for removing ASDF HDU from ASDF-in-FITS files""" try: with fits.open(input_file) as hdulist: hdulist.readall() asdf_hdu = hdulist['ASDF'] hdulist.remove(asdf_hdu) hdulist.writeto(output_file) except (ValueError, KeyError) as error: raise RuntimeError(str(error)) asdf-2.5.1/asdf/commands/tags.py0000644000446400020070000000260413567314375020676 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Implementation of command for displaying available tags in asdf """ import sys from .main import Command from .. import AsdfFile __all__ = ['list_tags'] class TagLister(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("tags"), help="List currently available tags", description="""Lists currently available tags.""") parser.add_argument( '-d', '--display-classes', action='store_true', help="""Display associated class names in addition to tags""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return list_tags(display_classes=args.display_classes) def _qualified_name(_class): return "{}.{}".format(_class.__module__, _class.__name__) def list_tags(display_classes=False, iostream=sys.stdout): """Function to list tags""" af = AsdfFile() type_by_tag = af._extensions._type_index._type_by_tag tags = sorted(type_by_tag.keys()) for tag in tags: string = str(tag) if display_classes: string += ": " + _qualified_name(type_by_tag[tag]) iostream.write(string + '\n') asdf-2.5.1/asdf/commands/tests/0000755000446400020070000000000013605166132020513 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/commands/tests/__init__.py0000644000446400020070000000013013567314375022631 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- asdf-2.5.1/asdf/commands/tests/data/0000755000446400020070000000000013605166132021424 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/commands/tests/data/__init__.py0000644000446400020070000000010013567314375023537 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst asdf-2.5.1/asdf/commands/tests/data/block0.asdf0000644000446400020070000023512513567314375023460 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev870} foobar: !core/ndarray-1.0.0 source: 0 datatype: uint64 byteorder: little bizbaz: green shape: [9000] ... BLK0888٭ٞZv/$Gt?]?Xkr?ڎ??rӓ?Xc^?]j7*?pn')?pD/±?5?{G8?x_\|?f^?wz^?T?nm?HS?'?`髎??v\"?(V_\?M_?@@U?jNg?P!H?#?N? Q?F{?ˌ2?@ J/'?o?XjH?sG'?L/"?㾉?*@7|]? [6 ?6L?[Px??w1`?Q"uݺ?N?.? mR?% ˬ?y?֗q*?* ?R?WcR\?ųM3@?|{.$?zt? m?Ωg?*I? ?1?ƟX?r`z?ۢ-A?̩p;?5ì? K?گ5K%?۬U?,8?i'?sA?1eU?x&2,?th+#?Gav?{ޚN?"2 >?bzxx?JD'A?H-?vЍZ?I#?4?$"sm+?ٷR?%?ȴ'O?W?f@?ꐐ<#?xVz?6X(&?\? nR9?: C?r?F.?i)R?b ?4*9s?+ ?״*0?0Ee?n?8ot7Q??:?S?D9?c:?|ey?bT?*\?:E.?Ui?(ew?)c?Ҹi {?4tƼ?^p^?[ XR?ȗ{?;|{?w}N?tNP?44$?lK? ^?$sLm?VO?Xj?oU?f"%p?a_??`&\-|?t@?Xd?;#?Au-?"қ?g^}?N%?xLt?&=?$?2 ?o&?V_?\?6Ǖ| ?%ĶS?]:?4X?0\?{&̀?p?fX?%?`0;?o?u"?..?|RQ?L,Ku?gѠ?6?`&J?ڢ?G̍?ć?d͊? c.?xdx?!Q,S?wa2!?@$%?f} ?-Z?8H?H ?ХM?D?#?x-Fc?P?s [?8)7X?k|?P)w"?|V?xw{?Ry/Ũ?n1`2?,[t?^ Hm?fY?ẃ3Ƭ?)_?w',?N?<^N?ws?X(64?H֥?x[?l1?9&{(h?ټ?:Р?6?*{un_y?:)^'x?~8?kB5!?I'E?Xcb?l?.? ?(?i?Ȑc0Ӡ?.q?,N?r'H?:?vM?=\H ?oH.)?4s*v?Q}?;_W?hVS?.N"?*MV?VfF?U7?Њ)g(?h4^k??s?_9_?&L ?8 #偹?үI%?[i?5o?]?% @?X,?XGs?[̑?N@YTA?ωCgJ?¢oQ? hvϰ?Lk?rca?_L?E? g?s?Jq??XRfI?pl?hW^?@)?-?*?p@?q_.? P?J7?&/?&?bɞ?$h?O2?Pi?HA-S?-;?7|'?N?6>Y?nX S?&:?.?V3??@FN?xrXF?9~?Cr?P+JF?^J0?企??zX?Zހ?Tj?/?av?j=:E?lcbt?9yq?{?gue?`舿?0d'?nI?>J?L:?ظv?4:?(EHr6{?1f?`sC?]?X}?)ۄ?h7i?bOB?٭)?;gEH?|%G,?*vy?͉3?"(?ł&?̳FR?p?Nz??K >س?N?Hx`?a ?0gO"[?\?dz+? |O?A߃?kx }s?*M?7$cz _?Q?f?0Z?8_? 8 o??򣋩'?T1"?3 ?V?9?)p?pJhO?SD"}?0d)?/"u?01ʄ?pY[v??j?ff??-?v+,/i? JB?fՕ?8T/,?fu?v`?Sd?FnJ?~L!? (*??qM??d$58?L ^y? 7c?.,?cb? R7[?؃:? okX? |%? s܋?T?j\0 ?68?}?n*h?$ƹ??j? ?E%.?)5?6?wMc?'?p2?|c?[?z!?VCf?R1#'?P@`J?g?RA? _?n??HbG?RD?c?0=»?0PQ?>z?#Yˮ?n>d?6b?葆]G?E]?3ӹcW?f??|z ^?mz?2?Bf?ʔQ+?#5l?RkM?HUĉ ?C3?hx?a@Ҿ?6 #)?ʑ?P2'?.ݒ[? +?m?@ q2?8~?D ?V%%?8F? ??pzώ?M>?~?` n 4?غe?Y?&?3?-?(?4Iwn?H2?c(a?(F?;z!y?=$uiJ?^?&),#?%??8N=a?\Z?c?8?wj?<-_?y?,&?-#?Bb?5?cIY?%?ʴy?d~?P9'I?(?,2:?کnz?{C?h%Ag?rz?x ?L0? Ef? t?,0?h?p߈Ճ,?%+?hɋ[?~05?@\ ? E9?8`?0WB?ئ?ds ?*N?? e2?dLô?@S?!?bx?,? ?8S?eq i?P5Sߍ?ˇ?ϙ2?/v_R?ݫ?+8?R]@?2&?h0?Лg]?@Q9 墡? ]?K?&?I@~?TOf?|?g yP?zb? X??İ?%?K> ?SCP?>*??X/[?Q k?]rQ?Vढ़g?B?ȟ0?7>?Xۢ?!??O?w9?Tju=?|c?y?J{|m?ֽ&?~?`[@@?H?MF?pZN #???;>)8?F(wK?0?b3?,?m֭?\P9c?΅o'?X:Q>0?D?SOѓ?L |?&G=?|(? y?Ԙ?Kfv?c 3x?cT?L|i"?۽Z?C&?~^?$9?p f?9(/K?#/?_?mޖ?k5Q?G*fj?S/?)ʱ?|B,#?-?nڭ?TGB?:tfD?" Wc?zd?L9>ۓ?hT?:O?HD~i?WL ?jб?t\q+&?+hvH? kϴȖ?D!?2?$2?Rā'?*0?]t?Tܲ4???kX?p-&?.}{?q9?<'_9?pDa?>?!y?7[?xP?4P'?>I2?E١?rʷ?Fc?Pƞ?z?PJ(?΄s?;DQo?rbh?4m?}0? ?L??Z>T-?3?]eN?i8ZQ?@f??}?~ͷ? |?^[?z?GdQ?|?en?q?^rhw?lbҿ??S'?F?In?^Ki?8)H"i?6o0*?ë_Z?-6p?1H?`3-6?2,r?N?bt ?|d5K?Y*?ҥ$3?6O9?B?AM(Z?@ ͭ?oX?c>?*J?5kn?p=sP??.7N?4?9:>p?׶пB?P"L8?Ya2?tZ '?|$?_1N?h~r??rA? ?W ?H*!?;i8c?2:Q?rI?X?`{K?$A@?E$L?ls\ؾt?;s?HD?©dw?4~E?)?wgY`?ƬѰ?4.?XX?o**??O4? _x?F=?nf?)?r?`ke?R3?}i G?I{w?0b0?:hf??&*?jL0?KfD?8Կ?)A.4?ȪBĄH?u ? m?`0X ?gk?{0?R1o?Ę?nFK?F?3?]h?/??X 0 ?0~7?j?.K6O?s҇?U?bOG?!޳?llED? >G?;t?ۜ;?SAK?N|K?֦.4?"?<h=?W 7?tJy?)Q?\*Y?r?vCm?DV ?e 'U?J.?ޥ4m5?@ذ?'; ?lB?3.?kJlp?ڸd?>!tx?\o?ʔ3n?|_Tk]?$BÍC?ac?f ?`!1?#)?$B ?+͢"? )X?S;?k?L۲?Ci ?@0r4/!? y?~?yw?wΑ?z?6?.?Xp#?"L,?89F:F?l ?Qp?٢ s?hr=s?10?,Np/?kQ*??-j*?{,@??ܖr?qUր?BUVѱ?l6 a?T;1?\~?+? Y?A4_?eG"?W?3Q?ܟ.?Es?l?8p`?& R?$v^b? #p5?Zd?|c\?=?%xԱ?4Dt6?{u?X ?ZC?'.Ǽ?E?X ?6.?x<аiz? ,?EB a?$'?%L?~-?6?ykǿ?׿?Uw?+0TZ?9ʒ@?8쌦? cv(?$7m? qY9?k?H#&S?\~Ҩ?5RZ?Ѭy?H?0Rq?~l?o?O?zйn?M?$d?T^oRh?D4$8?}Bu?*ʢ?x87?LiKi?o{?(6Ky?(e?(踾?n?a$?g+?^1.;?~7B?8 iC?~uLA?" ?L1?E” ?y?LЩ?grYA?HX? W?B&?pv?u{"?Ol#?õ? ?Кk?z 7~?P€?]qO?ʹ?R ??GP?/?ܥ7??ᴘ:?VH?]'41?<r?[?O}?[rj$?̣.7?Pqik?&/}u?6 #?J?g? DU??Jǰ?6?{T)?,t??7o? J?ƤM6?dhQ?Mʏ?G6rD?LK?`P?6|K"? ZN?ꃣ? 40K?͉_?;?R.#?8?3?ԇ?4q?J$?}?T.?X_ ?#-?qsԤ?Ң??>?|?zIv?WuT?|{~4?l:4?+Yx?L@/ܧ3?@9 ?_@g?.9P?PD?i?"Þ[%S?pw?kݾ?@),?`[}?X64?2Rl5=?+}?? &/%?n,?bM?xl28?!n`g?'?eI?x5Fo?@(?tܽ?% J?9^ ?)–?*?"r^?B[?@!6칿?? 5b?NR}?Tq?ʉA?d/(?ws? `}?wk=?8&z'S?d3z? %?\? ?p? Q4?%>?eo"W?:?J; ?P G?yQ?!m? 4E$?f >?;?cG+?)F?4\?2򜰠?C֧?EԚ.?_?TD~e? ^?~ʒ?N?c<?{?+?}>d?#M? w?8 ;o?&:lp?h5Χ?r?VZ_?D\?&z\?|9f?ɫ?&?K=-^?@t ?6\j??T!a1!?YX?057 ?b͗?x0Y?; H?!?ޓ+?4]u1?'#?U)i?vig?(e?Pk? J%]?J0?L,ͣ4?dK^?x{EE?:tn?d~?aKv?ۣ?@e?@8=θ?#,?DD4g?}b?;Jʎ?ƋQt?σ>?,`?R=jH? (?A!d?.(?$JTo?h?"728"?h_iI?|d?07I?plLO?m)?XhT?p i?#D&u ?A?p O?` ;I?k{?,?L{Ѷ#?=5?ƺ ?ߌ ?|M?1s1?*?Q 8t?X gy?d@Ǽg?⒘t?|nF16?[҂j)?z-? 'Q?h(?@;PQ??o|?|yE?Ɋ?_?H?j~? ^JBd?U9 ?Ծky?+| ?~Oy?-%?'?^Ur?d;?X(K[?<dl?E h ?t?# K?07aŦ?Tb?bV:]?ELAS?&?hn?h9 ? p!?M\A??+CGx??_"\?ڱ?=D?pE?$*:? B?x?9-?,ߢxA?#+?uV??(? L49ƞ?\+J?R|?RG?+xq?BmS?'p:?HPe?@8Hbd?=m?< u?' ?R?6+ڌ?H x?mJ?`ڤ7?2a;ƹ?HxZ;?^;-?.0{?@yK?F??6,? *??6D?~솃G;? [}? ڹ?{% ܵ? _jK?^)?S\?28ѹ?p}?Tb20?6q|!ɲ?bC?+**?漊?Kxh#+?ivA?6Nn?8RW?uĹs|?tJ?K?\?|ow?]?:?:?}X?y+?ۣg?S?{x?(?vw?k͎?L"?eM>PN?zUR?E;g1=?k? ۚO$?PjYI?&6??pM?s?US?v?0#[?BHB?M~ڴ?K>c)?fx?EU0]?P??%Z?@?*# ?S?0?c?N9*?jz?,?Pl?Di?@H$?6?Ys?(3{d?MN"?T˨?ZG?Nv?$V^??Djw?v- ?.o? 9?pm7x?E?Ϳ6?Lo8?|?>?FXvs?6?`c?Zi|?\I?pbT4?1O?ܣL1O?H?\*?JQ_1?|RY=n? sq?:E?r?(~?K|?P#q{?'?NƇ%u?1J?=ϡ*?`c?+L?y?fm?PJ t?9 K?F?W?at?[TGO?|o?'N?п?s?_WL?x@o? ?@o Ϟ?6A]?x8*]R?BԸn?X!R?Z!?l Z?ⓨ\ c? ß:E? 6Rx?{S?Ы3&?R[??]?WQ+?݃a?H?mrreiI?\6?'17?j? r|?47?hL3?S!?$? qٖ?Ɔo?_?y??0?$rnO?Qw?nQ?k?] ?a"?Up̀`?~*'?Y,פ?Tز?pE?0$k&?d?0?@7?V8?k_Gk$?{T;2?d0b ?c?Rf?.{?;?6L#?h"mza ?p?ڳK?``M?H߰>??RM7?q?a$R'?t?r?B?('cT?:"v?/?H_{?|. R:?0%?}Zu?LL?8M ?ۨ>B`??JP??PNѱ?GC?X2?*iש?[w?Y}=?f t$?Bm?@T??`~{nH?7?8[?CN?YeYւ? ?e?u?6Wp!I?u7TV??-I>?wms?K1˦?Lp>o?ߖ>?B ?V Ft?혇?mw??c?XLE=?!?ԨX?]T?N?:(E?x_5? sȏ?/*a?sJf?P"~?6N?G)f?7U? ]>?`{u#D?"wQ?q%?0Uyl?l8u?~^RM6?iվ?/?y0?a4gh?9V?P7`i?r?*?lDŽ?ԭe?K?d4?`?o ?g*?Gc{'?DJ?wc?q+U?>{X? ᴪ?~?\ ??M?){,?5˯?z +L?*.S#?N?j"?q\=8?P3xR?~-?8:T?ۼ?鋟?<(D?h(A?MZ?I?І? ?p}?(3-r׼?Dn-q}? tn?8)6?Ğ? ko?ٓ?_Q;?'?4Ա~?ssG?ou?F,Y?1?x*Ro28?,?BW}?"3ƕ??0\7 ?]C{?`ֶ闥? h?@¯? h?.,?`?2Px?h?C2(?*Fҍ?2o9z??F?#Wc?-/O?rȥWyv?>10U?-rR?je?t:"?/:Z?KՂ?zn??2iH?K8di? ?#r?'y|C?LME?xk?PIx?pW?"??ݮ0??Æ ??Rَ?] ?bmP.͹?J} ?I?t?\lx?d5ډb?E?g(;?T ?&3N?C5?M??xp,5 ?4E;a?|?*?Q `?FG?!&^e?P?8@B&?1$w?g)6?T M?ȯѳ?G? ƒyW? -?@D$? ??MDW?/`??m ?P ~?xʾ? Ty|? I9bX?2|?i? j?SU?Y\?|?Pz(?BT?V{ra?3Nզ}?`W|-?p`?A ?I?hj?C0P?d?Dˢ?. ?DE`(?P pC?VN9|?8B?G?!?X? ?ƣR"? 09?kCw?~,?}Md?{ѣ?TGw?z??D:?ʓW8?h[1?A%?e^C?V?\TV]?uT?xG_?|7G?t?Vz3?^?,/?Im?:ة?_ZH ? Cz?8$V?i7?CT:?7yuZ?pj\? S? _?9Bh`?S@?L8 G ?! P?oj?A?Xz?'?/ޔ?j %V?0zH?U?8Loo?nHy#?S,?9K?乆k2?l??WNTnL?:i ?p{L?詵4O?v?q̼?m21ų?&u?ptaЅ¶?:?@w?vԘQ/g?%V5!?td?(d?ū8?V-?U?)?D?$e|)?*g?/s?m7v?pƗ'v?:?7 R?R)?Ҭ6S? ң?FL?@H?4so?Ī ?v5]?-?0MFѾ?KFx?p$|?p?7b@\?gkf?n?p_?e*?w@ ?@v# ?y,c?P9? n?kk,J?P3pi?&?dI?עQ?Ԛzv? ~b?2g?Jx? :? A\? zbU?!E]?jزjW??? x"?D2?2-?՛??$Y1?B2&?ܡaI/?nb!?,?fHD?,4?ö'?E'~?ƀ²?\;!?l]=?,t\?!? d?[ ?ǨӱP?d?b'"p?4֘?H? y??(#?pܧ?WP0?T~?ץ1?GI ?D]?^?OЀ?$N /?cq?? f]?I?hWSm ?2|?~O?=?m?K'??3hV?@+?l""=? )?}" sM?JKQ?` vP?Tԯ ?Eo?JƘ"?t`E?-?&׼?\Fb?꺅\?t@f?Lc&?Zs?H J?O?j?_:]?fMi?(W?hR ۹?ֹW?jX?wÍ?̩2a?,w4ܣ?B'?nC[??`e?]N=?k`?];V?m֏hA?8?z?j?ROZ5?Jhl> ?Gb?r?pnA:?x5?Dv?03?z2?H˺ ?_BJ? փ?mn? ?p?+$_?:8?ez?~t? .F?dG}^a?1K?J -?lQS ?aL?Wf?"U?d?]"ê?@3? Ԩ\?\AҌ?e?̀:]?=a?ry?W1?ɐu+?uv?-w?X= ?x?=#? H?\C{?x7?OI?$;*[4?a<?~O?ASl?ג?O5\?.q?Q%ų2?OR?"?d//z??@b+? 籹{?7rI;?h?>;I??$i?`?܄?|p?]I?&k?}i??n:_?`j?E?{L?6K.?y?ea?/~+?ȨU(?\NC?/c?ڬ ?Q!?0_B ?&fw?46>?rB z?*:K?/j?k=?0S?>p4?=(?~r$?~tLڂ?r ?l?}?o?)?#1?v?9S?t@?>obI?.? *Oz?)[?*?.N}?F}?8ZF>x?1?L?8W?$tEs?cjj??! ? n?Q]?2LͮE?Ջ?VB z?4?t33P?nTӕ?p(t?R FF?ïj?~1­?a?%g?J9?s,?w9?w?]?@ E?3T?]< ?d*-?SD?7 ?̚?߫w?f1~X?˷w&?΂?J?5S?//?c_ 7?8oa ?V [ep?0d?u=? E _?6?-I?eMjf?>\"7?+?Xsy?іp? u^o?ˋM? Ɉ?:V?(?QE(?89Ih?%?h6?nG=3?jp9?e@=y??$3?A~? X?+]$?j0H?KX?Zs? ?ڄ3?`~g$?c4?>d=?lι?ljI?OgT?p\h!? ?r6?`AB?x2xu?.|gf?c?*?yˎ?2E h?п)?TQ?X?3/?1H?-EK?w?Vd!o`?Vμz?Y.;?>? ?fO?EM?ҷL?@/L?y?MZ?1?%B?+ka?@`A?D?A0'e?I0y?Li?IK:?lؤ?V;?Bi? ~9? Z?Z?LhµVa?/r?naa?Ze`f?4?P@?pd;?.?O?@ҙ?z?hQڸ?h(?6}I?0?ޫo?,?)J.?M?=?'L9?PH?g ?0 N?wCъX_?={?o8?<?[ڇC?|?w£?rӀQ?XT?{?w$NvR?x|io ?& ?ȱ,?m4?dNK?k? _a:?Sc?G1?Լ?}:?l@u?5fo?4[Ga?7l(?Fԗ?)?XXhg?{*I?~?F<?)EN?fH'o4?/W?E|?G)!5?'?hBT? 5)? ?~~?~YY?U?04Z[!?~D?(/?CF|:H?fu?ȉ? w?|? 5A?G ?o??n˼Ω?35J ?X{e?nC?:sap.? Á?o ?ZR?4Y0?^TT?]1h?u ?r? ? ?p?y>TQ?́9f?z.?R?=?x?䯹F?#R?.v,B? ?+ik?մ7g?B?2Ň?88?(kF?خA?h|a{?Ss?;?sR?%6F0?١ܢ?z_bm?ڞÖ?_`Nv??(44?*3gE? ?ۦf$?4Qr?,j-? ?TN4@?vXBE?H$+?rq?d9??6ة?vX?u9?]|S?xh?2~00]?E>?Lˆ?Jܛv?oIO?5s?ȾAT?oCM? i)2D?F0?`nk?3!?u'S?pj?Q6?q&KK?rdC?y?SD? #8q(?(դ:?. ?6lGn?3y?a-W?E^h?$S?T|k?ZE?gb?xR=?F`?4m?ky?DWg0?p23z?y?2iF ?U{N??_?%H"?As ?$jx[??#E,?k+?߀ȸ?,v? .h8?u ^?c|9??[BG?Oש?4Ix/?0 K?(V7?:T ?pѨ?h?rj$d?IT?u?|H1?oD??- ?j@*?,k?m&!?n咱?C?Ap7?8Q?3?C)YԨ?@r/?@b|?Dfs#?Xps?jpK?ܨ?rNz?}sy!f?e ɸ?`&lR?xg?rWi?a[Y?LmW?;dz? r/?Jr.?o ?av?=>?ս ?/n?nsr5?J=?8?B B??I?GBI?-=?7uE?6 wS?097?I?챜?1A?5ݟ!?M&?&>?q؈"?xy?ߌ?`܄>?%'Z?=̫?E:? y ?BF?z}t?h<\?x P_? JP?.f?|נg?cەt?j<=?=90/?Di``?6LD?Lj/3?5K\(?\ŧ?}0? ?Txg?,Li/l??M~?SrZ?/D?`b?u 3??ař,?+Gz?jb˥5?|*? C?1%: ?4?%?A2c?TъA?L^׬?!*?tMn_?qw-?& ??@fI?4h<. ?A*M?ܘ?(?D|0?`o?B/8\?dv?%Be?`1 ?:c??L~,?'I?>??Qy?S\?2,?csY?CGL[?6־?z?pN?\6X?u3A?h-Ag?p Ijت?PO(N6޿?N%?=-?\?̓@?P/?ArJS?jN7gL?:#!ϰ?y)M?RF_s?[?Ϲ3/I?w ^i?(?e8?GK5?۝#L?8?G2߷?/9 ?"* ?xCx?yNƙ?`q9 ?>B?f"?r?5oc?\ۥaA?ceQL? rK?QyI{R?"N3?k{???~?"/?a/ ?j?B?v&:?B`f?fy??U;oD?V`f?<,?;&?t/?"|(?\}?t Ӂ?"~&?gP?$ ?ԞT׏?^ܰݘ?8+?w j?ףE?Y?>l ?{?? [?i?L}(L? (X0F? Jd?Mf2 g?! ?O?q6 i?⚸)?c?8x?k{a?{a??#m?†o?SOX?C?E\q?)%)?i?cİ?ڊ TG?Ħ?\0?7!.b?03r?6E(?H ?Sԟ\?#axN???/?pK?I ?+<?O?ddUc? ?d'K?[!? Ɗ?!l?*?pǹw ?ʥ?숻?(nx?(!C?t;49I?|)?({9?' ?Y] ?r ?>??6 ?0v?b? I A{?/U?z`?j̭F?8+Lf? ?FĆ?-m膪?GhX-?j?u5?[=?&4[:?z?[*?#?+ ?`=Yp?L5+?v0?E??Wm?}>9?i ?5?ڇ?M&?4?dn?:?[N?P?i?q?M@?2ݫ?2?d!?v\p??|?UK!0?M~: ?]u?2.)U?$wI-?@+5?E@??yKShZ?`#p~?%Q&??~bk?g?֕d?PmR?h#&?15h?Twa~? m? v}?0aL=? ,?_;?$J)?DV?/0o?ǎ˴?5xyCC?K zȉ?qcv?ij?MsU?`3@?&?؀Da9?"?8{}-?fau?@#?@?9Fvp=?hD,?@ڗ-?8L?>B ?<ȟ?n ?0n-?Y䐦+?i*9H?مf?ZX?!SB?&|=?z:ؗ?u'?>6 ?a?5v2?M+?1j"?P@? l? c[y?Ƿ5c?UQ? ?Z?+E\?x?HN}&?o?^xws?t.+o?(OS3?8r@?úMQ?.k?G3p/?@ 6Od?iA?-? ?^QEl? \z?\?qa?p8D??5?x5u? JMm{?Д9ٻ?8?M&w|V?s?@OA߀?:?0I?n՛?m>@? sF?G?fDp?P?z?}?ͅ\?Y_?|R<5t?2"ߪ?*?aQ$?#?T5?t2?ǞXh?]I?=?bo?>_?" ?y_?G_ v?Qe$?L<*0?66?g a1?UzHo?m?D2]w?}qX?~Uj?6Ě?bځ0C?J7?56?\drB?| ?-=?Gt]h6? E?Vq?Fʙ*?|?į'?[?rn2?QX+? F? 9։?W?$@Xb?I h?*$8(?ډяL?^j?/;o?ht,?y_>? v?KFq?+0ֱ?01Zq?}b3? ?_p?wk?*-[T?:@:*?(?y,?lS?S#?A ?>kO?xG?Ak?It?TNH?yb?H,?X?+Q?@d?zl ?k?^&?k?UK?8ߥ?Xj?D?^5*?? > l?,܀4?4Ǩ?r)? }"4?I (H?w?:@=,?Wݓ?B(0?DN|?7g?@r?{ek?@zN2?A?zTz?(=?z?G3f+?4Z?G[?@*]f?:z?,?xr]C?eJ?-k?,ݸ0?`׉_E?T.?3ʆL?R5X?V?MX{?CQ%?3J?eJC? U*#?3S5{L?"c,[?wT_X?ZJJ ?%\*?#?Q?Ɩq?)|?M;?)#v?&$1H?(z8?F?x?8N2?)t?L2*Or?H?h*?8&3??,C?6N?S!?ub8?M5?tY&?"&5?6?8P?<]O?_8?J-? ?Z0?A!Db?hQ?''?NT3?0?⑘WJ??*?=?>"? k? Ƶ?`?Yx6?( $?R2|?̱;3?-O~?Qt?!qG?aY{.?rܔ?W?"#%?p}{ZQ+?XJ%JD?ЮR?L2=wK?f;?04q?V)?&z6?$7*E?0D??, ?xb?.Hdͫ?TD&4?@G:?4?46?R?/? #?=t?hxg?g^? +p?& ?&Xz?d)"?}1?Jn5)?jl@{?3"?rS?Ik? K=? [̷?{KJ6?6?=۟?Q"E?0T#?0A?؟sM2?y?^*Z?Z;e?rV?B?l;$?p?™?iV?d?HȎР?B&a?U:q?$0?dT2Zu?p4Vm?櫒?TȤ?Nn?0)n#?,M ?,U?+j?=v` ?4?sk?>t?i,;7?N}.?j?\;? x0+?>>XS?B=a2?4(NZ?`F?4?\PC? -am?lĺ?K?ưX'?jp?C^? ?og%?]'?ֿ:?c|?hI?pE?@l6?.wx0?oȮ?D?^?3X?~?pV,? .1?6'?W[?'&?,Ι?H mG?t<5?=eK?Vړ,\?o?`{RD?Pcbv'?p}H3?1H{.?m8L?Z ??%3?ٟǟ?)a?_Ja?5M*?5z?l95-?W/b?#zY?mZ? Rg?ؕuhu?N?p^3o?ьP?@N?$V"q?(C@?1?D?%:Q(?YF?B=?x\ȝ?3ۄB{?h0F?HeB? ?7j?jsN |?Pw?H<i?$T? H??|??? z?P3W? -W?Ȼj?"Q"\?p?^m?@$~?PD[?TN? }?0/?frS?4XM?6s= ?ڋg ?2z?ME?G?sa??I9?dvio?f{T?^pzJ[k?]3'?iw?8\]O?t|V?5?X{?e7]?,6m?B\4rԽ?>?j{h[? q?G${?6Bc?Xo4?l?4&.?`EU7?? WA? +^ֹ?,`':? |?e?,,|?Q? x?taU%:?-?Rw?FR?h/j>?yCS?r&?& w?畄Z? %(?K|F?Ѧ{?MT?4B?!>1 ?M5{?ܒB?z{?#?1 ,?MȤ?@!?C?Mڵ6??3υ?p?(?j:?DHM?Z?\1?`p:v?WX?I?B:?(V?=??0,I?ۧ??ea?8?I?bYZX?GK?q?xMg ?`4? ;oI r?xL?b?yA?Z ?x5?E~[?FT?2MN\?LK`w?鿼~?@V?zU?D?Âс?aU?Nӡذ? pJ?0%d?ywq[?DZ= ?it?Tsa?mjA?ѐ?܈\%?=䱴?%I߰?r=?K 4?B ]?N?MY?,@?Лr[P?J\2J?]KP2?qK2?U+.?o?u?1?e4f?Mk?gy?*KȪ?nk*?>MN5l?j?Z΁z?(n? O٤?Iٹ?ė-6?!?I?9ԁ? ??_t?'GJ?K?4:0?>>k??В|=N?$*?x?kUq?8{Z ?e?Tw? ,/kA?`|iϞ?v?Q7U?.͏3?@ݑ ۈ?%*?zKI?f"~?R?Ȇw?6?{?+fʓL?a6 ?q?\?z?`R/?s?俸?ύ?8} ?Uwc?9(V?%U?,OĦ?4Q"H?\|??]OE?QBN ?)aH?K?QOL?p%Q?j?P9P?L?4!+?iac?F>TX)?j2?g`?d,!?ʢ??])?>F?x̔7?yoif?F?H?z?w?1'9? G?%gr2?f?8S?=?؄8?}?tru?r?h]}?QW.?&lĔD?0n?[?\/5Z? ?F)?`xEw(?fdC?4TڏY?\Y7?Qߔ?oY_?a?Lw.(?ɣd?`_(?$7l#??5 ?? ?iT?Ȫ?G?zE?q(???(b?T ?J?>UM?u?sY?dZ~??  k?h~[? _?srQxB?,R?$!?? Sza?Iu=8?ªľ?Rl3[?4ד?xsFm?HUg O?hA?H? ?sf뭆i? W?1>?/E??!e?شB?\~?z?0/_?/ ."?t8G?=\?.d`oNv?Tv~{?i^ܢy?ow?_Y?h׫?gyE?Z[wP?j?#?j1c?$PCG?X?ф?hhZ?ֹc?5W|?`ǫd?'?Kyb?;ߞR?X?RC2*?Tkv?6}\?L#?A?F$O?DF;?⨎?l9l_?_b?G?Q?P<*?s??L?N?].#?S?"?d7? ?A={?`&'ޥ? ?cj?֓׹@??C#?P1 ?#I ?`v?^0d5? uE? C?US(?.?gx;?e/?*aC?Xm?v{?^Z?vK?=`U_P??V??g__?H3[f?'?N?g K?fV?aRN?NQ ?#K_VZ?zH.N)?y?npI?^7?}ɗԛ? b;?d?V?`xQ?dxY?Ȃ@?N(BЧ?q?a?(m?j~{n??Pým?0m|Uա?rza???LT3?,?~j #?z ?9{A[?,]x? DjT{?T&?v ,?dl?y?>?n[q?@@ ?JL?u&?\dD"?8?؍*?&?W,v?7 ?&???0wT?0?6sd?o)?VZF p?:x?Mh~?9LN]?6 侑?zqӻ?{? @?]Ef?I>=$?*;ڹ?Ⱦd?3?w)A?| ?`?(^?g?H" B ?L?5(+?g?ۇL?O*?ȩA?D?,M?D"B~?cdQ? m?碏?o"9?04^d ?s3};)?5ĭ?B?k_?]I?$1Z?6 e M? /Xr?%9?tu@U?m?{?)7W?d=?$c_?S??GH{?8j?K+PE?uX<? ?f]zh?Z?Gj$?kعb?hy?' U?|?~ͣ?أ1?0m?[?8jy:?\KK?Z2˵?0~{?^0-?)> ?]f?X?mG?\4v?6}wJ?O?<(? '?=-?3P?J* ]?1=>?hJw?r 8 ??6pʼnI?64?C( w?; ÿ?t(?'.?.V?X ?օ`V?R(.?cE6?/ ??؅\?m+H?;eR/? i?]>?S-?ڗ}?'K?1o?=@?3?LsBk?.??`%Jǭ?0!1DC?`?pg?1x_?*'?Ԝ'd*?Jq?X5٬>?V`?r))?X%n?'s?&Ɨ?o?8 r?^Q!?k?@*8U@?"(4?m聸?6c?u^?X<̜-?(܎~n? 9O(?Nǿ?=󄕰?O<(??dVm|?>1?g*@?iC?>?麟Q?b/?LTh?V3L_?` ƫ?lrت?VY?L?Bj?$Ԍ???GSc?r #?Ou?o|#ρ?ĕ- ?HL ?fA[?L?I9rz?u0Mh?QFT?,ʬ/?܍?C?9(a\?@3?rog?P،?>?6?{X?JJ?t$NR?٣.U?N\B?ďxT)(?&ďa?4?<$?sT?KT?O?@ 2?j?p<Φ?٥ҍR?C??ۓ?rNkw?I6?\1K?sCz?z2?L2L? K?*xd-?V#?y?YX?=-??b˪`&?괮?gC;]?IFl?7煾?] ??VQM?^T?D,SX?yO?<6\?n&ޅ?_JJ?& ?,M?ņ?Q䅆|?;Āނ?J# ?%H?ݗ?bh?zM?pfآ?t6?M|&?_TIZ??|ԁ?Q^?%qH?B&aC?IN?[b?@, ?o7?X: u9?-W|u?/%?S7?{?gP?(%?ks?ʟ?1fNĉn?;nv?CfW?ۣ[?*$1 l?|3NUIm?3?9.d?zl??zq?|7QX?ޣ?J? h9? 50?-ق?KՆ?%:?ie)?U?OX'?^g|9? M1?3!gs?H++?eR[t?A. 3?U#6?! ?>Cr=?uY?R,?p? 7/?;^?fͬ?g?L?v" ?p̡?r]3?M/e>?Pj?k0%? ?\p?Y5ы?l?f('?5#{[?u?F$U&?U?~?Q^S?h^u8?O?jKή?d c6 ?z?4ї?%L?ؓ`o?I?d?6+(?qZ&?TLNO?a>e?y#Tm?tA?$v?^L?C?8^??®;? ߺ?wІl?X?Lk\?+ ~k?LfE?!?~9y+?>9ؐ?H\$ '?0ь?=3o?Z?B@"5?mh_?(?0n?Y\?u1Ѫ?R$H`?Ϗj?g ?Ǐ?}`?zY??ȱir?͠$?6rTx?Ӯȴ?AEW?A/?VP0'?P/Ʋi?d[?<>3?6Ad?ϪM5?sA?ۅ? \ ?6d+Z?%L?jG?RiJ? ̪?Ԕ{L?OcLN|?.v#?NA ?nR ;o?Oyȏ?p*?e{V?Lx?3NZ,?`4;d?0@?z?:?JOāf?ҟy??վ2?Lm?l?jn?L-ˤ"?Fs?Z&n?,'U?HT9:Y?$^?",u?Râ?ZW?^?QR?C7%?g3D(?/?up?=r?zZ? :_~I?p |?tE?t|;??0?p98?<?6@6?Or1?#?DX?W~>?ꗱ.U?߀q?P~?LXI?k#?rq?$T?S+d?hQ?#TQ?M?u m?tA? ?en?cE?^z?rW?ҵ?&?1L'?}mh?ۼ?$Su?ЌU?CyK"?, ??hG3?T=-' ?X9+?x,?2y'?hr?[J?^(D?^4)?Y1?.] ?!e?޽?MI?>? \U?}z?RGj?еÕ?Q9u?ց5?xY?/)?AR?dA?cpe?t?KUY?3c?<]?3x??Pw?!?wl?.2yO?8j36??y49 ?qe]( ?C#?'e?4su0??l zt?@16i?f?mvꥼ??l*?ŤQ?Ni?NG=???m?Ad3Q?ݪ_??M?\=vu?ېl?)8 Ϡ?nӽo?j7V?gBnu??Zs? ?#lx?EeZ?mNM?~J?HYȝ?M&?N= V"?n~O?. *?Fa=?ae?& D?@e ?J?%uy?ȯ9?\?`ma?\??? Pm?[r??0x>k?S?5\7? w?_֎?\a0? !?[?Tg?IP|.??^?L?F?ڧ?'7?1#? `܂?x{`p?G?DxeT?>?Fw6?g{?[h? g?Jɛ?>0J/?P.S3L?d;<*?Qe`? {Y߷?(/Z?vؤZ?w%?%W4?pO5 ? ?O?',94?w?z ?8 R?ЯDGW;?3~A?/??ӡ;G?,_?DMف?q }.?zr2F?N&?@)Wc8?@uX??.G?G"\?L.JN?\[9`? .}P??_@=X?ri?T?^,b?2"U?V? L?ed!?Wsp?z?n;?/ (?=*?j?ć?WF"e?į??e1 r=?r2f??%w? ݹ?(^$S?@VT?A.s ?f*?RGm?US?Q?h !?4#Asy?#'??H? Սg?,DRA?w+?*\Bv?x2[ ?(? /9?fBCg?F?`e?)DWS?5? Խ? O[?;Vu?Z4T?`wj?i&#P?]F?c M?"i?f?ڹ? ?B_?0#4? $@?v V8?Q5J?g??9mJ=?d?}߼Z?O#??U[?M!@?#f.8?T?b{J ?pw?(ӷ]}?G?-MEf?{x?Vǚw?@G?r"? ?@\ۊ?V{?ӞD?pz ?D<]?Lam?b܄?@?AiT?$ O?K@{ ?ܣ+d?Ln$?}`U}?_4E??2c?8fw?j?^?oeAP?j58?(+?(+4}?t$?{z?Dj?q? H<$?创?YІ?T j?7u?R\?D0?8?lۘ?hTl뭰?pp?? s?@k?(U? Cp? 4?s.?23jd?*z)?=D&s|?zy?@d?/mk?6?Wֈ?/A;s'?pAI.?@l]=v??pG??b;Rq?o^"?ٕ.?H􉵦?X6n?&a?CDP?h=)?.lqY?vJ@?H"?D?XuQS,?ym2v?XsK?@x)?N -?z)~'?GLM?NHT3?謼vP%? ?$ܵ? 7d?+)?Ie`?hC`?Bh?{T+k?(қ?LFWW?FH?k鈰?³?\Wp?ʴZ?Hl$?X?&z?T?o?SČ?W?"r?|%?fbL?vF?f?9Du?>a!H?:=r3.?x8Y?x&4?w5!?|?zG+?w3>?ocz?V R?4װ? ? 0ش?Pژd?yc?g6q?^?6͓d?Z1:?6R??-@?H?4pytz??$%`\?NYH맋??@̵?L /L??U_s\L?9?|ԏ?WȌ?F?5z/?P2?HC৵?TS$? ?!m?7?wI)c?&0\ h?ykG?CÞ? Ha?y?z9I{?Z?ą(?;9%?Aު?0NQ?"Q*?Wa(?r?0&g?+\g|?iN?Wdʈ?':? X,Ⱦ?wM?=f?~?_/?i-e??Oo6M?.My?üΨ?ܐ?B+-j0?:?p ?;? !?Dn?\E5?TC:2?nwa?g)'?2T ?`y6? ,+? P?[i9?9`?T^Ǟ?0k͆?<9E?fa.?Z?M?t}[/?/?]?!~@?of@?;mF?S*?9U?vcN?rȇz?ĹCR\?<ޏ?h?焷?o?+6Qk?X5Of?Mƻ?ŗt.?A=?`:3h?.\m?e˳R?p+ۂ!u?BB?&d?DH?&:?Ѣȴ?:w?q]?@;>?]6ߑA?o?Lhe?AVo>ZF? 8?&r̴?x3w ?pֽp?N?X(gTE?DBVu?>5?c6Cu)?I8?3#Y?W&otB?z6r?z?uYF??ؑ&1?{?`ה?䏦? b{?UO?t/?2T?&&?oZ?һ? @D@?߂?N&8? /ȕ?<)q?LM?(>һ!?c}?;?r0?HXp?mb? A?# -?g?S\'?u?=R5?oP4 ?H]?zI4?]? I.[?ޢ>?#6"? l,?~6 ?:Bc?vk޿?@*#N?hn~?zz?|T8?V/?c2ʉ? l?GUɏ?d2N?L~Li\?zh?ʿ0:?sw`i?v]q@?4rӄl?s??-dD?()u?[~;?;w??USn)w?;?sS?(Yŭ?-<#?"`e??>(?G$? hE7?Eu M.?`[l? ?A{L?6A"?CP߾?f~Qn G?X7??1e&w @?(Oi?x^IA?66 ?"V?·*\?I]I?vP?T|?<@ۮ?/?@uԠ?VN&K;?yT?Y7Ef?yG[jp?dL?fe?@MT4?+]i? 7?g(?llW?+GCm?UR?Т?Ɨ9?؊?/ɦ&U?m?tPo?x~Xa? b?!7>'?T0?hW(?^h(b?*y5?-?g?ʶԁ?FLlF]?䁃?Kf?W?b;Y?9k} ?PT6.?iP?4My>?#؟?r9? EY?Vٿ?<"?g?Z01ӹ?Gr?Im?n%?p?S??R?fo?:{m?pE?ћ/?N@;ʼn?ǯ?>?h. ?᪤؝f?\xc?`?ǯ?$?\?(<#?解l?Ӱ0?.q9?W^o/?ovzD?AկB?.y#h?Ąf@,? R?Pw?[y?LOa?D|]?v?n?;M ]??Ģlz'?.|%?tW?4t?B_v?SA?ϛ +?j՜a?3?ŨI?ӄ?T?#xq?|? ]E7??i?lav?ӗBpD,?2H7?@Mȿ?"T?xw?Tk#3?fE? =m?ye;?JexV?X.Z?̈? ?T? j`?xB^?_گ?@Q#u?Xu?QbR?饪?co? m?Hg)?OX1A?}2? ͞X? f?HtS? ^Z?ëU?kP?{?ĘybBQ?#5%?Ru?@1Cs?t ?{ȸG? Ѵ?v?FU?^@;?t p?BF? >m ?U"?;F?}^bg?^.e6?Σ_?0מ?Zr?}jd\?Z:?_z? ֍)?k}??&?h jϷ? E (h? m?βE?f ?@* 4?4MH?Hʗ?&4?}l? G]?G|? ?OL+?zu~@?bW.?1*8??#/4?2&E?(\+B?% w?^rP?0i@2?`?tEr?1e? 5dDj?3?ƃ?v F?x ?ha?UO?=He ?8-y? ?Mξ{?M6Ѳ?K?;g҂?vR?8?&`hyl?y5M?( E?{kV?&6"?Pb,?bu3E?j:>Q#?02?X.b?03H?}{?j"9?3I?Y!?ա?uR?l1?ҁ b ?L^-g?&H^d?du?\?E(t? 9? G89?+?K.d?$V{?+4?ks?l㠎?PMJ?p)`?y?`e?=v\V?Ӆ?brSI?Y3?u ?R?tx/?L?rga?=8Q3?H*?n S?xol?c,B?eVZ?v8D? J?hy?S?=Ov?>[?jɮ?-?ʚl?ҳ?-2k`Fq?f ɛF?lI?? 'd?`?P_?4Q"?:\??{G?x+?L?;j?QQ?y;F?=?%h?$`3{?d/Q?Xw?,z?!3?~5?h?oc?@l4?ft?r(в?ڙ?d@t?m?X$Ǯ?V㐸Q?ܕ??;?"ش?6Q~?IK>?5v?=g?4/?пGq?2+u?Ns?<#M?e:Y?ʿA?އ?'/?|"E!?q§?<@m? $9?Wb"Hc?<3_?>$?젍Q?v #*?`R!?擢}?4Wuf?i?K+?V$?G R?0[?2Ϟ?9*?0J뭓?F܊?sՕ? Od?yO?bf5?b^?W?ҙM?<å{?y@@?R?m?&-Bu?i3??齶?^;C?1ۛj?90w?1R4?lo 0?L$eZ?`Dik?j?JB#^?<3,{l?G?1: ?o:?uȊ?zi h?XDNE!?lc0?̎9 ?p>|֠?8}Gʦ?ұx,r?L?ZlM?B?ZO?M~Iqr?̺\?4 /?O: ?ާ\|? z7( ?M"!?#O?{&?q--?.R? ??g> ?<t%?BP?\ |3?$TE?+pE#?]`m{?<?fk?B?Йw?Y}?cC?ށ6?0o]?Up?[B?P 5?\{V]?xR?}V#?z?CE4?!sk?Dad?Q?$?(6+)?̎P?d/7?C%?gi?Gc??&a?T'?Ь?d3'? 3 ?+kϼ?/]?R{?hoK?tp?8˜ ?bC??Ÿ8?? ?MPF?㞽Mg? Fz?`z?\I?d?{fû?R ~?eM?﷊?@wܽ?⊹B?&?P?˓'cq?4,?\]?]?Ԧ?TT1K8?Ȧ!q?i ?\t?js\?#0W#?HN2&_?H8[VL@?yQ@?r6%?IQ?dEd5?p`?8JY?L0?ێ^r?hwmQ?JAQ%?Y ?rMB?{?f?q?(d9??#M?UK $C?RF?i?0K+^?oo6?,f?a[g?X7?(KP?X ?d@?D4d?V,@?f^]a?밖?<=?m?ʧ=(?Q]x??? >?Kfg?] ? ?\})?7`߿H?< ]?뾔?K%??sO??؄?J~?ܵp? ?z2?u&鋧?\$?? ?ɶ ? "c?z?!=,p??U<)???ftcs?d3(4?=?~mb?m?n7o? k$C?B׵?z u3?6qj?3Q?ȱ8A?3}w? []u?;ͅ? "?K0?4LA ?V8? s?;8f?XA??0??}_5?^.?@?V$v.?ɂ?xNgQ?G?ȣ߬?2LN?=0j5?#л?r x?h$?I*k?-Ŀ ??>^8h?l-?"?u͵?:O?߷&?Y? z,?:???W]l\?XO?jb?6?B2?k/5y?{oav?1F?LG??2D뭊?hH?Q=?PϜ?UC?G?p^&?#} ?k,?~ҵ z?3O? U^?Xi?n B?VA?je~?M.? HZ? oG ?Pp;v?ĸ*?c= ?a? ?@;D? @?&@ݴ`'?X}??֨Q%? i#w?IMqJi?^ci\?:i!s?L^q?=[8?:95ҳ?l?ش!?烼?=%}?r ?Aƒ?*Dax?8}u$?V4~?&>^?x?3=Z?&gK?:?;!!Nc?VÉ ?^ ,?cL?S˵?T$? bW?p1b?,8X5?弞Š?c(.??@?b?y?Zs?~[?D?eL$?DT?X@?:n??M?8k<?l> R?48C/?|(ǎ ?^E? ޸W?PL?hm{?~Du? 3?\N{O?1?8lif? =?qw?Adג?^G?p2*?|+l?Ŧ=7e?&u?Xg4? f?, Q?PK}?FʹT?D6 ?ϻD?缍?Tp1?bft?.ށW?k4?( ?IH?Gh;?@(?^ȇ?^?!u?3< l? ֖?Xo?Zm6?Kg@?XF?T?Oa?&*?l?δ-?ZQܥV?D? :i?A?cl.f?OW(?̋i?X'??N?|y}Pj?aɄu?>h ?~5? ؼ?'kf?ë{? @?7C?@ R?T좞?q#?(c1?2ta?lg?pw nԡ?\6O}?K*?#'{?B ?v?DL#?{ ? FX?s ?-?5ɷ?/<K?dO?@m?:S?`G7?.?HdL?wvy_?ZG}?"?Ilq?d?cy?}@?gT?A5b?c'9D?$~?G̊?ėp??uc?jg?@3 !l?S?T?$ă?WJX?:43?u+pS?a?YBj?`WMU?Oz%?B:?S?TF?5?\/?]"?.<Z?F??k9N?bF?QFx?Mi?يS?E`?=O?\B@+Q?*A\ ??Fgѝg?ilY?`VJHy?,V?z?L? L?(b ?|XB?.cZ?iO]?踜?=n?2AUe]? P?(je?d`?`n-o(?Qv%? ?f2I?>Lt? W?}@?.[+?T2nz?!/#?y J?\n;?y(a?>^\+?喕98?(,K5ts?QP?m/? `At?@_?-JS?h?4]s?~V0?[? 9XE?Bb?d}Q? K?pHX?j1i?̰Ǻ?LO-Nm?%og?86?zu?*^P=?!y8?B?;0t?:Jns?x_Į?Ov?Pآ?[~?bmn??8%X ? Kbs?\o?%N?a?Eo?f%?ᯉ"_?t?ɮP?M٘??6xG?Zg ?^/Huz?\4?ߠ?U?ę}?}ߎ?-?ePg?G'??b1 ?#Ӹ.q?d?mWK?L?lo? ?M?][ b?Lt!?k?:S?:!ze?Ey?i{~?G?4?Y?*W{?:$?bD?V2"?8|?YA?D%q\??Ul??о_?˻ o?$ѷo?9]?\ϧ?X 3?iY*?N' _?? Y?0su[?`? [dJ?NZk|?F"?>G{@?Voqy)?Xݜ\C????Җ? |?Qt?K?t?|?} ?X4oa?Fk]d?XF?4c?X뎊?Wu.jzD?-9?4@ek[?џS? Т?j ,??h-/ʸ?V'!?)jb?8Ƕҟ?#Nl?Z֧f?,/z?Z?Xm\D?Rb2?Ӟ??V2S?U9A[? ;l?Lu\??tA?C4 ]T?z?`8q;?h`97?D?L}j?Niۙ?5?X+!?*UHS?T9?ãP ?s@?ށnO?KDPMi? >?&r$?^nt?Xk?~NA?Y?q~?5e?Ƅ?x|?V?*_?5 d?ߘR?t? J? 'F?]!?h?DЯҶ?*Ԭ? 3?F5ې?!8??(ܫ?B<?02.z?~$n?R?V?b@ג?:!E?pb?(%?+.H?8CU?t!r`A?bq2t?h?X@? f ̈́r?|T?+% ?o=M??+g?TT?hR$?T5?Bkn?Zh6?05#?ֻ?il ?L?L鼒??1??w?(??`č?"T?<?"o y?Cz?f!$?r?~I{?o\?,??a? fg?k&?$=m?>X\)?@\\?n?*U?V?V?4͡? ??h?l?o[?^x؇?7W)?@s??N X?=Cqe?53bA?lgu?J?͢•s??p ?֓[F?^?ER<_?9k?@1z?5V3Ko?SOD?`$?"41?70"?@h?ĥ  ?di8?uh(?juB?rxʻ??ZVB??u ? ?NNǧdx?e?\ UD?8jc?8,u?!?+ ?Giuy?ڠ?oV?܀h?;S6?~S?&tF?}H?A\A(?D?Rm` ?$'?̕?(4,?fCm?<1t?]Rt?l쭮?yw΄??+I??Q?ARk?PN=C?,!G3?؞zr(?$s?{-?AI?Ca?\# ?rC?c*D?-ThT??4kĝ?e)?fOcJ?IY?2?o?` ?;:q?!I?ZW=?`h? ϭU?^1??@.=׊?*[??F i?B$Sõ? #eV?3?r+?KE?~^E?VyY}?~U??<6}?e U?y-?>?~4?]b?m(?9kWo?_mr? V?zpW#?l?HtzE?ݎA,.?@+RB?vt~?%s;l?pIxTB? ;y?x|{?BI\?P#4 $?,ڎ?Xp?ɞKu7?v$?63̍ ?'-?+"k?g?!{?5-?c崯?ps3r?W?0k!?x+:I?t!?Lw?p|?x$?c3.?*n▭?D,Y]?o}mn?0!D?tW?0p??@ V۽?h#᳢[??U|?sf~?&1?TŽ?@X2c?.??)R?oA;#?:_N?4٥?Р? vj?Ь83?v,ПЉ?K_?b_?v1Б?*Ak?,|?f6j??S?P"[N? }?,cf?S7?4+ K?b:?Nb?`du+?0t@Ϟ?v?hLػ?.Zʁ?!Z?ؠe?W?.:J?Pq詰???FO~??d?j?hz?v%?oB f?GV?PC ?:sm?;?Q2?003??\??7lU?ig?Nj?ot?l?v$ֆ?P9G?CzU?_d!?.m?-?^V`"!?§f?(%?bZ?P¬?bUa?-S}B?H5yb?)͗iʾ?:◠c?X8??FAm?"u?hD?B?U)?얛?>? Q@?WiX?hM.x:?U+j?e(?Hƪ?\PLF?(@ł?i@^?Uf?>,l? J?)?.?~P2?0U?8L?8{?1-?;0a;?Xq|,?ț?H7?q4'?? 7|?x)?|C ?DGe?h^+ ?8H?{1?D2N}? ,K?76g]?PB?(kl?(<b?iE??Ž?sq?(L?0?@kyĭ{?6ayc?J0?0AO?R є?/PiT?pƑi?xmΪ?;?csLY??*.?8?vc?y4?0PP?,/D?N_M$?d?^ ?/+ A?zx??0dZ?&E?F&?Z1W^?"5-?.#t?yP?>"?4t'?us?q?mO?{Q?(R?t=?H&2?m[?lb?p'?h?a\?i?`a? sH?8?;h#ף?6@]C?P 0?hڷ?:єI?o8?(ɭI?qN_|6? ?JK?X @?j-p? .,(l?Trgv?;?hZd?VU?u?"PTԬ?F \C?xr?:nM ?Tz$!?T9?k?.-]? adF?'Q?}lH?E&?u?o`G?xRϱ?胜֙?8;Bo?d z7?h/z-?ɮA?Z???0n?Xj5?2o?`V?^-;:?7?Gגev?/{??GYM|?@@)GÃ?ذ>N^'?i~n?aI?6XnL?x;뿤?l0?`??#$?u?B?aC?h/9~? 9?Zn?Z ?PqJq?8~?N 3? !?aEo?-j r?#ASDF BLOCK INDEX %YAML 1.1 --- [359] ... asdf-2.5.1/asdf/commands/tests/data/block1.asdf0000644000446400020070000023512513567314375023461 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev870} foobar: !core/ndarray-1.0.0 bizbaz: red source: 0 datatype: float64 byteorder: little shape: [10000] ... BLK0888\7ZN%><І̯?Mm?JDb?B+j?F>:c?g?v ~?_C?`?a,P?'?SP?Փv2?dqi?nF?@MqZ?5^֢? $?%J?Q0I?\N?z=?Ś`?PYs!?9&? Ә/? դ?T-?l, ?*)YR?=m2x?`c@??&D?DA?hY??:l:?B.:? |Ҙ?f@-? G%M?iF?|Ϙ?|8R?^B?w?)Fix ?6Ao??c8?ǘ?D?t#?W-l`?Iy?:VtE? og?0ֻ?t&hVj?hl?J6?b\?G?bԃC-?Dd?#_?Ӣp?Or?ƏE?h ?]@y?hC??M?ms?@,l?( *?@i?FH+J?~94 ?l'd?@? y2?g7?!|a?i?3 ?t BC?PM?p4?abև?" 2C?|?`q ? (?dl?f:L?>݋?`?5/?2 Q?*Ԓy&?;yw?/D=? '"NH?N*? ]2?7?VJ(?%W?ԍd"?j0U? UPc?s%N?»L?hhއ?[?! !?@rU?]X'?J?9t?{xoLv?PPEpT ?GW?)?m,?u?dN6??@\r5??ં?ib?v! RD?8?(M_gw?zFh?x?T`?x ճ4? 9Ɵ?K?=%J?ťi?.?DZ'r?˝4?wi(0?b%#A2?n޲?P_mS?ʫN?hZ$?2p ?4B?lH(?V?-Zۑ?AO!|(?YR?0?Tõ? ?X0? p|*4?NL-?\ya?>,?8 8?&݈c?ho X?(=!?-C1??+]6MD?c#T?5VO? jY?5 `\?`?o-~t?&V&?:+~r? ~??`Q8W?K?V&?N1?|!?54?(% ? 3X?"G?og ? H׸?+?? ?1?u:?G4?t?]?xs/,? O?m;=?U 9?NNE?0o?#a?{h?a,}?s?ڊf?FIu?(?{`?ne?ulx(?`w??vj?h往?C͇:?[?R+Y?k?N6i,??x??O&?n|r?H?)b?RO? B3?J\?/)?l7=F?H ?Y?ɪD? ^?n\?P:y?Ӿ?? ? ?3P=?Da?@?{\?v>S???u?djE?8?z!u+?HE ?8ݺ?,Y?S]A-?Q??_i}?VO ?Ɗs?ጆ?D}ޙ_?}8?b`?RA?䌈 Q?L!ke?\?es?? Áx?^r{?!? o1?ǿ F?j?iJ?r/??4)?{S?l)F(??@ D?vw?-kNh?U ?ʷ?vY*?f1a^? XD?W?0Yl?06 ?aFi?P{Ko? Nyz?>ù?Pc2?N?ؽF??2? ?(ze ?[Bij?U?]F?-EE;?Bz?w?.9?M=?3/?8nF?0ݡ/?y?MQ? y/?9#?r(d?Y!j?5>?0I!{? oMC?Ӧ?R ?X"?jKAd?[è|?vzI?4!?Y;?x1͸?9k?IU6%?蜋% x?Գ8;?RN?S}?p9B?x?0)J?n?K p?> ޻?!<_?V/&?=ŲEF?+5?߱F;?ݯȍ?d}-?A ?|@??'|?ޓ[?A v? y?ﱖKQ?v4U:?Bqs?aa~?Ks"? łzv?ذyGN?t<7NV?.R?dy__?'z|?̾ >?YQ%?xv4"?T$?\G?ELu?8?䪚*??'?cH?*nN_?f^Y4?0JT? _1?xYE?-#/?/p:?G?xy-;?kv?cm?5@?-t?VYJ?yŮg'?A?gB?RDI?A]v?<;B?Oy?aDZ]?~0ك?ٍV?[Y{\?&L?R 9j???Y[kX?r\?}6d?<Ӧ?˲Xj?2U#;?2iGJ?X}?pFl?hr)?bWi?AOqd?ӻ3L?d?2#?1xJS?g'?*B?#92?٩1B#?|֔?4–s7?%R?Y33[?AfV?Ϟe?07?"cb?ddR2?1~?f^zQ?M5?XyG39:?(T?-?$ $&=??sT@?֍!?F?ԬdҮ$? ~?~R?・ ?ʲ=?cE?N瓍? %?Jp?x)Y?"/Ȫi?g ?E:?9 ?մ?0/,f?@T#?vXQe?PjN?HI? v?֐){?H"js?\^?PԳU?w2?0oծ?菇?Δn;G?9I5?TL~}|?sI ?\ya7}k?NUF?\"ī??`?~p?|.s?C ?о?./C?çi?7t"?;1?w]@?,?tUot?1?,c?`?dG#?K??^>_@?bMJmK?.p*?ɳ?:yf ?X?t"?X8B?8[?!$?A?J'={?p]?Q4?p?D]g?:E.?w?yE?sJ7?['Q?Hu? K!?4G? ?hɳ?,?ZjJ?z؏S?礛-?x%j?L:?^]?LGBΦ?doM?mY̖?~*?&^?XY=:? ë?r?a ~?pwyU??h Ug?µ`R??1̮?5^N?0,)?d?D_j?,_t?6e&?*f?`*ա?݆U"?7F90?db?0\U&?H}2 .?= ?$zXZ?n]?A?/dJ? ^m٪@?)_Q?~@?vS?J渣?\Bs?RG ?f@yZ/?ۂ?!{4?C?z[v??c'm?'H?M${PŐ?`#}??N?tJnU?,я?:`7?d}Q?Mӛ?A;M?V H!?xYe/?4wGW?`8p#J?䵪z?LulI>?!l;)?`{~;L?8#2? a?}=?w+7|?0PB!?e{?q0U?dԈ? ?|u" $?Tk"G?xF?U?tTSa?T] ?VG?SR?^Jp?vm?|ei??%9?=cPU? CgMl?t/j?D;?a{?^Ok?^xI?a?ܡ|?ld?f63?-X@?.U"?=E# ??( =?jџ? u?K+ܻ?>?l ?{??VrP?*ԙ?D??>7? 'J@?t)?_?D>s%?s?xQ ?UOA?OQ?tڳ? غ?uo0v|?M]xU?9}i?tl&L?2?z?N`5?G?'˅?Ii?Mpo?z;fh?)?+?dcP:3?`n?ݯ?h*?tdC8?Ba?Pei?<b?Y?3K?C|>i?`H?0K ?ICu?`Z?b^?yS?|Io? ,WU?-+%?70k_?D }? \4k?Rߞ:?|‘{?>?WU?Z>z?0ѝX,?` /H?h??v٘YU?>B{]?k-?W7?T/?tݤ?pֺ?8x?f.b|9?͉^O?N|{?XTr*?$C]s.?@#/Y?bbIA?DG9?c h?t?+8?H?bO1V?Pr;Y?,EB??B#Fi?>?c?\Ã??SOa?DAS?љr??H(w?u\ ?BNmb?GT{g?f? 2+-?hv6??)f,?t.N?+Q? n]?WT?=?(]?)? ME?r/a?Pqys?.¶F>?s7J?9ʅ?M?9F̊6?@ 2?Χr?_{e?㙽`@? LR)?n5O?x ?f-?/⨁?BDs?]@0?`??_aX?h3?̩]?8r?bBke?QP?^?gU +? -k!?;@?{b?@?8DT@?]tm?a?~? :?Mx?жM ?Ydv?+*T?L#O?H?Aw~?}?5v?s ?ܹ&?dm?' ?b_?ܔ?y/?@4E]Γ? fG?2٩?Dċk?7>ļ?%vd?7b$W?fG?` tO?_0n?'%?qIjt? Nh:?Cn?kJ>{?8b~?\?wJ?' ju? ?U ?3+s?-?$F?0sy?CGl?/A ?gy?|8?dbZ?;?"S3?j`? kB?_@L?~Δ,??@}w?HnT??! ?8CJ?՜?ᭀB9?+?<: ?h??LJ?qR?P?c\~*?ްM?)W_z8?.T]2?c}?hNR?€#}H?8J^"s? ?=70)?}4 kQ?M? ?!Ǿ?ЀC?,U]?=T?'o?۩1Q?Vx?V? T??W?v?q| ?9??[o?E-?-^Ө?t?섏e&?/o+?жbJ?v?[???K?Rn0?@fEsЬ?A?#]P:?"Ͷ??kL.?<?(6?C?^e,?Y?Hu?C?2!?Pv}?`=0PO?Eb?F\?9 }?jb4??f!?$j~?f}T?Le0??$?x {&j?W?n%Eh?  ??j2Mչy?]Q?4d ?8ȯ?)h?xT\??(+r? "6d5??L?N ?pD?ëk?"o} ?nuS?!\??Q?j?=_'w?R?| wѦ?LC?^??Βo|?^ ?e ?`8G:s?P"??WH?1k??׷?Ef? K?>k?4P7?\'s?MٰZP?-Zx<?0$-?;Z??%1?O??5S? 7?lc?C?kV?xH?m d?Nٳf!?/6< ?w'?f?P.?e?9>??7b#?eϼk??,o?ට[|?75?j,?lI4?5Z'? g?Kګ?G?ʐl?wά?o?]N?fn(`?:%?H@L?3?V½?6#-=?hXд ?hAO K?z#hr?͛K ?[lw?,,?@g)??X~}?ar?` ?G?J&y?}?(k?Ud[w?!b?Om? &I?pq>M?QZ?tL?!\n?.լ+?h.:?^l7:??G{ْ?Y'?З?|GQxv?{.u?ೂS?ǣ?K?@Y]?[ף/?:{-m?6;?{??wa?oP ? f7?:|S??j6 ?ٷؖ?ެ?y,?ktpWCz?`=?7p%Z? v?l;?l~1?>n?̧>?鴩?+k8?h5ǟ?'2?1Q$?v*? ?h7헅?֒M%?M=\? ?[\8?S3j?0~(}?"?]? n?4־&bF? ovL?P2?F?d~?O^?/?I ?eS0?C]G?-X5^?G a?:H?}`?PeӠ?S?(?jTЅ?松?ZW4?Iۺ?mBa"?sXbD?H> :?P>oI?\?PZC?^H*?۴&?M_U?D]@2?x1?,/x?j)?)=?B@ӥ?ܤS,Ɣ?$7V?S>ӻ?P6?)ps{?i{j?ܸ? ?0GhW?2 ?p!T?1h?ޡT?"G? $%?ul?Ef ?S:qM?io?a?bSv5?覗:?Ipf?( ?%_`m?Yƛ?N ?0E o?T?-I""?8g ??%V?oԿ??PY?V.Q?8avp?6U7?Ҫ*ˋh?Yy?vt?ǧ?!?E 2?.0p??n*S?2X$Τ?Vh6j?)?~@2?f?D7[5?,?rW?Q??N@?_q.?<ĵ(?3?5?&u ?V3T?o9?ȃ]?D?VY48?ܕ*#?Ag?Y ?Jư?M{e?E~?zҭ?Ê$?1$?Gr?ǨG?& X?#?H?lp?<龍?6 Mi?D̴?`]fl?B.Hg?Ȕ?iF?f|?iLK?Mj*C? hy?OH?|?KOL? ʲO?г?w|E?aԂu?U' ?Ro?{?w?^HV?'YY??O?4s?CɿU?c6?\f)?p?8?IF?͢?`w??*…J?"$z?Jp?>PsA?T ;k?.?Aa:>?m ?`P $?`qP?PxX:?U?oO>E?3du?L其?>J>?=C[?1?N'x?|dC?N UE?P%dz{%?1?*F?Юߌ?b?~A' ?`Y!~?T $b?XŽ?G?Qd??jg(2\?;?\rP30?ƳDZ?l1:? j?=IU?#!W?xn8[x?w%. ?a??GV}?^N?F'ϊ?aP?_/$?,[.?ȽSG?ˀ/?h_?tY2?rH?n?Dx?K?`dL?G}Z3?O|WZ?؜?{?t{Aɫ? ?b\?%?avW? c?7Ly8?(h?s? =?T?p`ư?cq?Bob?0G??@CµX? T?nr/?a,6v?XSq?uG5?DA?xĜO?XA?;?N(7?\?^e4?b d? TC(?iv$7?v|MZ? KU?(C9C?Dۈ*?z?5͗S?P? ?U.x?pegj?k˿??\?`Xo~?@v?+1?R +?ڦJH`?kׂ?|v?z\?X64?ф?E?9I?pD?0??_:_?0?L]+N?̚ALK?N=Y?Fyع?`w?`+x?[ʥ?"`?n?Ț?"cu?I?2?|&̝x?lY?D9pd?ގU?h跘?vW_B?`H?-9?.ɜs=k?G?r׍??\?&?ހ~?@p-?c? TTS~?!z;1{?Nbo~?b4B?ؼ?\w5\I?)?qfT3?y ?.v?;f?LJ? N-? B , ?sAZ?H?vwh.?Q#??jL4?#ɷ?$ƗE{?Knj?5:4?Rj?LԿ?h? A?f?녚?lJG?k[1?K-,@2? #?4_?|O?4C?ed@?lD?S,`Eg?VSā?6?gB̧?{' ?}>?*9?knY?p9Y?#?x4t-C?XeG?('3?J}?jb?L龘???PTN+꽨?ܘA?VaI? BK?d(?m}n'?R&q?Ƀ78?zƺ?4 ?{?q?gmlЂ?d2c퍹?ړ?{T?=!ם:?ICl?XRdbj?lQeX?4j?x:?&qЗn?3 ?D?v@I?\;0?ε?@#ֻ?oIu? 5s?q?8 5?&\?Tv?ĎD?bT?#ܺ?3_[? ?&?a\d?@ۺ?\F?Rq;d? &?-3M? &?i0_RZ8?+z-}?ta?ؔ9?_FxA?:Ԑ?T?N!,?"? rU?0XV? DA=?y\Љ?` ?FV?)ʰ?ٻq0?׸_W?|0T?@Yf?<&8i?@Pdn?pz݂?ف?dwv_?}$?" j?,y$ ?K?8C!Y? ?Yҿ|?J ??h$?? ߩ?ŗqy?@ ?p+YJx?Ж?vnY?^S]?#q?ZM? pZ?=?OdY?d?BTN?`K??${?wB?~@?z&'@?>Y?@_XW3?@򧯏?$x?$ƌ8O?6o?d?h?fi5?n? ΌTZ?u??ˍ?vv?lE3?W_j?#c?s8@$?@3R?z]{{4?EʥSg?֭?{Me?z dy?`yY?t o^P?&<8?P׭?zi+:v?5?ʕ?TTa=?Jv?\gGEa?-?uFX?T ?L U? +(}?g3?3k'?(6?23V?f~ ?Na N?8^bm??`0J?a] ?L!3?Z( ]B? O/@&?k(G6ؿ?T -9?yR?gK?;?Rݣ?b?$wu?s?*B&~?*Ov?B ߄?ӊ?``^?H?n? ?։l?e۴?>OE?z]Q?c\iCh?X? ,Yz? EBB?hr?|-=?Rndm?bm@??jS?dbR?I bb??C?4:ar? y?Y]?Nl:?[!?0 :@?= ?1Uk?'??`n?b?-?gbҌ=?@˕Wۅ?,bO?=%?nS?aCl9?pDµ?Yɍ?-H @$?zZ?Bk܍j?Vץ?tv?0z?p?AY*?^8?Q ?B~?g1A?N۔\{?ʘ"?\*ñ?L@?@`?[N@?hГA?F??1:?y?Ԕ=?jf?O@Pk?xF?Peq??`~qGf?Ԫel?h?`r}?@?l /b?;R\??*+#?6iЍ?׎$?ņ̟?)?8{@5?֋+W?H%??ДS:p?4?~G?tUV/a? E?1? -?j7u?9]$?) k?> ȩ?v' ?"%?l?Lt ? Xs?=c?Aa?H_?Ș?a'✇?2?Rw?2?'` 4?315?j?0i?ȹ$e?hHL?N G;:?F>?Y7?FT?@\ǻ?8k?<Xa??; ?h,pB?# ?].M?Xˎ?v)&E? w?76n?BZz?[?ANt?%?Hf9?i?Q0&?Ҫ?0?o,?2bo?P%?^ܒ'?I&"?'.g??E?uy?@>o?@'?R ?`(s?3D?wU?0 B? 8?5F?f3M?LH2?.7vz? ?l~U?4 A?N?b_L? %=??^0?f B?*b?`/?un TE?U>??xiO?F~W?pfM?Dd-E?̐?~?@ܹE?x&?ʇP? L?|xMj?y;?1Z?ddY?$??.BA??lVI ??nc?wP?!q?{l ?|Ͼږ?6j4?!7~@?Ua'?I?lO?_?"԰}?ڱ ?,Kw?.#;? ik??yrY?ݏ?H q?B8_-?T#4?{=I?A'C.?J][6N?i;?T`%?|`ޠ?pi?@ß,?(N^?|^? ?*ü?vC?|ai{?,^g?Yf?X{9I?8uX?%?H_2x?⳽o}?(?F= s?C?VrU$X?5#[CL?p$$?d\&.?L_?&|?D>?PA=(?#`u?2G?ȯF?ڤ? I?]ţ??0:?[?.^??b•^^?0>B?xH?]>?t|pc?>?V3?&}X?o?D?PLX? =Mv?+n?,??jU?h?X܈y?/6T?TG[/?vc[?)oRl?}G?_Dը?G??lH?ϕ ?1]Κ?7R?+?s{QB?S?P-w"??b1?6\t?ZbL?.?v ?p?0?8?bxш-?10y?XSy]?۰z?p=y?,x ?g'ym?$N?k?tc L?ZΔE ?D34:L?WU"?VHȁ?bN?)Qh?z;@U?JE?E+?y?i?),^?@˰:?v[?9͞t?*SY+&>?amQ]?⡦?Nmʑ\?D#QC?,Tt?:?#X0?f7ԙ??!e'?䲋?(T}? ?P-@q?4?8?=s??Lx?չ?&àR?"?T@ ?3s?s?YU?˫5?NO2c?C 6 ?pqQۮ?_T?[X?G ??R}?(T?h$q?&#?-i!?fϵ ?-)? 0t?pL`?vX$"?Xr?<Ι|? 7 }?vN?"vF?Nrq??f2?S%?fod?Yf?J?dxU?p_?"&?zA?4,? b+?)*a7T?tm;?DŽEP?0γ4??K]?{Y$?/Y:?G#ص?yŽ?@~@ ?=^?`$IA?z9SP? ? ?t?A!?Y_?aU?+}c?\pY?.?G?e ?m`p?E?d5?oi5V}??.-/?*>ru?X|?7D?5P?EvTuM?8Dua?x9?PD?? j?{? )87?4WB?o|{:?0?8&wm?܀X_?]Դ?os|?ЭPԷ?J?]? u?|L?Gag? Ȭ?)?Յ=?hk?I\y g?6?|#?<3?:? `?^?:wx?#!?kx?$2?b`?' ?)r?MvR?x(O?!Q?pٰ ?a.r? |x?N:oW?"Z?x??=ps?}N?p?;&Q?fV*?}"?>p'?R?6':?ꂩ? s?({;?e?/+Ͱ[?l?e1_?CiQHK?}W?q?hM?H?Uu!?xzX)?U ?$4g?AkĤ?eF ?BHp?SњB?O4?Fվ?cְoV?!",? &?w5Y]?ц2@?d?ݯ?cE1[?R?XϹ?he8?LC{?xI?XׯJR? ڌ? /? 7c?y?**?mX?V?$!6?6E,?E ޤ?`?"RI?f$?P?ptG72?S{?XOp?yĉ-T?U??\7c?WK?yE?Zh?3V?J?W}?0 ?1sV?MJ?`NF{?ktz^?l|?@ d^?YEW?S?ajRZ7?W?z@&?P?\ ?2%|?@G?0W@?T^3[?$tѦ?(?+jj?CTK?B?B$?IF%?Bsme? 7d???lKR?`ZEc?]y?'?h? o?TI?`&xwJ?@?aI?@R?LJ~a?8s?1Qi?B?(-?pI? ?H;m?ކ?qb?, ԛ-?4?9Ux?u%_?Gʐn?p?>6`? ?h౎?!ֹ?H:?T?DhbE?,ߛt?6}n?:/+?Џ-?w?~=?xp?x}?\^?;=?@K?WD? IT?قsJ? Dw ?9q:?o&з?~._?4t?l$?ۂ>? IM~?̹?x9? ׭?ty?hf?9?-?Ž`nm?=w?,/á?i? P?i? O?88?$?XbsL?>LX?Ƿq?=P o?YH?Dp?Ҹ̻[?}M+ ?bYۡ}?ߘU? :?$&߈d?ղLO?--L5?Xqm?&K?{|*?0j .U?*Ѕ:?I<\?Z̯×?4Wv?J*? |`"Q?5U?B?Uy?1!?Eu?Ҳbߖ?KYe}?0Zݗ/?㈲`? jN?rJ?iVCT?tb?ua U9?p?4F??Z3C?;DY?W*?BXl?8?EBf? AY?%Em?}? 8m?6?Ȝ?lŦ?$5 ?_| ? a?!?E?Gf#?0EQ?^@?}g?%?/I?:Kqq?L?&?blL?"2?JgNt?R:???7m?l?#? ?K9y?9? O;F?q}vQb?55\:)?$I?JP?dM 8kxx?$vG?ԭ?D ]4O?@_԰:?'?P+?ӋG?(c?0-V+?`aմ?7U?gW?i?a|?J?,!?\L_@?Oq]?OXD|?&8M?\;}?x9ã?{M?tK?FT~?nOl`>?_]?-y?l4ڃ?hi8UȰ?/yɓ?*=@ ?*s1?T?|ġ=?xؙ?uZ!??Vk?0UU z?eI;?H?" ?u~_??r?L3 n?QN?oQ%5?>6?qyh6?˄q-?@q@?F?E0!]?uI?z ?-&??NH?ӰNG?҆ -? y?=w*?`=O?Y|?B0 dH?|J)?y8?լ3m?t.7?x W?ag&7?`d92?z,? ;?'8uN?o8?7:ʊ4?2d,h?Cʹ?V?:?X 9t9?5lC?4t?"H??#?c?0_ቓv?>kK?pR76?aY>,?U{?a~ p?HqJ?_qU?qJu??h%@?HF?R2t?2݌?~Aڨ?@=[?b??jvR?PD~B?3I߸?6?R~T?12'}?5O7?6#P?8V ⥻?_? 9?{r?3S?B=?@J87??}Չ?ƏǺ?4?@?p`W/?Ta?]Bs?q9]?DrS{K?0 ?Թ#?]jf??xܶ?KA6x ?eF)?j {O[Y?91?p #B?ɹ9?Đp?rK 9?r?;0C`?tA?@|&?nB`?N?kP?8N4?]) ?:yx?N .?pۉFJ?ms?kKk}Y?nz?/fL?`1z?,l@d ?`C ?HwT?1??P-2Sr?2?bT?}ш?^o.?Hz?` h ?O?Vr?ܲf~?y ?bޥ6?7T{? RR?\Vf?Le:Q?n?d*>?WƑ?hQi?4Ld"?;M4?{?#@E?𑽅? vtf?TPϪ#?ȪV?dw ?vv?Pǿ?o2;]?:?P"?? >5?`vj2?7X*3?2It?|?ڢ=9?}?vl?YWXd?%+ˌ'?a????w?J?}֥?a\?L?v|? U? ?¦?B0??_ԥ ?Ć`;m?t^ ?~|r?e?u+??2N5? z.?`7 Z?@l\?bvC?kt?`SQuu?uIC?ӄm?ļuTx?sK ?4SC?TŸ ?L`;?޹9?|w/?:Ry?HNR?aZ$?PIF[?nY?wa?~F]?4O?E?`p?R?,֋?W@ȳ??, /P?p&]I?Że?Czī?@nݯ?$bb?4\B9?s9pL3?.?*~?ٛX? Pʛ?]?ʉT0?6R!v?n?$? oԻ?$?@*]?ӑ? ??u(.?%dS?zETx?p;N ? FA?вZ?rVz&?K+/K??,1N?mJe%? ?CV]?K([#?h7 ϻ?YK?.ƶ?Իh?/2J?XlSg?׳}?Qv1%?,:-?O ?A:?ꖈ}?=?¯ܛ?>i|?&,.?NTƿ?m-}Z? bZ?9p?EU??!?c#?^ 5?eI?'b?g~?RBH?^K?R*>Q?JV)?4 h?1?C?Kt? ωl?zS4? D,Z|?Z?ڼ?f|X^?*GN?*)PΎ?'|u?$[?>0?xHA??o?Rm?s ?`&8?)<-?f?gߦ;?`q3?14?*~x?p궓 ?Y; ?<@?~/+?n)?(Ꜿ?W ?"˱?üaO`?ly6k?܎-hu?p}A??(Er?i>kͰ?X?ֵa?P x}? ,?;Zi?p`I5?C-p \?"e?{/?P(@A?`к?Tr,%?hp)?^K?:n?k 4G?? yYI?U?T?Ž?ջ0?S3e?^l?k?1E?ȹ3?"(?T-?”!?қ n?H|nvA?X܏N ?($}?^"G?S?CB?(ĭo?7B;?̕\-?A=?+?O:z?} L?*? 3?0r*!?l9?|NU?G/?*1N?,{?͵?G!Tb?Z?dO??*?\f?pm%?Fel?y~?g=B?mql?8)o?&_w|?6 ?K]?MM??@˛?Z_m?Mz?vN?tG f?`㈀? ^ ?vC?mpc9?<>?L7/?~*u?A "?Q;?"j#E?z?Kd?< Az?k[?"<(u?,i?/3_?X١Ǭ?`Y[?3{s>?M!?$ZS:T?@/dz?hWX8?<$yTF? Z?qG?O̘r?(; ?;]6?_o?!_^?xϊS?0l?X̠=?kftF?[?E?1Y?D?:85?E?:??x%?(ja?nan?` ?,'?ح??_?|n"??PGb?wXw?̀]Xx?jDW?ݡ>?- ??K}?|!? ?B92?q5?ay&O?W??3q?.Fw?A'?Z-6[?bͬ?__La?W63?@aR ?Ě#?yu,?yJS#? >?G[?p?p?RB?}:?-8$?` J!?Hȱe?A ?Š?#1rN?\IEf?@%қq}?H ?N~ |?x^?J!:?9_A? s?4#:? &?>7??|F?4rk?pL5z6?2?nE?QPL?*?׶?h590?]9q?)2 ?|Q. ?lr?4?A?Rdc?S?f=IA?A+?“#8?t ?'?Z?`ɏ?0C%=?G?O?)^@^? 1'?NvL?̥?xvMF?Tϗ?f-z?8/?x\ؽ?:?|Ϛ?&?l?ws?%b?Cԧ{^?oo?@ULp?eI??Fm?}$uH?ݥ\%? ;/?9 (d?h+Ūb?r8$?? 錧 ?qu? ɯ$?a G?!-Ml?fe^^?X9T?ۺ?OM0(?rf ? I??ř'?( ?ꮾ?G[?\ ;E?2y;{?TKlr?5Hr`?L2ŨY?wĤ?'{R?u\?'B?(?jdf?k;O?5u?J.4? 9U?оԸ?Mʝ? kZ?5g?(S?Nސ]?/?m?.?jsu??4ɒ?M -|?? ´'?ʜW?V?k9p? ?Gd>b?/?~2?r?Һ#>?.ښ\??Nr?C)?V)_?&b?k3ò}? H?NF/s?&`S?ҷ]?}.?jL?x "? FBH?8_?>F?(R?UGmh?îl?x?~P*?&?*0?k+?(7.,?'7{?1H!E?6l;?+μ?f?d]E?f?xʮ+?耠׊?0=?pt?yR?4h1?Ϩv?k +?;߇?(+ 7?0?2Y z_m?rA?b|?Zk?[& ?'~?w>ױ?GUx? ?]i?ϯG?D !?о@9̼?^$Tm1?%?E?-M8?p/3?SK? Rj?S? r?d7|?p~n?]?tu?`?[o ?]&?'OG?-y?%j$i?Υ#?<?$mNd??T7$8S?g3E#?D>4c?85EӠ?ޣ?trI}y?Z_ y?lк]"H?Jv0Ӟ?wM?)Q?!QƮ?tȪ?97?35? :?1a?o/p|7?j+:? ׃?|?B#?:IM?"nn?Yy[?ڠ{m?b{[?w|?ݹ_* ? x?r5)?P:?THRV?]"-??y?A^ ?E0n?rq&?xvV_?0_W?FaB?;X? Вw?"?w?rךh?ELr?g?]?c ?_Pv?D⻬t?@ U?V7W3&?&CQ?C?`f?ε?#à?&x?>$F?9V?22 N?@AM?,֞?=?3?'5)?iި?,ڽ? $$?oo?Fd޴?ob?,K?LM?``ꅖ?؞)*?: ?Ns|-?)?~?TH?2a?$\?:T?MobH?8~(5?译%~?Dc8?5=?:(?o?5V?dI'?s7?HP^?D'?< Sj? VC?r!?35?jK?Χ_M?7vL?bl1?,?/l?O'?h?g ?b;eoR?RY?>?T"u?XQ?PV;? 8?rr?FD?*Ͻ 3?lK;]??԰'U-?[?HfC?c?BM?,j?]7z;? h?&RYK$?pF?`*K?@^**t?Lv'Ϛ?$?#?♆?q=q0?y^`?L?(t?B>u?t?tYa_?XeQ\f?[La?3W?n eYE?>q? FZ?9Ҋc? F?2&?@no? 9?:P?硦?dO,?6??٦ڮ?p3?0vX{?wT?(m"??nL?\uS>??贮 ?oW?Pڹ?R1+BX?C??*vh?TuO?|]Ow?]ay?$?3lR?̠KPA?SGi?e{\?sY`[8?pB?b+?%Uk?P:6x?]!P? U?,i? U#?-1]?n?m;S? ~%?H ?iiW? wtѹ? gW>?p0͢,?L_?85Ƈ&?&?|L?t?|?o E?+?cMS?Ԡ%?5cg?B?ĒG?Nz3?,LpF?%X1??s?XH?:&?fJ?[$?\?q?t=9\? H?ڷ\ ?l^{s??]?8Vhk?d= ?F[,??נ9%?$ s? F4?C[A=?4:bO?׳Q??C?}]?Z9(?kzq5;B?WY{s?!R? J?T83i?qY?V$?P-?+9?`?Ou/?x~?9??7P.p?jWv?C0G?3 ?؝>ju?p4?|6E??}tQ?dc~'?{?Zsf?㊗5z?R?4 V|?mNy?J5V?g?y^? ~(z?DKK?:rֹ?a6?z?>pS?l!/?=g?S?"?XY?*~& ?3H?Gʬ?. )H"?&Ȃ?9?M? )/݁?b?pFe?vi?*Qb?$?b?ןM?F p?;89?H ?¸r?TV?`?Dv?N)t?WޓBD?΃?L4?赤 ?4[?iE?Z!E???Jžh?;??2Ц?~qbm?[vV?S#?Oɶ;VS?'̔?5qd?7G"?Of?x??0,z,?%?DiD?pS?) в?uC?l?? ?(L?2ڇ3B?tP`?-?# ?ld?ڰ37?Y,N?F? 5p7??wZ=?ؒZ??ɋ!?!c?&0?A ?e?1.*?zGE?,m/j?$Bd"?"H?m[e3?K?kƿ?KG;?ģ_?ښ ?4&?*.?ǸH?G]?KoF?=>?o0? ?RP?u|?{ ?MBC)?Hpa^?mc?a7?Mas?(Ȥl?DŦ'?$Զ?)?h )?pqnm?g3A?{a?]^տ!?J=#?:S@N?qO? ? ?R" \?'OŃ a?b&?"?y'?d&>"?6޺u?@_ڗ?>]e??P]e?d`SH?و?=WN?N95"?D_?'`@?*?|? Px?k{R?%}?heaLE?F?zY?=?82Vn?h+S?J [?x `$???yا?}F]?Ő̐}?nR?tMW4?ᕏd? u\a'?&ZP_Wt?4WR?PHo?M ?S06-s?x~?` ?h+*?Pĥ.?4?N޼⮻?Ic?i ,?E>.? |8?~GBu?l{ ?Rw??EWa?, 7?bP{+?xku?lJn3?lUy?x< ?]l?nIR?\uIΐ?s?@ ? qI|?(J?c?DAOHN?z՘*?o{?TF?]?r ?`$b"?@^{ ?| /?w|? qπ'g?>iL?d?g?T0P?\U?'3??~??TuMK?i1u?aZt?\x?`8x?6>?PIՉ?Aވ?yP6Y?_k}?e!{?T?/+?6]k?=y? l?Z?LF?,nO?iu ??D$'4 ?l2?lY?$SY?E@i?w?.y? -?oj|?}NP?БNN?8j?d?bKT?~? 꿕?t?? ?^??:"?m0?c?P!}?T&`?ʸc?oZo U?pAx?6Y?i܎? IKSh?# %?@/U?*?[j?r6? {?ϙ׾?T,?(_=-U? i߿?t?c]?qP?`Ҳ9?f 1?ҡ?VT\?+H'?BfM?Tj?>ذ?ªR?%*?d ?ȝcy?f?k^?p0fm#??4?}[ZҺ?xY8>?Z??0\7?ɤ q?@ ?S?VL?#v?7;z?|*?!?e'?u\?vH?MxT?Юԙ? -?Luoc?ig?.O]?e?8Ů?(h6?<=)?Ȯ?DJD?;?t`VU?A܋UT?*E??Z(r?Jc??S$?ͪB??DI?4N ?Ko?+ 4?@/?[?8:@?8ݴ?(bW4?|Q?]B?m(??;?5I ?5?V8?8L1?~(i2=?N?sP`?Ol?pwR?pcQ?J+|?仟C?֨.?qGط? JL</?bOK?0$?o$3?#U?rNMio? !?=^o?kAPU?ms?C"*T?Ay{?N۫?b@[?~6?H?)NV?ֻۥ?gֳXC.?%*Ս? r?7}vU? )?PkN-?v݆.Q?Q{q?>.g?@Wi ?4S9?.m?1 _?u?t?^]?ܵR?Dn)?T7?0F` ?[1?{?ל?ON?R?tK?hv0!?[]e?`5?!9?`?y?MBU[?нq?~3K?X?ZL?B8k? ?cGS?.G?EbV8?+%m?JN?]ЊA?/|-?X?N|?fh?^{γ? ag?Ql?6FW5k?=?9K'>? ml?oԠ(?ܻV?qI?N=Pp?1ޏ?F|*?*8@`??4?}?Do?`"‣?8yؚ ?Y9S?Ud?ETq(!?Jz?f?>%n?͓yJ$?˼?\jp?8P_?o?-'?(jX)?Fz?d?|֏[?{ʏ?`26?31?@ ?c ?r?h]U?^]?x DE?ޣ?fn&B? @\[?NWF?ρ?-*?o?XM1?|܋??!?<=Ꞵ?2Fh?d&*bt9?5^?~v?Ni`,'?s5?A?^C?$_.?{t?Ag?H_p?l X?zKq?<=?,="E?!?W?I?]^??5??0?hE7f?v96t?nW??f?\TƸ?_ٽ]?ɩ"?3?p?um??I3?W(mԸ?Re9?)`?C?U5 ?Pa?H ?UnSn?tO?(?fe?91? KD?JLJ$?Vyo ?>5{? .)|?X[?Lux?S6P?UWqŽ?z# ?5͐???'?U~Uv?r9?2?8,\,?5xt?9w?n]?8?Ndt?<?Acy?ަU?h? G1?pK?j?> )?Hr[A?? ?0E?Uf ?#,?3 (ju?xY4?xN?Hݻ w?jx?\E?wUJ?P?fi4S?2?T?"MI)? l?b[a?=8k???1.?`쯑C?T4~?@օ?%2c?`jW?3dPY?lgS?*?R"?b̛Y?$K4S?oZ?ra?:$n?Oh:?QqH?DZH? Ez ?o~.H!?*ڊha?Vm?:,?;;??D[cs?G6<{?x?`?q>?k6@~?2J?5? ;)U?E?c$G?3`9?EoAF?R㚼?3?׉4?]IJ)0?TMǺ?1N? ?72U?m[f?Nrb?1y~4?w?KK?3h?ݭ3?Y`x?Pi,?cY8? aM?@`i?b&?Glkg?X}: z?"?3Ʊ?uF?BP6?I4 t?K? HT?Ÿdҩ?*Ш?X$N?Wd???x6? ժ?|N?F0R?M?'7v?HC?7? ; l?(&Շ-?%b>v?`ٯ?AZR?y?SP?j??V4?#6@?PN?;?8Qy?T&W? ]K?]?h?$J҆ͧ?OWJ?||m? *vH?>4?e?(l?N`?Ш?pJ&?a?ͭ?O_$A?-;?N?j? )^?xQc?]Ox?  m?@L? Rr???{?@2>ٽ?r*;{?Am?Pm?B?g=F?ݷ?W?|"^2?@Ŵf?l,?ŵ[Q?[o?2? 6?TQ ?`NQ?_?_Vm^?2:?岉?lI1?H/'uk?t?}?*P ?@^?$[?.7M?HkbQ? Q?i@M?Ogt?\?nov?Kǵ?T_Pb?CF(W?v?&8?J"2?*?^E\^? I?`̜"?lj6RWF?z\7?/?2a_m?U?6?$X-?4Ka?`JE? f?U˼a?4?hnKOo?5P;v?^fܼ?ԶQF?O?H?J?ׇ2q ?R`?Pլ?XNQ] Z?f8?ۭ?uJ+Zۉ?"2X?TW?XF ?pW?m촇? (3?'^p? ?h;7?`&8H?ZL?1$?2d@&?2S?p;?C3e?x>|?;^W?y,?_xk?<(V?AZٰ?ML?Y1n?m7[?3(⥃? 9T +?@B?|{P?q ?:+7?Vyj? 2,4?ګ?n/&?(+?@$uu?~?P,u?`ʚ?A?qd?+GU?/?T.0?W1Uhf?r?nF*?C\/(?]` l?<?Xȫ$?$%!ج?24T&?/vCK?1\3?/4?ݐM?\ ? }N? T6э?.cs?D0 ?%&?5?? ? }$G4?C?`®?2ܲ??׋4?2M80?8 d|?c?_ڲM?STR?d}$?=?ڨ!?PuT?͹㭲?h?|y3*?t ? i.?t1U%?|J?&\?B?Rd?ʝ;B?֨E?ӏ?VpGϼ?WHw I?7O?$?Pi'sѫ?92=?zrJG?4QU?Gw?}T?Ғ?z3?8pa?]?Z;_o? z[?p,`l?43å?Z? ?f҉?N8Ty?(B?jY?8ő? 6?b՜P?I ?䔋`?ȴ?Fn$t?N Q4@?X~`?=\?hjO|? 2e?v P1?I?PSx?P~ŝ뫫?2?J?.?C ?ձ~?F?n?ԏ?fzۀ?sS,?)c?z,?k"?A ?K9?̩??E ? ?U??(X?p*? ]G?`%,j?Jw}?Eɫ?.r5HgV?|(Ut?w#{?u?%yk?м,?/I?n ?~NA?x3!?Rk?h?(C?~&?K?9?a?ؗQ2?ؠl?txX?H $U?%W=]?Uu?"Y?P8$?̠j*?d?98?ZE?Ns³`?.N|?? /?XU_?ķ'u?C*4J?;xz?/w7?}??:bt?h?KwJ8ŵ?`҈S?F}?p9V?@Fl?H#yǂ?zt׬%?Nvt@?܃hj?̧7?0+3?lV?w7?p}?;_??ya?d5.d?: A?pPr;?DU?lUc-?z5_?BI?z""?0O2?K&?d EX?Am<?8'-?jp(?ԏ|Y?0,0 ?GS_?9?(ͪ]?*B?!?[bp?n\wW?df?{ש?d??^8?˳j?T0yz?2zhsP?`D|?r?UL+fn?7r>?Qֵ?E+?xM?PpCG?fDu?@*Ԍ?taV?vjD?]\?Zޛ#?Ii?+G?Ż??Sag)?qy ?@ -d?@Ÿ?|_x?`PM7?f?USo?ݷF?4q$éj?*?Ir?β}X?]z?ܷM?ƫ͞?@P.?h>?yu?tq?;?bb?r[?vE?% ?:S?ޏra?u?#=Ű?l)_?1?ǀ 3?X6,=?W?i?`C?ze'?h6Ƿ?Tb}!?T{?РН?:tA?M+?&?G#.6?&H?vxOt?~]&5? {%?n?+?c??E!y?s'? 8\?{ӓM?YH ,?2Z|?v4?7U ?@?X6&(?0 ?Ma7??pe!?W߷?@kO?R;{ ?N{? g=z?&QL?ġ?=S^?oyi?2?Ԫ?Ϻ?`>Yl?VG?2g?nS?1a'? *sS[?@om?[#s?ғq?4?(?>tn?\_r?`cE=? ! ?sS?PQ?5c?bF(^?fr?~?&N?.q?%QRF?<?Tn3?v]~ o?$ ?{.?e* ?Z%˹a?zB?e\ l?|Aʵ?(߯2?1>+?\I$?cv?h5@?>v X?WMQ?\\$?"28?IPA?T- ?) ?dĔ?_ٳE ?n%PH?L%~Fx? ,Z?+%_?o@?۹?b$ ? P?.+p<&v?WN?dvS?Ol&?lњW? HL7s?nҳwV?:N?ZG&V?rJE?=!?G? 8)s?(?-n]?O.B ?= j1?uN?ZZ?|R-q?.?5?A,n?,?g y? PC? y?jO>?`9O#?C)?ːT?'n?8?? [w/?EӔ?c8څ?ׄ]?PbF?@~?4lb?_os?5]?ь?61Q-?4-X^?] ?jYѶ?, jS?Rg?`iA ?~S?,}?Z ?E?1֠?[=r?,.?x?<[?; i?~v?"FAu?D(:?ߓd(/?38J?3E?!?"XhU,?J԰?~?A+ ?;j_?oI?L q?W u:?8GȘ?,g!??y?Ǝ+J?6B??zp?A?AG?#?NA4)?L?H(?ɸ?|^p?wEl?eCZ??cqn?la+?hL?W\J?b +?$O3ת?4_+?d?idӜ?92?@/@z?J?!?2W?hG?0ka?h!Gd?D ?ߴ??2?rm1i?2DGS?QZ=?9u6?F}Ox?b%E?to?0>8B\?Aa?z}Y?ᵙm?8Z?2m!D?̈'V?sGe?y^#?p ?^0?27>?2??}z?6ˠ?^~?q,9?Ԩ}a?rsʹ?4 V!?0myme?~r<( ?`?{>?> ?Ӯt?`?`C"?Y< 5?88?vBb?1?N?)?Jn?{q?-K>:?ǠM!?Ե}???wJ?h?(Q~s??[|H?AaCZ?T'=Hc?AHN?|? ?_Q?q-?0%y?0MS?0 ?e[x.?b2?[?YKU?xL+ފ?#]&(?>o;?#M?SW ѣ?r/6p}?\b?U[j?nW?0f$4?u?h8?-??(zT?ϮkP? ų? ?XTI?ܳO]?+fNԞ?~ ?̱%K??h*b?*cJE?Q~[?N?s̚ ?hF_? x?"t3?LFh=?\?/eM?/CSx? ʱ?N?pl ?~~?͈>??Wk:?"Ք?vU?e? iӪ??7׉2?Mvk??FU0?{b?Tg?fOP?z?] Y?, ?PU?nV?a/6??geh?\ٔ?Z?lY?R!?LR?`qe?oR?0h?9rCU? </? Ak$?Ɛb9?Ӎ? 2r ?@Ԏ??rm`?ycS?%7?x/?L ?(m?d➵?wr?d/H?T}*?.?@;h?ٞ#? v#?3W ?$t~?܉)8?aPt?Pp"?:KYi?Ԩɋ?ۑ?l:0 ʦ?;K?8U?*2Ъ?Uϒ?{u r?4 "?ԞS\?nL? 'g?t|̮?h?e/?T?xf,?@?4,?Ng?D*?i.?st?*?dB}?Ūd?e.?_4xZf?ԉ?amz4?vf Xg?8?B-?62^c?AJ4?̸ei?!񙡹.??@ 0?D'?dONy?ء?8A?Tch?hk?FGƔ?۳?-q* ?};&?wA;6q?@Z?,-w?L^c(?6?-T?ޕր?g>?Um,?R=1?n?P؊2l?@E?Go/g?Xj? y?Iվ?or(?ތq?Vrj?[;0?LHh?A>?x>@?Pe*?Ml?P2?%?$8?̸?}?T"l5?hq???+9w3?=#{?Υ%J?0MJ ?U[t?H$K5?ul-?{#U?Ibk®?Q?-M2? ?̸H|?F?qW'?d9 5h?V`16a?|ί?Cm?pt?Jf?@Ŗ?T??xgJX?3?X=?0?0?I?+oN?boa?,4?5P!?́}?A'Qy?k9?ɉ0^?!.?ب}? js=(?@j?_ص0?YJO? '?EwI\|?@-ч ?槃ld?H|A?k^ ?'i?;t? I?T桭~?F@ ?f2?"?b<](?Y9r?<]˦?VF!?Ԭ}?cv?+ T?8p ?\]?ƣUp)?VE??p ?e2 n?d*w?c??r_9?W-b?ڽMMQ?6`?x1 ?(bf?@\#~?sR D?7kH;?k? aX?P:븴?݋#ԁ?Qd0 ʃ?N?ao?2?9\fT ?y#?P9N&? /1??!z|???f?⍬?i>"?ڢ-F??XR? ?яx?Lk8?I ?:[P?,s! &?We? c"?`4?`~?P? )|~.?FkO?86ԙ?LW?1.?k@I???{?tZ?XKm?!al?F~#?n7?8N?pnW??]ː? g? Vw?&RH?@j?]v/5?B^?7o->?0%:+I? n?3Dq? r}:?0P:? M?Bp?ֿb(k?PgJ?ei:?d?]ӧ?-3`?4y?L`?`tV?J囊?Ϋi/?,WjhE?~fzY?x&wq?U?'R?@dg?fu?Zܘwe?UΏן?`Ρ?[?O]? 9g??6m)? +:?0 F?ɥC?صU(l? `? >?_,H?v(??z.e?r8av?6J?..8?QͿ?V?dDy?U?nT z?-]A?XM8.?xrF?LUZ?Oa.?D3?u?ÿf?%?b?(r#w?SO[0?Y?yUv3?4# $y?]?-ky?{^ݤ?Ff?y ?ʢ2g?m`n?A?()pD?DS?`K? 3?`VǭM?2i?3?fp?zC?/lB?LV?\6ݦ?|p?s5=?`qʹ?,^?Z5U ?@X?Ǧ?l=6w?Pǣ?vY$?6?*k?iz;?kd?*$g ?6Ců?-̋|w?lB6V?0`??r?rS}?bj??>?9k ͠?\W?)dW?bLFx??`6 ?0\?Il.?hej ?Gqd?nrr?$s\?=H?\?b;R7?h9?G?bT.?Hj; ?RT~?s0kc?xb2C?]?N {?9a"? ?{ w?TA@o?h?(?fm?p6BBݶ?Nx?xT ?pq>?8\?,*?tG? ?s? ?f+EMfJ?YS?`(?ps?'?q?4`u?R?Z;(?(sh?H?)#?6/?E9?{q?l톎?l?hu?$ e?`BL?LOE? ʣ?HI4?% 9 ??qH ?Bl?>,?x?@ʦ[6?@RXũ?HV?n3H?ՈiG?۹m?0hee?bf?$a@?AQE?Y;ɺ?84?нF?d̓?H?ā?(I?U?*[? I?`P?mx?>F?tEi8U?R'G?e?p㞏?yb]?I>a?ҨG? ?OjmxY?Pc8?d?p%?k"%?n"w?Y>?amk?_ml?dxL?Hd?X?1ѳ?Bw?쀉B?^>P?\҂` ?Œ1?i)w?Tk١?+Y?-Og?#I?1{u?B~s?[,º?0К;u?$S@#?@ȞI?$?ԍ &i?w"?Y:?(W@?պ=?Gr?@:U?Iȱ?n?*xH?NސF?Pp?F+r?t(㏣??tjڷ?6>_?P&C?.?< ?8k?L|?E.U?HR?sKA?Hj[?F (v?lX?Ii?vgc? )#?XwL?/?d*r?U-?u?4y?2kĴ,?(VpU?b(?g?"nk?ʘ??I.&?ET?\!8?@?]vאC?S??v~5n?/0??B?0R?nc>o?g%?؛? ~?5ub?u?V)d?#?T5xI?7?+ơ?0Vmr? |u? ?g?{?u>i?B?gϼ?hlt5m?0#_?n?h?`u?h\?9q?@g?-ZF ?JNF۲?Wgx!?؂}&c? LŃ]?h?m?Kq5?q?v8?@T?T?'I{s?HkUј?2t?`\C?>/,4?Dq?/!mR?# ?l.O?57?TƮ?\A??1ٺGl?|6=?n֗Q?|v@V?R?)v\?Q]K?P~uTA?գ?#?.-s??N?GP/?T?7n?`?NU{B?yt?Q?[?/}?L)?!΅9?(_yh?yi~@?Оb?*ɿ ?=`dl?'?l7\?|)ʺk?[w ? L? ?rT*[v?u{?8Qs?X}5Ӣ?T(0?D9?[2 ,?-Z?Za?i?`W ?@?`*?wed<? T(?XK?6Z?R;?8p)?F÷?$ˮ?$͍?8 z?0EL'?%!?_j]g?xOn?-%ɗ??;ᮈ?%?bz?hɚF? s?x?|p"?جTU?ɔCg?CUM?jIIy?V]H?xFϱ?R|$?gHX?VӰ?[;K?OV?J`?0a}?k9q?زaa?p!?α?H?Z,?(/7?Z?ҿ ? YY-?Iq??8:?ֹ׀??T²L?>lH{?_Cɏ?~&?PIg?zԓK?%h:?pLlQ?2Z1? wx?:ѓ?u#*6@? #?ׅD؄? N?f?U( ?P3?U0?OV?YZ?BDz?\?g%bOK?+&T~??PxV?p'^ރ?(Z?c^t_??ّ۽?) d?RH }?b?_2X?v9Dn?w3d?Ze?D '_"E??Z3y??З?# AF?Z 4?lq?<H?v)6"?@`o?pGk??<m?m?M?cJ?z?Dǎ?Ґ?O\2b??)m??QS?U/?5h?}JDz??ӻMF?` ?Ĩ ?@Р?(?ra"A?`b?"{][???!)?}떏?*nrf?h̗?x?xE??q? g?Ɋ15?K+?>F[?> M?p'r?9T?7r?:p?"?o ?~ς?"?2???E}E0M?ݸ? ?%&?4-_D?X?ȹw?ԔR?Nq?4JyI?qGM?v;&?+K^M?Ad?g,?S?||q??I]s?WM?nt6? 2>? P?\D?@Of6?}.\? ?%-?l*_?vЌ"?+@?hX1? (?Iu? ث??"b?‚H+y?ݡ?en;R?s?]ć?$ꗚ??Pd?%cR?^fs?|w?Bo(?3P?K]2?b9y7?~*`0??l7ü#?79%?$5#?/;?G q?rPw?O?a/r?$,ǸZ?|#? ?qF? C?P@IU?\A*?m%j?4Mr??ui!a?ȯL"? v}?8+@?p˧?K?xE&b?#ʹ?Ja2B?H?@hՙ?u?yg?j5R?_Xvp*?Z?zlؔ4??_Yϰ4??ٜ?~ D?DX?Cp? po?]A$?`ڦ?.lH?K?p ?:??*`>?;]&?` d?l?P/?4DF9?,Qo?P`?$?h.V@?ΦC?aZʄ?n%j"? 9N?ԏO?E?={?&c?*?5?!D"? mީ?כإ?lw?G ??a~?#V%(?G+3?d?`tk:?$U?Bn;ʛ?5Y8?=r?S҄~?{T~V?O?P-X?;=N?0?0vڿ?X\?YN??8?`?>Ϧ?X?vR"?_W}?SB?uD?sd?\m?P2 .?K^?q-T?pZ5P?Hsj?~baʍ?I4?$7?f_?F?8gb?b5/X?ڰ?.XC?2 r'?"Q?A:le?P]D|?nbl3?5Jfu|?2M [?!@?X2:?[?}`lG?ȶ|?Rcʄ?Tx?EL?5O ?X?q ?1+?S?..J?׽2į?TD?K}?H4?_nj?, #?* [W8?P|g?a/? T?\_]H?i9X?Đ]U+?H3K/?f? 5?.? P?Mgi3?([\? P?|\?a|3d?Pa?Zw ?zh?U;w?*șvg?5U?G ?0y?-@?9?߱?TƾX? `ķ?SSڔ?lQ L?H?j׳js?p?0VF?1?/.?u_??kj?$?r?J%Qs8?,Kb6c?YCgT?1Ly?4?&?h{%?̄&?LaG*?n/MJ?+p? @R?` o?rRR?z|?rQ?5&?PkT?!|f|?>U7?g?2s?e?90&7?O*?i Y?Sb[?X[Zh?X@Q?Ǖg?uzI?HPH?J/RAW?4L++?t||?r%?I短F?,?ulE?XѸg?D?@Cq?uͧ?]M "a?W?Fc?x+A?&8A?ʆ피?D4?$`E?"D6`?H7Jk?95~?,nM?T@^?U?R?¨]:?%w ?UE??+D-?ma? Rn3?d? Oۑ?%uk?Vۇ?Wq?1:C#?'_?'?4]qV?pVU?OFU"Xo?7cK`5?L!?dZ5M^?d@X-?XL?,DxA?dX?fU?Rrf?Ve8?j?>T3cS?Ѡ!?xI ?NK!?){F_?Hc8;?DAS!a?`l8G?x%bd?XSG4'?Rv۟?}ɶ?jX9?%g'4?H)9? Mt?X??sO92O?ȃ9V?`-t)?2b} ?A? zS?pt?0i?@];j-?"F(? U?8¦?8m?n]b? ?]b:ۄ?Ys ?\_??~WÉ?[\h ?\M??*8(?tuV?|[Z?]P^rC?Џ? H(?Fv:?Oq?L? 贛;?7C?ZrX?`9q?,X~U?8)?8$$h~?XRO?yK? ?7?xb;Zj;?tn:?jT5j?s')? ?;?P;^?eѩ? $@?F\?0?3z?e n?;j?%7?KN!?|u%?_.*?%­e?4S?4xQQ?a=J?U?(in? hXz?P{7?Dpc?lT]?\?Js?AMD3?Z1}?e?T# E?4Lb?h? e?P_N4?"d?4 .?fH?g=?ɪ?%!Ϫ?0 -?\{?P7}5?cǀ?M(jc.?Lᜎ?xaU??B ?R>W?ޜ! ?N?ZW?Xq?d$o3?YW:k?;fՁ;?솥w?c_?P2 ? ?Z#Cy?ְGH-?x~? le&? [?"z?=jL?.(?i+v?h.u?4k ?6i?j-HG?b??1qg??N?nM8w?ܟ1?89 ?f}?]*?xMU[?Z_w?ZX+Z?O?#?$,a?;Da?*l?H'$ש?:hoʭ?/8"=?w? ?2|?^LE?q ?D:W|?lJOd?}? Q?`??\(N?0Q??w?%v?x1(?@?@/?xK?>?1dsN?,ż ?#ASDF BLOCK INDEX %YAML 1.1 --- [359] ... asdf-2.5.1/asdf/commands/tests/data/blocks.diff0000644000446400020070000000042413567314375023546 0ustar eslavichSTSCI\science00000000000000tree: foobar: bizbaz: > green < red datatype: > uint64 < float64 shape: - > 9000 < 10000 ndarrays differ by shape, datatype and contents asdf-2.5.1/asdf/commands/tests/data/frames.diff0000644000446400020070000000233613567314375023552 0ustar eslavichSTSCI\science00000000000000tree: asdf_library: version: > 1.2.2.dev858 < 1.2.2.dev846 > frames: > - reference_frame: > galcen_coord: > dec: > unit: > deg > value: > -28.936175 > ra: > unit: > deg > value: > 266.4051 > wrap_angle: > unit: > deg > value: > 360.0 > galcen_v_sun: > - unit: > km s-1 > value: > 11.1 > - unit: > km s-1 > value: > 232.24 > - unit: > km s-1 > value: > 7.25 < galcen_dec: < unit: < rad < value: < 1.0 < galcen_ra: < unit: < deg < value: < 45.0 asdf-2.5.1/asdf/commands/tests/data/frames0.asdf0000644000446400020070000001013713567314375023635 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev858} frames: - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: ICRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: FK5} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4_noeterms} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: galactic} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: galcen_coord: !wcs/icrs_coord-1.1.0 dec: {value: -28.936175} ra: value: 266.4051 wrap_angle: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: 360.0} galcen_distance: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 5.0} galcen_v_sun: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 11.1} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 232.24} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 7.25} roll: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: 3.0} type: galactocentric z_sun: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 pc, value: 3.0} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: obsgeoloc: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 3.0856775814671916e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 9.257032744401574e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 6.1713551629343834e+19} obsgeovel: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 2.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 1.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 8.0} obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: GCRS unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: CIRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2022-01-03 00:00:00.000', type: ITRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: equinox: !time/time-1.1.0 J2000.000 obsgeoloc: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 3.0856775814671916e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 9.257032744401574e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 6.1713551629343834e+19} obsgeovel: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 2.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 1.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 8.0} obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: precessed_geocentric unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] ... asdf-2.5.1/asdf/commands/tests/data/frames1.asdf0000644000446400020070000000732113567314375023637 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev846} frames: - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat, blurg] name: CelestialFrame reference_frame: {type: ICRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: FK5} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4_noeterms} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: galactic} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: galcen_dec: !unit/quantity-1.1.0 unit: rad value: 1.0 galcen_distance: !unit/quantity-1.1.0 unit: m value: 5.0 galcen_ra: !unit/quantity-1.1.0 unit: deg value: 45.0 roll: !unit/quantity-1.1.0 unit: deg value: 3.0 type: galactocentric z_sun: !unit/quantity-1.1.0 unit: pc value: 3.0 unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: obsgeoloc: - !unit/quantity-1.1.0 unit: m value: 3.0856775814671916e+16 - !unit/quantity-1.1.0 unit: m value: 9.257032744401574e+16 - !unit/quantity-1.1.0 unit: m value: 6.1713551629343834e+19 obsgeovel: - !unit/quantity-1.1.0 unit: m s-1 value: 2.0 - !unit/quantity-1.1.0 unit: m s-1 value: 1.0 - !unit/quantity-1.1.0 unit: m s-1 value: 8.0 obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: GCRS unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: CIRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2022-01-03 00:00:00.000', type: ITRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: equinox: !time/time-1.1.0 J2000.000 obsgeoloc: - !unit/quantity-1.1.0 unit: m value: 3.0856775814671916e+16 - !unit/quantity-1.1.0 unit: m value: 9.257032744401574e+16 - !unit/quantity-1.1.0 unit: m value: 6.1713551629343834e+19 obsgeovel: - !unit/quantity-1.1.0 unit: m s-1 value: 2.0 - !unit/quantity-1.1.0 unit: m s-1 value: 1.0 - !unit/quantity-1.1.0 unit: m s-1 value: 8.0 obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: precessed_geocentric unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] ... asdf-2.5.1/asdf/commands/tests/data/frames_minimal.diff0000644000446400020070000000041513567314375025254 0ustar eslavichSTSCI\science00000000000000tree: asdf_library: version: > 1.2.2.dev858 < 1.2.2.dev846 frames: - reference_frame: > galcen_coord > galcen_v_sun < galcen_dec < galcen_ra asdf-2.5.1/asdf/commands/tests/test_defragment.py0000644000446400020070000000266113567314375024260 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import sys import numpy as np import pytest import asdf from ... import AsdfFile from .. import main from ...tests.helpers import get_file_sizes, assert_tree_match def _test_defragment(tmpdir, codec): x = np.arange(0, 1000, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(100, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') out_path = os.path.join(str(tmpdir), 'original.defragment.asdf') ff = AsdfFile(tree) ff.write_to(path) assert len(ff.blocks) == 2 result = main.main_from_args( ['defragment', path, '-o', out_path, '-c', codec]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original.defragment.asdf' in files assert files['original.defragment.asdf'] < files['original.asdf'] with asdf.open(os.path.join(str(tmpdir), 'original.defragment.asdf')) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff.blocks.internal_blocks)) == 2 def test_defragment_zlib(tmpdir): _test_defragment(tmpdir, 'zlib') def test_defragment_bzp2(tmpdir): _test_defragment(tmpdir, 'bzp2') def test_defragment_lz4(tmpdir): pytest.importorskip('lz4') _test_defragment(tmpdir, 'lz4') asdf-2.5.1/asdf/commands/tests/test_diff.py0000644000446400020070000000322413567314375023050 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import io from functools import partial import numpy as np import pytest from ... import AsdfFile from ...tests import helpers from .. import main, diff from . import data as test_data get_test_data_path = partial(helpers.get_test_data_path, module=test_data) def _assert_diffs_equal(filenames, result_file, minimal=False): iostream = io.StringIO() file_paths = [get_test_data_path(name) for name in filenames] diff(file_paths, minimal=minimal, iostream=iostream) iostream.seek(0) result_path = get_test_data_path(result_file) with open(result_path, 'r') as handle: assert handle.read() == iostream.read() def test_diff(): filenames = ['frames0.asdf', 'frames1.asdf'] result_file = 'frames.diff' _assert_diffs_equal(filenames, result_file, minimal=False) def test_diff_minimal(): filenames = ['frames0.asdf', 'frames1.asdf'] result_file = 'frames_minimal.diff' _assert_diffs_equal(filenames, result_file, minimal=True) def test_diff_block(): filenames = ['block0.asdf', 'block1.asdf'] result_file = 'blocks.diff' _assert_diffs_equal(filenames, result_file, minimal=False) def test_file_not_found(): # Try to open files that exist but are not valid asdf filenames = ['frames.diff', 'blocks.diff'] with pytest.raises(RuntimeError): diff([get_test_data_path(name) for name in filenames], False) def test_diff_command(): filenames = ['frames0.asdf', 'frames1.asdf'] paths = [get_test_data_path(name) for name in filenames] assert main.main_from_args(['diff'] + paths) == 0 asdf-2.5.1/asdf/commands/tests/test_exploded.py0000644000446400020070000000322513605165746023744 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import numpy as np import asdf from asdf import AsdfFile from asdf.commands import main from ...tests.helpers import get_file_sizes, assert_tree_match def test_explode_then_implode(tmpdir): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') ff = AsdfFile(tree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ff.write_to(path, all_array_storage='internal') assert len(ff.blocks) == 2 result = main.main_from_args(['explode', path]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original_exploded.asdf' in files assert 'original_exploded0000.asdf' in files assert 'original_exploded0001.asdf' in files assert 'original_exploded0002.asdf' not in files assert files['original.asdf'] > files['original_exploded.asdf'] path = os.path.join(str(tmpdir), 'original_exploded.asdf') result = main.main_from_args(['implode', path]) assert result == 0 with asdf.open(str(tmpdir.join('original_exploded_all.asdf'))) as af: assert_tree_match(af.tree, tree) assert len(af.blocks) == 2 def test_file_not_found(tmpdir): path = os.path.join(str(tmpdir), 'original.asdf') assert main.main_from_args(['explode', path]) == 2 asdf-2.5.1/asdf/commands/tests/test_extract.py0000644000446400020070000000206113567314375023610 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import numpy as np import pytest astropy = pytest.importorskip('astropy') from astropy.io.fits import HDUList, ImageHDU import asdf from asdf.fits_embed import AsdfInFits from asdf.tests.helpers import assert_tree_match from .. import extract def test_extract(tmpdir): hdulist = HDUList() image = ImageHDU(np.random.random((25, 25))) hdulist.append(image) tree = { 'some_words': 'These are some words', 'nested': { 'a': 100, 'b': 42 }, 'list': [x for x in range(10)], 'image': image.data } asdf_in_fits = str(tmpdir.join('asdf.fits')) with AsdfInFits(hdulist, tree) as aif: aif.write_to(asdf_in_fits) pure_asdf = str(tmpdir.join('extract.asdf')) extract.extract_file(asdf_in_fits, pure_asdf) assert os.path.exists(pure_asdf) with asdf.open(pure_asdf) as af: assert not isinstance(af, AsdfInFits) assert_tree_match(tree, af.tree) asdf-2.5.1/asdf/commands/tests/test_main.py0000644000446400020070000000077113567314375023070 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from .. import main def test_help(): # Just a smoke test, really main.main_from_args(['help']) def test_invalid_command(): with pytest.raises(SystemExit) as e: main.main([]) assert e.value.code == 2 with pytest.raises(SystemExit) as e: main.main(['foo']) if isinstance(e.value, int): assert e.value == 2 else: assert e.value.code == 2 asdf-2.5.1/asdf/commands/tests/test_remove_hdu.py0000644000446400020070000000177013567314375024301 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import numpy as np import pytest astropy = pytest.importorskip('astropy') from astropy.io import fits from asdf.fits_embed import AsdfInFits from .. import remove_hdu def test_remove_hdu(tmpdir): hdulist = fits.HDUList() image = fits.ImageHDU(np.random.random((25, 25))) hdulist.append(image) tree = { 'some_words': 'These are some words', 'nested': { 'a': 100, 'b': 42 }, 'list': [x for x in range(10)], 'image': image.data } asdf_in_fits = str(tmpdir.join('asdf.fits')) with AsdfInFits(hdulist, tree) as aif: aif.write_to(asdf_in_fits) with fits.open(asdf_in_fits) as hdul: assert 'ASDF' in hdul new_fits = str(tmpdir.join('remove.fits')) remove_hdu(asdf_in_fits, new_fits) assert os.path.exists(new_fits) with fits.open(new_fits) as hdul: assert 'ASDF' not in hdul asdf-2.5.1/asdf/commands/tests/test_tags.py0000644000446400020070000000210613567314375023074 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io from ... import AsdfFile from .. import list_tags def _get_tags(display_classes): iostream = io.StringIO() list_tags(display_classes=display_classes, iostream=iostream) iostream.seek(0) return [line.strip() for line in iostream.readlines()] def _class_to_string(_class): return "{}.{}".format(_class.__module__, _class.__name__) def test_list_schemas(): obs_tags = _get_tags(False) af = AsdfFile() exp_tags = sorted(af._extensions._type_index._type_by_tag.keys()) for exp, obs in zip(exp_tags, obs_tags): assert exp == obs def test_list_schemas_and_tags(): tag_lines = _get_tags(True) af = AsdfFile() type_by_tag = af._extensions._type_index._type_by_tag exp_tags = sorted(type_by_tag.keys()) for exp_tag, line in zip(exp_tags, tag_lines): tag_name, tag_class = line.split(": ") assert tag_name == exp_tag exp_class = _class_to_string(type_by_tag[exp_tag]) assert tag_class == exp_class asdf-2.5.1/asdf/commands/tests/test_to_yaml.py0000644000446400020070000000170713567314375023610 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import numpy as np import asdf from ... import AsdfFile from .. import main from ...tests.helpers import get_file_sizes, assert_tree_match def test_to_yaml(tmpdir): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') ff = AsdfFile(tree) ff.write_to(path) assert len(ff.blocks) == 2 result = main.main_from_args(['to_yaml', path]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original.yaml' in files with asdf.open(os.path.join(str(tmpdir), 'original.yaml')) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff.blocks.internal_blocks)) == 0 asdf-2.5.1/asdf/commands/to_yaml.py0000644000446400020070000000375113567314375021410 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Contains commands for dealing with exploded and imploded forms. """ import os import asdf from .main import Command from .. import AsdfFile __all__ = ['to_yaml'] class ToYaml(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("to_yaml"), help="Convert as ASDF file to pure YAML.", description="""Convert all data to inline YAML so the ASDF file contains no binary blocks.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to convert to YAML.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with a '.yaml' extension.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return to_yaml(args.filename[0], args.output, args.resolve_references) def to_yaml(input, output=None, resolve_references=False): """ Implode a given ASDF file, which may reference external data, back into a single ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. """ if output is None: base, ext = os.path.splitext(input) output = base + '.yaml' with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to(output, all_array_storage='inline') asdf-2.5.1/asdf/compat/0000755000446400020070000000000013605166132017033 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/compat/__init__.py0000644000446400020070000000013013567314375021151 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- asdf-2.5.1/asdf/compat/jsonschemacompat.py0000644000446400020070000000017013605165746022753 0ustar eslavichSTSCI\science00000000000000from ..util import minversion __all__ = ['JSONSCHEMA_LT_3'] JSONSCHEMA_LT_3 = not minversion('jsonschema', '3.0.0') asdf-2.5.1/asdf/compat/numpycompat.py0000644000446400020070000000015513567314375021775 0ustar eslavichSTSCI\science00000000000000from ..util import minversion __all__ = ['NUMPY_LT_1_7'] NUMPY_LT_1_7 = not minversion('numpy', '1.7.0') asdf-2.5.1/asdf/compression.py0000644000446400020070000001675513567314375020514 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import struct import numpy as np DEFAULT_BLOCK_SIZE = 1 << 22 #: Decompressed block size in bytes, 4MiB def validate(compression): """ Validate the compression string. Parameters ---------- compression : str, bytes or None Returns ------- compression : str or None In canonical form. Raises ------ ValueError """ if not compression or compression == b'\0\0\0\0': return None if isinstance(compression, bytes): compression = compression.decode('ascii') compression = compression.strip('\0') if compression not in ('zlib', 'bzp2', 'lz4', 'input'): raise ValueError( "Supported compression types are: 'zlib', 'bzp2', 'lz4', or 'input'") return compression class Lz4Compressor: def __init__(self, block_api): self._api = block_api def compress(self, data): output = self._api.compress(data, mode='high_compression') header = struct.pack('!I', len(output)) return header + output class Lz4Decompressor: def __init__(self, block_api): self._api = block_api self._size = 0 self._pos = 0 self._buffer = b'' def decompress(self, data): if not self._size: data = self._buffer + data if len(data) < 4: self._buffer += data return b'' self._size = struct.unpack('!I', data[:4])[0] data = data[4:] self._buffer = bytearray(self._size) if self._pos + len(data) < self._size: self._buffer[self._pos:self._pos + len(data)] = data self._pos += len(data) return b'' else: offset = self._size - self._pos self._buffer[self._pos:] = data[:offset] data = data[offset:] self._size = 0 self._pos = 0 output = self._api.decompress(self._buffer) self._buffer = b'' return output + self.decompress(data) def _get_decoder(compression): if compression == 'zlib': try: import zlib except ImportError: raise ImportError( "Your Python does not have the zlib library, " "therefore the compressed block in this ASDF file " "can not be decompressed.") return zlib.decompressobj() elif compression == 'bzp2': try: import bz2 except ImportError: raise ImportError( "Your Python does not have the bz2 library, " "therefore the compressed block in this ASDF file " "can not be decompressed.") return bz2.BZ2Decompressor() elif compression == 'lz4': try: import lz4.block except ImportError: raise ImportError( "lz4 library in not installed in your Python environment, " "therefore the compressed block in this ASDF file " "can not be decompressed.") return Lz4Decompressor(lz4.block) else: raise ValueError( "Unknown compression type: '{0}'".format(compression)) def _get_encoder(compression): if compression == 'zlib': try: import zlib except ImportError: raise ImportError( "Your Python does not have the zlib library, " "therefore the block in this ASDF file " "can not be compressed.") return zlib.compressobj() elif compression == 'bzp2': try: import bz2 except ImportError: raise ImportError( "Your Python does not have the bz2 library, " "therefore the block in this ASDF file " "can not be compressed.") return bz2.BZ2Compressor() elif compression == 'lz4': try: import lz4.block except ImportError: raise ImportError( "lz4 library in not installed in your Python environment, " "therefore the block in this ASDF file " "can not be compressed.") return Lz4Compressor(lz4.block) else: raise ValueError( "Unknown compression type: '{0}'".format(compression)) def to_compression_header(compression): """ Converts a compression string to the four byte field in a block header. """ if not compression: return b'' if isinstance(compression, str): return compression.encode('ascii') return compression def decompress(fd, used_size, data_size, compression): """ Decompress binary data in a file Parameters ---------- fd : generic_io.GenericIO object The file to read the compressed data from. used_size : int The size of the compressed data data_size : int The size of the uncompressed data compression : str The compression type used. Returns ------- array : numpy.array A flat uint8 containing the decompressed data. """ buffer = np.empty((data_size,), np.uint8) compression = validate(compression) decoder = _get_decoder(compression) i = 0 for block in fd.read_blocks(used_size): decoded = decoder.decompress(block) if i + len(decoded) > data_size: raise ValueError("Decompressed data too long") buffer.data[i:i+len(decoded)] = decoded i += len(decoded) if hasattr(decoder, 'flush'): decoded = decoder.flush() if i + len(decoded) > data_size: raise ValueError("Decompressed data too long") elif i + len(decoded) < data_size: raise ValueError("Decompressed data too short") buffer[i:i+len(decoded)] = decoded return buffer def compress(fd, data, compression, block_size=DEFAULT_BLOCK_SIZE): """ Compress array data and write to a file. Parameters ---------- fd : generic_io.GenericIO object The file to write to. data : buffer The buffer of uncompressed data. compression : str The type of compression to use. block_size : int, optional Input data will be split into blocks of this size (in bytes) before compression. """ compression = validate(compression) encoder = _get_encoder(compression) # We can have numpy arrays here. While compress() will work with them, # it is impossible to split them into fixed size blocks without converting # them to bytes. if isinstance(data, np.ndarray): data = data.tobytes() for i in range(0, len(data), block_size): fd.write(encoder.compress(data[i:i+block_size])) if hasattr(encoder, "flush"): fd.write(encoder.flush()) def get_compressed_size(data, compression, block_size=DEFAULT_BLOCK_SIZE): """ Returns the number of bytes required when the given data is compressed. Parameters ---------- data : buffer compression : str The type of compression to use. block_size : int, optional Input data will be split into blocks of this size (in bytes) before the compression. Returns ------- bytes : int """ compression = validate(compression) encoder = _get_encoder(compression) l = 0 for i in range(0, len(data), block_size): l += len(encoder.compress(data[i:i+block_size])) if hasattr(encoder, "flush"): l += len(encoder.flush()) return l asdf-2.5.1/asdf/conftest.py0000644000446400020070000000302713567314375017764 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- # this contains imports plugins that configure py.test for asdf tests. # by importing them here in conftest.py they are discoverable by py.test # no matter how it is invoked within the source tree. import os import pytest from .extern.RangeHTTPServer import RangeHTTPRequestHandler # This is to figure out the affiliated package version, rather than # using Astropy's from . import version from .tests.httpserver import HTTPServer, RangeHTTPServer @pytest.fixture() def httpserver(request): """ The returned ``httpserver`` provides a threaded HTTP server instance. It serves content from a temporary directory (available as the attribute tmpdir) at randomly assigned URL (available as the attribute url). * ``tmpdir`` - path to the tmpdir that it's serving from (str) * ``url`` - the base url for the server """ server = HTTPServer() request.addfinalizer(server.finalize) return server @pytest.fixture() def rhttpserver(request): """ The returned ``httpserver`` provides a threaded HTTP server instance. It serves content from a temporary directory (available as the attribute tmpdir) at randomly assigned URL (available as the attribute url). The server supports HTTP Range headers. * ``tmpdir`` - path to the tmpdir that it's serving from (str) * ``url`` - the base url for the server """ server = RangeHTTPServer() request.addfinalizer(server.finalize) return server asdf-2.5.1/asdf/constants.py0000644000446400020070000000114313567314375020150 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import numpy as np ASDF_MAGIC = b'#ASDF' BLOCK_MAGIC = b'\xd3BLK' BLOCK_HEADER_BOILERPLATE_SIZE = 6 ASDF_STANDARD_COMMENT = b'ASDF_STANDARD' INDEX_HEADER = b'#ASDF BLOCK INDEX' # The maximum number of blocks supported MAX_BLOCKS = 2 ** 16 MAX_BLOCKS_DIGITS = int(np.ceil(np.log10(MAX_BLOCKS) + 1)) YAML_TAG_PREFIX = 'tag:yaml.org,2002:' YAML_END_MARKER_REGEX = br'\r?\n\.\.\.((\r?\n)|$)' STSCI_SCHEMA_URI_BASE = 'http://stsci.edu/schemas/' STSCI_SCHEMA_TAG_BASE = 'tag:stsci.edu:asdf' BLOCK_FLAG_STREAMED = 0x1 asdf-2.5.1/asdf/exceptions.py0000644000446400020070000000070213605166107020304 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- class AsdfWarning(Warning): """ The base warning class from which all ASDF warnings should inherit. """ class AsdfDeprecationWarning(AsdfWarning): """ A warning class to indicate a deprecated feature. """ class AsdfConversionWarning(AsdfWarning): """ Warning class used for failures to convert data into custom types. """ asdf-2.5.1/asdf/extension.py0000644000446400020070000002076113605165746020156 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import abc import warnings from pkg_resources import iter_entry_points import six import importlib from . import types from . import resolver from .util import get_class_name from .type_index import AsdfTypeIndex from .version import version as asdf_version from .exceptions import AsdfDeprecationWarning __all__ = ['AsdfExtension', 'AsdfExtensionList'] ASDF_TEST_BUILD_ENV = 'ASDF_TEST_BUILD' @six.add_metaclass(abc.ABCMeta) class AsdfExtension: """ Abstract base class defining an extension to ASDF. """ @classmethod def __subclasshook__(cls, C): if cls is AsdfExtension: return (hasattr(C, 'types') and hasattr(C, 'tag_mapping') and hasattr(C, 'url_mapping')) return NotImplemented @abc.abstractproperty def types(self): """ A list of `asdf.CustomType` subclasses that describe how to store custom objects to and from ASDF. """ pass @abc.abstractproperty def tag_mapping(self): """ A list of 2-tuples or callables mapping YAML tag prefixes to JSON Schema URL prefixes. For each entry: - If a 2-tuple, the first part of the tuple is a YAML tag prefix to match. The second part is a string, where case the following are available as Python formatting tokens: - ``{tag}``: the complete YAML tag. - ``{tag_suffix}``: the part of the YAML tag after the matched prefix. - ``{tag_prefix}``: the matched YAML tag prefix. - If a callable, it is passed the entire YAML tag must return the entire JSON schema URL if it matches, otherwise, return `None`. Note that while JSON Schema URLs uniquely define a JSON Schema, they do not have to actually exist on an HTTP server and be fetchable (much like XML namespaces). For example, to match all YAML tags with the ``tag:nowhere.org:custom` prefix to the ``http://nowhere.org/schemas/custom/`` URL prefix:: return [('tag:nowhere.org:custom/', 'http://nowhere.org/schemas/custom/{tag_suffix}')] """ pass @abc.abstractproperty def url_mapping(self): """ A list of 2-tuples or callables mapping JSON Schema URLs to other URLs. This is useful if the JSON Schemas are not actually fetchable at their corresponding URLs but are on the local filesystem, or, to save bandwidth, we have a copy of fetchable schemas on the local filesystem. If neither is desirable, it may simply be the empty list. For each entry: - If a 2-tuple, the first part is a URL prefix to match. The second part is a string, where the following are available as Python formatting tokens: - ``{url}``: The entire JSON schema URL - ``{url_prefix}``: The matched URL prefix - ``{url_suffix}``: The part of the URL after the prefix. - If a callable, it is passed the entire JSON Schema URL and must return a resolvable URL pointing to the schema content. If it doesn't match, should return `None`. For example, to map a remote HTTP URL prefix to files installed alongside as data alongside Python module:: return [('http://nowhere.org/schemas/custom/1.0.0/', asdf.util.filepath_to_url( os.path.join(SCHEMA_PATH, 'stsci.edu')) + '/{url_suffix}.yaml' )] """ pass class AsdfExtensionList: """ Manage a set of extensions that are in effect. """ def __init__(self, extensions): tag_mapping = [] url_mapping = [] validators = {} self._type_index = AsdfTypeIndex() for extension in extensions: if not isinstance(extension, AsdfExtension): raise TypeError( "Extension must implement asdf.types.AsdfExtension " "interface") tag_mapping.extend(extension.tag_mapping) url_mapping.extend(extension.url_mapping) for typ in extension.types: self._type_index.add_type(typ, extension) validators.update(typ.validators) for sibling in typ.versioned_siblings: self._type_index.add_type(sibling, extension) validators.update(sibling.validators) self._tag_mapping = resolver.Resolver(tag_mapping, 'tag') self._url_mapping = resolver.Resolver(url_mapping, 'url') self._resolver = resolver.ResolverChain(self._tag_mapping, self._url_mapping) self._validators = validators @property def tag_to_schema_resolver(self): """Deprecated. Use `tag_mapping` instead""" warnings.warn( "The 'tag_to_schema_resolver' property is deprecated. Use " "'tag_mapping' instead.", AsdfDeprecationWarning) return self._tag_mapping @property def tag_mapping(self): return self._tag_mapping @property def url_mapping(self): return self._url_mapping @property def resolver(self): return self._resolver @property def type_index(self): return self._type_index @property def validators(self): return self._validators class BuiltinExtension: """ This is the "extension" to ASDF that includes all the built-in tags. Even though it's not really an extension and it's always available, it's built in the same way as an extension. """ @property def types(self): return types._all_asdftypes @property def tag_mapping(self): return resolver.DEFAULT_TAG_TO_URL_MAPPING @property def url_mapping(self): return resolver.DEFAULT_URL_MAPPING class _DefaultExtensions: def __init__(self): self._extensions = [] self._extension_list = None self._package_metadata = {} def _load_installed_extensions(self, group='asdf_extensions'): for entry_point in iter_entry_points(group=group): with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always', category=AsdfDeprecationWarning) ext = entry_point.load() if not issubclass(ext, AsdfExtension): warnings.warn("Found entry point {}, from {} but it is not a " "subclass of AsdfExtension, as expected. It is " "being ignored.".format(ext, entry_point.dist)) continue dist = entry_point.dist name = get_class_name(ext, instance=False) self._package_metadata[name] = (dist.project_name, dist.version) self._extensions.append(ext()) for warning in w: warnings.warn('{} (from {})'.format(warning.message, name), AsdfDeprecationWarning) @property def extensions(self): # This helps avoid a circular dependency with external packages if not self._extensions: # If this environment variable is defined, load the default # extension. This allows the package to be tested without being # installed (e.g. for builds on Debian). if os.environ.get(ASDF_TEST_BUILD_ENV): # Fake the extension metadata name = get_class_name(BuiltinExtension, instance=False) self._package_metadata[name] = ('asdf', asdf_version) self._extensions.append(BuiltinExtension()) self._load_installed_extensions() return self._extensions @property def extension_list(self): if self._extension_list is None: self._extension_list = AsdfExtensionList(self.extensions) return self._extension_list @property def package_metadata(self): return self._package_metadata def reset(self): """This will be used primarily for testing purposes.""" self._extensions = [] self._extension_list = None self._package_metadata = {} @property def resolver(self): return self.extension_list.resolver default_extensions = _DefaultExtensions() def get_default_resolver(): """ Get the resolver that includes mappings from all installed extensions. """ return default_extensions.resolver asdf-2.5.1/asdf/extern/0000755000446400020070000000000013605166132017055 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/extern/RangeHTTPServer.py0000755000446400020070000001566713567314375022407 0ustar eslavichSTSCI\science00000000000000#!/usr/bin/env python #Portions Copyright (C) 2009,2010 Xyne #Portions Copyright (C) 2011 Sean Goller # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # (version 2) as published by the Free Software Foundation. # # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Range HTTP Server. This module builds on BaseHTTPServer by implementing the standard GET and HEAD requests in a fairly straightforward manner, and includes support for the Range header. """ __version__ = "0.1" __all__ = ["RangeHTTPRequestHandler"] import os import shutil import posixpath import http.server import urllib.parse class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler): # pragma: no cover """Simple HTTP request handler with GET and HEAD commands. This serves files from the current directory and any of its subdirectories. The MIME type for files is determined by calling the .guess_type() method. The GET and HEAD requests are identical except that the HEAD request omits the actual contents of the file. """ server_version = "RangeHTTP/" + __version__ def do_GET(self): """Serve a GET request.""" f, start_range, end_range = self.send_head() if f: f.seek(start_range, 0) chunk = 0x1000 total = 0 while chunk > 0: if start_range + chunk > end_range: chunk = end_range - start_range try: self.wfile.write(f.read(chunk)) except: break total += chunk start_range += chunk f.close() def do_HEAD(self): """Serve a HEAD request.""" f, start_range, end_range = self.send_head() if f: f.close() def send_head(self): """Common code for GET and HEAD commands. This sends the response code and MIME headers. Return value is either a file object (which has to be copied to the outputfile by the caller unless the command was HEAD, and must be closed by the caller under all circumstances), or None, in which case the caller has nothing further to do. """ path = self.translate_path(self.path) f = None if os.path.isdir(path): if not self.path.endswith('/'): # redirect browser - doing basically what apache does self.send_response(301) self.send_header("Location", self.path + "/") self.end_headers() return (None, 0, 0) for index in "index.html", "index.htm": index = os.path.join(path, index) if os.path.exists(index): path = index break else: return self.list_directory(path) ctype = self.guess_type(path) try: # Always read in binary mode. Opening files in text mode may cause # newline translations, making the actual size of the content # transmitted *less* than the content-length! f = open(path, 'rb') except IOError: self.send_error(404, "File not found") return (None, 0, 0) if "Range" in self.headers: self.send_response(206) else: self.send_response(200) self.send_header("Content-type", ctype) fs = os.fstat(f.fileno()) size = int(fs[6]) start_range = 0 end_range = size self.send_header("Accept-Ranges", "bytes") if "Range" in self.headers: s, e = self.headers['range'][6:].split('-', 1) sl = len(s) el = len(e) if sl > 0: start_range = int(s) if el > 0: end_range = int(e) + 1 elif el > 0: ei = int(e) if ei < size: start_range = size - ei self.send_header( "Content-Range", 'bytes ' + str(start_range) + '-' + str(end_range - 1) + '/' + str(size)) self.send_header("Content-Length", end_range - start_range) self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) self.end_headers() return (f, start_range, end_range) def translate_path(self, path): """Translate a /-separated PATH to the local filename syntax. Components that mean special things to the local file system (e.g. drive or directory names) are ignored. (XXX They should probably be diagnosed.) """ # abandon query parameters path = path.split('?', 1)[0] path = path.split('#', 1)[0] path = posixpath.normpath(urllib.parse.unquote(path)) words = path.split('/') words = filter(None, words) path = os.getcwd() for word in words: drive, word = os.path.splitdrive(word) head, word = os.path.split(word) if word in (os.curdir, os.pardir): continue path = os.path.join(path, word) return path def copyfile(self, source, outputfile): """Copy all data between two file objects. The SOURCE argument is a file object open for reading (or anything with a read() method) and the DESTINATION argument is a file object open for writing (or anything with a write() method). The only reason for overriding this would be to change the block size or perhaps to replace newlines by CRLF -- note however that this the default server uses this to copy binary data as well. """ shutil.copyfileobj(source, outputfile) def guess_type(self, path): """Guess the type of a file. Argument is a PATH (a filename). Return value is a string of the form type/subtype, usable for a MIME Content-type header. The default implementation looks the file's extension up in the table self.extensions_map, using application/octet-stream as a default; however it would be permissible (if slow) to look inside the data to make a better guess. """ base, ext = posixpath.splitext(path) if ext in self.extensions_map: return self.extensions_map[ext] ext = ext.lower() if ext in self.extensions_map: return self.extensions_map[ext] else: return self.extensions_map[''] extensions_map = {'': 'unknown'} asdf-2.5.1/asdf/extern/__init__.py0000644000446400020070000000011013567314375021171 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ """ asdf-2.5.1/asdf/extern/atomicfile.py0000644000446400020070000001012613567314375021556 0ustar eslavichSTSCI\science00000000000000import six import os import tempfile import sys import errno if os.name == 'nt': # pragma: no cover import random import time _rename = lambda src, dst: False _rename_atomic = lambda src, dst: False import ctypes _MOVEFILE_REPLACE_EXISTING = 0x1 _MOVEFILE_WRITE_THROUGH = 0x8 _MoveFileEx = ctypes.windll.kernel32.MoveFileExW def _rename(src, dst): if not isinstance(src, str): src = str(src, sys.getfilesystemencoding()) if not isinstance(dst, str): dst = str(dst, sys.getfilesystemencoding()) if _rename_atomic(src, dst): return True retry = 0 rv = False while not rv and retry < 100: rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH) if not rv: time.sleep(0.001) retry += 1 return rv # new in Vista and Windows Server 2008 _CreateTransaction = ctypes.windll.ktmw32.CreateTransaction _CommitTransaction = ctypes.windll.ktmw32.CommitTransaction _MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW _CloseHandle = ctypes.windll.kernel32.CloseHandle def _rename_atomic(src, dst): ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Atomic rename') if ta == -1: return False try: retry = 0 rv = False while not rv and retry < 100: rv = _MoveFileTransacted(src, dst, None, None, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH, ta) if rv: rv = _CommitTransaction(ta) break else: time.sleep(0.001) retry += 1 return rv finally: _CloseHandle(ta) def atomic_rename(src, dst): # Try atomic or pseudo-atomic rename if _rename(src, dst): return # Fall back to "move away and replace" try: os.rename(src, dst) except OSError as e: if e.errno != errno.EEXIST: raise old = "%s-%08x" % (dst, random.randint(0, sys.maxsize)) os.rename(dst, old) os.rename(src, dst) try: os.unlink(old) except Exception: pass else: atomic_rename = os.rename class _AtomicWFile(object): """Helper class for :func:`atomic_open`.""" def __init__(self, f, tmp_filename, filename): self._f = f self._tmp_filename = tmp_filename self._filename = filename def __getattr__(self, attr): return getattr(self._f, attr) def __enter__(self): return self @property def name(self): return self._filename def close(self): if self._f.closed: return self._f.close() atomic_rename(self._tmp_filename, self._filename) def __exit__(self, exc_type, exc_value, tb): if exc_type is None: self.close() else: self._f.close() try: os.remove(self._tmp_filename) except OSError: pass def __repr__(self): return '<%s %s%r, mode %r>' % ( self.__class__.__name__, self._f.closed and 'closed ' or '', self._filename, self._f.mode ) def atomic_open(filename, mode='w'): """Works like a regular `open()` but writes updates into a temporary file instead of the given file and moves it over when the file is closed. The file returned behaves as if it was a regular Python """ if mode in ('r', 'rb', 'r+', 'rb+', 'a', 'ab'): raise TypeError('Read or append modes don\'t work with atomic_open') f = tempfile.NamedTemporaryFile(mode, prefix='.___atomic_write', dir=os.path.dirname(filename), delete=False) return _AtomicWFile(f, f.name, filename) asdf-2.5.1/asdf/extern/decorators.py0000644000446400020070000000255313567314375021614 0ustar eslavichSTSCI\science00000000000000# -*- coding: utf-8 -*- # This code was taken from sunpy, which is licensed under a 3-clause BSD style # license - see licenses/SUNPY_LICENSE.rst """Sundry function and class decorators.""" __all__ = ['add_common_docstring'] class add_common_docstring(object): """ A function decorator that will append and/or prepend an addendum to the docstring of the target function. Parameters ---------- append : `str`, optional A string to append to the end of the functions docstring. prepend : `str`, optional A string to prepend to the start of the functions docstring. **kwargs : `dict`, optional A dictionary to format append and prepend strings. """ def __init__(self, append=None, prepend=None, **kwargs): if kwargs: append = append.format(**kwargs) prepend = prepend.format(**kwargs) self.append = append self.prepend = prepend def __call__(self, func): func.__doc__ = func.__doc__ if func.__doc__ else '' self.append = self.append if self.append else '' self.prepend = self.prepend if self.prepend else '' if self.append and isinstance(func.__doc__, str): func.__doc__ += self.append if self.prepend and isinstance(func.__doc__, str): func.__doc__ = self.prepend + func.__doc__ return func asdf-2.5.1/asdf/fits_embed.py0000644000446400020070000002632513567314375020246 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Utilities for embedded ADSF files in FITS. """ import io import re import numpy as np from . import asdf from . import block from . import util from . import generic_io try: from astropy.io import fits from astropy.io.fits import Column, BinTableHDU except ImportError: raise ImportError("AsdfInFits requires astropy") ASDF_EXTENSION_NAME = 'ASDF' FITS_SOURCE_PREFIX = 'fits:' __all__ = ['AsdfInFits'] class _FitsBlock: def __init__(self, hdu): self._hdu = hdu def __repr__(self): return ''.format(self._hdu.name, self._hdu.ver) def __len__(self): return self._hdu.data.nbytes @property def data(self): return self._hdu.data @property def readonly(self): return False @property def array_storage(self): return 'fits' def override_byteorder(self, byteorder): return 'big' class _EmbeddedBlockManager(block.BlockManager): def __init__(self, hdulist, asdffile): self._hdulist = hdulist super(_EmbeddedBlockManager, self).__init__(asdffile) def get_block(self, source): if (isinstance(source, str) and source.startswith(FITS_SOURCE_PREFIX)): parts = re.match( # All printable ASCII characters are allowed in EXTNAME '((?P[ -~]+),)?(?P[0-9]+)', source[len(FITS_SOURCE_PREFIX):]) if parts is not None: ver = int(parts.group('ver')) if parts.group('name'): pair = (parts.group('name'), ver) else: pair = ver return _FitsBlock(self._hdulist[pair]) else: raise ValueError("Can not parse source '{0}'".format(source)) return super(_EmbeddedBlockManager, self).get_block(source) def get_source(self, block): if isinstance(block, _FitsBlock): for i, hdu in enumerate(self._hdulist): if hdu is block._hdu: if hdu.name == '': return '{0}{1}'.format( FITS_SOURCE_PREFIX, i) else: return '{0}{1},{2}'.format( FITS_SOURCE_PREFIX, hdu.name, hdu.ver) raise ValueError("FITS block seems to have been removed") return super(_EmbeddedBlockManager, self).get_source(block) def find_or_create_block_for_array(self, arr, ctx): from .tags.core import ndarray if not isinstance(arr, ndarray.NDArrayType): base = util.get_array_base(arr) for hdu in self._hdulist: if hdu.data is None: continue if base is util.get_array_base(hdu.data): return _FitsBlock(hdu) return super( _EmbeddedBlockManager, self).find_or_create_block_for_array(arr, ctx) class AsdfInFits(asdf.AsdfFile): """ Embed ASDF tree content in a FITS file. The YAML rendering of the tree is stored in a special FITS extension with the EXTNAME of ``ASDF``. Arrays in the ASDF tree may refer to binary data in other FITS extensions by setting source to a string with the prefix ``fits:`` followed by an ``EXTNAME``, ``EXTVER`` pair, e.g. ``fits:SCI,0``. Examples -------- Create a FITS file with ASDF structure, based on an existing FITS file:: from astropy.io import fits hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='SCI')) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='DQ')) tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist['DQ'].data, 'wcs': 'WCS info' } } } ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to('test.fits') # doctest: +SKIP """ def __init__(self, hdulist=None, tree=None, **kwargs): if hdulist is None: hdulist = fits.HDUList() super(AsdfInFits, self).__init__(tree=tree, **kwargs) self._blocks = _EmbeddedBlockManager(hdulist, self) self._hdulist = hdulist self._close_hdulist = False def __exit__(self, type, value, traceback): super(AsdfInFits, self).__exit__(type, value, traceback) if self._close_hdulist: self._hdulist.close() self._tree = {} def close(self): super(AsdfInFits, self).close() if self._close_hdulist: self._hdulist.close() self._tree = {} @classmethod def open(cls, fd, uri=None, validate_checksums=False, extensions=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, strict_extension_check=False, ignore_missing_extensions=False): """Creates a new AsdfInFits object based on given input data Parameters ---------- fd : FITS HDUList instance, URI string, or file-like object May be an already opened instance of a FITS HDUList instance, string ``file`` or ``http`` URI, or a Python file-like object. uri : str, optional The URI for this ASDF file. Used to resolve relative references against. If not provided, will be automatically determined from the associated file object, if possible and if created from `asdf.open`. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. Requires reading the entire file, so disabled by default. extensions : list of AsdfExtension, optional A list of extensions to the ASDF to support when reading and writing ASDF files. See `asdf.types.AsdfExtension` for more information. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. strict_extension_check : bool, optional When `True`, if the given ASDF file contains metadata about the extensions used to create it, and if those extensions are not installed, opening the file will fail. When `False`, opening a file under such conditions will cause only a warning. Defaults to `False`. ignore_missing_extensions : bool, optional When `True`, do not raise warnings when a file is read that contains metadata about extensions that are not available. Defaults to `False`. """ return cls._open_impl(fd, uri=uri, validate_checksums=validate_checksums, extensions=extensions, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions) @classmethod def _open_impl(cls, fd, uri=None, validate_checksums=False, extensions=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, strict_extension_check=False, _extension_metadata=None, ignore_missing_extensions=False): close_hdulist = False if isinstance(fd, fits.hdu.hdulist.HDUList): hdulist = fd else: uri = generic_io.get_uri(fd) try: hdulist = fits.open(fd) # Since we created this HDUList object, we need to be # responsible for cleaning up upon close() or __exit__ close_hdulist = True except IOError: msg = "Failed to parse given file '{}'. Is it FITS?" raise ValueError(msg.format(uri)) self = cls(hdulist, uri=uri, extensions=extensions, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag) if _extension_metadata is not None: self._extension_metadata = _extension_metadata self._close_hdulist = close_hdulist try: asdf_extension = hdulist[ASDF_EXTENSION_NAME] except (KeyError, IndexError, AttributeError): # This means there is no ASDF extension return self buff = io.BytesIO(asdf_extension.data) try: return cls._open_asdf(self, buff, uri=uri, mode='r', validate_checksums=validate_checksums, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions) except RuntimeError: self.close() raise def _create_hdu(self, buff, use_image_hdu): # Allow writing to old-style ImageHDU for backwards compatibility if use_image_hdu: array = np.frombuffer(buff.getvalue(), np.uint8) return fits.ImageHDU(array, name=ASDF_EXTENSION_NAME) else: data = np.array(buff.getbuffer(), dtype=np.uint8)[None, :] fmt = '{}B'.format(len(data[0])) column = fits.Column(array=data, format=fmt, name='ASDF_METADATA') return fits.BinTableHDU.from_columns([column], name=ASDF_EXTENSION_NAME) def _update_asdf_extension(self, all_array_storage=None, all_array_compression=None, auto_inline=None, pad_blocks=False, use_image_hdu=False): if self.blocks.streamed_block is not None: raise ValueError( "Can not save streamed data to ASDF-in-FITS file.") buff = io.BytesIO() super(AsdfInFits, self).write_to( buff, all_array_storage=all_array_storage, all_array_compression=all_array_compression, auto_inline=auto_inline, pad_blocks=pad_blocks, include_block_index=False) if ASDF_EXTENSION_NAME in self._hdulist: del self._hdulist[ASDF_EXTENSION_NAME] self._hdulist.append(self._create_hdu(buff, use_image_hdu)) def write_to(self, filename, all_array_storage=None, all_array_compression=None, auto_inline=None, pad_blocks=False, use_image_hdu=False, *args, **kwargs): self._update_asdf_extension( all_array_storage=all_array_storage, all_array_compression=all_array_compression, auto_inline=auto_inline, pad_blocks=pad_blocks, use_image_hdu=use_image_hdu) self._hdulist.writeto(filename, *args, **kwargs) def update(self, all_array_storage=None, all_array_compression=None, auto_inline=None, pad_blocks=False): raise NotImplementedError( "In-place update is not currently implemented for ASDF-in-FITS") self._update_asdf_extension( all_array_storage=all_array_storage, all_array_compression=all_array_compression, auto_inline=auto_inline, pad_blocks=pad_blocks) asdf-2.5.1/asdf/generic_io.py0000644000446400020070000011650413571550534020241 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ This provides abstractions around a number of different file and stream types available to Python so that they are always used in the most efficient way. The classes in this module should not be instantiated directly, but instead, one should use the factory function `get_file`. """ import io import os import re import sys import math import pathlib import tempfile import platform from distutils.version import LooseVersion from os import SEEK_SET, SEEK_CUR, SEEK_END import http.client from urllib import parse as urlparse from urllib.request import url2pathname import six import numpy as np from . import util from .extern import atomicfile __all__ = ['get_file', 'get_uri', 'resolve_uri', 'relative_uri'] _local_file_schemes = ['', 'file'] if sys.platform.startswith('win'): # pragma: no cover import string _local_file_schemes.extend(string.ascii_letters) def _check_bytes(fd, mode): """ Checks whether a given file-like object is opened in binary mode. """ # On Python 3, doing fd.read(0) on an HTTPResponse object causes # it to not be able to read any further, so we do this different # kind of check, which, unfortunately, is not as robust. if isinstance(fd, io.IOBase): if isinstance(fd, io.TextIOBase): return False return True if 'r' in mode: x = fd.read(0) if not isinstance(x, bytes): return False elif 'w' in mode: try: fd.write(b'') except TypeError: return False return True if (sys.platform == 'darwin' and LooseVersion(platform.mac_ver()[0]) < LooseVersion('10.9')): # pragma: no cover def _array_fromfile(fd, size): chunk_size = 1024 ** 3 if size < chunk_size: return np.fromfile(fd, dtype=np.uint8, count=size) else: array = np.empty(size, dtype=np.uint8) for beg in range(0, size, chunk_size): end = min(size, beg + chunk_size) array[beg:end] = np.fromfile(fd, dtype=np.uint8, count=end - beg) return array else: def _array_fromfile(fd, size): return np.fromfile(fd, dtype=np.uint8, count=size) _array_fromfile.__doc__ = """ Load a binary array from a real file object. Parameters ---------- fd : real file object size : integer Number of bytes to read. """ def _array_tofile_chunked(write, array, chunksize): # pragma: no cover array = array.view(np.uint8).flatten() for i in range(0, array.nbytes, chunksize): write(array[i:i + chunksize].data) def _array_tofile_simple(fd, write, array): return write(array.data) if sys.platform == 'darwin': # pragma: no cover def _array_tofile(fd, write, array): # This value is currently set as a workaround for a known bug in Python # on OSX. Individual writes must be less than 2GB, which necessitates # the chunk size here if we want it to remain a power of 2. # See https://bugs.python.org/issue24658. OSX_WRITE_LIMIT = 2 ** 30 if fd is None or array.nbytes >= OSX_WRITE_LIMIT and array.nbytes % 4096 == 0: return _array_tofile_chunked(write, array, OSX_WRITE_LIMIT) return _array_tofile_simple(fd, write, array) elif sys.platform.startswith('win'): # pragma: no cover def _array_tofile(fd, write, array): WIN_WRITE_LIMIT = 2 ** 30 return _array_tofile_chunked(write, array, WIN_WRITE_LIMIT) else: _array_tofile = _array_tofile_simple _array_tofile.__doc__ = """ Write an array to a file. Parameters ---------- fd : real file object If fd is provided, must be a real system file as supported by numpy.tofile. May be None, in which case all writing will be done through the `write` method. write : callable A callable that writes bytes to the file. array : Numpy array Must be an underlying data array, not a view. """ def resolve_uri(base, uri): """ Resolve a URI against a base URI. """ if base is None: base = '' resolved = urlparse.urljoin(base, uri) parsed = urlparse.urlparse(resolved) if parsed.path != '' and not parsed.path.startswith('/'): raise ValueError( "Resolved to relative URL") return resolved def relative_uri(source, target): """ Make a relative URI from source to target. """ su = urlparse.urlparse(source) tu = urlparse.urlparse(target) extra = list(tu[3:]) relative = None if tu[0] == '' and tu[1] == '': if tu[2] == su[2]: relative = '' elif not tu[2].startswith('/'): relative = tu[2] elif su[0:2] != tu[0:2]: return target if relative is None: if tu[2] == su[2]: relative = '' else: relative = os.path.relpath(tu[2], os.path.dirname(su[2])) if relative == '.': relative = '' relative = urlparse.urlunparse(["", "", relative] + extra) return relative class _TruncatedReader: """ Reads until a given delimiter is found. Only works with RandomAccessFile and InputStream, though as this is a private class, this is not explicitly enforced. """ def __init__(self, fd, delimiter, readahead_bytes, delimiter_name=None, include=False, initial_content=b'', exception=True): self._fd = fd self._delimiter = delimiter self._readahead_bytes = readahead_bytes if delimiter_name is None: delimiter_name = delimiter self._delimiter_name = delimiter_name self._include = include self._initial_content = initial_content self._trailing_content = b'' self._exception = exception self._past_end = False def read(self, nbytes=None): if self._past_end: content = self._trailing_content[:nbytes] if nbytes is None: self._trailing_content = b'' else: self._trailing_content = self._trailing_content[nbytes:] return content if nbytes is None: content = self._fd._peek() elif nbytes <= len(self._initial_content): content = self._initial_content[:nbytes] self._initial_content = self._initial_content[nbytes:] return content else: content = self._fd._peek(nbytes - len(self._initial_content) + self._readahead_bytes) if content == b'': if self._exception: raise ValueError("{0} not found".format(self._delimiter_name)) self._past_end = True return content index = re.search(self._delimiter, content) if index is not None: if self._include: index = index.end() else: index = index.start() content = content[:index] self._past_end = True elif nbytes is None and self._exception: # Read the whole file and didn't find the delimiter raise ValueError("{0} not found".format(self._delimiter_name)) else: if nbytes: content = content[:nbytes - len(self._initial_content)] self._fd.fast_forward(len(content)) if self._initial_content: content = self._initial_content + content self._initial_content = b'' if self._past_end and nbytes: self._trailing_content = content[nbytes:] content = content[:nbytes] return content @six.add_metaclass(util.InheritDocstrings) class GenericFile: """ Base class for an abstraction layer around a number of different file-like types. Each of its subclasses handles a particular kind of file in the most efficient way possible. This class should not be instantiated directly, but instead the factory function `get_file` should be used to get the correct subclass for the given file-like object. """ def __init__(self, fd, mode, close=False, uri=None): """ Parameters ---------- fd : file-like object The particular kind of file-like object must match the subclass of `GenericFile` being instantiated. mode : str Must be ``"r"`` (read), ``"w"`` (write), or ``"rw"`` (read/write). close : bool, optional When ``True``, close the given `fd` in the ``__exit__`` method, i.e. at the end of the with block. Should be set to ``True`` when this object "owns" the file object. Default: ``False``. uri : str, optional The file path or URI used to open the file. This is used to resolve relative URIs when the file refers to external sources. """ if not _check_bytes(fd, mode): raise ValueError( "File-like object must be opened in binary mode.") self._fd = fd self._mode = mode self._close = close self._blksize = io.DEFAULT_BUFFER_SIZE self._size = None self._uri = uri def __enter__(self): return self def __exit__(self, type, value, traceback): if self._close: if hasattr(self._fd, '__exit__'): self._fd.__exit__(type, value, traceback) else: self._fd.close() @property def block_size(self): return self._blksize @property def mode(self): """ The mode of the file. Will be ``'r'``, ``'w'`` or ``'rw'``. """ return self._mode @property def uri(self): """ The base uri of the file. """ return self._uri def read(self, size=-1): """ Read at most size bytes from the file (less if the read hits EOF before obtaining size bytes). If the size argument is negative or omitted, read all data until EOF is reached. The bytes are returned as a `bytes` object. An empty `bytes` object is returned when EOF is encountered immediately. Only available if `readable` returns `True`. """ # On Python 3, reading 0 bytes from a socket causes it to stop # working, so avoid doing that at all costs. if size == 0: return b'' return self._fd.read(size) def read_block(self): """ Read a "block" from the file. For real filesystem files, the block is the size of a native filesystem block. """ return self.read(self._blksize) def read_blocks(self, size): """ Read ``size`` bytes of data from the file, one block at a time. The result is a generator where each value is a bytes object. """ i = 0 for i in range(0, size - self._blksize, self._blksize): yield self.read(self._blksize) if i < size: yield self.read(size - i) def write(self, content): self._fd.write(content) write.__doc__ = """ Write a string to the file. There is no return value. Due to buffering, the string may not actually show up in the file until the flush() or close() method is called. Only available if `writable` returns `True`. """ def write_array(self, array): _array_tofile(None, self.write, array.ravel(order='A')) def seek(self, offset, whence=0): """ Set the file's current position. Only available if `seekable` returns `True`. Parameters ---------- offset : integer Offset, in bytes. whence : integer, optional The `whence` argument is optional and defaults to SEEK_SET or 0 (absolute file positioning); other values are SEEK_CUR or 1 (seek relative to the current position) and SEEK_END or 2 (seek relative to the file’s end). """ result = self._fd.seek(offset, whence) self.tell() return result def tell(self): """ Return the file's current position, in bytes. Only available in `seekable` returns `True`. """ return self._fd.tell() def flush(self): """ Flush the internal buffer. """ self._fd.flush() def close(self): """ Close the file. The underlying file-object will only be closed if ``close=True`` was passed to the constructor. """ if self._close: self._fd.close() def truncate(self, size=None): """ Truncate the file to the given size. """ raise NotImplementedError() def writable(self): """ Returns `True` if the file can be written to. """ return 'w' in self.mode def readable(self): """ Returns `True` if the file can be read from. """ return 'r' in self.mode def seekable(self): """ Returns `True` if the file supports random access (`seek` and `tell`). """ return False def can_memmap(self): """ Returns `True` if the file supports memmapping. """ return False def is_closed(self): """ Returns `True` if the underlying file object is closed. """ return self._fd.closed def read_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Reads until a match for a given regular expression is found. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Returns ------- content : bytes The content from the current position in the file, up to the delimiter. Includes the delimiter if `include` is ``True``. Raises ------ ValueError : If the delimiter is not found before the end of the file. """ buff = io.BytesIO() reader = self.reader_until( delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) while True: content = reader.read(self.block_size) buff.write(content) if len(content) < self.block_size: break return buff.getvalue() def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Returns a readable file-like object that treats the given delimiter as the end-of-file. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Raises ------ ValueError : If the delimiter is not found before the end of the file. """ raise NotImplementedError() def seek_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Seeks in the file until a match for a given regular expression is found. This is similar to ``read_until``, except the intervening content is not retained. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Returns ------- content : bytes The content from the current position in the file, up to the delimiter. Includes the delimiter if `include` is ``True``. Raises ------ ValueError : If the delimiter is not found before the end of the file. """ reader = self.reader_until( delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) while True: try: content = reader.read(self.block_size) except ValueError: return False if content == b'': return True def fast_forward(self, size): """ Move the file position forward by `size`. """ raise NotImplementedError() def clear(self, nbytes): """ Write nbytes of zeros. """ blank_data = b'\0' * self.block_size for i in range(0, nbytes, self.block_size): length = min(nbytes - i, self.block_size) self.write(blank_data[:length]) def memmap_array(self, offset, size): """ Memmap a chunk of the file into a `np.core.memmap` object. Parameters ---------- offset : integer The offset, in bytes, in the file. size : integer The size of the data to memmap. Returns ------- array : np.core.memmap """ raise NotImplementedError() def read_into_array(self, size): """ Read a chunk of the file into a uint8 array. Parameters ---------- size : integer The size of the data. Returns ------- array : np.core.memmap """ buff = self.read(size) return np.frombuffer(buff, np.uint8, size, 0) class GenericWrapper: """ A wrapper around a `GenericFile` object so that closing only happens in the very outer layer. """ def __init__(self, fd): self._fd = fd def __enter__(self): return self def __exit__(self, type, value, traceback): pass def __getattr__(self, attr): return getattr(self._fd, attr) class RandomAccessFile(GenericFile): """ The base class of file types that support random access. """ def seekable(self): return True def _peek(self, size=-1): cursor = self.tell() content = self.read(size) self.seek(cursor, SEEK_SET) return content def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): return _TruncatedReader( self, delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) def fast_forward(self, size): if size < 0: self.seek(0, SEEK_END) self.seek(size, SEEK_CUR) if sys.platform.startswith('win'): # pragma: no cover def truncate(self, size=None): # ftruncate doesn't work on an open file in Windows. The # best we can do is clear the extra bytes or add extra # bytes to the end. if size is None: size = self.tell() self.seek(0, SEEK_END) file_size = self.tell() if size < file_size: self.seek(size, SEEK_SET) nbytes = file_size - size elif size > file_size: nbytes = size - file_size else: nbytes = 0 block = b'\0' * self.block_size while nbytes > 0: self.write(block[:min(nbytes, self.block_size)]) nbytes -= self.block_size self.seek(size, SEEK_SET) else: def truncate(self, size=None): if size is None: self._fd.truncate() else: self._fd.truncate(size) self.seek(size, SEEK_SET) class RealFile(RandomAccessFile): """ Handles "real" files on a filesystem. """ def __init__(self, fd, mode, close=False, uri=None): super(RealFile, self).__init__(fd, mode, close=close, uri=uri) stat = os.fstat(fd.fileno()) if sys.platform.startswith('win'): # pragma: no cover # There appears to be reliable way to get block size on Windows, # so just choose a reasonable default self._blksize = io.DEFAULT_BUFFER_SIZE else: self._blksize = stat.st_blksize self._size = stat.st_size if (uri is None and isinstance(fd.name, str)): self._uri = util.filepath_to_url(os.path.abspath(fd.name)) def write_array(self, arr): if isinstance(arr, np.memmap) and getattr(arr, 'fd', None) is self: arr.flush() self.fast_forward(len(arr.data)) else: _array_tofile(self._fd, self._fd.write, arr.ravel(order='A')) def can_memmap(self): return True def memmap_array(self, offset, size): if 'w' in self._mode: mode = 'r+' else: mode = 'r' mmap = np.memmap( self._fd, mode=mode, offset=offset, shape=size) mmap.fd = self return mmap def read_into_array(self, size): return _array_fromfile(self._fd, size) class MemoryIO(RandomAccessFile): """ Handles random-access memory buffers, mainly `io.BytesIO` and `StringIO.StringIO`. """ def __init__(self, fd, mode, uri=None): super(MemoryIO, self).__init__(fd, mode, uri=uri) tell = fd.tell() fd.seek(0, 2) self._size = fd.tell() fd.seek(tell, 0) def read_into_array(self, size): buf = self._fd.getvalue() offset = self._fd.tell() result = np.frombuffer(buf, np.uint8, size, offset) # Copy the buffer so the original memory can be released. result = result.copy() self.seek(size, SEEK_CUR) return result class InputStream(GenericFile): """ Handles an input stream, such as stdin. """ def __init__(self, fd, mode='r', close=False, uri=None): super(InputStream, self).__init__(fd, mode, close=close, uri=uri) self._fd = fd self._buffer = b'' def _peek(self, size=-1): if size < 0: self._buffer += self._fd.read() else: len_buffer = len(self._buffer) if len_buffer < size: self._buffer += self._fd.read(size - len_buffer) return self._buffer def read(self, size=-1): # On Python 3, reading 0 bytes from a socket causes it to stop # working, so avoid doing that at all costs. if size == 0: return b'' len_buffer = len(self._buffer) if len_buffer == 0: return self._fd.read(size) elif size < 0: self._buffer += self._fd.read() buffer = self._buffer self._buffer = b'' return buffer elif len_buffer < size: if len_buffer < size: self._buffer += self._fd.read(size - len(self._buffer)) buffer = self._buffer self._buffer = b'' return buffer else: buffer = self._buffer[:size] self._buffer = self._buffer[size:] return buffer def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): return _TruncatedReader( self, delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) def fast_forward(self, size): if size >= 0 and len(self.read(size)) != size: raise IOError("Read past end of file") def read_into_array(self, size): try: # See if Numpy can handle this as a real file first... return np.fromfile(self._fd, np.uint8, size) except (IOError, AttributeError): # Else, fall back to reading into memory and then # returning the Numpy array. data = self.read(size) # We need to copy the array, so it is writable result = np.frombuffer(data, np.uint8, size) # When creating an array from a buffer, it is read-only. # If we need a read/write array, we have to copy it. if 'w' in self._mode: result = result.copy() return result class OutputStream(GenericFile): """ Handles an output stream, such as stdout. """ def __init__(self, fd, close=False, uri=None): super(OutputStream, self).__init__(fd, 'w', close=close, uri=uri) self._fd = fd def fast_forward(self, size): if size < 0: return self.clear(size) class HTTPConnection(RandomAccessFile): """ Uses a persistent HTTP connection to request specific ranges of the file and obtain its structure without transferring it in its entirety. It creates a temporary file on the local filesystem and copies blocks into it as needed. The `_blocks` array is a bitfield that keeps track of which blocks we have. """ # TODO: Handle HTTPS connection def __init__(self, connection, size, path, uri, first_chunk): self._mode = 'r' self._blksize = io.DEFAULT_BUFFER_SIZE # The underlying HTTPConnection object doesn't track closed # status, so we do that here. self._closed = False self._fd = connection self._path = path self._uri = uri # A bitmap of the blocks that we've already read and cached # locally self._blocks = np.zeros( int(math.ceil(size / self._blksize / 8)), np.uint8) local_file = tempfile.TemporaryFile() self._local = RealFile(local_file, 'rw', close=True) self._local.truncate(size) self._local.seek(0) self._local.write(first_chunk) self._local.seek(0) self._blocks[0] = 1 # The size of the entire file self._size = size self._nreads = 0 # Some methods just short-circuit to the local copy self.seek = self._local.seek self.tell = self._local.tell def __exit__(self, type, value, traceback): if not self._closed: self._local.close() if hasattr(self._fd, '__exit__'): self._fd.__exit__(type, value, traceback) else: self._fd.close() self._closed = True def close(self): if not self._closed: self._local.close() self._fd.close() self._closed = True def is_closed(self): return self._closed def _get_range(self, start, end): """ Ensure the range of bytes has been copied to the local cache. """ if start >= self._size: return end = min(end, self._size) blocks = self._blocks block_size = self.block_size def has_block(x): return blocks[x >> 3] & (1 << (x & 0x7)) def mark_block(x): blocks[x >> 3] |= (1 << (x & 0x7)) block_start = start // block_size block_end = end // block_size + 1 pos = self._local.tell() try: # Between block_start and block_end, some blocks may be # already loaded. We want to load all of the missing # blocks in as few requests as possible. a = block_start while a < block_end: # Skip over whole groups of blocks at a time while a < block_end and blocks[a >> 3] == 0xff: a = ((a >> 3) + 1) << 3 while a < block_end and has_block(a): a += 1 if a >= block_end: break b = a + 1 # Skip over whole groups of blocks at a time while b < block_end and blocks[b >> 3] == 0x0: b = ((b >> 3) + 1) << 3 while b < block_end and not has_block(b): b += 1 if b > block_end: b = block_end if a * block_size >= self._size: return headers = { 'Range': 'bytes={0}-{1}'.format( a * block_size, (b * block_size) - 1)} self._fd.request('GET', self._path, headers=headers) response = self._fd.getresponse() if response.status != 206: raise IOError("HTTP failed: {0} {1}".format( response.status, response.reason)) # Now copy over to the temporary file, block-by-block self._local.seek(a * block_size, os.SEEK_SET) for i in range(a, b): chunk = response.read(block_size) self._local.write(chunk) mark_block(i) response.close() self._nreads += 1 a = b finally: self._local.seek(pos, os.SEEK_SET) def read(self, size=-1): if self._closed: raise IOError("read from closed connection") pos = self._local.tell() # Adjust size so it doesn't go beyond the end of the file if size < 0 or pos + size > self._size: size = self._size - pos # On Python 3, reading 0 bytes from a socket causes it to stop # working, so avoid doing that at all costs. if size == 0: return b'' self._get_range(pos, pos + size) return self._local.read(size) def read_into_array(self, size): if self._closed: raise IOError("read from closed connection") pos = self._local.tell() if pos + size > self._size: raise IOError("Read past end of file.") self._get_range(pos, pos + size) return self._local.memmap_array(pos, size) def _make_http_connection(init, mode, uri=None): """ Creates a HTTPConnection instance if the HTTP server supports Range requests, otherwise falls back to a generic InputStream. """ parsed = urlparse.urlparse(init) connection = http.client.HTTPConnection(parsed.netloc) connection.connect() block_size = io.DEFAULT_BUFFER_SIZE # We request a range of the whole file ("0-") to check if the # server understands that header entry, and also to get the # size of the entire file headers = {'Range': 'bytes=0-'} connection.request('GET', parsed.path, headers=headers) response = connection.getresponse() if response.status // 100 != 2: raise IOError("HTTP failed: {0} {1}".format( response.status, response.reason)) # Status 206 means a range was returned. If it's anything else # that indicates the server probably doesn't support Range # headers. if (response.status != 206 or response.getheader('accept-ranges', None) != 'bytes' or response.getheader('content-range', None) is None or response.getheader('content-length', None) is None): # Fall back to a regular input stream, but we don't # need to open a new connection. response.close = connection.close return InputStream(response, mode, uri=uri or init, close=True) # Since we'll be requesting chunks, we can't read at all with the # current request (because we can't abort it), so just close and # start over size = int(response.getheader('content-length')) first_chunk = response.read(block_size) response.close() return HTTPConnection(connection, size, parsed.path, uri or init, first_chunk) def get_uri(file_obj): """ Returns the uri of the given file object Parameters ---------- uri : object """ if isinstance(file_obj, six.string_types): return file_obj if isinstance(file_obj, GenericFile): return file_obj.uri # A catch-all for types from Python's io module that have names return getattr(file_obj, 'name', '') def get_file(init, mode='r', uri=None, close=False): """ Returns a `GenericFile` instance suitable for wrapping the given object `init`. If passed an already open file-like object, it must be opened for reading/writing in binary mode. It is the caller's responsibility to close it. Parameters ---------- init : object `init` may be: - A `bytes` or `unicode` file path or ``file:`` or ``http:`` url. - A Python 2 `file` object. - An `io.IOBase` object (the default file object on Python 3). - A ducktyped object that looks like a file object. If `mode` is ``"r"``, it must have a ``read`` method. If `mode` is ``"w"``, it must have a ``write`` method. If `mode` is ``"rw"`` it must have the ``read``, ``write``, ``tell`` and ``seek`` methods. - A `GenericFile` instance, in which case it is wrapped in a `GenericWrapper` instance, so that the file is closed when only when the final layer is unwrapped. mode : str Must be one of ``"r"``, ``"w"`` or ``"rw"``. uri : str Sets the base URI of the file object. This will be used to resolve any relative URIs contained in the file. This is redundant if `init` is a `bytes` or `unicode` object (since it will be the uri), and it may be determined automatically if `init` refers to a regular filesystem file. It is not required if URI resolution is not used in the file. close : bool If ``True``, closes the underlying file handle when this object is closed. Defaults to ``False``. Returns ------- fd : GenericFile Raises ------ ValueError, TypeError, IOError """ if mode not in ('r', 'w', 'rw'): raise ValueError("mode must be 'r', 'w' or 'rw'") if init in (sys.__stdout__, sys.__stdin__, sys.__stderr__): init = os.fdopen(init.fileno(), init.mode + 'b') if isinstance(init, (GenericFile, GenericWrapper)): if mode not in init.mode: raise ValueError( "File is opened as '{0}', but '{1}' was requested".format( init.mode, mode)) return GenericWrapper(init) elif isinstance(init, (str, pathlib.Path)): parsed = urlparse.urlparse(str(init)) if parsed.scheme in ['http', 'https']: if 'w' in mode: raise ValueError( "HTTP connections can not be opened for writing") return _make_http_connection(init, mode, uri=uri) elif parsed.scheme in _local_file_schemes: if mode == 'rw': realmode = 'r+b' else: realmode = mode + 'b' # Windows paths are not URIs, and so they should not be parsed as # such. Otherwise, the drive component of the path can get lost. # This is not an ideal solution, but we can't use pathlib here # because it doesn't handle URIs properly. if sys.platform.startswith('win') and parsed.scheme in string.ascii_letters: realpath = str(init) else: realpath = url2pathname(parsed.path) if mode == 'w': fd = atomicfile.atomic_open(realpath, realmode) else: fd = open(realpath, realmode) fd = fd.__enter__() return RealFile(fd, mode, close=True, uri=uri) elif isinstance(init, io.BytesIO): return MemoryIO(init, mode, uri=uri) elif isinstance(init, io.StringIO): raise TypeError( "io.StringIO objects are not supported. Use io.BytesIO instead.") elif isinstance(init, io.IOBase): if (('r' in mode and not init.readable()) or ('w' in mode and not init.writable())): raise ValueError( "File is opened as '{0}', but '{1}' was requested".format( init.mode, mode)) if init.seekable(): if isinstance(init, (io.BufferedReader, io.BufferedWriter, io.BufferedRandom)): init2 = init.raw else: init2 = init if isinstance(init2, io.RawIOBase): result = RealFile(init2, mode, uri=uri, close=close) else: result = MemoryIO(init2, mode, uri=uri) result._secondary_fd = init return result else: if mode == 'w': return OutputStream(init, uri=uri, close=close) elif mode == 'r': return InputStream(init, mode, uri=uri, close=close) else: raise ValueError( "File '{0}' could not be opened in 'rw' mode".format(init)) elif mode == 'w' and ( hasattr(init, 'write') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'r' and ( hasattr(init, 'read') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'rw' and ( hasattr(init, 'read') and hasattr(init, 'write') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'w' and hasattr(init, 'write'): return OutputStream(init, uri=uri, close=close) elif mode == 'r' and hasattr(init, 'read'): return InputStream(init, mode, uri=uri, close=close) raise ValueError("Can't handle '{0}' as a file for mode '{1}'".format( init, mode)) asdf-2.5.1/asdf/reference.py0000644000446400020070000001260513605165746020076 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Manages external references in the YAML tree using the `JSON Reference standard `__ and `JSON Pointer standard `__. """ from collections.abc import Sequence import weakref import numpy as np from urllib import parse as urlparse from .types import AsdfType from . import generic_io from . import treeutil from . import util __all__ = [ 'resolve_fragment', 'Reference', 'find_references', 'resolve_references', 'make_reference'] def resolve_fragment(tree, pointer): """ Resolve a JSON Pointer within the tree. """ pointer = pointer.lstrip(u"/") parts = urlparse.unquote(pointer).split(u"/") if pointer else [] for part in parts: part = part.replace(u"~1", u"/").replace(u"~0", u"~") if isinstance(tree, Sequence): # Array indexes should be turned into integers try: part = int(part) except ValueError: pass try: tree = tree[part] except (TypeError, LookupError): raise ValueError( "Unresolvable reference: '{0}'".format(pointer)) return tree class Reference(AsdfType): yaml_tag = 'tag:yaml.org,2002:map' def __init__(self, uri, base_uri=None, asdffile=None, target=None): self._uri = uri if asdffile is not None: self._asdffile = weakref.ref(asdffile) self._base_uri = base_uri self._target = target def _get_target(self, do_not_fill_defaults=False): if self._target is None: base_uri = self._base_uri if base_uri is None: base_uri = self._asdffile().uri uri = generic_io.resolve_uri(base_uri, self._uri) asdffile = self._asdffile().open_external( uri, do_not_fill_defaults=do_not_fill_defaults) parts = urlparse.urlparse(self._uri) fragment = parts.fragment self._target = resolve_fragment(asdffile.tree, fragment) return self._target def __repr__(self): # repr alone should not force loading of the reference if self._target is None: return "".format( self._uri) else: return "".format(repr(self._target)) def __str__(self): # str alone should not force loading of the reference if self._target is None: return "".format( self._uri) else: return str(self._target) def __len__(self): return len(self._get_target()) def __getattr__(self, attr): if attr == '_tag': return None try: return getattr(self._get_target(), attr) except Exception: raise AttributeError("No attribute '{0}'".format(attr)) def __getitem__(self, item): return self._get_target()[item] def __setitem__(self, item, val): self._get_target()[item] = val def __array__(self): return np.asarray(self._get_target()) def __call__(self, do_not_fill_defaults=False): return self._get_target(do_not_fill_defaults=do_not_fill_defaults) def __contains__(self, item): return item in self._get_target() @classmethod def to_tree(self, data, ctx): if ctx.uri is not None: uri = generic_io.relative_uri(ctx.uri, data._uri) else: uri = data._uri return {'$ref': uri} @classmethod def validate(self, data): pass def find_references(tree, ctx, ignore_implicit_conversion=False): """ Find all of the JSON references in the tree, and convert them into `Reference` objects. """ def do_find(tree, json_id): if isinstance(tree, dict) and '$ref' in tree: return Reference(tree['$ref'], json_id, asdffile=ctx) return tree return treeutil.walk_and_modify( tree, do_find, ignore_implicit_conversion=ignore_implicit_conversion) def resolve_references(tree, ctx, do_not_fill_defaults=False): """ Resolve all of the references in the tree, by loading the external data and inserting it directly into the tree. """ def do_resolve(tree): if isinstance(tree, Reference): return tree(do_not_fill_defaults=do_not_fill_defaults) return tree tree = find_references(tree, ctx) return treeutil.walk_and_modify(tree, do_resolve) def make_reference(asdffile, path): """ Make a reference to a subtree of the given ASDF file. Parameters ---------- asdffile : AsdfFile path : list of str and int, optional The parts of the path pointing to an item in this tree. If omitted, points to the root of the tree. Returns ------- reference : reference.Reference A reference object. """ path_str = '/'.join( x.replace(u"~", u"~0").replace(u"/", u"~1") for x in path) target = resolve_fragment(asdffile.tree, path_str) if asdffile.uri is None: raise ValueError( "Can not make a reference to a AsdfFile without an associated URI.") base_uri = util.get_base_uri(asdffile.uri) uri = base_uri + '#' + path_str return Reference(uri, target=target) asdf-2.5.1/asdf/resolver.py0000644000446400020070000001332213605165746017776 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import sys import os.path import warnings from . import constants from . import util from .exceptions import AsdfDeprecationWarning def find_schema_path(): dirname = os.path.dirname(__file__) # This means we are working within a development build if os.path.exists(os.path.join(dirname, '..', 'asdf-standard')): return os.path.join(dirname, '..', 'asdf-standard', 'schemas') # Otherwise, we return the installed location return os.path.join(dirname, 'schemas') class Resolver: """ A class that can be used to map strings with a particular prefix to another. """ def __init__(self, mappings, prefix): """ Parameters ---------- mappings : list of tuple or callable A list of mappings to try, in order. For each entry: - If a callable, must take a string and return a remapped string. Should return `None` if the mapping does not apply to the input. - If a tuple, the first item is a string prefix to match. The second item specifies how to create the new result in Python string formatting syntax. The following formatting tokens are available, where ``X`` relates to the ``prefix`` argument: - ``{X}``: The entire string passed in. - ``{X_prefix}``: The prefix of the string that was matched. - ``{X_suffix}``: The part of the string following the prefix. prefix : str The prefix to use for the Python formatting token names. """ self._mappings = self._validate_mappings(mappings) self._prefix = prefix def add_mapping(self, mappings, prefix=''): # Deprecating this because Resolver is used as part of a dictionary key # and so shouldn't be mutable. assert prefix == self._prefix warnings.warn("The 'add_mapping' method is deprecated.", AsdfDeprecationWarning) self._mappings = self._mappings + self._validate_mappings(mappings) def _perform_mapping(self, mapping, input): if callable(mapping): output = mapping(input) if output is not None: return (sys.maxsize, mapping(input)) else: return None else: if input.startswith(mapping[0]): format_tokens = { self._prefix: input, self._prefix + "_prefix": mapping[0], self._prefix + "_suffix": input[len(mapping[0]):] } return len(mapping[0]), mapping[1].format(**format_tokens) else: return None def _validate_mappings(self, mappings): normalized = [] for mapping in mappings: if callable(mapping): normalized.append(mapping) elif (isinstance(mapping, (list, tuple)) and len(mapping) == 2 and isinstance(mapping[0], str) and isinstance(mapping[1], str)): normalized.append(tuple(mapping)) else: raise ValueError("Invalid mapping '{0}'".format(mapping)) return tuple(normalized) def __call__(self, input): candidates = [(0, input)] for mapping in self._mappings: output = self._perform_mapping(mapping, input) if output is not None: candidates.append(output) candidates.sort() return candidates[-1][1] def __hash__(self): return hash(self._mappings) def __eq__(self, other): if not isinstance(other, Resolver): return NotImplemented return self._mappings == other._mappings class ResolverChain: """ A chain of Resolvers, each of which is called with the previous Resolver's output to produce the final transformed string. """ def __init__(self, *resolvers): """ Parameters ---------- *resolvers : list of Resolver Resolvers to include in the chain. """ self._resolvers = tuple(resolvers) def __call__(self, input): for resolver in self._resolvers: input = resolver(input) return input def __hash__(self): return hash(self._resolvers) def __eq__(self, other): if not isinstance(other, ResolverChain): return NotImplemented return self._resolvers == other._resolvers DEFAULT_URL_MAPPING = [ (constants.STSCI_SCHEMA_URI_BASE, util.filepath_to_url( os.path.join(find_schema_path(), 'stsci.edu')) + '/{url_suffix}.yaml')] DEFAULT_TAG_TO_URL_MAPPING = [ (constants.STSCI_SCHEMA_TAG_BASE, 'http://stsci.edu/schemas/asdf{tag_suffix}') ] def default_url_mapping(uri): warnings.warn("'default_url_mapping' is deprecated.", AsdfDeprecationWarning) return default_url_mapping._resolver(uri) default_url_mapping._resolver = Resolver(DEFAULT_URL_MAPPING, 'url') def default_tag_to_url_mapping(uri): warnings.warn("'default_tag_to_url_mapping' is deprecated.", AsdfDeprecationWarning) return default_tag_to_url_mapping._resolver(uri) default_tag_to_url_mapping._resolver = Resolver(DEFAULT_TAG_TO_URL_MAPPING, 'tag') def default_resolver(uri): warnings.warn( "The 'default_resolver(...)' function is deprecated. Use " "'asdf.extension.get_default_resolver()(...)' instead.", AsdfDeprecationWarning) return default_resolver._resolver(uri) default_resolver._resolver = ResolverChain(default_tag_to_url_mapping._resolver, default_url_mapping._resolver) asdf-2.5.1/asdf/schema.py0000644000446400020070000005077313605165746017410 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import json import datetime import warnings import copy from numbers import Integral from functools import lru_cache from collections import OrderedDict from collections.abc import Mapping from urllib import parse as urlparse from jsonschema import validators as mvalidators from jsonschema.exceptions import ValidationError import yaml from . import constants from . import generic_io from . import reference from . import treeutil from . import util from .extension import default_extensions from .compat.jsonschemacompat import JSONSCHEMA_LT_3 from . import extension from .exceptions import AsdfDeprecationWarning YAML_SCHEMA_METASCHEMA_ID = 'http://stsci.edu/schemas/yaml-schema/draft-01' if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_loader = yaml.SafeLoader __all__ = ['validate', 'fill_defaults', 'remove_defaults', 'check_schema'] def default_ext_resolver(uri): """ Resolver that uses tag/url mappings from all installed extensions """ # Deprecating this because it doesn't play nicely with the caching on # load_schema(...). warnings.warn( "The 'default_ext_resolver(...)' function is deprecated. Use " "'asdf.extension.get_default_resolver()(...)' instead.", AsdfDeprecationWarning) return extension.get_default_resolver()(uri) PYTHON_TYPE_TO_YAML_TAG = { None: 'null', str: 'str', bytes: 'str', bool: 'bool', int: 'int', float: 'float', list: 'seq', dict: 'map', set: 'set', OrderedDict: 'omap' } # Prepend full YAML tag prefix for k, v in PYTHON_TYPE_TO_YAML_TAG.items(): PYTHON_TYPE_TO_YAML_TAG[k] = constants.YAML_TAG_PREFIX + v def _type_to_tag(type_): for base in type_.mro(): if base in PYTHON_TYPE_TO_YAML_TAG: return PYTHON_TYPE_TO_YAML_TAG[base] return None def validate_tag(validator, tagname, instance, schema): if hasattr(instance, '_tag'): instance_tag = instance._tag else: # Try tags for known Python builtins instance_tag = _type_to_tag(type(instance)) if instance_tag is not None and instance_tag != tagname: yield ValidationError( "mismatched tags, wanted '{0}', got '{1}'".format( tagname, instance_tag)) def validate_propertyOrder(validator, order, instance, schema): """ Stores a value on the `tagged.TaggedDict` instance so that properties can be written out in the preferred order. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not validator.is_type(instance, 'object'): return if not order: # propertyOrder may be an empty list return instance.property_order = order def validate_flowStyle(validator, flow_style, instance, schema): """ Sets a flag on the `tagged.TaggedList` or `tagged.TaggedDict` object so that the YAML generator knows which style to use to write the element. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not (validator.is_type(instance, 'object') or validator.is_type(instance, 'array')): return instance.flow_style = flow_style def validate_style(validator, style, instance, schema): """ Sets a flag on the `tagged.TaggedString` object so that the YAML generator knows which style to use to write the string. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not validator.is_type(instance, 'string'): return instance.style = style def validate_type(validator, types, instance, schema): """ PyYAML returns strings that look like dates as datetime objects. However, as far as JSON is concerned, this is type==string and format==date-time. That detects for that case and doesn't raise an error, otherwise falling back to the default type checker. """ if (isinstance(instance, datetime.datetime) and schema.get('format') == 'date-time' and 'string' in types): return return mvalidators.Draft4Validator.VALIDATORS['type']( validator, types, instance, schema) YAML_VALIDATORS = util.HashableDict( mvalidators.Draft4Validator.VALIDATORS.copy()) YAML_VALIDATORS.update({ 'tag': validate_tag, 'propertyOrder': validate_propertyOrder, 'flowStyle': validate_flowStyle, 'style': validate_style, 'type': validate_type }) def validate_fill_default(validator, properties, instance, schema): if not validator.is_type(instance, 'object'): return for property, subschema in properties.items(): if "default" in subschema: instance.setdefault(property, subschema["default"]) for err in mvalidators.Draft4Validator.VALIDATORS['properties']( validator, properties, instance, schema): yield err FILL_DEFAULTS = util.HashableDict() for key in ('allOf', 'anyOf', 'oneOf', 'items'): FILL_DEFAULTS[key] = mvalidators.Draft4Validator.VALIDATORS[key] FILL_DEFAULTS['properties'] = validate_fill_default def validate_remove_default(validator, properties, instance, schema): if not validator.is_type(instance, 'object'): return for property, subschema in properties.items(): if subschema.get("default", None) is not None: if instance.get(property, None) == subschema["default"]: del instance[property] for err in mvalidators.Draft4Validator.VALIDATORS['properties']( validator, properties, instance, schema): yield err REMOVE_DEFAULTS = util.HashableDict() for key in ('allOf', 'anyOf', 'oneOf', 'items'): REMOVE_DEFAULTS[key] = mvalidators.Draft4Validator.VALIDATORS[key] REMOVE_DEFAULTS['properties'] = validate_remove_default @lru_cache() def _create_validator(validators=YAML_VALIDATORS): meta_schema = load_schema(YAML_SCHEMA_METASCHEMA_ID, extension.get_default_resolver()) if JSONSCHEMA_LT_3: base_cls = mvalidators.create(meta_schema=meta_schema, validators=validators) else: type_checker = mvalidators.Draft4Validator.TYPE_CHECKER.redefine( 'array', lambda checker, instance: isinstance(instance, list) or isinstance(instance, tuple) ) id_of = mvalidators.Draft4Validator.ID_OF base_cls = mvalidators.create( meta_schema=meta_schema, validators=validators, type_checker=type_checker, id_of=id_of ) class ASDFValidator(base_cls): if JSONSCHEMA_LT_3: DEFAULT_TYPES = base_cls.DEFAULT_TYPES.copy() DEFAULT_TYPES['array'] = (list, tuple) def iter_errors(self, instance, _schema=None, _seen=set()): # We can't validate anything that looks like an external reference, # since we don't have the actual content, so we just have to defer # it for now. If the user cares about complete validation, they # can call `AsdfFile.resolve_references`. if id(instance) in _seen: return if _schema is None: schema = self.schema else: schema = _schema if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return if _schema is None: tag = getattr(instance, '_tag', None) if tag is not None: schema_path = self.ctx.resolver(tag) if schema_path != tag: try: s = load_schema(schema_path, self.ctx.resolver) except FileNotFoundError: msg = "Unable to locate schema file for '{}': '{}'" warnings.warn(msg.format(tag, schema_path)) s = {} if s: with self.resolver.in_scope(schema_path): for x in super(ASDFValidator, self).iter_errors(instance, s): yield x if isinstance(instance, dict): new_seen = _seen | set([id(instance)]) for val in instance.values(): for x in self.iter_errors(val, _seen=new_seen): yield x elif isinstance(instance, list): new_seen = _seen | set([id(instance)]) for val in instance: for x in self.iter_errors(val, _seen=new_seen): yield x else: for x in super(ASDFValidator, self).iter_errors(instance, _schema=schema): yield x return ASDFValidator # We want to load mappings in schema as ordered dicts class OrderedLoader(_yaml_base_loader): pass def construct_mapping(loader, node): loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) @lru_cache() def _load_schema(url): with generic_io.get_file(url) as fd: if isinstance(url, str) and url.endswith('json'): json_data = fd.read().decode('utf-8') result = json.loads(json_data, object_pairs_hook=OrderedDict) else: result = yaml.load(fd, Loader=OrderedLoader) return result, fd.uri def _make_schema_loader(resolver): def load_schema(url): url = resolver(str(url)) return _load_schema(url) return load_schema def _make_resolver(url_mapping): handlers = {} schema_loader = _make_schema_loader(url_mapping) def get_schema(url): return schema_loader(url)[0] for x in ['http', 'https', 'file', 'tag']: handlers[x] = get_schema # We set cache_remote=False here because we do the caching of # remote schemas here in `load_schema`, so we don't need # jsonschema to do it on our behalf. Setting it to `True` # counterintuitively makes things slower. return mvalidators.RefResolver( '', {}, cache_remote=False, handlers=handlers) def _load_draft4_metaschema(): from jsonschema import _utils return _utils.load_schema('draft4') # This is a list of schema that we have locally on disk but require # special methods to obtain HARDCODED_SCHEMA = { 'http://json-schema.org/draft-04/schema': _load_draft4_metaschema } @lru_cache() def load_custom_schema(url): # Avoid circular import from .tags.core import AsdfObject custom = load_schema(url, resolve_local_refs=True) core = load_schema(AsdfObject.yaml_tag) def update(d, u): for k, v in u.items(): # Respect the property ordering of the core schema if k == 'propertyOrder' and k in d: d[k] = u[k] + d[k] elif isinstance(v, Mapping): d[k] = update(d.get(k, {}), v) else: d[k] = v return d return update(custom, core) def load_schema(url, resolver=None, resolve_references=False, resolve_local_refs=False): """ Load a schema from the given URL. Parameters ---------- url : str The path to the schema resolver : callable, optional A callback function used to map URIs to other URIs. The callable must take a string and return a string or `None`. This is useful, for example, when a remote resource has a mirror on the local filesystem that you wish to use. resolve_references : bool, optional If `True`, resolve all `$ref` references. resolve_local_refs : bool, optional If `True`, resolve all `$ref` references that refer to other objects within the same schema. This will automatically be handled when passing `resolve_references=True`, but it may be desirable in some cases to control local reference resolution separately. """ if resolver is None: # We can't just set this as the default in load_schema's definition # because invoking get_default_resolver at import time leads to a circular import. resolver = extension.get_default_resolver() # We want to cache the work that went into constructing the schema, but returning # the same object is treacherous, because users who mutate the result will not # expect that they're changing the schema everywhere. return copy.deepcopy( _load_schema_cached(url, resolver, resolve_references, resolve_local_refs) ) @lru_cache() def _load_schema_cached(url, resolver, resolve_references, resolve_local_refs): loader = _make_schema_loader(resolver) if url in HARDCODED_SCHEMA: schema = HARDCODED_SCHEMA[url]() else: schema, url = loader(url) # Resolve local references if resolve_local_refs: def resolve_local(node, json_id): if isinstance(node, dict) and '$ref' in node: ref_url = resolver(node['$ref']) if ref_url.startswith('#'): parts = urlparse.urlparse(ref_url) subschema_fragment = reference.resolve_fragment( schema, parts.fragment) return subschema_fragment return node schema = treeutil.walk_and_modify(schema, resolve_local) if resolve_references: def resolve_refs(node, json_id): if json_id is None: json_id = url if isinstance(node, dict) and '$ref' in node: suburl = generic_io.resolve_uri(json_id, resolver(node['$ref'])) parts = urlparse.urlparse(suburl) fragment = parts.fragment if len(fragment): suburl_path = suburl[:-(len(fragment) + 1)] else: suburl_path = suburl suburl_path = resolver(suburl_path) if suburl_path == url: subschema = schema else: subschema = load_schema(suburl_path, resolver, True) subschema_fragment = reference.resolve_fragment( subschema, fragment) return subschema_fragment return node schema = treeutil.walk_and_modify(schema, resolve_refs) return schema def get_validator(schema={}, ctx=None, validators=None, url_mapping=None, *args, **kwargs): """ Get a JSON schema validator object for the given schema. The additional *args and **kwargs are passed along to `jsonschema.validate`. Parameters ---------- schema : schema, optional Explicit schema to use. If not provided, the schema to use is determined by the tag on instance (or subinstance). ctx : AsdfFile context Used to resolve tags and urls validators : dict, optional A dictionary mapping properties to validators to use (instead of the built-in ones and ones provided by extension types). url_mapping : resolver.Resolver, optional A resolver to convert remote URLs into local ones. Returns ------- validator : jsonschema.Validator """ if ctx is None: from .asdf import AsdfFile ctx = AsdfFile() if validators is None: validators = util.HashableDict(YAML_VALIDATORS.copy()) validators.update(ctx._extensions.validators) kwargs['resolver'] = _make_resolver(url_mapping) # We don't just call validators.validate() directly here, because # that validates the schema itself, wasting a lot of time (at the # time of this writing, it was half of the runtime of the unit # test suite!!!). Instead, we assume that the schemas are valid # through the running of the unit tests, not at run time. cls = _create_validator(validators=validators) validator = cls(schema, *args, **kwargs) validator.ctx = ctx return validator def validate_large_literals(instance, reading=False): """ Validate that the tree has no large numeric literals. """ # We can count on 52 bits of precision for instance in treeutil.iter_tree(instance): if not isinstance(instance, Integral): continue if instance <= ((1 << 51) - 1) and instance >= -((1 << 51) - 2): continue if not reading: raise ValidationError( "Integer value {0} is too large to safely represent as a " "literal in ASDF".format(instance)) warnings.warn( "Invalid integer literal value {0} detected while reading file. " "The value has been read safely, but the file should be " "fixed.".format(instance) ) def validate(instance, ctx=None, schema={}, validators=None, reading=False, *args, **kwargs): """ Validate the given instance (which must be a tagged tree) against the appropriate schema. The schema itself is located using the tag on the instance. The additional *args and **kwargs are passed along to `jsonschema.validate`. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls schema : schema, optional Explicit schema to use. If not provided, the schema to use is determined by the tag on instance (or subinstance). validators : dict, optional A dictionary mapping properties to validators to use (instead of the built-in ones and ones provided by extension types). reading: bool, optional Indicates whether validation is being performed when the file is being read. This is useful to allow for different validation behavior when reading vs writing files. """ if ctx is None: from .asdf import AsdfFile ctx = AsdfFile() validator = get_validator(schema, ctx, validators, ctx.resolver, *args, **kwargs) validator.validate(instance, _schema=(schema or None)) validate_large_literals(instance, reading=reading) def fill_defaults(instance, ctx, reading=False): """ For any default values in the schema, add them to the tree if they don't exist. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls reading: bool, optional Indicates whether the ASDF file is being read (in contrast to being written). """ validate(instance, ctx, validators=FILL_DEFAULTS, reading=reading) def remove_defaults(instance, ctx): """ For any values in the tree that are the same as the default values specified in the schema, remove them from the tree. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls """ validate(instance, ctx, validators=REMOVE_DEFAULTS) def check_schema(schema): """ Check a given schema to make sure it is valid YAML schema. """ # We also want to validate the "default" values in the schema # against the schema itself. jsonschema as a library doesn't do # this on its own. def validate_default(validator, default, instance, schema): if not validator.is_type(instance, 'object'): return if 'default' in instance: with instance_validator.resolver.in_scope(scope): for err in instance_validator.iter_errors( instance['default'], instance): yield err VALIDATORS = util.HashableDict( mvalidators.Draft4Validator.VALIDATORS.copy()) VALIDATORS.update({ 'default': validate_default }) meta_schema_id = schema.get('$schema', YAML_SCHEMA_METASCHEMA_ID) meta_schema = load_schema(meta_schema_id, extension.get_default_resolver()) resolver = _make_resolver(extension.get_default_resolver()) cls = mvalidators.create(meta_schema=meta_schema, validators=VALIDATORS) validator = cls(meta_schema, resolver=resolver) instance_validator = mvalidators.Draft4Validator(schema, resolver=resolver) scope = schema.get('id', '') validator.validate(schema, _schema=meta_schema) asdf-2.5.1/asdf/stream.py0000644000446400020070000000352713567314375017437 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from .tags.core import ndarray class Stream(ndarray.NDArrayType): """ Used to put a streamed array into the tree. Examples -------- Save a double-precision array with 1024 columns, one row at a time:: >>> from asdf import AsdfFile, Stream >>> import numpy as np >>> ff = AsdfFile() >>> ff.tree['streamed'] = Stream([1024], np.float64) >>> with open('test.asdf', 'wb') as fd: ... ff.write_to(fd) ... for i in range(200): ... nbytes = fd.write( ... np.array([i] * 1024, np.float64).tostring()) """ name = None types = [] def __init__(self, shape, dtype, strides=None): self._shape = shape self._datatype, self._byteorder = ndarray.numpy_dtype_to_asdf_datatype(dtype) self._strides = strides self._array = None def _make_array(self): self._array = None @classmethod def reserve_blocks(cls, data, ctx): if isinstance(data, Stream): yield ctx.blocks.get_streamed_block() @classmethod def from_tree(cls, data, ctx): return ndarray.NDArrayType.from_tree(data, ctx) @classmethod def to_tree(cls, data, ctx): ctx.blocks.get_streamed_block() result = {} result['source'] = -1 result['shape'] = ['*'] + data._shape result['datatype'] = data._datatype result['byteorder'] = data._byteorder if data._strides is not None: result['strides'] = data._strides return result def __repr__(self): return "Stream({}, {}, strides={})".format( self._shape, self._datatype, self._strides) def __str__(self): return str(self.__repr__()) asdf-2.5.1/asdf/tagged.py0000644000446400020070000000741013567314375017372 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ This file manages a transient representation of the tree made up of simple Python data types (lists, dicts, scalars) wrapped inside of `Tagged` subclasses, which add a ``tag`` attribute to hold the associated YAML tag. Below "basic data types" refers to the basic built-in data types defined in the core YAML specification. "Custom data types" are specialized tags that are added by ASDF or third-parties that are not in the YAML specification. When YAML is loaded from disk, we want to first validate it using JSON schema, which only understands basic Python data types, not the ``Nodes`` that ``pyyaml`` uses as its intermediate representation. However, basic Python data types do not preserve the tag information from the YAML file that we need later to convert elements to custom data types. Therefore, the approach here is to wrap those basic types inside of `Tagged` objects long enough to run through the jsonschema validator, and then convert to custom data types and throwing away the tag annotations in the process. Upon writing, the custom data types are first converted to basic Python data types wrapped in `Tagged` objects. The tags assigned to the ``Tagged`` objects are then used to write tags to the YAML file. All of this is an implementation detail of the our custom YAML loader and dumper (``yamlutil.AsdfLoader`` and ``yamlutil.AsdfDumper``) and is not intended to be exposed to the end user. """ from collections import UserDict, UserList, UserString __all__ = ['tag_object', 'get_tag'] class Tagged: """ Base class of classes that wrap a given object and store a tag with it. """ pass class TaggedDict(Tagged, UserDict, dict): """ A Python dict with a tag attached. """ flow_style = None property_order = None def __init__(self, data=None, tag=None): if data is None: data = {} self.data = data self._tag = tag def __eq__(self, other): return (isinstance(other, TaggedDict) and self.data == other.data and self._tag == other._tag) class TaggedList(Tagged, UserList, list): """ A Python list with a tag attached. """ flow_style = None def __init__(self, data=None, tag=None): if data is None: data = [] self.data = data self._tag = tag def __eq__(self, other): return (isinstance(other, TaggedList) and self.data == other.data and self._tag == other._tag) class TaggedString(Tagged, UserString, str): """ A Python list with a tag attached. """ style = None def __eq__(self, other): return (isinstance(other, TaggedString) and str.__eq__(self, other) and self._tag == other._tag) def tag_object(tag, instance, ctx=None): """ Tag an object by wrapping it in a ``Tagged`` instance. """ if isinstance(instance, Tagged): instance._tag = tag elif isinstance(instance, dict): instance = TaggedDict(instance, tag) elif isinstance(instance, list): instance = TaggedList(instance, tag) elif isinstance(instance, str): instance = TaggedString(instance) instance._tag = tag else: from . import AsdfFile, yamlutil if ctx is None: ctx = AsdfFile() try: instance = yamlutil.custom_tree_to_tagged_tree(instance, ctx) except TypeError: raise TypeError("Don't know how to tag a {0}".format(type(instance))) instance._tag = tag return instance def get_tag(instance): """ Get the tag associated with the instance, if there is one. """ return getattr(instance, '_tag', None) asdf-2.5.1/asdf/tags/0000755000446400020070000000000013605166132016506 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/tags/__init__.py0000644000446400020070000000017313567314375020633 0ustar eslavichSTSCI\science00000000000000# -*- coding: utf-8 -*- # TODO: Import entire tree automatically and make these work like "plugins"? from . import core asdf-2.5.1/asdf/tags/core/0000755000446400020070000000000013605166132017436 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/tags/core/__init__.py0000644000446400020070000000234413605166107021554 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from ...types import AsdfType from ...yamlutil import custom_tree_to_tagged_tree class AsdfObject(dict, AsdfType): name = 'core/asdf' version = '1.1.0' class Software(dict, AsdfType): name = 'core/software' version = '1.0.0' class HistoryEntry(dict, AsdfType): name = 'core/history_entry' version = '1.0.0' class ExtensionMetadata(AsdfType): name = 'core/extension_metadata' version = '1.0.0' def __init__(self, extension_class=None, software={}): self.extension_class = extension_class self.software = software @classmethod def from_tree(cls, node, ctx): return cls(**node) @classmethod def to_tree(cls, node, ctx): tree = {} tree['extension_class'] = node.extension_class tree['software'] = custom_tree_to_tagged_tree(node.software, ctx) return tree class SubclassMetadata(dict, AsdfType): name = 'core/subclass_metadata' version = '1.0.0' from .constant import ConstantType from .ndarray import NDArrayType from .complex import ComplexType from .integer import IntegerType from .external_reference import ExternalArrayReference asdf-2.5.1/asdf/tags/core/complex.py0000644000446400020070000000112613567314375021472 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import numpy as np from ...types import AsdfType from ... import util class ComplexType(AsdfType): name = 'core/complex' types = list(util.iter_subclasses(np.complexfloating)) + [complex] @classmethod def to_tree(cls, node, ctx): return str(node) @classmethod def from_tree(cls, tree, ctx): tree = tree.replace( 'inf', 'INF').replace( 'i', 'j').replace( 'INF', 'inf').replace( 'I', 'J') return complex(tree) asdf-2.5.1/asdf/tags/core/constant.py0000644000446400020070000000077013567314375021660 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from ...types import AsdfType class Constant: def __init__(self, value): self._value = value @property def value(self): return self._value class ConstantType(AsdfType): name = 'core/constant' types = [Constant] @classmethod def from_tree(self, node, ctx): return Constant(node) @classmethod def to_tree(self, data, ctx): return data.value asdf-2.5.1/asdf/tags/core/external_reference.py0000644000446400020070000000421113567314375023661 0ustar eslavichSTSCI\science00000000000000from ...types import AsdfType class ExternalArrayReference(AsdfType): """ Store a reference to an array in an external File. This class is a simple way of referring to an array in another file. It provides no way to resolve these references, that is left to the user. It also performs no checking to see if any of the arguments are correct. e.g. if the file exits. Parameters ---------- fileuri: `str` The path to the path to be referenced. Can be relative to the file containing the reference. target: `object` Some internal target to the data in the file. Examples may include a HDU index, a HDF path or an asdf fragment. dtype: `str` The (numpy) dtype of the contained array. shape: `tuple` The shape of the array to be loaded. Examples -------- >>> import asdf >>> ref = asdf.ExternalArrayReference("myfitsfile.fits", 1, "float64", (100, 100)) >>> tree = {'reference': ref} >>> with asdf.AsdfFile(tree) as ff: ... ff.write_to("test.asdf") """ name = "core/externalarray" version = (1, 0, 0) def __init__(self, fileuri, target, dtype, shape): self.fileuri = str(fileuri) self.target = target self.dtype = dtype self.shape = tuple(shape) def __repr__(self): return "".format( self.fileuri, self.target, self.shape, self.dtype) def __str__(self): return repr(self) def __eq__(self, other): uri = self.fileuri == other.fileuri target = self.target == other.target dtype = self.dtype == other.dtype shape = self.shape == other.shape return all((uri, target, dtype, shape)) @classmethod def to_tree(self, data, ctx): node = {} node['fileuri'] = data.fileuri node['target'] = data.target node['datatype'] = data.dtype node['shape'] = data.shape return node @classmethod def from_tree(cls, tree, ctx): return cls(tree['fileuri'], tree['target'], tree['datatype'], tree['shape']) asdf-2.5.1/asdf/tags/core/integer.py0000644000446400020070000000673013567314375021466 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from numbers import Integral import numpy as np from ...types import AsdfType from ...yamlutil import custom_tree_to_tagged_tree class IntegerType(AsdfType): """ Enables the storage of arbitrarily large integer values The ASDF Standard mandates that integer literals in the tree can be no larger than 52 bits. Use of this class enables the storage of arbitrarily large integer values. When reading files that contain arbitrarily large integers, the values that are restored in the tree will be raw Python `int` instances. Parameters ---------- value: `numbers.Integral` A Python integral value (e.g. `int` or `numpy.integer`) storage_type: `str`, optional Optionally overrides the storage type of the array used to represent the integer value. Valid values are "internal" (the default) and "inline" Examples -------- >>> import asdf >>> import random >>> # Create a large integer value >>> largeval = random.getrandbits(100) >>> # Store the large integer value to the tree using asdf.IntegerType >>> tree = dict(largeval=asdf.IntegerType(largeval)) >>> with asdf.AsdfFile(tree) as af: ... af.write_to('largeval.asdf') >>> with asdf.open('largeval.asdf') as aa: ... assert aa['largeval'] == largeval """ name = 'core/integer' version = '1.0.0' _value_cache = dict() def __init__(self, value, storage_type='internal'): assert storage_type in ['internal', 'inline'], "Invalid storage type given" self._value = value self._sign = '-' if value < 0 else '+' self._storage = storage_type @classmethod def to_tree(cls, node, ctx): if ctx not in cls._value_cache: cls._value_cache[ctx] = dict() abs_value = int(np.abs(node._value)) # If the same value has already been stored, reuse the array if abs_value in cls._value_cache[ctx]: array = cls._value_cache[ctx][abs_value] else: # pack integer value into 32-bit words words = [] value = abs_value while value > 0: words.append(value & 0xffffffff) value >>= 32 array = np.array(words, dtype=np.uint32) if node._storage == 'internal': cls._value_cache[ctx][abs_value] = array tree = dict() ctx.set_array_storage(array, node._storage) tree['words'] = custom_tree_to_tagged_tree(array, ctx) tree['sign'] = node._sign tree['string'] = str(int(node._value)) return tree @classmethod def from_tree(cls, tree, ctx): value = 0 for x in tree['words'][::-1]: value <<= 32 value |= int(x) if tree['sign'] == '-': value = -value return IntegerType(value) def __int__(self): return int(self._value) def __float__(self): return float(self._value) def __eq__(self, other): if isinstance(other, Integral): return self._value == other elif isinstance(other, IntegerType): return self._value == other._value else: raise ValueError( "Can't compare IntegralType to unknown type: {}".format( type(other))) def __repr__(self): return "IntegerType({})".format(self._value) asdf-2.5.1/asdf/tags/core/ndarray.py0000644000446400020070000005434413605165746021474 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import sys import weakref import numpy as np from numpy import ma from jsonschema import ValidationError from ...types import AsdfType from ... import schema from ... import util from ... import yamlutil _datatype_names = { 'int8' : 'i1', 'int16' : 'i2', 'int32' : 'i4', 'int64' : 'i8', 'uint8' : 'u1', 'uint16' : 'u2', 'uint32' : 'u4', 'uint64' : 'u8', 'float32' : 'f4', 'float64' : 'f8', 'complex64' : 'c8', 'complex128' : 'c16', 'bool8' : 'b1' } _string_datatype_names = { 'ascii' : 'S', 'ucs4' : 'U' } def asdf_byteorder_to_numpy_byteorder(byteorder): if byteorder == 'big': return '>' elif byteorder == 'little': return '<' raise ValueError("Invalid ASDF byteorder '{0}'".format(byteorder)) def asdf_datatype_to_numpy_dtype(datatype, byteorder=None): if byteorder is None: byteorder = sys.byteorder if isinstance(datatype, str) and datatype in _datatype_names: datatype = _datatype_names[datatype] byteorder = asdf_byteorder_to_numpy_byteorder(byteorder) return np.dtype(str(byteorder + datatype)) elif (isinstance(datatype, list) and len(datatype) == 2 and isinstance(datatype[0], str) and isinstance(datatype[1], int) and datatype[0] in _string_datatype_names): length = datatype[1] byteorder = asdf_byteorder_to_numpy_byteorder(byteorder) datatype = str(byteorder) + str(_string_datatype_names[datatype[0]]) + str(length) return np.dtype(datatype) elif isinstance(datatype, dict): if 'datatype' not in datatype: raise ValueError("Field entry has no datatype: '{0}'".format(datatype)) name = datatype.get('name', '') byteorder = datatype.get('byteorder', byteorder) shape = datatype.get('shape') datatype = asdf_datatype_to_numpy_dtype(datatype['datatype'], byteorder) if shape is None: return (str(name), datatype) else: return (str(name), datatype, tuple(shape)) elif isinstance(datatype, list): datatype_list = [] for i, subdatatype in enumerate(datatype): np_dtype = asdf_datatype_to_numpy_dtype(subdatatype, byteorder) if isinstance(np_dtype, tuple): datatype_list.append(np_dtype) elif isinstance(np_dtype, np.dtype): datatype_list.append((str(''), np_dtype)) else: raise RuntimeError("Error parsing asdf datatype") return np.dtype(datatype_list) raise ValueError("Unknown datatype {0}".format(datatype)) def numpy_byteorder_to_asdf_byteorder(byteorder): if byteorder == '=': return sys.byteorder elif byteorder == '<': return 'little' else: return 'big' def numpy_dtype_to_asdf_datatype(dtype, include_byteorder=True): dtype = np.dtype(dtype) if dtype.names is not None: fields = [] for name in dtype.names: field = dtype.fields[name][0] d = {} d['name'] = name field_dtype, byteorder = numpy_dtype_to_asdf_datatype(field) d['datatype'] = field_dtype if include_byteorder: d['byteorder'] = byteorder if field.shape: d['shape'] = list(field.shape) fields.append(d) return fields, numpy_byteorder_to_asdf_byteorder(dtype.byteorder) elif dtype.subdtype is not None: return numpy_dtype_to_asdf_datatype(dtype.subdtype[0]) elif dtype.name in _datatype_names: return dtype.name, numpy_byteorder_to_asdf_byteorder(dtype.byteorder) elif dtype.name == 'bool': return 'bool8', numpy_byteorder_to_asdf_byteorder(dtype.byteorder) elif dtype.name.startswith('string') or dtype.name.startswith('bytes'): return ['ascii', dtype.itemsize], 'big' elif dtype.name.startswith('unicode') or dtype.name.startswith('str'): return (['ucs4', int(dtype.itemsize / 4)], numpy_byteorder_to_asdf_byteorder(dtype.byteorder)) raise ValueError("Unknown dtype {0}".format(dtype)) def inline_data_asarray(inline, dtype=None): # np.asarray doesn't handle structured arrays unless the innermost # elements are tuples. To do that, we drill down the first # element of each level until we find a single item that # successfully converts to a scalar of the expected structured # dtype. Then we go through and convert everything at that level # to a tuple. This probably breaks for nested structured dtypes, # but it's probably good enough for now. It also won't work with # object dtypes, but ASDF explicitly excludes those, so we're ok # there. if dtype is not None and dtype.fields is not None: def find_innermost_match(l, depth=0): if not isinstance(l, list) or not len(l): raise ValueError( "data can not be converted to structured array") try: np.asarray(tuple(l), dtype=dtype) except ValueError: return find_innermost_match(l[0], depth + 1) else: return depth depth = find_innermost_match(inline) def convert_to_tuples(l, data_depth, depth=0): if data_depth == depth: return tuple(l) else: return [convert_to_tuples(x, data_depth, depth+1) for x in l] inline = convert_to_tuples(inline, depth) return np.asarray(inline, dtype=dtype) else: def handle_mask(inline): if isinstance(inline, list): if None in inline: inline_array = np.asarray(inline) nones = np.equal(inline_array, None) return np.ma.array(np.where(nones, 0, inline), mask=nones) else: return [handle_mask(x) for x in inline] return inline inline = handle_mask(inline) inline = np.ma.asarray(inline, dtype=dtype) if not ma.is_masked(inline): return inline.data else: return inline def numpy_array_to_list(array): def tolist(x): if isinstance(x, (np.ndarray, NDArrayType)): if x.dtype.char == 'S': x = x.astype('U').tolist() else: x = x.tolist() if isinstance(x, (list, tuple)): return [tolist(y) for y in x] else: return x def ascii_to_unicode(x): # Convert byte string arrays to unicode string arrays, since YAML # doesn't handle the former. if isinstance(x, list): return [ascii_to_unicode(y) for y in x] elif isinstance(x, bytes): return x.decode('ascii') else: return x result = ascii_to_unicode(tolist(array)) schema.validate_large_literals(result) return result class NDArrayType(AsdfType): name = 'core/ndarray' types = [np.ndarray, ma.MaskedArray] def __init__(self, source, shape, dtype, offset, strides, order, mask, asdffile): self._asdffile = asdffile self._source = source self._block = None self._block_data_weakref = None self._array = None self._mask = mask if isinstance(source, list): self._array = inline_data_asarray(source, dtype) self._array = self._apply_mask(self._array, self._mask) self._block = asdffile.blocks.add_inline(self._array) if shape is not None: if ((shape[0] == '*' and self._array.shape[1:] != tuple(shape[1:])) or (self._array.shape != tuple(shape))): raise ValueError( "inline data doesn't match the given shape") self._shape = shape self._dtype = dtype self._offset = offset self._strides = strides self._order = order if not asdffile.blocks.lazy_load: self._make_array() def _make_array(self): # If the ASDF file has been updated in-place, then there's # a chance that the block's original data object has been # closed and replaced. We need to check here and re-generate # the array if necessary, otherwise we risk segfaults when # memory mapping. if self._block_data_weakref is not None: block_data = self._block_data_weakref() if block_data is not self.block.data: self._array = None self._block_data_weakref = None if self._array is None: block = self.block shape = self.get_actual_shape( self._shape, self._strides, self._dtype, len(block)) self._array = np.ndarray( shape, self._dtype, block.data, self._offset, self._strides, self._order) self._block_data_weakref = weakref.ref(block.data) self._array = self._apply_mask(self._array, self._mask) if block.readonly: self._array.setflags(write=False) return self._array def _apply_mask(self, array, mask): if isinstance(mask, (np.ndarray, NDArrayType)): # Use "mask.view()" here so the underlying possibly # memmapped mask array is freed properly when the masked # array goes away. array = ma.array(array, mask=mask.view()) # assert util.get_array_base(array.mask) is util.get_array_base(mask) return array elif np.isscalar(mask): if np.isnan(mask): return ma.array(array, mask=np.isnan(array)) else: return ma.masked_values(array, mask) return array def __array__(self): return self._make_array() def __repr__(self): # repr alone should not force loading of the data if self._array is None: return "<{0} (unloaded) shape: {1} dtype: {2}>".format( 'array' if self._mask is None else 'masked array', self._shape, self._dtype) return repr(self._array) def __str__(self): # str alone should not force loading of the data if self._array is None: return "<{0} (unloaded) shape: {1} dtype: {2}>".format( 'array' if self._mask is None else 'masked array', self._shape, self._dtype) return str(self._array) def get_actual_shape(self, shape, strides, dtype, block_size): """ Get the actual shape of an array, by computing it against the block_size if it contains a ``*``. """ num_stars = shape.count('*') if num_stars == 0: return shape elif num_stars == 1: if shape[0] != '*': raise ValueError("'*' may only be in first entry of shape") if strides is not None: stride = strides[0] else: stride = np.product(shape[1:]) * dtype.itemsize missing = int(block_size / stride) return [missing] + shape[1:] raise ValueError("Invalid shape '{0}'".format(shape)) @property def block(self): if self._block is None: self._block = self._asdffile.blocks.get_block(self._source) return self._block @property def shape(self): if self._shape is None: return self.__array__().shape if '*' in self._shape: return tuple(self.get_actual_shape( self._shape, self._strides, self._dtype, len(self.block))) return tuple(self._shape) @property def dtype(self): if self._array is None: return self._dtype else: return self._array.dtype def __len__(self): if self._array is None: return self._shape[0] else: return len(self._array) def __getattr__(self, attr): # We need to ignore __array_struct__, or unicode arrays end up # getting "double casted" and upsized. This also reduces the # number of array creations in the general case. if attr == '__array_struct__': raise AttributeError() return getattr(self._make_array(), attr) def __setitem__(self, *args): # This workaround appears to be necessary in order to avoid a segfault # in the case that array assignment causes an exception. The segfault # originates from the call to __repr__ inside the traceback report. try: self._make_array().__setitem__(*args) except Exception as e: self._array = None self._block_data_weakref = None raise e from None @classmethod def from_tree(cls, node, ctx): if isinstance(node, list): return cls(node, None, None, None, None, None, None, ctx) elif isinstance(node, dict): source = node.get('source') data = node.get('data') if source and data: raise ValueError( "Both source and data may not be provided " "at the same time") if data: source = data shape = node.get('shape', None) if data is not None: byteorder = sys.byteorder else: byteorder = node['byteorder'] if 'datatype' in node: dtype = asdf_datatype_to_numpy_dtype( node['datatype'], byteorder) else: dtype = None offset = node.get('offset', 0) strides = node.get('strides', None) mask = node.get('mask', None) return cls(source, shape, dtype, offset, strides, 'A', mask, ctx) raise TypeError("Invalid ndarray description.") @classmethod def reserve_blocks(cls, data, ctx): # Find all of the used data buffers so we can add or rearrange # them if necessary if isinstance(data, np.ndarray): yield ctx.blocks.find_or_create_block_for_array(data, ctx) elif isinstance(data, NDArrayType): yield data.block @classmethod def to_tree(cls, data, ctx): base = util.get_array_base(data) shape = data.shape dtype = data.dtype offset = data.ctypes.data - base.ctypes.data strides = None if not data.flags.c_contiguous: # We do not want to encode strides for broadcasted arrays if not all(data.strides): data = np.ascontiguousarray(data) else: strides = data.strides block = ctx.blocks.find_or_create_block_for_array(data, ctx) result = {} result['shape'] = list(shape) if block.array_storage == 'streamed': result['shape'][0] = '*' dtype, byteorder = numpy_dtype_to_asdf_datatype( dtype, include_byteorder=(block.array_storage != 'inline')) byteorder = block.override_byteorder(byteorder) if block.array_storage == 'inline': listdata = numpy_array_to_list(data) result['data'] = yamlutil.custom_tree_to_tagged_tree( listdata, ctx) result['datatype'] = dtype else: result['shape'] = list(shape) if block.array_storage == 'streamed': result['shape'][0] = '*' result['source'] = ctx.blocks.get_source(block) result['datatype'] = dtype result['byteorder'] = byteorder if offset > 0: result['offset'] = offset if strides is not None: result['strides'] = list(strides) if isinstance(data, ma.MaskedArray): if np.any(data.mask): if block.array_storage == 'inline': ctx.blocks.set_array_storage(ctx.blocks[data.mask], 'inline') result['mask'] = yamlutil.custom_tree_to_tagged_tree( data.mask, ctx) return result @classmethod def _assert_equality(cls, old, new, func): if old.dtype.fields: if not new.dtype.fields: assert False, "arrays not equal" for a, b in zip(old, new): cls._assert_equality(a, b, func) else: old = old.__array__() new = new.__array__() if old.dtype.char in 'SU': if old.dtype.char == 'S': old = old.astype('U') if new.dtype.char == 'S': new = new.astype('U') old = old.tolist() new = new.tolist() assert old == new else: func(old, new) @classmethod def assert_equal(cls, old, new): from numpy.testing import assert_array_equal cls._assert_equality(old, new, assert_array_equal) @classmethod def assert_allclose(cls, old, new): from numpy.testing import assert_allclose, assert_array_equal if (old.dtype.kind in 'iu' and new.dtype.kind in 'iu'): cls._assert_equality(old, new, assert_array_equal) else: cls._assert_equality(old, new, assert_allclose) @classmethod def copy_to_new_asdf(cls, node, asdffile): if isinstance(node, NDArrayType): array = node._make_array() asdffile.blocks.set_array_storage(asdffile.blocks[array], node.block.array_storage) return node._make_array() return node def _make_operation(name): def __operation__(self, *args): return getattr(self._make_array(), name)(*args) return __operation__ for op in [ '__neg__', '__pos__', '__abs__', '__invert__', '__complex__', '__int__', '__long__', '__float__', '__oct__', '__hex__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', '__cmp__', '__rcmp__', '__add__', '__sub__', '__mul__', '__floordiv__', '__mod__', '__divmod__', '__pow__', '__lshift__', '__rshift__', '__and__', '__xor__', '__or__', '__div__', '__truediv__', '__radd__', '__rsub__', '__rmul__', '__rdiv__', '__rtruediv__', '__rfloordiv__', '__rmod__', '__rdivmod__', '__rpow__', '__rlshift__', '__rrshift__', '__rand__', '__rxor__', '__ror__', '__iadd__', '__isub__', '__imul__', '__idiv__', '__itruediv__', '__ifloordiv__', '__imod__', '__ipow__', '__ilshift__', '__irshift__', '__iand__', '__ixor__', '__ior__', '__getitem__', '__delitem__', '__contains__']: setattr(NDArrayType, op, _make_operation(op)) def _get_ndim(instance): if isinstance(instance, list): array = inline_data_asarray(instance) return array.ndim elif isinstance(instance, dict): if 'shape' in instance: return len(instance['shape']) elif 'data' in instance: array = inline_data_asarray(instance['data']) return array.ndim elif isinstance(instance, (np.ndarray, NDArrayType)): return len(instance.shape) def validate_ndim(validator, ndim, instance, schema): in_ndim = _get_ndim(instance) if in_ndim != ndim: yield ValidationError( "Wrong number of dimensions: Expected {0}, got {1}".format( ndim, in_ndim), instance=repr(instance)) def validate_max_ndim(validator, max_ndim, instance, schema): in_ndim = _get_ndim(instance) if in_ndim > max_ndim: yield ValidationError( "Wrong number of dimensions: Expected max of {0}, got {1}".format( max_ndim, in_ndim), instance=repr(instance)) def validate_datatype(validator, datatype, instance, schema): if isinstance(instance, list): array = inline_data_asarray(instance) in_datatype, _ = numpy_dtype_to_asdf_datatype(array.dtype) elif isinstance(instance, dict): if 'datatype' in instance: in_datatype = instance['datatype'] elif 'data' in instance: array = inline_data_asarray(instance['data']) in_datatype, _ = numpy_dtype_to_asdf_datatype(array.dtype) else: raise ValidationError("Not an array") elif isinstance(instance, (np.ndarray, NDArrayType)): in_datatype, _ = numpy_dtype_to_asdf_datatype(instance.dtype) else: raise ValidationError("Not an array") if datatype == in_datatype: return if schema.get('exact_datatype', False): yield ValidationError( "Expected datatype '{0}', got '{1}'".format( datatype, in_datatype)) np_datatype = asdf_datatype_to_numpy_dtype(datatype) np_in_datatype = asdf_datatype_to_numpy_dtype(in_datatype) if not np_datatype.fields: if np_in_datatype.fields: yield ValidationError( "Expected scalar datatype '{0}', got '{1}'".format( datatype, in_datatype)) if not np.can_cast(np_in_datatype, np_datatype, 'safe'): yield ValidationError( "Can not safely cast from '{0}' to '{1}' ".format( in_datatype, datatype)) else: if not np_in_datatype.fields: yield ValidationError( "Expected structured datatype '{0}', got '{1}'".format( datatype, in_datatype)) if len(np_in_datatype.fields) != len(np_datatype.fields): yield ValidationError( "Mismatch in number of columns: " "Expected {0}, got {1}".format( len(datatype), len(in_datatype))) for i in range(len(np_datatype.fields)): in_type = np_in_datatype[i] out_type = np_datatype[i] if not np.can_cast(in_type, out_type, 'safe'): yield ValidationError( "Can not safely cast to expected datatype: " "Expected {0}, got {1}".format( numpy_dtype_to_asdf_datatype(out_type)[0], numpy_dtype_to_asdf_datatype(in_type)[0])) NDArrayType.validators = { 'ndim': validate_ndim, 'max_ndim': validate_max_ndim, 'datatype': validate_datatype } asdf-2.5.1/asdf/tags/core/tests/0000755000446400020070000000000013605166132020600 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/tags/core/tests/__init__.py0000644000446400020070000000000013567314375022712 0ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/tags/core/tests/data/0000755000446400020070000000000013605166132021511 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/tags/core/tests/data/__init__.py0000644000446400020070000000010013567314375023624 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst asdf-2.5.1/asdf/tags/core/tests/data/datatype-1.0.0.yaml0000644000446400020070000000073113567314375024656 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://nowhere.org/schemas/custom/datatype-1.0.0" type: object properties: a: datatype: float32 b: datatype: float32 exact_datatype: true c: datatype: - name: a datatype: int16 - name: b datatype: ['ascii', 16] d: datatype: - name: a datatype: int16 - name: b datatype: ['ascii', 16] exact_datatype: trueasdf-2.5.1/asdf/tags/core/tests/data/ndim-1.0.0.yaml0000644000446400020070000000027313567314375023773 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://nowhere.org/schemas/custom/ndim-1.0.0" type: object properties: a: ndim: 2 b: max_ndim: 2asdf-2.5.1/asdf/tags/core/tests/test_complex.py0000644000446400020070000000275713567314375023706 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import re import pytest import asdf from asdf.tests import helpers def make_complex_asdf(string): yaml = """ a: !core/complex-1.0.0 {} """.format(string) return helpers.yaml_to_asdf(yaml) @pytest.mark.parametrize('invalid', [ '3 + 4i', '3+-4i', '3-+4i', '3i+4i', 'X3+4iX', '3+X4i', '3+4', '3i+4' '3+4z', '3.+4i', '3+4.i', '3e-4.0+4i', '3+4e4.0i', '' ]) def test_invalid_complex(invalid): with pytest.raises(asdf.ValidationError): with asdf.open(make_complex_asdf(invalid)): pass @pytest.mark.parametrize('valid', [ '3+4j', '(3+4j)', '.3+4j', '3+.4j', '3e10+4j', '3e-10+4j', '3+4e10j', '3.0+4j', '3+4.0j', '3.0+4.0j', '3+4e-10j', '3+4J', '3+4i', '3+4I', 'inf', 'inf+infj', 'inf+infi', 'infj', 'infi', 'INFi', 'INFI', '3+infj', 'inf+4j', ]) def test_valid_complex(valid): with asdf.open(make_complex_asdf(valid)) as af: assert af.tree['a'] == complex(re.sub(r'[iI]$', r'j', valid)) @pytest.mark.parametrize('valid', [ 'nan', 'nan+nanj', 'nan+nani', 'nanj', 'nani', 'NANi', 'NANI', '3+nanj', 'nan+4j' ]) def test_valid_nan_complex(valid): with asdf.open(make_complex_asdf(valid)) as af: # Don't compare values since NANs are never equal pass def test_roundtrip(tmpdir): tree = { 'a': 0+0j, 'b': 1+1j, 'c': -1+1j, 'd': -1-1j } helpers.assert_roundtrip_tree(tree, tmpdir) asdf-2.5.1/asdf/tags/core/tests/test_external_reference.py0000644000446400020070000000065513567314375026072 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from asdf.tags.core.external_reference import ExternalArrayReference from asdf.tests import helpers def test_roundtrip_external_array(tmpdir): ref = ExternalArrayReference("./nonexistant.fits", 1, "np.float64", (100, 100)) tree = {'nothere': ref} helpers.assert_roundtrip_tree(tree, tmpdir) asdf-2.5.1/asdf/tags/core/tests/test_history.py0000644000446400020070000002176713605166107023731 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import datetime import fractions import pytest from jsonschema import ValidationError import asdf from asdf import util from asdf import types from asdf.tests import helpers from asdf.tests.helpers import yaml_to_asdf, display_warnings from asdf.tags.core import HistoryEntry SCHEMA_PATH = os.path.join(os.path.dirname(helpers.__file__), 'data') class CustomExtension: """ This is the base class that is used for extensions for custom tag classes that exist only for the purposes of testing. """ @property def types(self): return [] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(SCHEMA_PATH) + '/{url_suffix}.yaml')] def test_history(): ff = asdf.AsdfFile() assert 'history' not in ff.tree ff.add_history_entry('This happened', {'name': 'my_tool', 'homepage': 'http://nowhere.org', 'author': 'John Doe', 'version': '2.0'}) assert len(ff.tree['history']['entries']) == 1 with pytest.raises(ValidationError): ff.add_history_entry('That happened', {'author': 'John Doe', 'version': '2.0'}) assert len(ff.tree['history']['entries']) == 1 ff.add_history_entry('This other thing happened') assert len(ff.tree['history']['entries']) == 2 assert isinstance(ff.tree['history']['entries'][0]['time'], datetime.datetime) def test_history_to_file(tmpdir): tmpfile = str(tmpdir.join('history.asdf')) with asdf.AsdfFile() as ff: ff.add_history_entry('This happened', {'name': 'my_tool', 'homepage': 'http://nowhere.org', 'author': 'John Doe', 'version': '2.0'}) ff.write_to(tmpfile) with asdf.open(tmpfile) as ff: assert 'entries' in ff.tree['history'] assert 'extensions' in ff.tree['history'] assert len(ff.tree['history']['entries']) == 1 entry = ff.tree['history']['entries'][0] assert entry['description'] == 'This happened' assert entry['software']['name'] == 'my_tool' assert entry['software']['version'] == '2.0' # Test the history entry retrieval API entries = ff.get_history_entries() assert len(entries) == 1 assert isinstance(entries, list) assert isinstance(entries[0], HistoryEntry) assert entries[0]['description'] == "This happened" assert entries[0]['software']['name'] == 'my_tool' def test_old_history(tmpdir): """Make sure that old versions of the history format are still accepted""" yaml = """ history: - !core/history_entry-1.0.0 description: "Here's a test of old history entries" software: !core/software-1.0.0 name: foo version: 1.2.3 """ buff = yaml_to_asdf(yaml) with asdf.open(buff) as af: assert len(af.tree['history']) == 1 # Test the history entry retrieval API entries = af.get_history_entries() assert len(entries) == 1 assert isinstance(entries, list) assert isinstance(entries[0], HistoryEntry) assert entries[0]['description'] == "Here's a test of old history entries" assert entries[0]['software']['name'] == 'foo' def test_get_history_entries(tmpdir): """ Test edge cases for the get_history_entries API. Other cases tested above """ tmpfile = str(tmpdir.join('empty.asdf')) with asdf.AsdfFile() as af: af.write_to(tmpfile) # Make sure this works when there is no history section at all with asdf.open(tmpfile) as af: assert len(af['history']['extensions']) > 0 assert len(af.get_history_entries()) == 0 def test_extension_metadata(tmpdir): ff = asdf.AsdfFile() # So far only the base extension has been used assert len(ff.type_index.get_extensions_used()) == 1 tmpfile = str(tmpdir.join('extension.asdf')) ff.write_to(tmpfile) with asdf.open(tmpfile) as af: assert len(af.tree['history']['extensions']) == 1 metadata = af.tree['history']['extensions'][0] assert metadata.extension_class == 'asdf.extension.BuiltinExtension' # Don't bother with testing the version here since it will depend on # how recently the package was built (version is auto-generated) assert metadata.software['name'] == 'asdf' def test_missing_extension_warning(): yaml = """ history: extensions: - !core/extension_metadata-1.0.0 extension_class: foo.bar.FooBar software: !core/software-1.0.0 name: foo version: 1.2.3 """ buff = yaml_to_asdf(yaml) with pytest.warns(None) as warnings: with asdf.open(buff) as af: pass assert len(warnings) == 1, display_warnings(warnings) assert str(warnings[0].message).startswith( "File was created with extension 'foo.bar.FooBar'") def test_extension_version_warning(): yaml = """ history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: !core/software-1.0.0 name: asdf version: 100.0.3 """ buff = yaml_to_asdf(yaml) with pytest.warns(None) as warnings: with asdf.open(buff) as af: pass assert len(warnings) == 1, display_warnings(warnings) assert str(warnings[0].message).startswith( "File was created with extension 'asdf.extension.BuiltinExtension' " "from package asdf-100.0.3") buff.seek(0) # Make sure suppressing the warning works too with pytest.warns(None) as warnings: with asdf.open(buff, ignore_missing_extensions=True) as af: pass assert len(warnings) == 0, display_warnings(warnings) def test_strict_extension_check(): yaml = """ history: extensions: - !core/extension_metadata-1.0.0 extension_class: foo.bar.FooBar software: !core/software-1.0.0 name: foo version: 1.2.3 """ buff = yaml_to_asdf(yaml) with pytest.raises(RuntimeError): with asdf.open(buff, strict_extension_check=True) as af: pass # Make sure to test for incompatibility with ignore_missing_extensions with pytest.raises(ValueError): with asdf.open(buff, strict_extension_check=True, ignore_missing_extensions=True) as af: pass def test_metadata_with_custom_extension(tmpdir): class FractionType(types.AsdfType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [fractions.Fraction] @classmethod def to_tree(cls, node, ctx): return [node.numerator, node.denominator] @classmethod def from_tree(cls, tree, ctx): return fractions.Fraction(tree[0], tree[1]) class FractionExtension(CustomExtension): @property def types(self): return [FractionType] tree = { 'fraction': fractions.Fraction(2, 3) } tmpfile = str(tmpdir.join('custom_extension.asdf')) with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to(tmpfile) # We expect metadata about both the Builtin extension and the custom one with asdf.open(tmpfile, extensions=FractionExtension()) as af: assert len(af['history']['extensions']) == 2 with pytest.warns(None) as warnings: with asdf.open(tmpfile, ignore_unrecognized_tag=True) as af: pass # Since we're ignoring the unrecognized tag warning, we should only get # one warning here, which is the one about the missing extension assert len(warnings) == 1 assert "was created with extension" in str(warnings[0].message) # If we use the extension but we don't serialize any types that require it, # no metadata about this extension should be added to the file tree2 = { 'x': [x for x in range(10)] } tmpfile2 = str(tmpdir.join('no_extension.asdf')) with asdf.AsdfFile(tree2, extensions=FractionExtension()) as ff: ff.write_to(tmpfile2) with asdf.open(tmpfile2) as af: assert len(af['history']['extensions']) == 1 with pytest.warns(None) as warnings: with asdf.open(tmpfile2) as af: pass assert len(warnings) == 0 # Make sure that this works even when constructing the tree on-the-fly tmpfile3 = str(tmpdir.join('custom_extension2.asdf')) with asdf.AsdfFile(extensions=FractionExtension()) as ff: ff.tree['fraction'] = fractions.Fraction(4, 5) ff.write_to(tmpfile3) with asdf.open(tmpfile3, extensions=FractionExtension()) as af: assert len(af['history']['extensions']) == 2 asdf-2.5.1/asdf/tags/core/tests/test_integer.py0000644000446400020070000000454613567314375023672 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import random import pytest import asdf from asdf import IntegerType from asdf.tests import helpers # Make sure tests are deterministic random.seed(0) @pytest.mark.parametrize('sign', ['+', '-']) @pytest.mark.parametrize('value', [ random.getrandbits(64), random.getrandbits(65), random.getrandbits(100), random.getrandbits(128), random.getrandbits(129), random.getrandbits(200), ]) def test_integer_value(tmpdir, value, sign): if sign == '-': value = -value integer = IntegerType(value) tree = dict(integer=integer) helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.parametrize('inline', [False, True]) def test_integer_storage(tmpdir, inline): tmpfile = str(tmpdir.join('integer.asdf')) kwargs = dict() if inline: kwargs['storage_type'] = 'inline' random.seed(0) value = random.getrandbits(1000) tree = dict(integer=IntegerType(value, **kwargs)) with asdf.AsdfFile(tree) as af: af.write_to(tmpfile) with asdf.open(tmpfile, _force_raw_types=True) as rf: if inline: assert 'source' not in rf.tree['integer']['words'] assert 'data' in rf.tree['integer']['words'] else: assert 'source' in rf.tree['integer']['words'] assert 'data' not in rf.tree['integer']['words'] assert 'string' in rf.tree['integer'] assert rf.tree['integer']['string'] == str(value) def test_integer_storage_duplication(tmpdir): tmpfile = str(tmpdir.join('integer.asdf')) random.seed(0) value = random.getrandbits(1000) tree = dict(integer1=IntegerType(value), integer2=IntegerType(value)) with asdf.AsdfFile(tree) as af: af.write_to(tmpfile) assert len(af.blocks) == 1 with asdf.open(tmpfile, _force_raw_types=True) as rf: assert rf.tree['integer1']['words']['source'] == 0 assert rf.tree['integer2']['words']['source'] == 0 with asdf.open(tmpfile) as aa: assert aa.tree['integer1'] == value assert aa.tree['integer2'] == value def test_integer_conversion(): random.seed(0) value = random.getrandbits(1000) integer = asdf.IntegerType(value) assert integer == value assert int(integer) == int(value) assert float(integer) == float(value) asdf-2.5.1/asdf/tags/core/tests/test_ndarray.py0000644000446400020070000006036113605165746023671 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import re import sys import pytest import numpy as np from numpy import ma from numpy.testing import assert_array_equal import jsonschema import yaml import asdf from asdf import util from asdf.tests import helpers, CustomTestType from asdf.tags.core import ndarray from . import data as test_data TEST_DATA_PATH = helpers.get_test_data_path('', module=test_data) # These custom types and the custom extension are here purely for the purpose # of testing NDArray objects and making sure that they can be validated as part # of a nested hierarchy, and not just top-level objects. class CustomNdim(CustomTestType): name = 'ndim' organization = 'nowhere.org' standard = 'custom' version = '1.0.0' class CustomDatatype(CustomTestType): name = 'datatype' organization = 'nowhere.org' standard = 'custom' version = '1.0.0' class CustomExtension: @property def types(self): return [CustomNdim, CustomDatatype] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [( 'http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] def test_sharing(tmpdir): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2] } def check_asdf(asdf): tree = asdf.tree assert_array_equal(tree['science_data'], x) assert_array_equal(tree['subset'], x[3:-3]) assert_array_equal(tree['skipping'], x[::2]) assert tree['science_data'].ctypes.data == tree['skipping'].ctypes.data assert len(list(asdf.blocks.internal_blocks)) == 1 assert next(asdf.blocks.internal_blocks)._size == 80 tree['science_data'][0] = 42 assert tree['skipping'][0] == 42 def check_raw_yaml(content): assert b'!core/ndarray' in content helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml) def test_byteorder(tmpdir): tree = { 'bigendian': np.arange(0, 10, dtype=str('>f8')), 'little': np.arange(0, 10, dtype=str('' assert my_tree['little'].dtype.byteorder == '=' else: assert my_tree['bigendian'].dtype.byteorder == '=' assert my_tree['little'].dtype.byteorder == '<' def check_raw_yaml(content): assert b'byteorder: little' in content assert b'byteorder: big' in content helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml) def test_all_dtypes(tmpdir): tree = {} for byteorder in ('>', '<'): for dtype in ndarray._datatype_names.values(): # Python 3 can't expose these dtypes in non-native byte # order, because it's using the new Python buffer # interface. if dtype in ('c32', 'f16'): continue if dtype == 'b1': arr = np.array([True, False]) else: arr = np.arange(0, 10, dtype=str(byteorder + dtype)) tree[byteorder + dtype] = arr helpers.assert_roundtrip_tree(tree, tmpdir) def test_dont_load_data(): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2] } ff = asdf.AsdfFile(tree) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) with asdf.open(buff) as ff: ff.run_hook('reserve_blocks') # repr and str shouldn't load data str(ff.tree['science_data']) repr(ff.tree) for block in ff.blocks.internal_blocks: assert block._data is None def test_table_inline(tmpdir): table = np.array( [(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[(str('MINE'), np.int8), (str(''), np.float64), (str('arr'), '>i4', (2,))]) tree = {'table_data': table} def check_raw_yaml(content): tree = yaml.safe_load( re.sub(br'!core/\S+', b'', content)) assert tree['table_data'] == { 'datatype': [ {'datatype': 'int8', 'name': 'MINE'}, {'datatype': 'float64', 'name': 'f1'}, {'datatype': 'int32', 'name': 'arr', 'shape': [2]} ], 'data': [[0, 1.0, [2, 3]], [4, 5.0, [6, 7]]], 'shape': [2] } helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_raw_yaml, write_options={'auto_inline': 64}) def test_auto_inline_recursive(tmpdir): astropy = pytest.importorskip('astropy') from astropy.modeling import models aff = models.AffineTransformation2D(matrix=[[1, 2], [3, 4]]) tree = {'test': aff} def check_asdf(asdf): assert len(list(asdf.blocks.internal_blocks)) == 0 helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, write_options={'auto_inline': 64}) def test_copy_inline(): yaml = """ x0: !core/ndarray-1.0.0 data: [-1.0, 1.0] """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff) as infile: with asdf.AsdfFile() as f: f.tree['a'] = infile.tree['x0'] f.tree['b'] = f.tree['a'] f.write_to(io.BytesIO()) def test_table(tmpdir): table = np.array( [(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[(str('MINE'), np.int8), (str(''), np.float64), (str('arr'), '>i4', (2,))]) tree = {'table_data': table} def check_raw_yaml(content): tree = yaml.safe_load( re.sub(br'!core/\S+', b'', content)) assert tree['table_data'] == { 'datatype': [ {'byteorder': 'big', 'datatype': 'int8', 'name': 'MINE'}, {'byteorder': 'little', 'datatype': 'float64', 'name': 'f1'}, {'byteorder': 'big', 'datatype': 'int32', 'name': 'arr', 'shape': [2]} ], 'shape': [2], 'source': 0, 'byteorder': 'big' } helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_raw_yaml) def test_table_nested_fields(tmpdir): table = np.array( [(0, (1, 2)), (4, (5, 6)), (7, (8, 9))], dtype=[(str('A'), np.int64), (str('B'), [(str('C'), np.int64), (str('D'), np.int64)])]) tree = {'table_data': table} def check_raw_yaml(content): tree = yaml.safe_load( re.sub(br'!core/\S+', b'', content)) assert tree['table_data'] == { 'datatype': [ {'datatype': 'int64', 'name': 'A', 'byteorder': 'little'}, {'datatype': [ {'datatype': 'int64', 'name': 'C', 'byteorder': 'little'}, {'datatype': 'int64', 'name': 'D', 'byteorder': 'little'} ], 'name': 'B', 'byteorder': 'big'}], 'shape': [3], 'source': 0, 'byteorder': 'big' } helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_raw_yaml) def test_inline(): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2] } buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.blocks.set_array_storage(ff.blocks[tree['science_data']], 'inline') ff.write_to(buff) buff.seek(0) with asdf.open(buff, mode='rw') as ff: helpers.assert_tree_match(tree, ff.tree) assert len(list(ff.blocks.internal_blocks)) == 0 buff = io.BytesIO() ff.write_to(buff) assert b'[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]' in buff.getvalue() def test_inline_bare(): content = "arr: !core/ndarray-1.0.0 [[1, 2, 3, 4], [5, 6, 7, 8]]" buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert_array_equal(ff.tree['arr'], [[1, 2, 3, 4], [5, 6, 7, 8]]) def test_mask_roundtrip(tmpdir): x = np.arange(0, 10, dtype=np.float) m = ma.array(x, mask=x > 5) tree = { 'masked_array': m, 'unmasked_array': x } def check_asdf(asdf): tree = asdf.tree m = tree['masked_array'] x = tree['unmasked_array'] print(m) print(m.mask) assert np.all(m.mask[6:]) assert len(asdf.blocks) == 2 helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf) def test_len_roundtrip(tmpdir): sequence = np.arange(0, 10, dtype=np.int) tree = { 'sequence': sequence } def check_len(asdf): s = asdf.tree["sequence"] assert len(s) == 10 helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_len) def test_mask_arbitrary(): content = """ arr: !core/ndarray-1.0.0 data: [[1, 2, 3, 1234], [5, 6, 7, 8]] mask: 1234 """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert_array_equal( ff.tree['arr'].mask, [[False, False, False, True], [False, False, False, False]]) def test_mask_nan(): content = """ arr: !core/ndarray-1.0.0 data: [[1, 2, 3, .NaN], [5, 6, 7, 8]] mask: .NaN """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert_array_equal( ff.tree['arr'].mask, [[False, False, False, True], [False, False, False, False]]) def test_string(tmpdir): tree = { 'ascii': np.array([b'foo', b'bar', b'baz']), 'unicode': np.array(['სამეცნიერო', 'данные', 'வடிவம்']) } helpers.assert_roundtrip_tree(tree, tmpdir) def test_string_table(tmpdir): tree = { 'table': np.array([(b'foo', 'სამეცნიერო', 42, 53.0)]) } helpers.assert_roundtrip_tree(tree, tmpdir) def test_inline_string(): content = "arr: !core/ndarray-1.0.0 ['a', 'b', 'c']" buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert_array_equal(ff.tree['arr']._make_array(), ['a', 'b', 'c']) def test_inline_structured(): content = """ arr: !core/ndarray-1.0.0 datatype: [['ascii', 4], uint16, uint16, ['ascii', 4]] data: [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]]""" buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert ff.tree['arr']['f1'].dtype.char == 'H' def test_simple_table(): table = np.array( [(10.683262825012207, 41.2674560546875, 0.13, 0.12, 213.916), (10.682777404785156, 41.270111083984375, 0.1, 0.09, 306.825), (10.684737205505371, 41.26903533935547, 0.08, 0.07, 96.656), (10.682382583618164, 41.26792526245117, 0.1, 0.09, 237.145), (10.686025619506836, 41.26922607421875, 0.13, 0.12, 79.581), (10.685656547546387, 41.26955032348633, 0.13, 0.12, 55.219), (10.684028625488281, 41.27090072631836, 0.13, 0.12, 345.269), (10.687610626220703, 41.270301818847656, 0.18, 0.14, 60.192)], dtype=[ (str('ra'), str(' a: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 shape: [1, 3] data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [[[1, 2, 3]]] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass def test_datatype_validation(tmpdir): content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: float32 """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: float64 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: int16 """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: int16 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass def test_structured_datatype_validation(tmpdir): content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int64 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a', 0], [2, 'b', 1], [3, 'c', 2]] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] - name: c datatype: float64 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! d: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! d: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int16 - name: b datatype: ['ascii', 16] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()) as ff: pass def test_string_inline(): x = np.array([b'a', b'b', b'c']) l = ndarray.numpy_array_to_list(x) for entry in l: assert isinstance(entry, str) def test_inline_shape_mismatch(): content = """ arr: !core/ndarray-1.0.0 data: [1, 2, 3] shape: [2] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(ValueError): with asdf.open(buff) as ff: pass @pytest.mark.xfail( reason="NDArrays with dtype=object are not currently supported") def test_simple_object_array(tmpdir): # See https://github.com/spacetelescope/asdf/issues/383 for feature # request dictdata = np.empty((3, 3), dtype=object) for i, _ in enumerate(dictdata.flat): dictdata.flat[i] = {'foo': i*42, 'bar': i**2} helpers.assert_roundtrip_tree({'bizbaz': dictdata}, tmpdir) @pytest.mark.xfail( reason="NDArrays with dtype=object are not currently supported") def test_tagged_object_array(tmpdir): # See https://github.com/spacetelescope/asdf/issues/383 for feature # request astropy = pytest.importorskip('astropy') from astropy.units.quantity import Quantity objdata = np.empty((3, 3), dtype=object) for i, _ in enumerate(objdata.flat): objdata.flat[i] = Quantity(i, 'angstrom') helpers.assert_roundtrip_tree({'bizbaz': objdata}, tmpdir) def test_broadcasted_array(tmpdir): attrs = np.broadcast_arrays(np.array([10,20]), np.array(10), np.array(10)) tree = {'one': attrs[1] }#, 'two': attrs[1], 'three': attrs[2]} helpers.assert_roundtrip_tree(tree, tmpdir) def test_fortran_order(tmpdir): array = np.array([[11,12,13], [21,22,23]], order='F') tree = dict(data=array) helpers.assert_roundtrip_tree(tree, tmpdir) def test_readonly(tmpdir): tmpfile = str(tmpdir.join('data.asdf')) tree = dict(data=np.ndarray((100))) with asdf.AsdfFile(tree) as af: # Make sure we're actually writing to an internal array for this test af.write_to(tmpfile, all_array_storage='internal') # Opening in read mode (the default) should mean array is readonly with asdf.open(tmpfile) as af: assert af['data'].flags.writeable == False with pytest.raises(ValueError) as err: af['data'][0] = 41 assert str(err) == 'assignment destination is read-only' # This should be perfectly fine with asdf.open(tmpfile, mode='rw') as af: assert af['data'].flags.writeable == True af['data'][0] = 40 # Copying the arrays makes it safe to write to the underlying array with asdf.open(tmpfile, mode='r', copy_arrays=True) as af: assert af['data'].flags.writeable == True af['data'][0] = 42 def test_readonly_inline(tmpdir): tmpfile = str(tmpdir.join('data.asdf')) tree = dict(data=np.ndarray((100))) with asdf.AsdfFile(tree) as af: af.write_to(tmpfile, all_array_storage='inline') # This should be safe since it's an inline array with asdf.open(tmpfile, mode='r') as af: assert af['data'].flags.writeable == True af['data'][0] = 42 # Confirm that NDArrayType's internal array is regenerated # following an update. def test_block_data_change(tmpdir): tmpfile = str(tmpdir.join("data.asdf")) tree = {"data": np.ndarray(10)} with asdf.AsdfFile(tree) as af: af.write_to(tmpfile) with asdf.open(tmpfile, mode="rw") as af: array_before = af.tree["data"].__array__() af.update() array_after = af.tree["data"].__array__() assert array_before is not array_after asdf-2.5.1/asdf/tests/0000755000446400020070000000000013605166132016712 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/tests/__init__.py0000644000446400020070000000344713605165746021045 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This packages contains affiliated package tests. """ import numpy as np from .. import CustomType, util from .helpers import get_test_data_path class CustomTestType(CustomType): """This class is intended to be inherited by custom types that are used purely for the purposes of testing. The methods ``from_tree_tagged`` and ``from_tree`` are implemented solely in order to avoid custom type conversion warnings. """ @classmethod def from_tree_tagged(cls, tree, ctx): return cls.from_tree(tree.data, ctx) @classmethod def from_tree(cls, tree, ctx): return tree def create_small_tree(): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } return tree def create_large_tree(): # These are designed to be big enough so they don't fit in a # single block, but not so big that RAM/disk space for the tests # is enormous. x = np.random.rand(256, 256) y = np.random.rand(16, 16, 16) tree = { 'science_data': x, 'more': y } return tree class CustomExtension: """ This is the base class that is used for extensions for custom tag classes that exist only for the purposes of testing. """ @property def types(self): return [] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(get_test_data_path('')) + '/{url_suffix}.yaml')] asdf-2.5.1/asdf/tests/conftest.py0000644000446400020070000000046413567314375021130 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest import numpy as np from . import create_small_tree, create_large_tree @pytest.fixture def small_tree(): return create_small_tree() @pytest.fixture def large_tree(): return create_large_tree() asdf-2.5.1/asdf/tests/coveragerc0000644000446400020070000000135113567314375020770 0ustar eslavichSTSCI\science00000000000000[run] source = {packagename} omit = asdf/_astropy_init* asdf/conftest* asdf/cython_version* asdf/setup_package* asdf/*/setup_package* asdf/*/*/setup_package* asdf/tests/* asdf/*/tests/* asdf/*/*/tests/* asdf/version.* asdf/compat* asdf/extern* [report] exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about packages we have installed except ImportError # Don't complain if tests don't hit assertions raise AssertionError raise NotImplementedError # Don't complain about script hooks def main\(.*\): # Ignore branches that don't pertain to this version of Python pragma: py{ignore_python_version} six.PY{ignore_python_version} asdf-2.5.1/asdf/tests/data/0000755000446400020070000000000013605166132017623 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf/tests/data/__init__.py0000644000446400020070000000010013567314375021736 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst asdf-2.5.1/asdf/tests/data/asdf.fits.gz0000644000446400020070000000740013567314375022062 0ustar eslavichSTSCI\science00000000000000z2Zasdf.fitsyTSgيA$((X& TTA` ( T ZpBe123 TRu}Zڮuo|uy}sN&Yf3X* E'Tb53dVkK)ij4Lf U ?uFP]~Wvxݩ/}^O]|AO3627zO ̌z m&B 8BPM >"D!BK–O'WhZF=RwNS]"}|<%6r]Hw7xa߾H%["Ess+50hBw3'lh8US1o \Gf|hKO}rhLߦ0tE'o=^ g4ϊP=}m.I(TVwICE>YEUvsvy3ww}/Ѽ|nܫA(=q63x;(LsgH,GYzѯWGKx*̋A`;K$IwCCjL_Jwb$ICkV@%~*%ףC|5ъ5mSR ;߂h;-a}" 1?y~(VB+41TT76&~݂i^u40xo{i.!ehiF|vnb%c1cFFGG:z;g5z"X&uf>u?oԲj-sp:_g(߯;6O/E7,m:t,1 Ix%;+E z;]($Fn=M)F4%*oJwWeOpRy'綉/V(dlҺm\~ٴyjh5ռCl$l۬֠x p/;8*M XMU58Z:bgʚVMS4S%+Rx8F:j3KDxbh4nnP"5#Ctf7+#rP9=lڟWahz% CKۓ_AЩ~ ܳG?'d]P{ۋ Xť k_8=rgg(}fsxcp"<7,C~CBRmqc6JeBZ9Iq8b8 |zPӬ sWC>ik9G1JQr|+, GmkQjF]ɛIf~;f5dOchhwq'f&U \5Q94Ճ7P>[W}6麾cպ{yDSYBTmiV:솶kޗn؜TC $~ MU1u(VWi G6dN/Z& z8XbC?|3r3O4ùwNbhE~}l|I//VVDG:ޛ>LWoY"G=[?j׸Ttķ6ɩA-g*uA#e#%IVT`Q?&X4Xh<N>Tl e6y{4ufYT_xJб3Po! n MH@ksf74ZF>nE NsjF9[haӊ=WQXRw[ \A%-Jsf~[*r#r5^8 : W;?W/SH,9ׅhwX[HHF7cz(aPJ#g4{$!v6}ney󔹇.WGzH2")\gjҖZ! kc=(Xww-j:Ƭ EsFZg, ?[2f9#r测-OaF+i ܿ*KD1x_,BX\{:t3#vTv7JE-'>*؇9{T$2,1Ж/A΃c#\zf'{ztz.btU%-˻Ǐ#:s*4K@{ Zs}I=;sRF}O ^ߠO}e MuP/hl6EG]߂FF c+ocC4\^f" XpK ztwR$&$_ 5X.@1=4)4xt螝_>Wv}讵5;P[pV0\m"GV-Z:KU125c%(ŀbԲx = Z"t37(~[ymm7rp $i. sĀ5qQœuϱggĮHdn=Z9L!SA՝iK]kȩzOk@GU( =!vk QD}.5V*c5̲EFsT"8(B2 8YIso,kSl_.xQQA:ٶVmCs=<Moif-/G\kEXKIzEW%Ɓnj ?7$htʞLTE[a}Fb#t&lrF>eB:K1ԚDФdbCTl^bKfMzUN2HtD{Yc 鮹ߐjs"޿!w~1"B"D!Bl Xo& H'O; C]KKu2_w:IveI,Yؘ[id/ BWzוF+IYYZH.гSkqCzb>"p鬱346 5z?ggwoWŴ%B"Dkd?9jʪʪHV (Tp&)gm!dW:~]C;H*))24g&<ݝhdS]4oGɾ 7*M Pk'~*I)ނ7eP 7ŇJ! WwSOg> EISO1QTPGFAj?5Umct'[eR :՗v@ Π됶( d/{R M AlҜ)d]'G'اHK*++KZ"D!B%q@8asdf-2.5.1/asdf/tests/data/complex-42.0.0.yaml0000644000446400020070000000021213567314375022703 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/custom-42.0.0" type: stringasdf-2.5.1/asdf/tests/data/custom-1.0.0.yaml0000644000446400020070000000022513567314375022465 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom-1.0.0" type: integer default: 42asdf-2.5.1/asdf/tests/data/custom_flow-1.0.0.yaml0000644000446400020070000000032613567314375023516 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_flow-1.0.0" type: object properties: a: type: number b: type: number flowStyle: blockasdf-2.5.1/asdf/tests/data/custom_flow-1.1.0.yaml0000644000446400020070000000032713567314375023520 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_flow-1.1.0" type: object properties: c: type: number d: type: number flowStyle: block asdf-2.5.1/asdf/tests/data/custom_schema.yaml0000644000446400020070000000104113567314375023350 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" title: | Top-level custom schema used for testing. description: | This schema is used to test the custom schema validation mechanism in ASDF. type: object properties: foo: description: | Your generic kind of foo. type: object properties: x: type: number y: type: number bar: type: object properties: a: type: string b: type: string required: [foo, bar] additionalProperties: true asdf-2.5.1/asdf/tests/data/custom_schema_definitions.yaml0000644000446400020070000000062013567314375025745 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/asdf-1.1.0" tag: "tag:stsci.edu:asdf/core/asdf-1.1.0" type: object properties: thing: $ref: "#/definitions/bizbaz" required: [thing] additionalProperties: true definitions: bizbaz: type: object properties: biz: type: string baz: type: string asdf-2.5.1/asdf/tests/data/custom_style-1.0.0.yaml0000644000446400020070000000023613567314375023707 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_style-1.0.0" type: string style: literal asdf-2.5.1/asdf/tests/data/default-1.0.0.yaml0000644000446400020070000000042513567314375022601 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/default-1.0.0" type: object properties: a: type: integer default: 42 b: type: object properties: c: type: integer default: 82asdf-2.5.1/asdf/tests/data/example_schema.json0000755000446400020070000000423313567314375023511 0ustar eslavichSTSCI\science00000000000000{ "date" : { "title" : "[yyyy-mm-ddThh:mm:ss.ssssss] UTC date file created", "type" : "string", "sql_dtype" : "datetime2", "fits_keyword" : "DATE", "description" : "The UTC date and time when the HDU was created, in the form YYYY-MM-DDThh:mm:ss.ssssss, where YYYY shall be the four-digit calendar year number, MM the two-digit month number with January given by 01 and December by 12, and DD the two-digit day of the month. The literal T shall separate the date and time, hh shall be the two-digit hour in the day, mm the two-digit number of minutes after the hour, and ss.ssssss the number of seconds (two digits followed by a fraction accurate to microseconds) after the minute. Default values must not be given to any portion of the date/time string, and leading zeros must not be omitted.", "calculation" : "Operating system time in the format of YYYY-MM-DDThh:mm:ss.ssssss", "default_value" : "", "example" : "2015-01-01T00:00:00.000001", "units" : "", "sw_source" : "calculation", "source" : "Science Data Processing (SDP)", "destination" : ["ScienceCommon.date","GuideStar.date"], "level" : "1a", "si" : "Multiple", "section" : "Basic", "mode" : "All", "fits_hdu" : "PRIMARY", "misc" : "" }, "origin" : { "title" : "institution responsible for creating FITS file", "type" : "string", "sql_dtype" : "nvarchar(20)", "fits_keyword" : "ORIGIN", "description" : "Identifies the organization or institution responsible for creating the FITS file.", "calculation" : "", "default_value" : "STSCI", "example" : "STSCI", "units" : "", "sw_source" : "", "source" : "Science Data Processing (SDP)", "destination" : ["ScienceCommon.origin","GuideStar.origin"], "level" : "1a", "si" : "Multiple", "section" : "Basic", "mode" : "All", "fits_hdu" : "PRIMARY", "misc" : "" } } asdf-2.5.1/asdf/tests/data/extension_check.fits0000644000446400020070000002070113605166107023665 0ustar eslavichSTSCI\science00000000000000SIMPLE = T / conforms to FITS standard BITPIX = 8 / array data type NAXIS = 0 / number of array dimensions EXTEND = T END XTENSION= 'ASDF ' / ASDF extension BITPIX = 8 / array data type NAXIS = 1 / number of array dimensions NAXIS1 = 538 / Axis length PCOUNT = 0 / number of parameters GCOUNT = 1 / number of groups COMPRESS= F / Uses gzip compression EXTNAME = 'ASDF ' / Name of ASDF extension END #ASDF 1.0.0 #ASDF_STANDARD 1.2.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 2.0.0.dev1264} history: extensions: - !core/extension_metadata-1.0.0 extension_class: foo.bar.FooBar software: {name: foo, version: 1.2.3} - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: {name: asdf, version: 2.0.0} ... asdf-2.5.1/asdf/tests/data/foreign_tag_reference-1.0.0.yaml0000644000446400020070000000101513567314375025453 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/foreign_tag_reference-1.0.0" title: An example custom type for testing tag references tag: "tag:nowhere.org:custom/foreign_tag_reference-1.0.0" type: object properties: a: # Test foreign tag reference using tag URI $ref: "tag:nowhere.org:custom/tag_reference-1.0.0" b: # Test foreign tag reference using tag ID $ref: "http://nowhere.org/schemas/custom/tag_reference-1.0.0" required: [a, b] ... asdf-2.5.1/asdf/tests/data/fraction-1.0.0.yaml0000644000446400020070000000110513567314375022756 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fraction-1.0.0" title: An example custom type for handling fractions tag: "tag:nowhere.org:custom/fraction-1.0.0" # Using anyOf here is a cheap hack to allow us to support both formats in an # example that is used in the documentation (see docs/asdf/extensions.rst). anyOf: - type: array items: type: integer minItems: 2 maxItems: 2 - type: object properties: numerator: type: integer denominator: type: integer ... asdf-2.5.1/asdf/tests/data/fractional_2d_coord-1.0.0.yaml0000644000446400020070000000054313567314375025053 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fractional_2d_coord-1.0.0" title: An example custom type for handling components tag: "tag:nowhere.org:custom/fractional_2d_coord-1.0.0" type: object properties: x: $ref: fraction-1.0.0 y: $ref: fraction-1.0.0 required: [x, y] ... asdf-2.5.1/asdf/tests/data/missing-1.1.0.yaml0000644000446400020070000000021113567314375022620 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/missing-1.1.0" type: objectasdf-2.5.1/asdf/tests/data/self_referencing-1.0.0.yaml0000644000446400020070000000026713567314375024461 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/self_referencing-1.0.0" anyOf: - type: object - $ref: "#/anyOf/0" ...asdf-2.5.1/asdf/tests/data/tag_reference-1.0.0.yaml0000644000446400020070000000057213567314375023751 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/tag_reference-1.0.0" title: An example custom type for testing tag references tag: "tag:nowhere.org:custom/tag_reference-1.0.0" type: object properties: name: type: string things: $ref: "tag:stsci.edu:asdf/core/ndarray-1.0.0" required: [name, things] ... asdf-2.5.1/asdf/tests/data/version_mismatch.fits0000644000446400020070000001320013567314375024073 0ustar eslavichSTSCI\science00000000000000SIMPLE = T / conforms to FITS standard BITPIX = 8 / array data type NAXIS = 1 / number of array dimensions NAXIS1 = 288 EXTNAME = 'ASDF ' / extension name END #ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 a: !core/complex-7.0.0 0j asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev874} ... asdf-2.5.1/asdf/tests/helpers.py0000644000446400020070000002660213567314375020747 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import sys import warnings try: from astropy.coordinates import ICRS except ImportError: ICRS = None try: from astropy.coordinates.representation import CartesianRepresentation except ImportError: CartesianRepresentation = None try: from astropy.coordinates.representation import CartesianDifferential except ImportError: CartesianDifferential = None import asdf from ..asdf import AsdfFile, get_asdf_library_info from ..block import Block from .httpserver import RangeHTTPServer from ..extension import default_extensions from ..exceptions import AsdfConversionWarning from .. import versioning from ..tags.core import AsdfObject try: from pytest_remotedata.disable_internet import INTERNET_OFF except ImportError: INTERNET_OFF = False if sys.version_info >= (3, 7): from importlib import resources else: try: import importlib_resources as resources except ImportError: resources = None __all__ = ['get_test_data_path', 'assert_tree_match', 'assert_roundtrip_tree', 'yaml_to_asdf', 'get_file_sizes', 'display_warnings'] def get_test_data_path(name, module=None): if resources is None: raise RuntimeError("The importlib_resources package is required to get" " test data on systems with Python < 3.7") if module is None: from . import data as test_data module = test_data with resources.path(module, name) as path: return str(path) def assert_tree_match(old_tree, new_tree, ctx=None, funcname='assert_equal', ignore_keys=None): """ Assert that two ASDF trees match. Parameters ---------- old_tree : ASDF tree new_tree : ASDF tree ctx : ASDF file context Used to look up the set of types in effect. funcname : `str` or `callable` The name of a method on members of old_tree and new_tree that will be used to compare custom objects. The default of `assert_equal` handles Numpy arrays. ignore_keys : list of str List of keys to ignore """ seen = set() if ignore_keys is None: ignore_keys = ['asdf_library', 'history'] ignore_keys = set(ignore_keys) if ctx is None: version_string = str(versioning.default_version) ctx = default_extensions.extension_list else: version_string = ctx.version_string def recurse(old, new): if id(old) in seen or id(new) in seen: return seen.add(id(old)) seen.add(id(new)) old_type = ctx.type_index.from_custom_type(type(old), version_string) new_type = ctx.type_index.from_custom_type(type(new), version_string) if (old_type is not None and new_type is not None and old_type is new_type and (callable(funcname) or hasattr(old_type, funcname))): if callable(funcname): funcname(old, new) else: getattr(old_type, funcname)(old, new) elif isinstance(old, dict) and isinstance(new, dict): assert (set(x for x in old.keys() if x not in ignore_keys) == set(x for x in new.keys() if x not in ignore_keys)) for key in old.keys(): if key not in ignore_keys: recurse(old[key], new[key]) elif isinstance(old, (list, tuple)) and isinstance(new, (list, tuple)): assert len(old) == len(new) for a, b in zip(old, new): recurse(a, b) # The astropy classes CartesianRepresentation, CartesianDifferential, # and ICRS do not define equality in a way that is meaningful for unit # tests. We explicitly compare the fields that we care about in order # to enable our unit testing. It is possible that in the future it will # be necessary or useful to account for fields that are not currently # compared. elif CartesianRepresentation is not None and \ isinstance(old, CartesianRepresentation): assert old.x == new.x and old.y == new.y and old.z == new.z elif CartesianDifferential is not None and \ isinstance(old, CartesianDifferential): assert old.d_x == new.d_x and old.d_y == new.d_y and \ old.d_z == new.d_z elif ICRS is not None and isinstance(old, ICRS): assert old.ra == new.ra and old.dec == new.dec else: assert old == new recurse(old_tree, new_tree) def assert_roundtrip_tree(*args, **kwargs): """ Assert that a given tree saves to ASDF and, when loaded back, the tree matches the original tree. tree : ASDF tree tmpdir : str Path to temporary directory to save file tree_match_func : `str` or `callable` Passed to `assert_tree_match` and used to compare two objects in the tree. raw_yaml_check_func : callable, optional Will be called with the raw YAML content as a string to perform any additional checks. asdf_check_func : callable, optional Will be called with the reloaded ASDF file to perform any additional checks. """ with warnings.catch_warnings(): warnings.filterwarnings("error", category=AsdfConversionWarning) _assert_roundtrip_tree(*args, **kwargs) def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, raw_yaml_check_func=None, write_options={}, init_options={}, extensions=None, tree_match_func='assert_equal'): fname = str(tmpdir.join('test.asdf')) # First, test writing/reading a BytesIO buffer buff = io.BytesIO() AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) assert not buff.closed buff.seek(0) with asdf.open(buff, mode='rw', extensions=extensions) as ff: assert not buff.closed assert isinstance(ff.tree, AsdfObject) assert 'asdf_library' in ff.tree assert ff.tree['asdf_library'] == get_asdf_library_info() assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) buff.seek(0) ff = AsdfFile(extensions=extensions, **init_options) content = AsdfFile._open_impl(ff, buff, mode='r', _get_yaml_content=True) buff.close() # We *never* want to get any raw python objects out assert b'!!python' not in content assert b'!core/asdf' in content assert content.startswith(b'%YAML 1.1') if raw_yaml_check_func: raw_yaml_check_func(content) # Then, test writing/reading to a real file ff = AsdfFile(tree, extensions=extensions, **init_options) ff.write_to(fname, **write_options) with asdf.open(fname, mode='rw', extensions=extensions) as ff: assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) # Make sure everything works without a block index write_options['include_block_index'] = False buff = io.BytesIO() AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) assert not buff.closed buff.seek(0) with asdf.open(buff, mode='rw', extensions=extensions) as ff: assert not buff.closed assert isinstance(ff.tree, AsdfObject) assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) # Now try everything on an HTTP range server if not INTERNET_OFF and not sys.platform.startswith('win'): server = RangeHTTPServer() try: ff = AsdfFile(tree, extensions=extensions, **init_options) ff.write_to(os.path.join(server.tmpdir, 'test.asdf'), **write_options) with asdf.open(server.url + 'test.asdf', mode='r', extensions=extensions) as ff: assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) finally: server.finalize() # Now don't be lazy and check that nothing breaks with io.BytesIO() as buff: AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) buff.seek(0) ff = asdf.open(buff, extensions=extensions, copy_arrays=True, lazy_load=False) # Ensure that all the blocks are loaded for block in ff.blocks._internal_blocks: assert isinstance(block, Block) assert block._data is not None # The underlying file is closed at this time and everything should still work assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) # Now repeat with copy_arrays=False and a real file to test mmap() AsdfFile(tree, extensions=extensions, **init_options).write_to(fname, **write_options) with asdf.open(fname, mode='rw', extensions=extensions, copy_arrays=False, lazy_load=False) as ff: for block in ff.blocks._internal_blocks: assert isinstance(block, Block) assert block._data is not None assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) def yaml_to_asdf(yaml_content, yaml_headers=True, standard_version=None): """ Given a string of YAML content, adds the extra pre- and post-amble to make it an ASDF file. Parameters ---------- yaml_content : string yaml_headers : bool, optional When True (default) add the standard ASDF YAML headers. Returns ------- buff : io.BytesIO() A file-like object containing the ASDF-like content. """ if isinstance(yaml_content, str): yaml_content = yaml_content.encode('utf-8') buff = io.BytesIO() if standard_version is None: standard_version = versioning.default_version if yaml_headers: buff.write("""#ASDF {0} #ASDF_STANDARD {1} %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-{0} """.format(AsdfObject.version, standard_version).encode('ascii')) buff.write(yaml_content) if yaml_headers: buff.write(b"\n...\n") buff.seek(0) return buff def get_file_sizes(dirname): """ Get the file sizes in a directory. Parameters ---------- dirname : string Path to a directory Returns ------- sizes : dict Dictionary of (file, size) pairs. """ files = {} for filename in os.listdir(dirname): path = os.path.join(dirname, filename) if os.path.isfile(path): files[filename] = os.stat(path).st_size return files def display_warnings(_warnings): """ Return a string that displays a list of unexpected warnings Parameters ---------- _warnings : iterable List of warnings to be displayed Returns ------- msg : str String containing the warning messages to be displayed """ if len(_warnings) == 0: return "No warnings occurred (was one expected?)" msg = "Unexpected warning(s) occurred:\n" for warning in _warnings: msg += "{}:{}: {}: {}\n".format( warning.filename, warning.lineno, warning.category.__name__, warning.message) return msg asdf-2.5.1/asdf/tests/httpserver.py0000644000446400020070000000415613567314375021513 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import queue import shutil import tempfile import threading import http.server import socketserver from ..extern.RangeHTTPServer import RangeHTTPRequestHandler __all__ = ['HTTPServer', 'RangeHTTPServer'] def run_server(tmpdir, handler_class, stop_event, queue): # pragma: no cover """ Runs an HTTP server serving files from given tmpdir in a separate process. When it's ready, it sends a URL to the server over a queue so the main process (the HTTP client) can start making requests of it. """ class HTTPRequestHandler(handler_class): def translate_path(self, path): path = handler_class.translate_path(self, path) path = os.path.join( tmpdir, os.path.relpath(path, os.getcwd())) return path server = socketserver.TCPServer(("127.0.0.1", 0), HTTPRequestHandler) domain, port = server.server_address url = "http://{0}:{1}/".format(domain, port) # Set a reasonable timeout so that invalid requests (which may occur during # testing) do not cause the entire test suite to hang indefinitely server.timeout = 0.1 queue.put(url) # Using server.serve_forever does not work here since it ignores the # timeout value set above. Having an explicit loop also allows us to kill # the server from the parent thread. while not stop_event.isSet(): server.handle_request() server.server_close() class HTTPServer: handler_class = http.server.SimpleHTTPRequestHandler def __init__(self): self.tmpdir = tempfile.mkdtemp() q = queue.Queue() self.stop_event = threading.Event() args = (self.tmpdir, self.handler_class, self.stop_event, q) self.thread = threading.Thread(target=run_server, args=args) self.thread.start() self.url = q.get() def finalize(self): self.stop_event.set() self.thread.join() shutil.rmtree(self.tmpdir) class RangeHTTPServer(HTTPServer): handler_class = RangeHTTPRequestHandler asdf-2.5.1/asdf/tests/test_api.py0000644000446400020070000002722713605165767021123 0ustar eslavichSTSCI\science00000000000000# -*- coding: utf-8 -*- import os import io import pathlib import numpy as np from numpy.testing import assert_array_equal from astropy.modeling import models import pytest import asdf from asdf import treeutil from asdf import extension from asdf import resolver from asdf import schema from asdf import versioning from asdf.exceptions import AsdfDeprecationWarning from .helpers import assert_tree_match, assert_roundtrip_tree, display_warnings def test_get_data_from_closed_file(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.arange(0, 64).reshape((8, 8)) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.open(path) as ff: pass with pytest.raises(IOError): assert_array_equal(my_array, ff.tree['my_array']) def test_no_warning_nan_array(tmpdir): """ Tests for a regression that was introduced by https://github.com/spacetelescope/asdf/pull/557 """ tree = dict(array=np.array([1, 2, np.nan])) with pytest.warns(None) as w: assert_roundtrip_tree(tree, tmpdir) assert len(w) == 0, display_warnings(w) def test_warning_deprecated_open(tmpdir): tmpfile = str(tmpdir.join('foo.asdf')) tree = dict(foo=42, bar='hello') with asdf.AsdfFile(tree) as af: af.write_to(tmpfile) with pytest.warns(AsdfDeprecationWarning): with asdf.AsdfFile.open(tmpfile) as af: assert_tree_match(tree, af.tree) def test_open_readonly(tmpdir): tmpfile = str(tmpdir.join('readonly.asdf')) tree = dict(foo=42, bar='hello', baz=np.arange(20)) with asdf.AsdfFile(tree) as af: af.write_to(tmpfile, all_array_storage='internal') os.chmod(tmpfile, 0o440) assert os.access(tmpfile, os.W_OK) == False with asdf.open(tmpfile) as af: assert af['baz'].flags.writeable == False with pytest.raises(PermissionError): with asdf.open(tmpfile, mode='rw'): pass def test_atomic_write(tmpdir, small_tree): tmpfile = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(small_tree) ff.write_to(tmpfile) with asdf.open(tmpfile, mode='r') as ff: ff.write_to(tmpfile) def test_overwrite(tmpdir): # This is intended to reproduce the following issue: # https://github.com/spacetelescope/asdf/issues/100 tmpfile = os.path.join(str(tmpdir), 'test.asdf') aff = models.AffineTransformation2D(matrix=[[1, 2], [3, 4]]) f = asdf.AsdfFile() f.tree['model'] = aff f.write_to(tmpfile) model = f.tree['model'] ff = asdf.AsdfFile() ff.tree['model'] = model ff.write_to(tmpfile) def test_default_version(): # See https://github.com/spacetelescope/asdf/issues/364 version_map = versioning.get_version_map(versioning.default_version) ff = asdf.AsdfFile() assert ff.file_format_version == version_map['FILE_FORMAT'] def test_update_exceptions(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.open(path, mode='r', copy_arrays=True) as ff: with pytest.raises(IOError): ff.update() ff = asdf.AsdfFile(tree) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) with asdf.open(buff, mode='rw') as ff: ff.update() with pytest.raises(ValueError): asdf.AsdfFile().update() def test_top_level_tree(small_tree): tree = {'tree': small_tree} ff = asdf.AsdfFile(tree) assert_tree_match(ff.tree['tree'], ff['tree']) ff2 = asdf.AsdfFile() ff2['tree'] = small_tree assert_tree_match(ff2.tree['tree'], ff2['tree']) def test_top_level_keys(small_tree): tree = {'tree': small_tree} ff = asdf.AsdfFile(tree) assert ff.tree.keys() == ff.keys() def test_top_level_contains(): tree = { 'foo': 42, 'bar': 43, } with asdf.AsdfFile(tree) as af: assert 'foo' in af assert 'bar' in af def test_walk_and_modify_remove_keys(): tree = { 'foo': 42, 'bar': 43 } def func(x): if x == 42: return None return x tree2 = treeutil.walk_and_modify(tree, func) assert 'foo' not in tree2 assert 'bar' in tree2 def test_copy(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array, 'foo': {'bar': 'baz'}} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, 'test.asdf')) with asdf.open(os.path.join(tmpdir, 'test.asdf')) as ff: ff2 = ff.copy() ff2.tree['my_array'] *= 2 ff2.tree['foo']['bar'] = 'boo' assert np.all(ff2.tree['my_array'] == ff.tree['my_array'] * 2) assert ff.tree['foo']['bar'] == 'baz' assert_array_equal(ff2.tree['my_array'], ff2.tree['my_array']) def test_tag_to_schema_resolver_deprecation(): ff = asdf.AsdfFile() with pytest.warns(AsdfDeprecationWarning): ff.tag_to_schema_resolver('foo') with pytest.warns(AsdfDeprecationWarning): extension_list = extension.default_extensions.extension_list extension_list.tag_to_schema_resolver('foo') def test_access_tree_outside_handler(tmpdir): tempname = str(tmpdir.join('test.asdf')) tree = {'random': np.random.random(10)} ff = asdf.AsdfFile(tree) ff.write_to(str(tempname)) with asdf.open(tempname) as newf: pass # Accessing array data outside of handler should fail with pytest.raises(OSError): repr(newf.tree['random']) # Using the top-level getattr should also fail with pytest.raises(OSError): repr(newf['random']) def test_context_handler_resolve_and_inline(tmpdir): # This reproduces the issue reported in # https://github.com/spacetelescope/asdf/issues/406 tempname = str(tmpdir.join('test.asdf')) tree = {'random': np.random.random(10)} ff = asdf.AsdfFile(tree) ff.write_to(str(tempname)) with asdf.open(tempname) as newf: newf.resolve_and_inline() with pytest.raises(OSError): newf.tree['random'][0] def test_open_pathlib_path(tmpdir): filename = str(tmpdir.join('pathlib.asdf')) path = pathlib.Path(filename) tree = {'data': np.ones(10)} with asdf.AsdfFile(tree) as af: af.write_to(path) with asdf.open(path) as af: assert (af['data'] == tree['data']).all() @pytest.mark.parametrize('installed,extension,warns', [ ('1.2.3', '2.0.0', True), ('1.2.3', '2.0.dev10842', True), ('2.0.0', '2.0.0', False), ('2.0.1', '2.0.0', False), ('2.0.1', '2.0.dev12345', False), ]) def test_extension_version_check(installed, extension, warns): af = asdf.AsdfFile() af._fname = 'test.asdf' af._extension_metadata['foo.extension.FooExtension'] = ('foo', installed) tree = { 'history': { 'extensions': [ asdf.tags.core.ExtensionMetadata('foo.extension.FooExtension', asdf.tags.core.Software(name='foo', version=extension)), ] } } if warns: with pytest.warns(UserWarning) as w: af._check_extensions(tree) assert str(w[0].message).startswith("File 'test.asdf' was created with") with pytest.raises(RuntimeError) as err: af._check_extensions(tree, strict=True) err.match("^File 'test.asdf' was created with") else: af._check_extensions(tree) @pytest.mark.xfail(reason='Setting auto_inline option modifies AsdfFile state') def test_auto_inline(tmpdir): outfile = str(tmpdir.join('test.asdf')) tree = dict(data=np.arange(6)) # Use the same object for each write in order to make sure that there # aren't unanticipated side effects with asdf.AsdfFile(tree) as af: af.write_to(outfile) assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 1 af.write_to(outfile, auto_inline=10) assert len(list(af.blocks.inline_blocks)) == 1 assert len(list(af.blocks.internal_blocks)) == 0 af.write_to(outfile) assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 1 af.write_to(outfile, auto_inline=7) assert len(list(af.blocks.inline_blocks)) == 1 assert len(list(af.blocks.internal_blocks)) == 0 af.write_to(outfile, auto_inline=5) assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 1 @pytest.mark.skip(reason='Until inline_threshold is added as a write option') def test_inline_threshold(tmpdir): tree = { 'small': np.ones(10), 'large': np.ones(100) } with asdf.AsdfFile(tree) as af: assert len(list(af.blocks.inline_blocks)) == 1 assert len(list(af.blocks.internal_blocks)) == 1 with asdf.AsdfFile(tree, inline_threshold=10) as af: assert len(list(af.blocks.inline_blocks)) == 1 assert len(list(af.blocks.internal_blocks)) == 1 with asdf.AsdfFile(tree, inline_threshold=5) as af: assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 2 with asdf.AsdfFile(tree, inline_threshold=100) as af: assert len(list(af.blocks.inline_blocks)) == 2 assert len(list(af.blocks.internal_blocks)) == 0 @pytest.mark.skip(reason='Until inline_threshold is added as a write option') def test_inline_threshold_masked(tmpdir): mask = np.random.randint(0, 1+1, 20) masked_array = np.ma.masked_array(np.ones(20), mask=mask) tree = { 'masked': masked_array } # Make sure that masked arrays aren't automatically inlined, even if they # are small enough with asdf.AsdfFile(tree) as af: assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 2 tree = { 'masked': masked_array, 'normal': np.random.random(20) } with asdf.AsdfFile(tree) as af: assert len(list(af.blocks.inline_blocks)) == 1 assert len(list(af.blocks.internal_blocks)) == 2 @pytest.mark.skip(reason='Until inline_threshold is added as a write option') def test_inline_threshold_override(tmpdir): tmpfile = str(tmpdir.join('inline.asdf')) tree = { 'small': np.ones(10), 'large': np.ones(100) } with asdf.AsdfFile(tree) as af: af.set_array_storage(tree['small'], 'internal') assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 2 with asdf.AsdfFile(tree) as af: af.set_array_storage(tree['large'], 'inline') assert len(list(af.blocks.inline_blocks)) == 2 assert len(list(af.blocks.internal_blocks)) == 0 with asdf.AsdfFile(tree) as af: af.write_to(tmpfile, all_array_storage='internal') assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 2 with asdf.AsdfFile(tree) as af: af.write_to(tmpfile, all_array_storage='inline') assert len(list(af.blocks.inline_blocks)) == 2 assert len(list(af.blocks.internal_blocks)) == 0 def test_resolver_deprecations(): for resolver_method in [ resolver.default_resolver, resolver.default_tag_to_url_mapping, resolver.default_url_mapping, schema.default_ext_resolver ]: with pytest.warns(AsdfDeprecationWarning): resolver_method("foo") def test_get_default_resolver(): resolver = extension.get_default_resolver() result = resolver('tag:stsci.edu:asdf/core/ndarray-1.0.0') assert result.endswith("/schemas/stsci.edu/asdf/core/ndarray-1.0.0.yaml") asdf-2.5.1/asdf/tests/test_array_blocks.py0000644000446400020070000005655113605165746023024 0ustar eslavichSTSCI\science00000000000000# -*- coding: utf-8 -*- import io import os import numpy as np from numpy.testing import assert_array_equal import pytest import asdf from asdf import block from asdf import constants from asdf import generic_io def test_external_block(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.set_array_storage(my_array, 'external') assert ff.get_array_storage(my_array) == 'external' ff.write_to(os.path.join(tmpdir, "test.asdf")) assert 'test0000.asdf' in os.listdir(tmpdir) def test_external_block_non_url(): my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.set_array_storage(my_array, 'external') assert ff.get_array_storage(my_array) == 'external' buff = io.BytesIO() with pytest.raises(ValueError): ff.write_to(buff) def test_invalid_array_storage(): my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.set_array_storage(my_array, 'foo') b = block.Block() b._array_storage = 'foo' with pytest.raises(ValueError): ff.blocks.add(b) with pytest.raises(ValueError): ff.blocks.remove(b) def test_transfer_array_sources(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf")) with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(my_array, ff.tree['my_array']) ff.write_to(os.path.join(tmpdir, "test2.asdf")) # write_to should have no effect on getting the original data assert_array_equal(my_array, ff.tree['my_array']) assert ff._fd is None def test_write_to_same(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf")) with asdf.open( os.path.join(tmpdir, "test.asdf"), mode='rw') as ff: assert_array_equal(my_array, ff.tree['my_array']) ff.tree['extra'] = [0] * 1000 ff.write_to(os.path.join(tmpdir, "test2.asdf")) with asdf.open( os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: assert_array_equal(my_array, ff.tree['my_array']) def test_pad_blocks(tmpdir): tmpdir = str(tmpdir) # This is the case where the new tree can't fit in the available space my_array = np.ones((8, 8)) * 1 my_array2 = np.ones((42, 5)) * 2 tree = { 'my_array': my_array, 'my_array2': my_array2 } ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True) with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['my_array'], my_array) assert_array_equal(ff.tree['my_array2'], my_array2) def test_update_expand_tree(tmpdir): tmpdir = str(tmpdir) testpath = os.path.join(tmpdir, "test.asdf") # This is the case where the new tree can't fit in the available space my_array = np.arange(64) * 1 my_array2 = np.arange(64) * 2 tree = { 'arrays': [ my_array, my_array2, np.arange(3) ] } ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['arrays'][2], 'inline') assert len(list(ff.blocks.inline_blocks)) == 1 ff.write_to(testpath, pad_blocks=True) with asdf.open(testpath, mode='rw') as ff: assert_array_equal(ff.tree['arrays'][0], my_array) orig_offset = ff.blocks[ff.tree['arrays'][0]].offset ff.tree['extra'] = [0] * 6000 ff.update() with asdf.open(testpath) as ff: assert orig_offset <= ff.blocks[ff.tree['arrays'][0]].offset assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline' assert_array_equal(ff.tree['arrays'][0], my_array) assert_array_equal(ff.tree['arrays'][1], my_array2) # Now, we expand the header only by a little bit ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['arrays'][2], 'inline') ff.write_to(os.path.join(tmpdir, "test2.asdf"), pad_blocks=True) with asdf.open(os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: orig_offset = ff.blocks[ff.tree['arrays'][0]].offset ff.tree['extra'] = [0] * 2 ff.update() with asdf.open(os.path.join(tmpdir, "test2.asdf")) as ff: assert orig_offset == ff.blocks[ff.tree['arrays'][0]].offset assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline' assert_array_equal(ff.tree['arrays'][0], my_array) assert_array_equal(ff.tree['arrays'][1], my_array2) def _get_update_tree(): return { 'arrays': [ np.arange(64) * 1, np.arange(64) * 2, np.arange(64) * 3 ] } def test_update_delete_first_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][0] ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2]) def test_update_delete_last_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][-1] ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) def test_update_delete_middle_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][1] ff.update() assert len(ff.blocks._internal_blocks) == 2 assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert len(ff.tree['arrays']) == 2 assert ff.tree['arrays'][0]._source == 0 assert ff.tree['arrays'][1]._source == 1 assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2]) def test_update_replace_first_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][0] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], np.arange(32)) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) def test_update_replace_last_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][2] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], np.arange(32)) def test_update_replace_middle_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][1] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], np.arange(32)) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) def test_update_add_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'].append(np.arange(32)) ff.update() with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) assert_array_equal(ff.tree['arrays'][3], np.arange(32)) def test_update_add_array_at_end(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'].append(np.arange(2048)) ff.update() assert len(ff.blocks) == 4 assert os.stat(path).st_size >= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) assert_array_equal(ff.tree['arrays'][3], np.arange(2048)) def test_update_replace_all_arrays(tmpdir): tmpdir = str(tmpdir) testpath = os.path.join(tmpdir, "test.asdf") # This is the case where the new tree can't fit in the available space my_array = np.ones((64, 64)) * 1 tree = { 'my_array': my_array, } ff = asdf.AsdfFile(tree) ff.write_to(testpath, pad_blocks=True) with asdf.open(testpath, mode='rw') as ff: ff.tree['my_array'] = np.ones((64, 64)) * 2 ff.update() with asdf.open(testpath) as ff: assert_array_equal(ff.tree['my_array'], np.ones((64, 64)) * 2) def test_update_array_in_place(tmpdir): tmpdir = str(tmpdir) testpath = os.path.join(tmpdir, "test.asdf") # This is the case where the new tree can't fit in the available space my_array = np.ones((64, 64)) * 1 tree = { 'my_array': my_array, } ff = asdf.AsdfFile(tree) ff.write_to(testpath, pad_blocks=True) with asdf.open(testpath, mode='rw') as ff: array = np.asarray(ff.tree['my_array']) array *= 2 ff.update() with asdf.open(testpath) as ff: assert_array_equal(ff.tree['my_array'], np.ones((64, 64)) * 2) def test_init_from_asdffile(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff2 = asdf.AsdfFile(ff) assert ff.tree['my_array'] is ff2.tree['my_array'] assert_array_equal(ff.tree['my_array'], ff2.tree['my_array']) assert ff.blocks[my_array] != ff2.blocks[my_array] ff2.tree['my_array'] = None assert_array_equal(ff.tree['my_array'], my_array) ff.write_to(os.path.join(tmpdir, 'test.asdf')) with asdf.open(os.path.join(tmpdir, 'test.asdf')) as ff: ff2 = asdf.AsdfFile(ff) assert not ff.tree['my_array'] is ff2.tree['my_array'] assert_array_equal(ff.tree['my_array'], ff2.tree['my_array']) assert ff.blocks[my_array] != ff2.blocks[my_array] ff2.tree['my_array'] = None assert_array_equal(ff.tree['my_array'], my_array) def test_seek_until_on_block_boundary(): # Create content where the first block begins on a # file-reading-block boundary. content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo : bar ... """ content += (b'\0' * (io.DEFAULT_BUFFER_SIZE - 2) + constants.BLOCK_MAGIC + b'\0\x30' + b'\0' * 50) buff = io.BytesIO(content) ff = asdf.open(buff) assert len(ff.blocks) == 1 buff.seek(0) fd = generic_io.InputStream(buff, 'r') ff = asdf.open(fd) assert len(ff.blocks) == 1 def test_checksum(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.arange(0, 64, dtype=np.int64).reshape((8, 8)) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.open(path, validate_checksums=True) as ff: assert type(ff.blocks._internal_blocks[0].checksum) == bytes assert ff.blocks._internal_blocks[0].checksum == \ b'\xcaM\\\xb8t_L|\x00\n+\x01\xf1\xcfP1' def test_checksum_update(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.arange(0, 64, dtype=np.int64).reshape((8, 8)) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.open(path, mode='rw') as ff: ff.tree['my_array'][7, 7] = 0.0 # update() should update the checksum, even if the data itself # is memmapped and isn't expressly re-written. ff.update() with asdf.open(path, validate_checksums=True) as ff: assert ff.blocks._internal_blocks[0].checksum == \ b'T\xaf~[\x90\x8a\x88^\xc2B\x96D,N\xadL' def test_deferred_block_loading(small_tree): buff = io.BytesIO() ff = asdf.AsdfFile(small_tree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ff.write_to(buff, include_block_index=False, all_array_storage='internal') buff.seek(0) with asdf.open(buff) as ff2: assert len([x for x in ff2.blocks.blocks if isinstance(x, block.Block)]) == 1 x = ff2.tree['science_data'] * 2 x = ff2.tree['not_shared'] * 2 assert len([x for x in ff2.blocks.blocks if isinstance(x, block.Block)]) == 2 with pytest.raises(ValueError): ff2.blocks.get_block(2) def test_block_index(): buff = io.BytesIO() arrays = [] for i in range(100): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.open(buff) as ff2: assert isinstance(ff2.blocks._internal_blocks[0], block.Block) assert len(ff2.blocks._internal_blocks) == 100 for i in range(2, 99): assert isinstance(ff2.blocks._internal_blocks[i], block.UnloadedBlock) assert isinstance(ff2.blocks._internal_blocks[99], block.Block) # Force the loading of one array x = ff2.tree['arrays'][50] * 2 for i in range(2, 99): if i == 50: assert isinstance(ff2.blocks._internal_blocks[i], block.Block) else: assert isinstance(ff2.blocks._internal_blocks[i], block.UnloadedBlock) def test_large_block_index(): # This test is designed to test reading of a block index that is # larger than a single file system block, which is why we create # io.DEFAULT_BUFFER_SIZE / 4 arrays, and assuming each entry has more # than one digit in its address, we're guaranteed to have an index # larger than a filesystem block. # TODO: It would be nice to find a way to make this test faster. The # real bottleneck here is the enormous YAML section. buff = io.BytesIO() narrays = int(io.DEFAULT_BUFFER_SIZE / 4) arrays = [] for i in range(narrays): arrays.append(np.array([i], np.uint16)) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ff.write_to(buff, all_array_storage='internal') buff.seek(0) with asdf.open(buff) as ff2: assert isinstance(ff2.blocks._internal_blocks[0], block.Block) assert len(ff2.blocks._internal_blocks) == narrays def test_no_block_index(): buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) assert constants.INDEX_HEADER not in buff.getvalue() def test_junk_after_index(): buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.write(b"JUNK") buff.seek(0) # This has junk after the block index, so it # should fall back to the skip method, which # only loads the first block. with asdf.open(buff) as ff: assert len(ff.blocks) == 1 def test_short_file_find_block_index(): # This tests searching for a block index in a file that looks like # it might have an index, in the last filesystem block or so, but # ultimately proves to not have an index. buff = io.BytesIO() ff = asdf.AsdfFile({'arr': np.ndarray([1]), 'arr2': np.ndarray([2])}) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ff.write_to(buff, include_block_index=False, all_array_storage='internal') buff.write(b'#ASDF BLOCK INDEX\n') buff.write(b'0' * (io.DEFAULT_BUFFER_SIZE * 4)) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 def test_invalid_block_index_values(): # This adds a value in the block index that points to something # past the end of the file. In that case, we should just reject # the index altogether. buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) ff.blocks._internal_blocks.append(block.UnloadedBlock(buff, 123456789)) ff.blocks.write_block_index(buff, ff) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 def test_invalid_last_block_index(): # This adds a value in the block index that points to something # that isn't a block buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) ff.blocks._internal_blocks[-1]._offset -= 4 ff.blocks.write_block_index(buff, ff) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 def test_unordered_block_index(): # This creates a block index that isn't in increasing order buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) ff.blocks._internal_blocks = ff.blocks._internal_blocks[::-1] ff.blocks.write_block_index(buff, ff) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 def test_invalid_block_index_first_block_value(): # This creates a bogus block index where the offset of the first # block doesn't match what we already know it to be. In this # case, we should reject the whole block index. buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) ff.blocks._internal_blocks[0]._offset -= 4 ff.blocks.write_block_index(buff, ff) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 def test_invalid_block_id(): ff = asdf.AsdfFile() with pytest.raises(ValueError): ff.blocks.get_block(-2) def test_dots_but_no_block_index(): # This puts `...` at the end of the file, so we sort of think # we might have a block index, but as it turns out, we don't # after reading a few chunks from the end of the file. buff = io.BytesIO() tree = { 'array': np.ones((8, 8)) } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) buff.write(b'A' * 64000) buff.write(b'...\n') buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 def test_open_no_memmap(tmpdir): tmpfile = os.path.join(str(tmpdir), 'random.asdf') tree = { 'array': np.random.random((20, 20)) } ff = asdf.AsdfFile(tree) ff.write_to(tmpfile) # Test that by default we use memmapped arrays when possible with asdf.open(tmpfile) as af: array = af.tree['array'] # Make sure to access the block so that it gets loaded x = array[0] assert array.block._memmapped == True assert isinstance(array.block._data, np.memmap) # Test that if we ask for copy, we do not get memmapped arrays with asdf.open(tmpfile, copy_arrays=True) as af: array = af.tree['array'] x = array[0] assert array.block._memmapped == False # We can't just check for isinstance(..., np.array) since this will # be true for np.memmap as well assert not isinstance(array.block._data, np.memmap) def test_fd_not_seekable(): data = np.ones(1024) b = block.Block(data=data) fd = io.BytesIO() fd.seekable = lambda: False fd.write_array = lambda arr: fd.write(arr.tobytes()) fd.read_blocks = lambda us: [fd.read(us)] fd.fast_forward = lambda offset: fd.seek(offset, 1) b.output_compression = 'zlib' b.write(fd) fd.seek(0) b = block.Block() b.read(fd) # We lost the information about the underlying array type, # but still can compare the bytes. assert b.data.tobytes() == data.tobytes() asdf-2.5.1/asdf/tests/test_asdftypes.py0000644000446400020070000006375313605165767022360 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import sys import copy from fractions import Fraction import pytest import asdf from asdf import types from asdf import extension from asdf import yamlutil from asdf import util from asdf import versioning from . import helpers, CustomTestType, CustomExtension TEST_DATA_PATH = str(helpers.get_test_data_path('')) class Fractional2dCoord: def __init__(self, x, y): self.x = x self.y = y def fractiontype_factory(): class FractionType(types.CustomType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [Fraction] handle_dynamic_subclasses = True @classmethod def to_tree(cls, node, ctx): return [node.numerator, node.denominator] @classmethod def from_tree(cls, tree, ctx): return Fraction(tree[0], tree[1]) return FractionType def fractional2dcoordtype_factory(): FractionType = fractiontype_factory() class Fractional2dCoordType(types.CustomType): name = 'fractional_2d_coord' organization = 'nowhere.org' standard = 'custom' version = (1, 0, 0) types = [Fractional2dCoord] @classmethod def to_tree(cls, node, ctx): x = yamlutil.custom_tree_to_tagged_tree(node.x, ctx) y = yamlutil.custom_tree_to_tagged_tree(node.y, ctx) return dict(x=x, y=y) @classmethod def from_tree(cls, tree, ctx): x = yamlutil.tagged_tree_to_custom_tree(tree['x'], ctx) y = yamlutil.tagged_tree_to_custom_tree(tree['y'], ctx) return Fractional2dCoord(x, y) class Fractional2dCoordExtension(CustomExtension): @property def types(self): return [FractionType, Fractional2dCoordType] return FractionType, Fractional2dCoordType, Fractional2dCoordExtension def test_custom_tag(): FractionType = fractiontype_factory() class FractionExtension(CustomExtension): @property def types(self): return [FractionType] class FractionCallable(FractionExtension): @property def tag_mapping(self): def check(tag): prefix = 'tag:nowhere.org:custom' if tag.startswith(prefix): return 'http://nowhere.org/schemas/custom' + tag[len(prefix):] return [check] yaml = """ a: ! [2, 3] b: !core/complex-1.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=FractionExtension()) as ff: assert ff.tree['a'] == Fraction(2, 3) buff = io.BytesIO() ff.write_to(buff) buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=FractionCallable()) as ff: assert ff.tree['a'] == Fraction(2, 3) buff = io.BytesIO() ff.write_to(buff) buff.close() def test_version_mismatch(): yaml = """ a: !core/complex-42.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as warning: with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 1 assert str(warning[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 42.0.0 found in file, " "but latest supported version is 1.0.0") # Make sure warning is repeatable buff.seek(0) with pytest.warns(None) as warning: with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 1 assert str(warning[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 42.0.0 found in file, " "but latest supported version is 1.0.0") # Make sure the warning does not occur if it is being ignored (default) buff.seek(0) with pytest.warns(None) as warning: with asdf.open(buff) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 0, helpers.display_warnings(warning) # If the major and minor match, there should be no warning. yaml = """ a: !core/complex-1.0.1 0j """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as warning: with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 0 @pytest.mark.skipif(sys.platform.startswith('win'), reason='Avoid path manipulation on Windows') def test_version_mismatch_file(tmpdir): testfile = os.path.join(str(tmpdir), 'mismatch.asdf') yaml = """ a: !core/complex-42.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with open(testfile, 'wb') as handle: handle.write(buff.read()) with pytest.warns(None) as w: with asdf.open(testfile, ignore_version_mismatch=False) as ff: assert ff._fname == "file://{}".format(testfile) assert isinstance(ff.tree['a'], complex) assert len(w) == 1 assert str(w[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 42.0.0 found in file " "'file://{}', but latest supported version is 1.0.0".format(testfile)) def test_version_mismatch_with_supported_versions(): """Make sure that defining the supported_versions field does not affect whether or not schema mismatch warnings are triggered.""" class CustomFlow: pass class CustomFlowType(CustomTestType): version = '1.1.0' supported_versions = ['1.0.0', '1.1.0'] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] class CustomFlowExtension(CustomExtension): @property def types(self): return [CustomFlowType] yaml = """ flow_thing: ! c: 100 d: 3.14 """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as w: data = asdf.open( buff, ignore_version_mismatch=False, extensions=CustomFlowExtension()) assert len(w) == 1, helpers.display_warnings(w) assert str(w[0].message) == ( "'tag:nowhere.org:custom/custom_flow' with version 1.0.0 found in " "file, but latest supported version is 1.1.0") def test_versioned_writing(monkeypatch): from ..tags.core.complex import ComplexType # Create a bogus version map monkeypatch.setitem(versioning._version_map, '42.0.0', { 'FILE_FORMAT': '42.0.0', 'YAML_VERSION': '1.1', 'tags': { 'tag:stsci.edu:asdf/core/complex': '42.0.0', 'tag:stscu.edu:asdf/core/asdf': '1.0.0' }, # We need to insert these explicitly since we're monkeypatching 'core': { 'tag:stsci.edu:asdf/core/complex': '42.0.0', 'tag:stscu.edu:asdf/core/asdf': '1.0.0' }, 'standard': {} }) # Add bogus version to supported versions monkeypatch.setattr(versioning, 'supported_versions', versioning.supported_versions + [versioning.AsdfVersion('42.0.0')] ) class FancyComplexType(types.CustomType): name = 'core/complex' organization = 'stsci.edu' standard = 'asdf' version = (42, 0, 0) types = [complex] @classmethod def to_tree(cls, node, ctx): return ComplexType.to_tree(node, ctx) @classmethod def from_tree(cls, tree, ctx): return ComplexType.from_tree(tree, ctx) class FancyComplexExtension: @property def types(self): return [FancyComplexType] @property def tag_mapping(self): return [] @property def url_mapping(self): return [('http://stsci.edu/schemas/asdf/core/complex-42.0.0', util.filepath_to_url(TEST_DATA_PATH) + '/complex-42.0.0.yaml')] tree = {'a': complex(0, -1)} buff = io.BytesIO() ff = asdf.AsdfFile(tree, version="42.0.0", extensions=[FancyComplexExtension()]) ff.write_to(buff) assert b'complex-42.0.0' in buff.getvalue() def test_longest_match(): class FancyComplexExtension: @property def types(self): return [] @property def tag_mapping(self): return [] @property def url_mapping(self): return [('http://stsci.edu/schemas/asdf/core/', 'FOOBAR/{url_suffix}')] l = extension.AsdfExtensionList( [extension.BuiltinExtension(), FancyComplexExtension()]) assert l.url_mapping( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') == 'FOOBAR/asdf-1.0.0' assert l.url_mapping( 'http://stsci.edu/schemas/asdf/transform/transform-1.0.0') != 'FOOBAR/transform-1.0.0' def test_module_versioning(): class NoModuleType(types.CustomType): # It seems highly unlikely that this would be a real module requires = ['qkjvqdja'] class HasCorrectPytest(types.CustomType): # This means it requires 1.0.0 or greater, so it should succeed requires = ['pytest-1.0.0'] class DoesntHaveCorrectPytest(types.CustomType): requires = ['pytest-91984.1.7'] nmt = NoModuleType() hcp = HasCorrectPytest() # perhaps an unfortunate acroynm dhcp = DoesntHaveCorrectPytest() assert nmt.has_required_modules == False assert hcp.has_required_modules == True assert dhcp.has_required_modules == False def test_undefined_tag(): # This tests makes sure that ASDF still returns meaningful structured data # even when it encounters a schema tag that it does not specifically # implement as an extension from numpy import array yaml = """ undefined_data: ! - 5 - {'message': 'there is no tag'} - !core/ndarray-1.0.0 [[1, 2, 3], [4, 5, 6]] - ! - !core/ndarray-1.0.0 [[7],[8],[9],[10]] - !core/complex-1.0.0 3.14j """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as warning: afile = asdf.open(buff) missing = afile.tree['undefined_data'] assert missing[0] == 5 assert missing[1] == {'message': 'there is no tag'} assert (missing[2] == array([[1, 2, 3], [4, 5, 6]])).all() assert (missing[3][0] == array([[7],[8],[9],[10]])).all() assert missing[3][1] == 3.14j # There are two undefined tags, so we expect two warnings assert len(warning) == 2 for i, tag in enumerate(["also_undefined-1.3.0", "undefined_tag-1.0.0"]): assert str(warning[i].message) == ( "tag:nowhere.org:custom/{} is not recognized, converting to raw " "Python data structure".format(tag)) # Make sure no warning occurs if explicitly ignored buff.seek(0) with pytest.warns(None) as warning: afile = asdf.open(buff, ignore_unrecognized_tag=True) assert len(warning) == 0 def test_newer_tag(): # This test simulates a scenario where newer versions of CustomFlow # provides different keyword parameters that the older schema and tag class # do not account for. We want to test whether ASDF can handle this problem # gracefully and still provide meaningful data as output. The test case is # fairly contrived but we want to test whether ASDF can handle backwards # compatibility even when an explicit tag class for different versions of a # schema is not available. class CustomFlow: def __init__(self, c=None, d=None): self.c = c self.d = d class CustomFlowType(types.CustomType): version = '1.1.0' name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] @classmethod def from_tree(cls, tree, ctx): kwargs = {} for name in tree: kwargs[name] = tree[name] return CustomFlow(**kwargs) @classmethod def to_tree(cls, data, ctx): tree = dict(c=data.c, d=data.d) class CustomFlowExtension(CustomExtension): @property def types(self): return [CustomFlowType] new_yaml = """ flow_thing: ! c: 100 d: 3.14 """ new_buff = helpers.yaml_to_asdf(new_yaml) new_data = asdf.open(new_buff, extensions=CustomFlowExtension()) assert type(new_data.tree['flow_thing']) == CustomFlow old_yaml = """ flow_thing: ! a: 100 b: 3.14 """ old_buff = helpers.yaml_to_asdf(old_yaml) with pytest.warns(None) as warning: asdf.open(old_buff, extensions=CustomFlowExtension()) assert len(warning) == 1, helpers.display_warnings(warning) # We expect this warning since it will not be possible to convert version # 1.0.0 of CustomFlow to a CustomType (by design, for testing purposes). assert str(warning[0].message).startswith( "Failed to convert " "tag:nowhere.org:custom/custom_flow-1.0.0 to custom type") def test_incompatible_version_check(): class TestType0(types.CustomType): supported_versions = versioning.AsdfSpec('>=1.2.0') assert TestType0.incompatible_version('1.1.0') == True assert TestType0.incompatible_version('1.2.0') == False assert TestType0.incompatible_version('2.0.1') == False class TestType1(types.CustomType): supported_versions = versioning.AsdfVersion('1.0.0') assert TestType1.incompatible_version('1.0.0') == False assert TestType1.incompatible_version('1.1.0') == True class TestType2(types.CustomType): supported_versions = '1.0.0' assert TestType2.incompatible_version('1.0.0') == False assert TestType2.incompatible_version('1.1.0') == True class TestType3(types.CustomType): # This doesn't make much sense, but it's just for the sake of example supported_versions = ['1.0.0', versioning.AsdfSpec('>=2.0.0')] assert TestType3.incompatible_version('1.0.0') == False assert TestType3.incompatible_version('1.1.0') == True assert TestType3.incompatible_version('2.0.0') == False assert TestType3.incompatible_version('2.0.1') == False class TestType4(types.CustomType): supported_versions = ['1.0.0', versioning.AsdfVersion('1.1.0')] assert TestType4.incompatible_version('1.0.0') == False assert TestType4.incompatible_version('1.0.1') == True assert TestType4.incompatible_version('1.1.0') == False assert TestType4.incompatible_version('1.1.1') == True class TestType5(types.CustomType): supported_versions = \ [versioning.AsdfSpec('<1.0.0'), versioning.AsdfSpec('>=2.0.0')] assert TestType5.incompatible_version('0.9.9') == False assert TestType5.incompatible_version('2.0.0') == False assert TestType5.incompatible_version('2.0.1') == False assert TestType5.incompatible_version('1.0.0') == True assert TestType5.incompatible_version('1.1.0') == True with pytest.raises(ValueError): class TestType6(types.CustomType): supported_versions = 'blue' with pytest.raises(ValueError): class TestType6(types.CustomType): supported_versions = ['1.1.0', '2.2.0', 'blue'] def test_supported_versions(): class CustomFlow: def __init__(self, c=None, d=None): self.c = c self.d = d class CustomFlowType(types.CustomType): version = '1.1.0' supported_versions = [(1,0,0), versioning.AsdfSpec('>=1.1.0')] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] @classmethod def from_tree(cls, tree, ctx): # Convert old schema to new CustomFlow type if cls.version == '1.0.0': return CustomFlow(c=tree['a'], d=tree['b']) else: return CustomFlow(**tree) return CustomFlow(**kwargs) @classmethod def to_tree(cls, data, ctx): if cls.version == '1.0.0': tree = dict(a=data.c, b=data.d) else: tree = dict(c=data.c, d=data.d) class CustomFlowExtension(CustomExtension): @property def types(self): return [CustomFlowType] new_yaml = """ flow_thing: ! c: 100 d: 3.14 """ old_yaml = """ flow_thing: ! a: 100 b: 3.14 """ new_buff = helpers.yaml_to_asdf(new_yaml) new_data = asdf.open(new_buff, extensions=CustomFlowExtension()) assert type(new_data.tree['flow_thing']) == CustomFlow old_buff = helpers.yaml_to_asdf(old_yaml) old_data = asdf.open(old_buff, extensions=CustomFlowExtension()) assert type(old_data.tree['flow_thing']) == CustomFlow def test_unsupported_version_warning(): class CustomFlow: pass class CustomFlowType(types.CustomType): version = '1.0.0' supported_versions = [(1,0,0)] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] class CustomFlowExtension(CustomExtension): @property def types(self): return [CustomFlowType] yaml = """ flow_thing: ! c: 100 d: 3.14 """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as _warnings: data = asdf.open(buff, extensions=CustomFlowExtension()) assert len(_warnings) == 1 assert str(_warnings[0].message) == ( "Version 1.1.0 of tag:nowhere.org:custom/custom_flow is not compatible " "with any existing tag implementations") def test_extension_override(tmpdir): gwcs = pytest.importorskip('gwcs', '0.12.0') from asdf.extension import default_extensions default_extensions.reset() version = str(versioning.default_version) tmpfile = str(tmpdir.join('override.asdf')) with asdf.AsdfFile() as aa: wti = aa.type_index._write_type_indices[version] assert wti.from_custom_type(gwcs.WCS) is gwcs.tags.wcs.WCSType aa.tree['wcs'] = gwcs.WCS(output_frame='icrs') aa.write_to(tmpfile) with open(tmpfile, 'rb') as ff: contents = str(ff.read()) assert gwcs.tags.wcs.WCSType.yaml_tag in contents def test_extension_override_subclass(tmpdir): gwcs = pytest.importorskip('gwcs', '0.12.0') astropy = pytest.importorskip('astropy', '4.0.0') from astropy.modeling import models from asdf.extension import default_extensions default_extensions.reset() version = str(versioning.default_version) tmpfile = str(tmpdir.join('override.asdf')) class SubclassWCS(gwcs.WCS): pass with asdf.AsdfFile() as aa: wti = aa.type_index._write_type_indices[version] assert wti.from_custom_type(gwcs.WCS) is gwcs.tags.wcs.WCSType assert wti.from_custom_type(SubclassWCS) is gwcs.tags.wcs.WCSType # The duplication here is deliberate: make sure that nothing has changed assert wti.from_custom_type(gwcs.WCS) is gwcs.tags.wcs.WCSType aa.tree['wcs'] = SubclassWCS(output_frame='icrs') aa.write_to(tmpfile) with open(tmpfile, 'rb') as ff: contents = str(ff.read()) assert gwcs.tags.wcs.WCSType.yaml_tag in contents def test_tag_without_schema(tmpdir): tmpfile = str(tmpdir.join('foo.asdf')) class FooType(types.CustomType): name = 'foo' def __init__(self, a, b): self.a = a self.b = b @classmethod def from_tree(cls, tree, ctx): return cls(tree['a'], tree['b']) @classmethod def to_tree(cls, node, ctx): return dict(a=node.a, b=node.b) def __eq__(self, other): return self.a == other.a and self.b == other.b class FooExtension: @property def types(self): return [FooType] @property def tag_mapping(self): return [] @property def url_mapping(self): return [] foo = FooType('hello', 42) tree = dict(foo=foo) with pytest.warns(UserWarning) as w: with asdf.AsdfFile(tree, extensions=FooExtension()) as af: af.write_to(tmpfile) # There are three validation passes when writing. Eventually this may # change assert len(w) == 3, helpers.display_warnings(w) assert str(w[0].message).startswith('Unable to locate schema file') assert str(w[1].message).startswith('Unable to locate schema file') assert str(w[2].message).startswith('Unable to locate schema file') with pytest.warns(UserWarning) as w: with asdf.AsdfFile(tree, extensions=FooExtension()) as ff: assert isinstance(ff.tree['foo'], FooType) assert ff.tree['foo'] == tree['foo'] # There is only one validation pass when writing. assert len(w) == 1, helpers.display_warnings(w) assert str(w[0].message).startswith('Unable to locate schema file') def test_subclass_decorator(tmpdir): tmpfile = str(tmpdir.join('subclass.asdf')) (FractionType, Fractional2dCoordType, Fractional2dCoordExtension) = fractional2dcoordtype_factory() extension = Fractional2dCoordExtension() coord = Fractional2dCoord(Fraction(2, 3), Fraction(7, 9)) tree = dict(coord=coord) # First make sure the base type is serialized properly with asdf.AsdfFile(tree, extensions=extension) as af: af.write_to(tmpfile) with asdf.open(tmpfile, extensions=extension) as af: assert isinstance(af['coord'], Fractional2dCoord) assert af['coord'].x == coord.x assert af['coord'].y == coord.y # Now create a subclass @Fractional2dCoordType.subclass class Subclass2dCoord(Fractional2dCoord): pass subclass_coord = Subclass2dCoord(Fraction(2, 3), Fraction(7, 9)) tree = dict(coord=subclass_coord) with asdf.AsdfFile(tree, extensions=extension) as af: af.write_to(tmpfile) with asdf.open(tmpfile, extensions=extension) as af: assert isinstance(af['coord'], Subclass2dCoord) assert af['coord'].x == subclass_coord.x assert af['coord'].y == subclass_coord.y def test_subclass_decorator_custom_attribute(tmpdir): tmpfile = str(tmpdir.join('subclass.asdf')) (FractionType, Fractional2dCoordType, Fractional2dCoordExtension) = fractional2dcoordtype_factory() extension = Fractional2dCoordExtension() coord = Fractional2dCoord(Fraction(2, 3), Fraction(7, 9)) tree = dict(coord=coord) # First make sure the base type is serialized properly with asdf.AsdfFile(tree, extensions=extension) as af: af.write_to(tmpfile) with asdf.open(tmpfile, extensions=extension) as af: assert isinstance(af['coord'], Fractional2dCoord) assert af['coord'].x == coord.x assert af['coord'].y == coord.y # Now create a subclass @Fractional2dCoordType.subclass(attribute='bizbaz') class Subclass2dCoord(Fractional2dCoord): pass subclass_coord = Subclass2dCoord(Fraction(2, 3), Fraction(7, 9)) tree = dict(coord=subclass_coord) with asdf.AsdfFile(tree, extensions=extension) as af: af.write_to(tmpfile) tmp = asdf.AsdfFile() af_yaml = asdf.AsdfFile._open_asdf(tmp, tmpfile, _force_raw_types=True) assert 'bizbaz' in af_yaml['coord'] assert 'Subclass2dCoord' in af_yaml['coord']['bizbaz']['name'] af_yaml.close() with asdf.open(tmpfile, extensions=extension) as af: assert isinstance(af['coord'], Subclass2dCoord) assert af['coord'].x == subclass_coord.x assert af['coord'].y == subclass_coord.y def test_subclass_decorator_attribute(tmpdir): tmpfile = str(tmpdir.join('subclass.asdf')) (FractionType, Fractional2dCoordType, Fractional2dCoordExtension) = fractional2dcoordtype_factory() extension = Fractional2dCoordExtension() @Fractional2dCoordType.subclass class Subclass2dCoord(Fractional2dCoord): def __init__(self, *args, custom=None, other=None, **kwargs): super().__init__(*args, **kwargs) self._custom = custom self._other = other @Fractional2dCoordType.subclass_property def custom(self): return self._custom @Fractional2dCoordType.subclass_property def other(self): return self._other subclass_coord = Subclass2dCoord(Fraction(2, 3), Fraction(7, 9), custom='testing', other=[1,2,3,4]) tree = dict(coord=subclass_coord) with asdf.AsdfFile(tree, extensions=extension) as af: af.write_to(tmpfile) with asdf.open(tmpfile, extensions=extension) as af: assert isinstance(af['coord'], Subclass2dCoord) assert af['coord'].x == subclass_coord.x assert af['coord'].y == subclass_coord.y assert af['coord'].custom == 'testing' assert af['coord'].other == [1,2,3,4] def test_subclass_decorator_warning(tmpdir): tmpfile = str(tmpdir.join('fraction.asdf')) FractionType = fractiontype_factory() class FractionExtension(CustomExtension): @property def types(self): return [FractionType] @FractionType.subclass class MyFraction(Fraction): # We need to override __new__ since Fraction is immutable def __new__(cls, *args, custom='custom', **kwargs): self = super().__new__(cls, *args, **kwargs) self._custom_attribute = custom return self @FractionType.subclass_property def custom_attribute(self): return self._custom_attribute tree = dict(fraction=MyFraction(7, 9, custom='TESTING!')) with pytest.warns(UserWarning) as w: with asdf.AsdfFile(tree, extensions=FractionExtension()) as af: pass assert len(w) == 1, helpers.display_warnings(w) assert str(w[0].message).startswith("Failed to add subclass attribute(s)") asdf-2.5.1/asdf/tests/test_compression.py0000644000446400020070000001245613567314375022707 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import numpy as np import pytest import asdf from asdf import compression from asdf import generic_io from ..tests import helpers def _get_large_tree(): np.random.seed(0) x = np.random.rand(128, 128) tree = { 'science_data': x, } return tree def _get_sparse_tree(): np.random.seed(0) arr = np.zeros((128, 128)) for x, y, z in np.random.rand(64, 3): arr[int(x*127), int(y*127)] = z arr[0, 0] = 5.0 tree = {'science_data': arr} return tree def _roundtrip(tmpdir, tree, compression=None, write_options={}, read_options={}): tmpfile = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(tree) ff.set_array_compression(tree['science_data'], compression) ff.write_to(tmpfile, **write_options) with asdf.open(tmpfile, mode="rw") as ff: ff.update(**write_options) with asdf.open(tmpfile, **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) # Also test saving to a buffer buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.set_array_compression(tree['science_data'], compression) ff.write_to(buff, **write_options) buff.seek(0) with asdf.open(buff, **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) # Test saving to a non-seekable buffer buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.set_array_compression(tree['science_data'], compression) ff.write_to(generic_io.OutputStream(buff), **write_options) buff.seek(0) with asdf.open(generic_io.InputStream(buff), **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) return ff def test_invalid_compression(): tree = _get_large_tree() ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.set_array_compression(tree['science_data'], 'foo') with pytest.raises(ValueError): compression._get_decoder('foo') with pytest.raises(ValueError): compression._get_encoder('foo') def test_get_compressed_size(): assert compression.get_compressed_size(b'0' * 1024, 'zlib') < 1024 def test_decompress_too_long_short(): fio = io.BytesIO() compression.compress(fio, b'0' * 1024, 'zlib') size = fio.tell() fio.seek(0) fio.read_blocks = lambda us: [fio.read(us)] compression.decompress(fio, size, 1024, 'zlib') fio.seek(0) with pytest.raises(ValueError): compression.decompress(fio, size, 1025, 'zlib') fio.seek(0) with pytest.raises(ValueError): compression.decompress(fio, size, 1023, 'zlib') def test_zlib(tmpdir): tree = _get_large_tree() _roundtrip(tmpdir, tree, 'zlib') def test_bzp2(tmpdir): tree = _get_large_tree() _roundtrip(tmpdir, tree, 'bzp2') def test_lz4(tmpdir): pytest.importorskip('lz4') tree = _get_large_tree() _roundtrip(tmpdir, tree, 'lz4') def test_recompression(tmpdir): tree = _get_large_tree() tmpfile = os.path.join(str(tmpdir), 'test1.asdf') afile = asdf.AsdfFile(tree) afile.write_to(tmpfile, all_array_compression='zlib') afile.close() afile = asdf.open(tmpfile) tmpfile = os.path.join(str(tmpdir), 'test2.asdf') afile.write_to(tmpfile, all_array_compression='bzp2') afile.close() afile = asdf.open(tmpfile) helpers.assert_tree_match(tree, afile.tree) afile.close() def test_input(tmpdir): tree = _get_large_tree() tmpfile = os.path.join(str(tmpdir), 'test1.asdf') afile = asdf.AsdfFile(tree) afile.write_to(tmpfile, all_array_compression='zlib') afile.close() afile = asdf.open(tmpfile) tmpfile = os.path.join(str(tmpdir), 'test2.asdf') afile.write_to(tmpfile) afile.close() afile = asdf.open(tmpfile) helpers.assert_tree_match(tree, afile.tree) assert afile.get_array_compression(afile.tree['science_data']) == 'zlib' afile.close() def test_none(tmpdir): tree = _get_large_tree() tmpfile1 = os.path.join(str(tmpdir), 'test1.asdf') with asdf.AsdfFile(tree) as afile: afile.write_to(tmpfile1) tmpfile2 = os.path.join(str(tmpdir), 'test2.asdf') with asdf.open(tmpfile1) as afile: assert afile.get_array_compression(afile.tree['science_data']) is None afile.write_to(tmpfile2, all_array_compression='zlib') assert afile.get_array_compression(afile.tree['science_data']) == 'zlib' with asdf.open(tmpfile2) as afile: afile.write_to(tmpfile1, all_array_compression=None) with asdf.open(tmpfile1) as afile: helpers.assert_tree_match(tree, afile.tree) assert afile.get_array_compression(afile.tree['science_data']) is None def test_set_array_compression(tmpdir): tmpfile = os.path.join(str(tmpdir), 'compressed.asdf') zlib_data = np.array([x for x in range(1000)]) bzp2_data = np.array([x for x in range(1000)]) tree = dict(zlib_data=zlib_data, bzp2_data=bzp2_data) with asdf.AsdfFile(tree) as af_out: af_out.set_array_compression(zlib_data, 'zlib') af_out.set_array_compression(bzp2_data, 'bzp2') af_out.write_to(tmpfile) with asdf.open(tmpfile) as af_in: assert af_in.get_array_compression(af_in.tree['zlib_data']) == 'zlib' assert af_in.get_array_compression(af_in.tree['bzp2_data']) == 'bzp2' asdf-2.5.1/asdf/tests/test_file_format.py0000644000446400020070000001213413567314375022626 0ustar eslavichSTSCI\science00000000000000# -*- coding: utf-8 -*- import os import io import pytest import asdf from asdf import generic_io def test_no_yaml_end_marker(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: bar...baz baz: 42 """ path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with pytest.raises(ValueError): with asdf.open(buff): pass buff.seek(0) fd = generic_io.InputStream(buff, 'r') with pytest.raises(ValueError): with asdf.open(fd): pass with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with pytest.raises(ValueError): with asdf.open(fd): pass def test_no_final_newline(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: ...bar... baz: 42 ...""" path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with asdf.open(buff) as ff: assert len(ff.tree) == 2 buff.seek(0) fd = generic_io.InputStream(buff, 'r') with asdf.open(fd) as ff: assert len(ff.tree) == 2 with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with asdf.open(fd) as ff: assert len(ff.tree) == 2 def test_no_asdf_header(tmpdir): content = b"What? This ain't no ASDF file" path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with pytest.raises(ValueError): asdf.open(buff) with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with pytest.raises(ValueError): asdf.open(fd) def test_no_asdf_blocks(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: bar ... XXXXXXXX """ path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with asdf.open(buff) as ff: assert len(ff.blocks) == 0 buff.seek(0) fd = generic_io.InputStream(buff, 'r') with asdf.open(fd) as ff: assert len(ff.blocks) == 0 with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with asdf.open(fd) as ff: assert len(ff.blocks) == 0 def test_invalid_source(small_tree): buff = io.BytesIO() ff = asdf.AsdfFile(small_tree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ff.write_to(buff, all_array_storage='internal') buff.seek(0) with asdf.open(buff) as ff2: ff2.blocks.get_block(0) with pytest.raises(ValueError): ff2.blocks.get_block(2) with pytest.raises(IOError): ff2.blocks.get_block("http://127.0.0.1/") with pytest.raises(TypeError): ff2.blocks.get_block(42.0) with pytest.raises(ValueError): ff2.blocks.get_source(42.0) block = ff2.blocks.get_block(0) assert ff2.blocks.get_source(block) == 0 def test_empty_file(): buff = io.BytesIO(b"#ASDF 1.0.0\n") buff.seek(0) with asdf.open(buff) as ff: assert ff.tree == {} assert len(ff.blocks) == 0 buff = io.BytesIO(b"#ASDF 1.0.0\n#ASDF_STANDARD 1.0.0") buff.seek(0) with asdf.open(buff) as ff: assert ff.tree == {} assert len(ff.blocks) == 0 def test_not_asdf_file(): buff = io.BytesIO(b"SIMPLE") buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass buff = io.BytesIO(b"SIMPLE\n") buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass def test_junk_file(): buff = io.BytesIO(b"#ASDF 1.0.0\nFOO") buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass def test_block_mismatch(): # This is a file with a single small block, followed by something # that has an invalid block magic number. buff = io.BytesIO( b'#ASDF 1.0.0\n\xd3BLK\x00\x28\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0FOOBAR') buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass def test_block_header_too_small(): # The block header size must be at least 40 buff = io.BytesIO( b'#ASDF 1.0.0\n\xd3BLK\0\0') buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass def test_invalid_version(tmpdir): content = b"""#ASDF 0.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-0.1.0 foo : bar ...""" buff = io.BytesIO(content) with pytest.raises(ValueError): with asdf.open(buff) as ff: pass def test_valid_version(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo : bar ...""" buff = io.BytesIO(content) with asdf.open(buff) as ff: version = ff.file_format_version assert version.major == 1 assert version.minor == 0 assert version.patch == 0 asdf-2.5.1/asdf/tests/test_fits_embed.py0000644000446400020070000003444213602667224022440 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import copy import os import sys import pytest import numpy as np from numpy.testing import assert_array_equal astropy = pytest.importorskip('astropy') from astropy.io import fits from astropy.table import Table import asdf from asdf import fits_embed from asdf import open as asdf_open from .helpers import assert_tree_match, display_warnings, get_test_data_path def create_asdf_in_fits(): """Test fixture to create AsdfInFits object to use for testing""" hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) tree = { 'model': { 'sci': { 'data': hdulist[0].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist[1].data, 'wcs': 'WCS info' }, 'err': { 'data': hdulist[2].data, 'wcs': 'WCS info' } } } return fits_embed.AsdfInFits(hdulist, tree) # Testing backwards compatibility ensures that we can continue to read and # write files that use the old convention of ImageHDU to store the ASDF file. @pytest.mark.parametrize('backwards_compat', [False, True]) def test_embed_asdf_in_fits_file(tmpdir, backwards_compat): fits_testfile = str(tmpdir.join('test.fits')) asdf_testfile = str(tmpdir.join('test.asdf')) hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='SCI')) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='DQ')) # Test a name with underscores to make sure it works hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='WITH_UNDERSCORE')) tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist['DQ'].data, 'wcs': 'WCS info' }, 'with_underscore': { 'data': hdulist['WITH_UNDERSCORE'].data, 'wcs': 'WCS info' } } } ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to(fits_testfile, use_image_hdu=backwards_compat) with fits.open(fits_testfile) as hdulist2: assert len(hdulist2) == 4 assert [x.name for x in hdulist2] == ['SCI', 'DQ', 'WITH_UNDERSCORE', 'ASDF'] assert_array_equal(hdulist2[0].data, np.arange(512, dtype=np.float)) asdf_hdu = hdulist2['ASDF'] assert asdf_hdu.data.tostring().startswith(b'#ASDF') # When in backwards compatibility mode, the ASDF file will be contained # in an ImageHDU if backwards_compat: assert isinstance(asdf_hdu, fits.ImageHDU) assert asdf_hdu.data.tostring().strip().endswith(b'...') else: assert isinstance(asdf_hdu, fits.BinTableHDU) with fits_embed.AsdfInFits.open(hdulist2) as ff2: assert_tree_match(tree, ff2.tree) ff = asdf.AsdfFile(copy.deepcopy(ff2.tree)) ff.write_to(asdf_testfile) with asdf.open(asdf_testfile) as ff: assert_tree_match(tree, ff.tree) def test_embed_asdf_in_fits_file_anonymous_extensions(tmpdir): # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(os.path.join(str(tmpdir), 'test.fits')) ff2 = asdf.AsdfFile(asdf_in_fits.tree) ff2.write_to(os.path.join(str(tmpdir), 'plain.asdf')) with fits.open(os.path.join(str(tmpdir), 'test.fits')) as hdulist: assert len(hdulist) == 4 assert [x.name for x in hdulist] == ['PRIMARY', '', '', 'ASDF'] asdf_hdu = hdulist['ASDF'] assert isinstance(asdf_hdu, fits.BinTableHDU) assert asdf_hdu.data.tostring().startswith(b'#ASDF') with fits_embed.AsdfInFits.open(hdulist) as ff2: assert_tree_match(asdf_in_fits.tree, ff2.tree) ff = asdf.AsdfFile(copy.deepcopy(ff2.tree)) ff.write_to(os.path.join(str(tmpdir), 'test.asdf')) with asdf.open(os.path.join(str(tmpdir), 'test.asdf')) as ff: assert_tree_match(asdf_in_fits.tree, ff.tree) @pytest.mark.xfail( reason="In-place update for ASDF-in-FITS does not currently work") def test_update_in_place(tmpdir): tempfile = str(tmpdir.join('test.fits')) # Create a file and write it out asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(tempfile) # Open the file and add data so it needs to be updated with fits_embed.AsdfInFits.open(tempfile) as ff: ff.tree['new_stuff'] = "A String" ff.update() # Open the updated file and make sure everything looks okay with fits_embed.AsdfInFits.open(tempfile) as ff: assert ff.tree['new_stuff'] == "A String" assert_tree_match(ff.tree['model'], asdf_in_fits.tree['model']) def test_update_and_write_new(tmpdir): tempfile = str(tmpdir.join('test.fits')) newfile = str(tmpdir.join('new.fits')) # Create a file and write it out asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(tempfile) # Open the file and add data so it needs to be updated with fits_embed.AsdfInFits.open(tempfile) as ff: ff.tree['new_stuff'] = "A String" ff.write_to(newfile) # Open the updated file and make sure everything looks okay with fits_embed.AsdfInFits.open(newfile) as ff: assert ff.tree['new_stuff'] == "A String" assert_tree_match(ff.tree['model'], asdf_in_fits.tree['model']) @pytest.mark.xfail( reason="ASDF HDU implementation does not currently reseek after writing") def test_access_hdu_data_after_write(tmpdir): # There is actually probably not a great reason to support this kind of # functionality, but I am adding a test here to record the failure for # posterity. tempfile = str(tmpdir.join('test.fits')) asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(tempfile) asdf_hdu = asdf_in_fits._hdulist['ASDF'] assert asdf_hdu.data.tostring().startswith('#ASDF') def test_create_in_tree_first(tmpdir): tree = { 'model': { 'sci': { 'data': np.arange(512, dtype=np.float), 'wcs': 'WCS info' }, 'dq': { 'data': np.arange(512, dtype=np.float), 'wcs': 'WCS info' }, 'err': { 'data': np.arange(512, dtype=np.float), 'wcs': 'WCS info' } } } hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(tree['model']['sci']['data'])) hdulist.append(fits.ImageHDU(tree['model']['dq']['data'])) hdulist.append(fits.ImageHDU(tree['model']['err']['data'])) tmpfile = os.path.join(str(tmpdir), 'test.fits') with fits_embed.AsdfInFits(hdulist, tree) as ff: ff.write_to(tmpfile) with asdf.AsdfFile(tree) as ff: ff.write_to(os.path.join(str(tmpdir), 'plain.asdf')) with asdf.open(os.path.join(str(tmpdir), 'plain.asdf')) as ff: assert_array_equal(ff.tree['model']['sci']['data'], np.arange(512, dtype=np.float)) # This tests the changes that allow FITS files with ASDF extensions to be # opened directly by the top-level asdf.open API with asdf_open(tmpfile) as ff: assert_array_equal(ff.tree['model']['sci']['data'], np.arange(512, dtype=np.float)) def compare_asdfs(asdf0, asdf1): # Make sure the trees match assert_tree_match(asdf0.tree, asdf1.tree) # Compare the data blocks for key in asdf0.tree['model'].keys(): assert_array_equal( asdf0.tree['model'][key]['data'], asdf1.tree['model'][key]['data']) def test_asdf_in_fits_open(tmpdir): """Test the open method of AsdfInFits""" tmpfile = os.path.join(str(tmpdir), 'test.fits') # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(tmpfile) # Test opening the file directly from the URI with fits_embed.AsdfInFits.open(tmpfile) as ff: compare_asdfs(asdf_in_fits, ff) # Test open/close without context handler ff = fits_embed.AsdfInFits.open(tmpfile) compare_asdfs(asdf_in_fits, ff) ff.close() # Test reading in the file from an already-opened file handle with open(tmpfile, 'rb') as handle: with fits_embed.AsdfInFits.open(handle) as ff: compare_asdfs(asdf_in_fits, ff) # Test opening the file as a FITS file first and passing the HDUList with fits.open(tmpfile) as hdulist: with fits_embed.AsdfInFits.open(hdulist) as ff: compare_asdfs(asdf_in_fits, ff) def test_asdf_open(tmpdir): """Test the top-level open method of the asdf module""" tmpfile = os.path.join(str(tmpdir), 'test.fits') # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(tmpfile) # Test opening the file directly from the URI with asdf_open(tmpfile) as ff: compare_asdfs(asdf_in_fits, ff) # Test open/close without context handler ff = asdf_open(tmpfile) compare_asdfs(asdf_in_fits, ff) ff.close() # Test reading in the file from an already-opened file handle with open(tmpfile, 'rb') as handle: with asdf_open(handle) as ff: compare_asdfs(asdf_in_fits, ff) # Test opening the file as a FITS file first and passing the HDUList with fits.open(tmpfile) as hdulist: with asdf_open(hdulist) as ff: compare_asdfs(asdf_in_fits, ff) def test_open_gzipped(): testfile = get_test_data_path('asdf.fits.gz') # Opening as an HDU should work with fits.open(testfile) as ff: with asdf.open(ff) as af: assert af.tree['stuff'].shape == (20, 20) with fits_embed.AsdfInFits.open(testfile) as af: assert af.tree['stuff'].shape == (20, 20) with asdf.open(testfile) as af: assert af.tree['stuff'].shape == (20, 20) def test_bad_input(tmpdir): """Make sure these functions behave properly with bad input""" text_file = os.path.join(str(tmpdir), 'test.txt') with open(text_file, 'w') as fh: fh.write('I <3 ASDF!!!!!') with pytest.raises(ValueError): asdf_open(text_file) @pytest.mark.skipif(sys.platform.startswith('win'), reason='Avoid path manipulation on Windows') def test_version_mismatch_file(): testfile = str(get_test_data_path('version_mismatch.fits')) with pytest.warns(None) as w: with asdf.open(testfile, ignore_version_mismatch=False) as fits_handle: assert fits_handle.tree['a'] == complex(0j) # This is the warning that we expect from opening the FITS file assert len(w) == 1, display_warnings(w) assert str(w[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 7.0.0 found in file " "'{}', but latest supported version is 1.0.0".format(testfile)) # Make sure warning does not occur when warning is ignored (default) with pytest.warns(None) as w: with asdf.open(testfile) as fits_handle: assert fits_handle.tree['a'] == complex(0j) assert len(w) == 0, display_warnings(w) with pytest.warns(None) as w: with fits_embed.AsdfInFits.open(testfile, ignore_version_mismatch=False) as fits_handle: assert fits_handle.tree['a'] == complex(0j) assert len(w) == 1 assert str(w[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 7.0.0 found in file " "'{}', but latest supported version is 1.0.0".format(testfile)) # Make sure warning does not occur when warning is ignored (default) with pytest.warns(None) as w: with fits_embed.AsdfInFits.open(testfile) as fits_handle: assert fits_handle.tree['a'] == complex(0j) assert len(w) == 0, display_warnings(w) def test_serialize_table(tmpdir): tmpfile = str(tmpdir.join('table.fits')) data = np.random.random((10, 10)) table = Table(data) hdu = fits.BinTableHDU(table) hdulist = fits.HDUList() hdulist.append(hdu) tree = {'my_table': hdulist[1].data} with fits_embed.AsdfInFits(hdulist, tree) as ff: ff.write_to(tmpfile) with asdf.open(tmpfile) as ff: data = ff.tree['my_table'] assert data._source.startswith('fits:') def test_extension_check(): testfile = get_test_data_path('extension_check.fits') with pytest.warns(None) as warnings: with asdf.open(testfile) as ff: pass assert len(warnings) == 1, display_warnings(warnings) assert ("was created with extension 'foo.bar.FooBar', which is not " "currently installed (from package foo-1.2.3)") in str(warnings[0].message) # Make sure that suppressing the warning works as well with pytest.warns(None) as warnings: with asdf.open(testfile, ignore_missing_extensions=True) as ff: pass assert len(warnings) == 0, display_warnings(warnings) with pytest.raises(RuntimeError): with asdf.open(testfile, strict_extension_check=True) as ff: pass def test_verify_with_astropy(tmpdir): tmpfile = str(tmpdir.join('asdf.fits')) with create_asdf_in_fits() as aif: aif.write_to(tmpfile) with fits.open(tmpfile) as hdu: hdu.verify('exception') def test_dangling_file_handle(tmpdir): """ This tests the bug fix introduced in #533. Without the bug fix, this test will fail when running the test suite with pytest-openfiles. """ import gc fits_filename = str(tmpdir.join('dangling.fits')) # Create FITS file to use for test hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) hdulist.writeto(fits_filename) hdulist.close() hdul = fits.open(fits_filename) gc.collect() ctx = asdf.AsdfFile() gc.collect() ctx.blocks.find_or_create_block_for_array(hdul[0].data, ctx) gc.collect() hdul.close() gc.collect() ctx.close() gc.collect() del ctx asdf-2.5.1/asdf/tests/test_generic_io.py0000644000446400020070000006045513571306144022441 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import sys import pytest import urllib.request as urllib_request import numpy as np import asdf from asdf import util from asdf import generic_io from asdf.asdf import is_asdf_file from . import helpers, create_small_tree, create_large_tree @pytest.fixture(params=[create_small_tree, create_large_tree]) def tree(request): return request.param() def _roundtrip(tree, get_write_fd, get_read_fd, write_options={}, read_options={}): # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. write_options.setdefault('all_array_storage', 'internal') with get_write_fd() as fd: asdf.AsdfFile(tree).write_to(fd, **write_options) # Work around the fact that generic_io's get_file doesn't have a way of # determining whether or not the underlying file handle should be # closed as part of the exit handler if isinstance(fd._fd, io.FileIO): fd._fd.close() with get_read_fd() as fd: ff = asdf.open(fd, **read_options) helpers.assert_tree_match(tree, ff.tree) return ff def test_mode_fail(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with pytest.raises(ValueError): generic_io.get_file(path, mode="r+") def test_open(tmpdir, small_tree): from .. import open path = os.path.join(str(tmpdir), 'test.asdf') # Simply tests the high-level "open" function ff = asdf.AsdfFile(small_tree) ff.write_to(path) with open(path) as ff2: helpers.assert_tree_match(ff2.tree, ff.tree) def test_path(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(path, mode='w') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f def get_read_fd(): # Must open with mode=rw in order to get memmapped data f = generic_io.get_file(path, mode='rw') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) # This is to check for a "feature" in Python 3.x that reading zero # bytes from a socket causes it to stop. We have code in generic_io.py # to workaround it. f.read(0) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 next(ff.blocks.internal_blocks).data assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) def test_open2(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(open(path, 'wb'), mode='w', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f def get_read_fd(): # Must open with mode=rw in order to get memmapped data f = generic_io.get_file(open(path, 'r+b'), mode='rw', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) def test_open_fail(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='w') def test_open_fail2(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with io.open(path, 'w') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='w') def test_open_fail3(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: fd.write("\n\n\n") with open(path, 'r') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='r') def test_open_fail4(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: fd.write("\n\n\n") with io.open(path, 'r') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='r') def test_io_open(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(io.open(path, 'wb'), mode='w', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f def get_read_fd(): f = generic_io.get_file(io.open(path, 'r+b'), mode='rw', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) ff.tree['science_data'][0] = 42 def test_close_underlying(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with generic_io.get_file(open(path, 'wb'), mode='w', close=True) as ff: pass assert ff.is_closed() == True assert ff._fd.closed == True with generic_io.get_file(open(path, 'rb'), close=True) as ff2: pass assert ff2.is_closed() == True assert ff2._fd.closed == True def test_bytes_io(tree): buff = io.BytesIO() def get_write_fd(): f = generic_io.get_file(buff, mode='w') assert isinstance(f, generic_io.MemoryIO) return f def get_read_fd(): buff.seek(0) f = generic_io.get_file(buff, mode='rw') assert isinstance(f, generic_io.MemoryIO) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] = 42 def test_streams(tree): buff = io.BytesIO() def get_write_fd(): return generic_io.OutputStream(buff) def get_read_fd(): buff.seek(0) return generic_io.InputStream(buff, 'rw') with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(ff.blocks) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] = 42 def test_streams2(): buff = io.BytesIO(b'\0' * 60) buff.seek(0) fd = generic_io.InputStream(buff, 'r') x = fd._peek(10) x = fd.read() assert len(x) == 60 @pytest.mark.remote_data @pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows firewall prevents test") def test_urlopen(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(open(path, 'wb'), mode='w') def get_read_fd(): return generic_io.get_file( urllib_request.urlopen( httpserver.url + "test.asdf")) with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) @pytest.mark.remote_data @pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows firewall prevents test") def test_http_connection(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(open(path, 'wb'), mode='w') def get_read_fd(): fd = generic_io.get_file(httpserver.url + "test.asdf") assert isinstance(fd, generic_io.InputStream) # This is to check for a "feature" in Python 3.x that reading zero # bytes from a socket causes it to stop. We have code in generic_io.py # to workaround it. fd.read(0) return fd with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] == 42 @pytest.mark.remote_data @pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows firewall prevents test") def test_http_connection_range(tree, rhttpserver): path = os.path.join(rhttpserver.tmpdir, 'test.asdf') connection = [None] def get_write_fd(): return generic_io.get_file(open(path, 'wb'), mode='w') def get_read_fd(): fd = generic_io.get_file(rhttpserver.url + "test.asdf") assert isinstance(fd, generic_io.HTTPConnection) connection[0] = fd return fd with _roundtrip(tree, get_write_fd, get_read_fd) as ff: if len(tree) == 4: assert connection[0]._nreads == 0 else: assert connection[0]._nreads == 6 assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] == 42 def test_exploded_filesystem(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): return generic_io.get_file(path, mode='r') with _roundtrip(tree, get_write_fd, get_read_fd, write_options={'all_array_storage': 'external'}) as ff: assert len(list(ff.blocks.internal_blocks)) == 0 assert len(list(ff.blocks.external_blocks)) == 2 def test_exploded_filesystem_fail(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): fd = io.BytesIO() with open(path, mode='rb') as fd2: fd.write(fd2.read()) fd.seek(0) return fd with get_write_fd() as fd: asdf.AsdfFile(tree).write_to(fd, all_array_storage='external') with get_read_fd() as fd: with asdf.open(fd) as ff: with pytest.raises(ValueError): helpers.assert_tree_match(tree, ff.tree) @pytest.mark.remote_data @pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows firewall prevents test") def test_exploded_http(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): return generic_io.get_file(httpserver.url + "test.asdf") with _roundtrip(tree, get_write_fd, get_read_fd, write_options={'all_array_storage': 'external'}) as ff: assert len(list(ff.blocks.internal_blocks)) == 0 assert len(list(ff.blocks.external_blocks)) == 2 def test_exploded_stream_write(small_tree): # Writing an exploded file to an output stream should fail, since # we can't write "files" alongside it. ff = asdf.AsdfFile(small_tree) with pytest.raises(ValueError): ff.write_to(io.BytesIO(), all_array_storage='external') def test_exploded_stream_read(tmpdir, small_tree): # Reading from an exploded input file should fail, but only once # the data block is accessed. This behavior is important so that # the tree can still be accessed even if the data is missing. path = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(small_tree) ff.write_to(path, all_array_storage='external') with open(path, 'rb') as fd: # This should work, so we can get the tree content x = generic_io.InputStream(fd, 'r') with asdf.open(x) as ff: # It's only when trying to access external data that an error occurs with pytest.raises(ValueError): ff.tree['science_data'][:] def test_unicode_open(tmpdir, small_tree): path = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(small_tree) ff.write_to(path) with io.open(path, 'rt', encoding="utf-8") as fd: with pytest.raises(ValueError): with asdf.open(fd): pass def test_invalid_obj(tmpdir): with pytest.raises(ValueError): generic_io.get_file(42) path = os.path.join(str(tmpdir), 'test.asdf') with generic_io.get_file(path, 'w') as fd: with pytest.raises(ValueError): fd2 = generic_io.get_file(fd, 'r') with pytest.raises(ValueError): fd2 = generic_io.get_file("http://www.google.com", "w") with pytest.raises(TypeError): fd2 = generic_io.get_file(io.StringIO()) with open(path, 'rb') as fd: with pytest.raises(ValueError): fd2 = generic_io.get_file(fd, 'w') with io.open(path, 'rb') as fd: with pytest.raises(ValueError): fd2 = generic_io.get_file(fd, 'w') with generic_io.get_file(sys.__stdout__, 'w'): pass def test_nonseekable_file(tmpdir): base = io.IOBase class FileWrapper(base): def tell(self): raise IOError() def seekable(self): return False def readable(self): return True def writable(self): return True with FileWrapper(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: assert isinstance(generic_io.get_file(fd, 'w'), generic_io.OutputStream) with pytest.raises(ValueError): generic_io.get_file(fd, 'rw') with FileWrapper(os.path.join(str(tmpdir), 'test.asdf'), 'rb') as fd: assert isinstance(generic_io.get_file(fd, 'r'), generic_io.InputStream) def test_relative_uri(): assert generic_io.relative_uri( 'http://www.google.com', 'file://local') == 'file://local' def test_arbitrary_file_object(): class Wrapper: def __init__(self, init): self._fd = init class Random: def seek(self, *args): return self._fd.seek(*args) def tell(self, *args): return self._fd.tell(*args) class Reader(Wrapper): def read(self, *args): return self._fd.read(*args) class RandomReader(Reader, Random): pass class Writer(Wrapper): def write(self, *args): return self._fd.write(*args) class RandomWriter(Writer, Random): pass class All(Reader, Writer, Random): pass buff = io.BytesIO() assert isinstance( generic_io.get_file(Reader(buff), 'r'), generic_io.InputStream) assert isinstance( generic_io.get_file(Writer(buff), 'w'), generic_io.OutputStream) assert isinstance( generic_io.get_file(RandomReader(buff), 'r'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(RandomWriter(buff), 'w'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'rw'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'r'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'w'), generic_io.MemoryIO) with pytest.raises(ValueError): generic_io.get_file(Reader(buff), 'w') with pytest.raises(ValueError): generic_io.get_file(Writer(buff), 'r') def test_check_bytes(tmpdir): with io.open(os.path.join(str(tmpdir), 'test.asdf'), 'w', encoding='utf-8') as fd: assert generic_io._check_bytes(fd, 'r') is False assert generic_io._check_bytes(fd, 'rw') is False assert generic_io._check_bytes(fd, 'w') is False with io.open(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: assert generic_io._check_bytes(fd, 'r') is True assert generic_io._check_bytes(fd, 'rw') is True assert generic_io._check_bytes(fd, 'w') is True def test_truncated_reader(): """ Tests several edge cases for _TruncatedReader.read() Includes regression test for https://github.com/spacetelescope/asdf/pull/181 """ # TODO: Should probably break this up into multiple test cases fd = generic_io.RandomAccessFile(io.BytesIO(), 'rw') content = b'a' * 100 + b'b' fd.write(content) fd.seek(0) # Simple cases where the delimiter is not found at all tr = generic_io._TruncatedReader(fd, b'x', 1) with pytest.raises(ValueError): tr.read() fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1) assert tr.read(100) == content[:100] assert tr.read(1) == content[100:] with pytest.raises(ValueError): tr.read() fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, exception=False) assert tr.read() == content # No delimiter but with 'initial_content' init = b'abcd' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read(100) == (init + content)[:100] assert tr.read() == (init + content)[100:] fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read() == init + content fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read(2) == init[:2] assert tr.read() == init[2:] + content # Some tests of a single character delimiter # Add some trailing data after the delimiter fd.seek(0, 2) fd.write(b'ffff') # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1) assert tr.read(100) == content[:100] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1) assert tr.read() == content[:100] # Delimiter included fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read() == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(101) == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(102) == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(100) == content[:100] assert tr.read(1) == content[100:101] assert tr.read() == b'' # Longer delimiter with variable length content = b'a' * 100 + b'\n...\n' + b'ffffff' delimiter = br'\r?\n\.\.\.((\r?\n)|$)' readahead = 7 fd = generic_io.RandomAccessFile(io.BytesIO(), 'rw') fd.write(content) # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read() == content[:100] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(100) == content[:100] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read() == content[99:100] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read(2) == content[99:100] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read(50) == content[99:100] assert tr.read() == b'' # Same as the previous set of tests, but including the delimiter fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read() == content[:105] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(105) == content[:105] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read() == content[99:105] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read(2) == content[99:101] assert tr.read() == content[101:105] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read(50) == content[99:105] assert tr.read() == b'' # Same sequence of tests but with some 'initial_content' init = b'abcd' # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read() == (init + content[:100]) assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(100) == (init + content[:96]) assert tr.read() == content[96:100] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read() == content[95:100] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(6) == content[95:100] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(50) == content[95:100] assert tr.read() == b'' # Same as the previous set of tests, but including the delimiter fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read() == (init + content[:105]) assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(105) == (init + content[:101]) assert tr.read() == content[101:105] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(103) == (init + content[:99]) assert tr.read() == content[99:105] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(6) == content[95:101] assert tr.read() == content[101:105] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(50) == content[95:105] assert tr.read() == b'' def test_is_asdf(tmpdir): # test fits astropy = pytest.importorskip('astropy') from astropy.io import fits hdul = fits.HDUList() phdu= fits.PrimaryHDU() imhdu= fits.ImageHDU(data=np.arange(24).reshape((4,6))) hdul.append(phdu) hdul.append(imhdu) path = os.path.join(str(tmpdir), 'test.fits') hdul.writeto(path) assert not is_asdf_file(path) assert is_asdf_file(asdf.AsdfFile()) asdf-2.5.1/asdf/tests/test_helpers.py0000644000446400020070000000250313567314375022000 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from asdf import types from asdf.exceptions import AsdfConversionWarning from asdf.tests.helpers import assert_roundtrip_tree def test_conversion_error(tmpdir): class FooType(types.CustomType): name = 'foo' def __init__(self, a, b): self.a = a self.b = b @classmethod def from_tree(cls, tree, ctx): raise TypeError("This allows us to test the failure") @classmethod def to_tree(cls, node, ctx): return dict(a=node.a, b=node.b) def __eq__(self, other): return self.a == other.a and self.b == other.b class FooExtension: @property def types(self): return [FooType] @property def tag_mapping(self): return [] @property def url_mapping(self): return [] foo = FooType(10, 'hello') tree = dict(foo=foo) with pytest.raises(AsdfConversionWarning): with pytest.warns(UserWarning) as w: assert_roundtrip_tree(tree, tmpdir, extensions=FooExtension()) # Make sure we still get other warnings assert len(w) == 1 assert str(w[0].message).startswith('Unable to locate schema file') asdf-2.5.1/asdf/tests/test_reference.py0000644000446400020070000001676013567314375022306 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import numpy as np from numpy.testing import assert_array_equal import pytest import asdf from asdf import reference from asdf import util from asdf.tags.core import ndarray from .helpers import assert_tree_match def test_external_reference(tmpdir): exttree = { 'cool_stuff': { 'a': np.array([0, 1, 2], np.float), 'b': np.array([3, 4, 5], np.float) }, 'list_of_stuff': [ 'foobar', 42, np.array([7, 8, 9], np.float) ] } external_path = os.path.join(str(tmpdir), 'external.asdf') ext = asdf.AsdfFile(exttree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ext.write_to(external_path, all_array_storage='internal') external_path = os.path.join(str(tmpdir), 'external2.asdf') ff = asdf.AsdfFile(exttree) ff.write_to(external_path, all_array_storage='internal') tree = { # The special name "data" here must be an array. This is # included so that such validation can be ignored when we just # have a "$ref". 'data': { '$ref': 'external.asdf#/cool_stuff/a' }, 'science_data': { '$ref': 'external.asdf#/cool_stuff/a' }, 'science_data2': { '$ref': 'external2.asdf#/cool_stuff/a' }, 'foobar': { '$ref': 'external.asdf#/list_of_stuff/0', }, 'answer': { '$ref': 'external.asdf#/list_of_stuff/1' }, 'array': { '$ref': 'external.asdf#/list_of_stuff/2', }, 'whole_thing': { '$ref': 'external.asdf#' }, 'myself': { '$ref': '#', }, 'internal': { '$ref': '#science_data' } } def do_asserts(ff): assert 'unloaded' in repr(ff.tree['science_data']) assert 'unloaded' in str(ff.tree['science_data']) assert len(ff._external_asdf_by_uri) == 0 assert_array_equal(ff.tree['science_data'], exttree['cool_stuff']['a']) assert len(ff._external_asdf_by_uri) == 1 with pytest.raises((ValueError, RuntimeError)): # Assignment destination is readonly ff.tree['science_data'][0] = 42 assert_array_equal(ff.tree['science_data2'], exttree['cool_stuff']['a']) assert len(ff._external_asdf_by_uri) == 2 assert ff.tree['foobar']() == 'foobar' assert ff.tree['answer']() == 42 assert_array_equal(ff.tree['array'], exttree['list_of_stuff'][2]) assert_tree_match(ff.tree['whole_thing'](), exttree) assert_array_equal( ff.tree['whole_thing']['cool_stuff']['a'], exttree['cool_stuff']['a']) assert_array_equal( ff.tree['myself']['science_data'], exttree['cool_stuff']['a']) # Make sure that referencing oneself doesn't make another call # to disk. assert len(ff._external_asdf_by_uri) == 2 assert_array_equal(ff.tree['internal'], exttree['cool_stuff']['a']) with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: do_asserts(ff) internal_path = os.path.join(str(tmpdir), 'main.asdf') ff.write_to(internal_path) with asdf.open(internal_path) as ff: do_asserts(ff) with asdf.open(internal_path) as ff: assert len(ff._external_asdf_by_uri) == 0 ff.resolve_references() assert len(ff._external_asdf_by_uri) == 2 assert isinstance(ff.tree['data'], ndarray.NDArrayType) assert isinstance(ff.tree['science_data'], ndarray.NDArrayType) assert_array_equal(ff.tree['science_data'], exttree['cool_stuff']['a']) assert_array_equal(ff.tree['science_data2'], exttree['cool_stuff']['a']) assert ff.tree['foobar'] == 'foobar' assert ff.tree['answer'] == 42 assert_array_equal(ff.tree['array'], exttree['list_of_stuff'][2]) assert_tree_match(ff.tree['whole_thing'], exttree) assert_array_equal( ff.tree['whole_thing']['cool_stuff']['a'], exttree['cool_stuff']['a']) assert_array_equal( ff.tree['myself']['science_data'], exttree['cool_stuff']['a']) assert_array_equal(ff.tree['internal'], exttree['cool_stuff']['a']) @pytest.mark.remote_data def test_external_reference_invalid(tmpdir): tree = { 'foo': { '$ref': 'fail.asdf' } } ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.resolve_references() ff = asdf.AsdfFile(tree, uri="http://httpstat.us/404") with pytest.raises(IOError): ff.resolve_references() ff = asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) with pytest.raises(IOError): ff.resolve_references() def test_external_reference_invalid_fragment(tmpdir): exttree = { 'list_of_stuff': [ 'foobar', 42, np.array([7, 8, 9], np.float) ] } external_path = os.path.join(str(tmpdir), 'external.asdf') ff = asdf.AsdfFile(exttree) ff.write_to(external_path) tree = { 'foo': { '$ref': 'external.asdf#/list_of_stuff/a' } } with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: with pytest.raises(ValueError): ff.resolve_references() tree = { 'foo': { '$ref': 'external.asdf#/list_of_stuff/3' } } with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: with pytest.raises(ValueError): ff.resolve_references() def test_make_reference(tmpdir): exttree = { # Include some ~ and / in the name to make sure that escaping # is working correctly 'f~o~o/': { 'a': np.array([0, 1, 2], np.float), 'b': np.array([3, 4, 5], np.float) } } external_path = os.path.join(str(tmpdir), 'external.asdf') ext = asdf.AsdfFile(exttree) ext.write_to(external_path) with asdf.open(external_path) as ext: ff = asdf.AsdfFile() ff.tree['ref'] = ext.make_reference(['f~o~o/', 'a']) assert_array_equal(ff.tree['ref'], ext.tree['f~o~o/']['a']) ff.write_to(os.path.join(str(tmpdir), 'source.asdf')) with asdf.open(os.path.join(str(tmpdir), 'source.asdf')) as ff: assert ff.tree['ref']._uri == 'external.asdf#f~0o~0o~1/a' def test_internal_reference(tmpdir): testfile = os.path.join(str(tmpdir), 'test.asdf') tree = { 'foo': 2, 'bar': {'$ref': '#'} } ff = asdf.AsdfFile(tree) ff.find_references() assert isinstance(ff.tree['bar'], reference.Reference) ff.resolve_references() assert ff.tree['bar']['foo'] == 2 tree = { 'foo': 2 } ff = asdf.AsdfFile( tree, uri=util.filepath_to_url(os.path.abspath(testfile))) ff.tree['bar'] = ff.make_reference([]) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) ff = asdf.AsdfFile() content = asdf.AsdfFile()._open_impl(ff, buff, _get_yaml_content=True) assert b"{$ref: ''}" in content asdf-2.5.1/asdf/tests/test_reference_files.py0000644000446400020070000000504513567314375023462 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import sys import pytest from asdf import open as asdf_open from asdf import versioning from .helpers import assert_tree_match, display_warnings _REFFILE_PATH = os.path.join(os.path.dirname(__file__), '..', '..', 'asdf-standard', 'reference_files') def get_test_id(reference_file_path): """Helper function to return the informative part of a schema path""" path = os.path.normpath(str(reference_file_path)) return os.path.sep.join(path.split(os.path.sep)[-3:]) def collect_reference_files(): """Function used by pytest to collect ASDF reference files for testing.""" for version in versioning.supported_versions: version_dir = os.path.join(_REFFILE_PATH, str(version)) if os.path.exists(version_dir): for filename in os.listdir(version_dir): if filename.endswith(".asdf"): filepath = os.path.join(version_dir, filename) basename, _ = os.path.splitext(filepath) if os.path.exists(basename + ".yaml"): yield filepath def _compare_trees(name_without_ext, expect_warnings=False): asdf_path = name_without_ext + ".asdf" yaml_path = name_without_ext + ".yaml" with asdf_open(asdf_path) as af_handle: af_handle.resolve_and_inline() with asdf_open(yaml_path) as ref: def _compare_func(): assert_tree_match(af_handle.tree, ref.tree, funcname='assert_allclose') if expect_warnings: # Make sure to only suppress warnings when they are expected. # However, there's still a chance of missing warnings that we # actually care about here. with pytest.warns(RuntimeWarning) as w: _compare_func() else: _compare_func() @pytest.mark.parametrize( 'reference_file', collect_reference_files(), ids=get_test_id) def test_reference_file(reference_file): basename = os.path.basename(reference_file) name_without_ext, _ = os.path.splitext(reference_file) known_fail = False expect_warnings = 'complex' in reference_file if sys.maxunicode <= 65535: known_fail = known_fail or (basename in ('unicode_spp.asdf')) try: _compare_trees(name_without_ext, expect_warnings=expect_warnings) except Exception: if known_fail: pytest.xfail() else: raise asdf-2.5.1/asdf/tests/test_resolver.py0000644000446400020070000000723013605165746022200 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from asdf.resolver import Resolver, ResolverChain from asdf.exceptions import AsdfDeprecationWarning def test_resolver_no_mappings(): r = Resolver([], "test") assert r("united_states:maryland:baltimore") == "united_states:maryland:baltimore" def test_resolver_tuple_mapping(): r = Resolver([("united_states:", "earth:{test}")], "test") assert r("united_states:maryland:baltimore") == "earth:united_states:maryland:baltimore" r = Resolver([("united_states:", "{test_prefix}texas:houston")], "test") assert r("united_states:maryland:baltimore") == "united_states:texas:houston" r = Resolver([("united_states:", "{test_suffix}:hampden")], "test") assert r("united_states:maryland:baltimore") == "maryland:baltimore:hampden" def test_resolver_callable_mapping(): r = Resolver([lambda inp: "nowhere"], "test") assert r("united_states:maryland:baltimore") == "nowhere" def test_resolver_multiple_mappings(): r = Resolver([ ("united_states:", "unknown_region:{test_suffix}"), ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}") ], "test") # Should choose the mapping with the longest matched prefix: assert r("united_states:maryland:baltimore") == "mid_atlantic:maryland:baltimore" r = Resolver([ ("united_states:", "unknown_region:{test_suffix}"), lambda inp: "nowhere", ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}") ], "test") # Should prioritize the mapping offered by the callable: assert r("united_states:maryland:baltimore") == "nowhere" r = Resolver([ ("united_states:", "unknown_region:{test_suffix}"), lambda inp: None, ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}") ], "test") # None from the callable is a signal that it can't handle the input, # so we should fall back to the longest matched prefix: assert r("united_states:maryland:baltimore") == "mid_atlantic:maryland:baltimore" def test_resolver_non_prefix(): r = Resolver([("maryland:", "shouldn't happen")], "test") assert r("united_states:maryland:baltimore") == "united_states:maryland:baltimore" def test_resolver_invalid_mapping(): with pytest.raises(ValueError): Resolver([("foo",)], "test") with pytest.raises(ValueError): Resolver([12], "test") def test_resolver_hash_and_equals(): r1 = Resolver([("united_states:", "earth:{test}")], "test") r2 = Resolver([("united_states:", "earth:{test}")], "test") r3 = Resolver([("united_states:", "{test}:hampden")], "test") assert hash(r1) == hash(r2) assert r1 == r2 assert hash(r1) != hash(r3) assert r1 != r3 def test_resolver_add_mapping_deprecated(): r = Resolver([], "test") with pytest.warns(AsdfDeprecationWarning): r.add_mapping([("united_states:", "earth:{test}")], "test") def test_resolver_chain(): r1 = Resolver([("maryland:", "united_states:{test}")], "test") r2 = Resolver([("united_states:", "earth:{test}")], "test") chain = ResolverChain(r1, r2) assert chain("maryland:baltimore") == "earth:united_states:maryland:baltimore" def test_resolver_chain_hash_and_equals(): r1 = Resolver([("united_states:", "earth:{test}")], "test") r2 = Resolver([("united_states:", "earth:{test}")], "test") r3 = Resolver([("united_states:", "{test}:hampden")], "test") c1 = ResolverChain(r1, r3) c2 = ResolverChain(r2, r3) c3 = ResolverChain(r1, r2) assert hash(c1) == hash(c2) assert c1 == c2 assert hash(c1) != hash(c3) assert c1 != c3 asdf-2.5.1/asdf/tests/test_schema.py0000644000446400020070000005510313605165746021601 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import re import warnings from jsonschema import ValidationError import yaml import pytest import numpy as np from numpy.testing import assert_array_equal import asdf from asdf import types from asdf import extension from asdf import resolver from asdf import schema from asdf import util from asdf import yamlutil from asdf.tests import helpers, CustomExtension class TagReferenceType(types.CustomType): """ This class is used by several tests below for validating foreign type references in schemas and ASDF files. """ name = 'tag_reference' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' @classmethod def from_tree(cls, tree, ctx): node = {} node['name'] = tree['name'] node['things'] = yamlutil.tagged_tree_to_custom_tree(tree['things'], ctx) return node def test_tagging_scalars(): astropy = pytest.importorskip('astropy', '3.0.0') from astropy import units as u yaml = """ unit: !unit/unit-1.0.0 m not_unit: m """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff) as ff: assert isinstance(ff.tree['unit'], u.UnitBase) assert not isinstance(ff.tree['not_unit'], u.UnitBase) assert isinstance(ff.tree['not_unit'], str) assert ff.tree == { 'unit': u.m, 'not_unit': 'm' } def test_read_json_schema(): """Pytest to make sure reading JSON schemas succeeds. This was known to fail on Python 3.5 See issue #314 at https://github.com/spacetelescope/asdf/issues/314 for more details. """ json_schema = helpers.get_test_data_path('example_schema.json') schema_tree = schema.load_schema(json_schema, resolve_references=True) schema.check_schema(schema_tree) def test_load_schema(tmpdir): schema_def = """ %YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "../core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_full_tag(tmpdir): schema_def = """ %YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "tag:stsci.edu:asdf/core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_tag_address(tmpdir): schema_def = """ %YAML 1.1 %TAG !asdf! tag:stsci.edu:asdf/ --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_file_url(tmpdir): schema_def = """ %YAML 1.1 %TAG !asdf! tag:stsci.edu:asdf/ --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "{}" required: [foobar] ... """.format(extension.get_default_resolver()('tag:stsci.edu:asdf/core/ndarray-1.0.0')) schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_schema_caching(): # Make sure that if we request the same URL, we get a different object # (despite the caching internal to load_schema). Changes to a schema # dict should not impact other uses of that schema. s1 = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') s2 = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') assert s1 is not s2 def test_asdf_file_resolver_hashing(): # Confirm that resolvers from distinct AsdfFile instances # hash to the same value (this allows schema caching to function). a1 = asdf.AsdfFile() a2 = asdf.AsdfFile() assert hash(a1.resolver) == hash(a2.resolver) assert a1.resolver == a2.resolver def test_flow_style(): class CustomFlowStyleType(dict, types.CustomType): name = 'custom_flow' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomFlowStyleExtension(CustomExtension): @property def types(self): return [CustomFlowStyleType] tree = { 'custom_flow': CustomFlowStyleType({'a': 42, 'b': 43}) } buff = io.BytesIO() ff = asdf.AsdfFile(tree, extensions=CustomFlowStyleExtension()) ff.write_to(buff) assert b' a: 42\n b: 43' in buff.getvalue() def test_style(): class CustomStyleType(str, types.CustomType): name = 'custom_style' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomStyleExtension(CustomExtension): @property def types(self): return [CustomStyleType] tree = { 'custom_style': CustomStyleType("short") } buff = io.BytesIO() ff = asdf.AsdfFile(tree, extensions=CustomStyleExtension()) ff.write_to(buff) assert b'|-\n short\n' in buff.getvalue() def test_property_order(): tree = {'foo': np.ndarray([1, 2, 3])} buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.write_to(buff) ndarray_schema = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/ndarray-1.0.0') property_order = ndarray_schema['anyOf'][1]['propertyOrder'] last_index = 0 for prop in property_order: index = buff.getvalue().find(prop.encode('utf-8') + b':') if index != -1: assert index > last_index last_index = index def test_invalid_nested(): class CustomType(str, types.CustomType): name = 'custom' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomTypeExtension(CustomExtension): @property def types(self): return [CustomType] yaml = """ custom: ! foo """ buff = helpers.yaml_to_asdf(yaml) # This should cause a warning but not an error because without explicitly # providing an extension, our custom type will not be recognized and will # simply be converted to a raw type. with pytest.warns(None) as warning: with asdf.open(buff): pass assert len(warning) == 1 buff.seek(0) with pytest.raises(ValidationError): with asdf.open(buff, extensions=[CustomTypeExtension()]): pass # Make sure tags get validated inside of other tags that know # nothing about them. yaml = """ array: !core/ndarray-1.0.0 data: [0, 1, 2] custom: ! foo """ buff = helpers.yaml_to_asdf(yaml) with pytest.raises(ValidationError): with asdf.open(buff, extensions=[CustomTypeExtension()]): pass def test_invalid_schema(): s = {'type': 'integer'} schema.check_schema(s) s = {'type': 'foobar'} with pytest.raises(ValidationError): schema.check_schema(s) def test_defaults(): s = { 'type': 'object', 'properties': { 'a': { 'type': 'integer', 'default': 42 } } } t = {} cls = schema._create_validator(schema.FILL_DEFAULTS) validator = cls(s) validator.validate(t, _schema=s) assert t['a'] == 42 cls = schema._create_validator(schema.REMOVE_DEFAULTS) validator = cls(s) validator.validate(t, _schema=s) assert t == {} def test_default_check_in_schema(): s = { 'type': 'object', 'properties': { 'a': { 'type': 'integer', 'default': 'foo' } } } with pytest.raises(ValidationError): schema.check_schema(s) def test_fill_and_remove_defaults(): class DefaultType(dict, types.CustomType): name = 'default' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class DefaultTypeExtension(CustomExtension): @property def types(self): return [DefaultType] yaml = """ custom: ! b: {} """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert 'a' in ff.tree['custom'] assert ff.tree['custom']['a'] == 42 assert ff.tree['custom']['b']['c'] == 82 buff.seek(0) with asdf.open(buff, extensions=[DefaultTypeExtension()], do_not_fill_defaults=True) as ff: assert 'a' not in ff.tree['custom'] assert 'c' not in ff.tree['custom']['b'] ff.fill_defaults() assert 'a' in ff.tree['custom'] assert ff.tree['custom']['a'] == 42 assert 'c' in ff.tree['custom']['b'] assert ff.tree['custom']['b']['c'] == 82 ff.remove_defaults() assert 'a' not in ff.tree['custom'] assert 'c' not in ff.tree['custom']['b'] def test_tag_reference_validation(): class DefaultTypeExtension(CustomExtension): @property def types(self): return [TagReferenceType] yaml = """ custom: ! name: "Something" things: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: custom = ff.tree['custom'] assert custom['name'] == "Something" assert_array_equal(custom['things'], [1, 2, 3]) def test_foreign_tag_reference_validation(): class ForeignTagReferenceType(types.CustomType): name = 'foreign_tag_reference' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' @classmethod def from_tree(cls, tree, ctx): node = {} node['a'] = yamlutil.tagged_tree_to_custom_tree(tree['a'], ctx) node['b'] = yamlutil.tagged_tree_to_custom_tree(tree['b'], ctx) return node class ForeignTypeExtension(CustomExtension): @property def types(self): return [TagReferenceType, ForeignTagReferenceType] yaml = """ custom: ! a: ! name: "Something" things: !core/ndarray-1.0.0 data: [1, 2, 3] b: ! name: "Anything" things: !core/ndarray-1.0.0 data: [4, 5, 6] """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=ForeignTypeExtension()) as ff: a = ff.tree['custom']['a'] b = ff.tree['custom']['b'] assert a['name'] == 'Something' assert_array_equal(a['things'], [1, 2, 3]) assert b['name'] == 'Anything' assert_array_equal(b['things'], [4, 5, 6]) def test_self_reference_resolution(): r = resolver.Resolver(CustomExtension().url_mapping, 'url') s = schema.load_schema( helpers.get_test_data_path('self_referencing-1.0.0.yaml'), resolver=r, resolve_references=True) assert '$ref' not in repr(s) assert s['anyOf'][1] == s['anyOf'][0] def test_schema_resolved_via_entry_points(): """Test that entry points mappings to core schema works""" r = extension.get_default_resolver() tag = types.format_tag('stsci.edu', 'asdf', '1.0.0', 'fits/fits') url = extension.default_extensions.extension_list.tag_mapping(tag) s = schema.load_schema(url, resolver=r, resolve_references=True) assert tag in repr(s) @pytest.mark.parametrize('use_numpy', [False, True]) def test_large_literals(use_numpy): largeval = 1 << 53 if use_numpy: largeval = np.uint64(largeval) tree = { 'large_int': largeval, } with pytest.raises(ValidationError): asdf.AsdfFile(tree) tree = { 'large_list': [largeval], } with pytest.raises(ValidationError): asdf.AsdfFile(tree) tree = { 'large_array': np.array([largeval], np.uint64) } ff = asdf.AsdfFile(tree) buff = io.BytesIO() ff.write_to(buff) ff.set_array_storage(ff.tree['large_array'], 'inline') buff = io.BytesIO() with pytest.raises(ValidationError): ff.write_to(buff) print(buff.getvalue()) def test_read_large_literal(): value = 1 << 64 yaml = """integer: {}""".format(value) buff = helpers.yaml_to_asdf(yaml) with pytest.warns(UserWarning) as w: with asdf.open(buff) as af: assert af['integer'] == value # We get two warnings: one for validation time, and one when defaults # are filled. It seems like we could improve this architecture, though... assert len(w) == 2 assert str(w[0].message).startswith('Invalid integer literal value') assert str(w[1].message).startswith('Invalid integer literal value') def test_nested_array(): s = { 'type': 'object', 'properties': { 'stuff': { 'type': 'array', 'items': { 'type': 'array', 'items': [ { 'type': 'integer' }, { 'type': 'string' }, { 'type': 'number' }, ], 'minItems': 3, 'maxItems': 3 } } } } good = dict(stuff=[[1, 'hello', 2], [4, 'world', 9.7]]) schema.validate(good, schema=s) bads = [ dict(stuff=[[1, 2, 3]]), dict(stuff=[12,'dldl']), dict(stuff=[[12, 'dldl']]), dict(stuff=[[1, 'hello', 2], [4, 5]]), dict(stuff=[[1, 'hello', 2], [4, 5, 6]]) ] for b in bads: with pytest.raises(ValidationError): schema.validate(b, schema=s) def test_nested_array_yaml(tmpdir): schema_def = """ %YAML 1.1 --- type: object properties: stuff: type: array items: type: array items: - type: integer - type: string - type: number minItems: 3 maxItems: 3 ... """ schema_path = tmpdir.join('nested.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path)) schema.check_schema(schema_tree) good = dict(stuff=[[1, 'hello', 2], [4, 'world', 9.7]]) schema.validate(good, schema=schema_tree) bads = [ dict(stuff=[[1, 2, 3]]), dict(stuff=[12,'dldl']), dict(stuff=[[12, 'dldl']]), dict(stuff=[[1, 'hello', 2], [4, 5]]), dict(stuff=[[1, 'hello', 2], [4, 5, 6]]) ] for b in bads: with pytest.raises(ValidationError): schema.validate(b, schema=schema_tree) def test_type_missing_dependencies(): astropy = pytest.importorskip('astropy', '3.0.0') class MissingType(types.CustomType): name = 'missing' organization = 'nowhere.org' version = (1, 1, 0) standard = 'custom' types = ['asdfghjkl12345.foo'] requires = ["ASDFGHJKL12345"] class DefaultTypeExtension(CustomExtension): @property def types(self): return [MissingType] yaml = """ custom: ! b: {foo: 42} """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as w: with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert ff.tree['custom']['b']['foo'] == 42 assert len(w) == 1 def test_assert_roundtrip_with_extension(tmpdir): called_custom_assert_equal = [False] class CustomType(dict, types.CustomType): name = 'custom_flow' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' @classmethod def assert_equal(cls, old, new): called_custom_assert_equal[0] = True class CustomTypeExtension(CustomExtension): @property def types(self): return [CustomType] tree = { 'custom': CustomType({'a': 42, 'b': 43}) } def check(ff): assert isinstance(ff.tree['custom'], CustomType) with pytest.warns(None) as warnings: helpers.assert_roundtrip_tree( tree, tmpdir, extensions=[CustomTypeExtension()]) assert len(warnings) == 0, helpers.display_warnings(warnings) assert called_custom_assert_equal[0] is True def test_custom_validation_bad(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema.yaml') asdf_file = os.path.join(str(tmpdir), 'out.asdf') # This tree does not conform to the custom schema tree = {'stuff': 42, 'other_stuff': 'hello'} # Creating file without custom schema should pass with asdf.AsdfFile(tree) as ff: ff.write_to(asdf_file) # Creating file using custom schema should fail with pytest.raises(ValidationError): with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: pass # Opening file without custom schema should pass with asdf.open(asdf_file) as ff: pass # Opening file with custom schema should fail with pytest.raises(ValidationError): with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff: pass def test_custom_validation_good(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema.yaml') asdf_file = os.path.join(str(tmpdir), 'out.asdf') # This tree conforms to the custom schema tree = { 'foo': {'x': 42, 'y': 10}, 'bar': {'a': 'hello', 'b': 'banjo'} } with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: ff.write_to(asdf_file) with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff: pass def test_custom_validation_pathlib(tmpdir): """ Make sure custom schema paths can be pathlib.Path objects See https://github.com/spacetelescope/asdf/issues/653 for discussion. """ from pathlib import Path custom_schema_path = Path(helpers.get_test_data_path('custom_schema.yaml')) asdf_file = os.path.join(str(tmpdir), 'out.asdf') # This tree conforms to the custom schema tree = { 'foo': {'x': 42, 'y': 10}, 'bar': {'a': 'hello', 'b': 'banjo'} } with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: ff.write_to(asdf_file) with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff: pass def test_custom_validation_with_definitions_good(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema_definitions.yaml') asdf_file = os.path.join(str(tmpdir), 'out.asdf') # This tree conforms to the custom schema tree = { 'thing': { 'biz': 'hello', 'baz': 'world' } } with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: ff.write_to(asdf_file) with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff: pass def test_custom_validation_with_definitions_bad(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema_definitions.yaml') asdf_file = os.path.join(str(tmpdir), 'out.asdf') # This tree does NOT conform to the custom schema tree = { 'forb': { 'biz': 'hello', 'baz': 'world' } } # Creating file without custom schema should pass with asdf.AsdfFile(tree) as ff: ff.write_to(asdf_file) # Creating file with custom schema should fail with pytest.raises(ValidationError): with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: pass # Opening file without custom schema should pass with asdf.open(asdf_file) as ff: pass # Opening file with custom schema should fail with pytest.raises(ValidationError): with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff: pass def test_nonexistent_tag(tmpdir): """ This tests the case where a node is tagged with a type that apparently comes from an extension that is known, but the type itself can't be found. This could occur when a more recent version of an installed package provides the new type, but an older version of the package is installed. ASDF should still be able to open the file in this case, but it won't be able to restore the type. The bug that prompted this test results from attempting to load a schema file that doesn't exist, which is why this test belongs in this file. """ # This shouldn't ever happen, but it's a useful test case yaml = """ a: !core/doesnt_exist-1.0.0 hello """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as w: with asdf.open(buff) as af: assert str(af['a']) == 'hello' # Currently there are 3 warnings since one occurs on each of the # validation passes. It would be good to consolidate these # eventually assert len(w) == 3, helpers.display_warnings(w) assert str(w[0].message).startswith("Unable to locate schema file") assert str(w[1].message).startswith("Unable to locate schema file") assert str(w[2].message).startswith(af['a']._tag) # This is a more realistic case since we're using an external extension yaml = """ a: ! hello """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(None) as w: with asdf.open(buff, extensions=CustomExtension()) as af: assert str(af['a']) == 'hello' assert len(w) == 3, helpers.display_warnings(w) assert str(w[0].message).startswith("Unable to locate schema file") assert str(w[1].message).startswith("Unable to locate schema file") assert str(w[2].message).startswith(af['a']._tag) asdf-2.5.1/asdf/tests/test_stream.py0000644000446400020070000001321013567314375021626 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import numpy as np from numpy.testing import assert_array_equal import pytest import asdf from asdf import generic_io from asdf import stream def test_stream(): buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.float64).tostring()) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (100, 6, 2) for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_write_nothing(): # Test that if you write nothing, you get a zero-length array buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (0, 6, 2) def test_stream_twice(): # Test that if you write nothing, you get a zero-length array buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.uint8), 'stream2': stream.Stream([12, 2], np.uint8) } ff = asdf.AsdfFile(tree) ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.uint8).tostring()) buff.seek(0) ff = asdf.open(buff) assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (100, 6, 2) assert ff.tree['stream2'].shape == (50, 12, 2) def test_stream_with_nonstream(): buff = io.BytesIO() tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) # Since we're testing with small arrays, force this array to be stored in # an internal block rather than letting it be automatically put inline. ff.set_array_storage(ff['nonstream'], 'internal') ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.float64).tostring()) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) assert len(ff.blocks) == 2 for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_real_file(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } with open(path, 'wb') as fd: ff = asdf.AsdfFile(tree) # Since we're testing with small arrays, force this array to be stored # in an internal block rather than letting it be automatically put # inline. ff.set_array_storage(ff['nonstream'], 'internal') ff.write_to(fd) for i in range(100): fd.write(np.array([i] * 12, np.float64).tostring()) with asdf.open(path) as ff: assert len(ff.blocks) == 1 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) assert len(ff.blocks) == 2 for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_to_stream(): tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } buff = io.BytesIO() fd = generic_io.OutputStream(buff) ff = asdf.AsdfFile(tree) ff.write_to(fd) for i in range(100): fd.write(np.array([i] * 12, np.float64).tostring()) buff.seek(0) with asdf.open(generic_io.InputStream(buff, 'r')) as ff: assert len(ff.blocks) == 2 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_array_to_stream(tmpdir): tree = { 'stream': np.array([1, 2, 3, 4], np.int64), } buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream'], 'streamed') ff.write_to(buff) buff.write(np.array([5, 6, 7, 8], np.int64).tostring()) buff.seek(0) ff = asdf.open(generic_io.InputStream(buff)) assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8]) buff.seek(0) ff2 = asdf.AsdfFile(ff) ff2.write_to(buff) assert b"shape: ['*']" in buff.getvalue() with open(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream'], 'streamed') ff.write_to(fd) fd.write(np.array([5, 6, 7, 8], np.int64).tostring()) with asdf.open(os.path.join(str(tmpdir), 'test.asdf')) as ff: assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8]) ff2 = asdf.AsdfFile(ff) ff2.write_to(buff) assert b"shape: ['*']" in buff.getvalue() def test_too_many_streams(): tree = { 'stream1': np.array([1, 2, 3, 4], np.int64), 'stream2': np.array([1, 2, 3, 4], np.int64) } ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream1'], 'streamed') with pytest.raises(ValueError): ff.set_array_storage(tree['stream2'], 'streamed') def test_stream_repr_and_str(): tree = { 'stream': stream.Stream([16], np.int64) } ff = asdf.AsdfFile(tree) repr(ff.tree['stream']) str(ff.tree['stream']) asdf-2.5.1/asdf/tests/test_versioning.py0000644000446400020070000001550013567314375022522 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from itertools import combinations from asdf.versioning import AsdfVersion, AsdfSpec def test_version_constructor(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion((1,0,0)) ver2 = AsdfVersion([1,0,0]) assert str(ver0) == '1.0.0' assert str(ver1) == '1.0.0' assert str(ver2) == '1.0.0' def test_version_and_version_equality(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion('1.0.0') assert ver0 is not ver1 assert ver0 == ver1 assert ver1 == ver0 assert not (ver0 != ver1) assert not (ver1 != ver0) def test_version_and_string_equality(): version = AsdfVersion('1.0.0') string_ver = '1.0.0' assert version == string_ver assert string_ver == version assert not (version != string_ver) assert not (string_ver != version) def test_version_and_tuple_equality(): version = AsdfVersion('1.0.0') tuple_ver = (1,0,0) assert version == tuple_ver assert tuple_ver == version assert not (version != tuple_ver) assert not (tuple_ver != version) def test_version_and_version_inequality(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion('1.0.1') ver2 = AsdfVersion('1.1.0') ver3 = AsdfVersion('1.1.1') ver4 = AsdfVersion('2.0.0') ver5 = AsdfVersion('2.0.1') ver6 = AsdfVersion('2.1.0') ver7 = AsdfVersion('2.1.1') versions = [ver0, ver1, ver2, ver3, ver4, ver5, ver6, ver7] for x,y in combinations(versions, 2): assert not (x == y) assert x != y assert ver0 < ver1 < ver2 < ver3 < ver4 < ver5 < ver6 < ver7 assert ver7 > ver6 > ver5 > ver4 > ver3 > ver2 > ver1 > ver0 assert (ver0 < ver1 < ver2 < ver4 < ver3 < ver5 < ver6 < ver7) == False assert (ver7 > ver6 > ver5 > ver3 > ver4 > ver2 > ver1 > ver0) == False assert ver0 <= ver1 <= ver2 <= ver3 <= ver4 <= ver5 <= ver6 <= ver7 assert ver7 >= ver6 >= ver5 >= ver4 >= ver3 >= ver2 >= ver1 >= ver0 def test_version_and_string_inequality(): version = AsdfVersion('2.0.0') assert version > '1.0.0' assert version > '1.0.1' assert version > '1.1.0' assert version > '1.1.1' assert (version > '2.0.0') == False assert (version < '2.0.0') == False assert version < '2.0.1' assert version < '2.1.0' assert version < '2.1.1' assert version >= '1.0.0' assert version >= '1.0.1' assert version >= '1.1.0' assert version >= '1.1.1' assert version >= '2.0.0' assert version <= '2.0.0' assert version <= '2.0.1' assert version <= '2.1.0' assert version <= '2.1.1' assert '1.0.0' < version assert '1.0.1' < version assert '1.1.0' < version assert '1.1.1' < version assert ('2.0.0' < version) == False assert ('2.0.0' > version) == False assert '2.0.1' > version assert '2.1.0' > version assert '2.1.1' > version assert '1.0.0' <= version assert '1.0.1' <= version assert '1.1.0' <= version assert '1.1.1' <= version assert '2.0.0' <= version assert '2.0.0' >= version assert '2.0.1' >= version assert '2.1.0' >= version assert '2.1.1' >= version def test_version_and_tuple_inequality(): version = AsdfVersion('2.0.0') assert version > (1,0,0) assert version > (1,0,1) assert version > (1,1,0) assert version > (1,1,1) assert (version > (2,0,0)) == False assert (version < (2,0,0)) == False assert version < (2,0,1) assert version < (2,1,0) assert version < (2,1,1) assert version >= (1,0,0) assert version >= (1,0,1) assert version >= (1,1,0) assert version >= (1,1,1) assert version >= (2,0,0) assert version <= (2,0,0) assert version <= (2,0,1) assert version <= (2,1,0) assert version <= (2,1,1) assert (1,0,0) < version assert (1,0,1) < version assert (1,1,0) < version assert (1,1,1) < version assert ((2,0,0) < version) == False assert ((2,0,0) > version) == False assert (2,0,1) > version assert (2,1,0) > version assert (2,1,1) > version assert (1,0,0) <= version assert (1,0,1) <= version assert (1,1,0) <= version assert (1,1,1) <= version assert (2,0,0) <= version assert (2,0,0) >= version assert (2,0,1) >= version assert (2,1,0) >= version assert (2,1,1) >= version def test_spec_version_match(): spec = AsdfSpec('>=1.1.0') assert spec.match(AsdfVersion('1.1.0')) assert spec.match(AsdfVersion('1.2.0')) assert not spec.match(AsdfVersion('1.0.0')) assert not spec.match(AsdfVersion('1.0.9')) def test_spec_version_select(): spec = AsdfSpec('>=1.1.0') versions = [AsdfVersion(x) for x in ['1.0.0', '1.0.9', '1.1.0', '1.2.0']] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_version_filter(): spec = AsdfSpec('>=1.1.0') versions = [AsdfVersion(x) for x in ['1.0.0', '1.0.9', '1.1.0', '1.2.0']] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_string_match(): spec = AsdfSpec('>=1.1.0') assert spec.match('1.1.0') assert spec.match('1.2.0') assert not spec.match('1.0.0') assert not spec.match('1.0.9') def test_spec_string_select(): spec = AsdfSpec('>=1.1.0') versions = ['1.0.0', '1.0.9', '1.1.0', '1.2.0'] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_string_filter(): spec = AsdfSpec('>=1.1.0') versions = ['1.0.0', '1.0.9', '1.1.0', '1.2.0'] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_tuple_match(): spec = AsdfSpec('>=1.1.0') assert spec.match((1,1,0)) assert spec.match((1,2,0)) assert not spec.match((1,0,0)) assert not spec.match((1,0,9)) def test_spec_tuple_select(): spec = AsdfSpec('>=1.1.0') versions = [(1,0,0), (1,0,9), (1,1,0), (1,2,0)] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_tuple_filter(): spec = AsdfSpec('>=1.1.0') versions = [(1,0,0), (1,0,9), (1,1,0), (1,2,0)] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_equal(): """Make sure that equality means match""" spec = AsdfSpec('>=1.2.0') version0 = AsdfVersion('1.1.0') version1 = AsdfVersion('1.3.0') assert spec != version0 assert version0 != spec assert spec == version1 assert version1 == spec assert spec != '1.1.0' assert '1.1.0' != spec assert spec == '1.3.0' assert '1.3.0' == spec assert spec != (1, 1, 0) assert (1, 1, 0) != spec assert spec == (1, 3, 0) assert (1, 3, 0) == spec asdf-2.5.1/asdf/tests/test_yaml.py0000644000446400020070000001615713567314375021312 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io from collections import namedtuple, OrderedDict from typing import NamedTuple import numpy as np import pytest import yaml import asdf from asdf import tagged from asdf import treeutil from . import helpers def test_ordered_dict(tmpdir): # Test that we can write out and read in ordered dicts. tree = { "ordered_dict": OrderedDict( [('first', 'foo'), ('second', 'bar'), ('third', 'baz')]), "unordered_dict": { 'first': 'foo', 'second': 'bar', 'third': 'baz' } } def check_asdf(asdf): tree = asdf.tree assert isinstance(tree['ordered_dict'], OrderedDict) assert list(tree['ordered_dict'].keys()) == ['first', 'second', 'third'] assert not isinstance(tree['unordered_dict'], OrderedDict) assert isinstance(tree['unordered_dict'], dict) def check_raw_yaml(content): assert b'OrderedDict' not in content helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml) def test_unicode_write(tmpdir): # We want to write unicode out as regular utf-8-encoded # characters, not as escape sequences tree = { "ɐʇɐp‾ǝpoɔıun": 42, "ascii_only": "this is ascii" } def check_asdf(asdf): assert "ɐʇɐp‾ǝpoɔıun" in asdf.tree assert isinstance(asdf.tree['ascii_only'], str) def check_raw_yaml(content): # Ensure that unicode is written out as UTF-8 without escape # sequences assert "ɐʇɐp‾ǝpoɔıun".encode('utf-8') in content # Ensure that the unicode "tag" is not used assert b"unicode" not in content helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml) def test_arbitrary_python_object(): # Putting "just any old" Python object in the tree should raise an # exception. class Foo: pass tree = {'object': Foo()} buff = io.BytesIO() ff = asdf.AsdfFile(tree) with pytest.raises(yaml.YAMLError): ff.write_to(buff) def run_tuple_test(tree, tmpdir): def check_asdf(asdf): assert isinstance(asdf.tree['val'], list) def check_raw_yaml(content): assert b'tuple' not in content # Ignore these warnings for the tests that don't actually test the warning init_options = dict(ignore_implicit_conversion=True) helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml, init_options=init_options) def test_python_tuple(tmpdir): # We don't want to store tuples as tuples, because that's not a # built-in YAML data type. This test ensures that they are # converted to lists. tree = { "val": (1, 2, 3) } run_tuple_test(tree, tmpdir) def test_named_tuple_collections(tmpdir): # Ensure that we are able to serialize a collections.namedtuple. nt = namedtuple("TestNamedTuple1", ("one", "two", "three")) tree = { "val": nt(1, 2, 3) } run_tuple_test(tree, tmpdir) def test_named_tuple_typing(tmpdir): # Ensure that we are able to serialize a typing.NamedTuple. nt = NamedTuple("TestNamedTuple2", (("one", int), ("two", int), ("three", int))) tree = { "val": nt(1, 2, 3) } run_tuple_test(tree, tmpdir) def test_named_tuple_collections_recursive(tmpdir): nt = namedtuple("TestNamedTuple3", ("one", "two", "three")) tree = { "val": nt(1, 2, np.ones(3)) } def check_asdf(asdf): assert (asdf.tree['val'][2] == np.ones(3)).all() init_options = dict(ignore_implicit_conversion=True) helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, init_options=init_options) def test_named_tuple_typing_recursive(tmpdir): nt = NamedTuple("TestNamedTuple4", (("one", int), ("two", int), ("three", np.ndarray))) tree = { "val": nt(1, 2, np.ones(3)) } def check_asdf(asdf): assert (asdf.tree['val'][2] == np.ones(3)).all() init_options = dict(ignore_implicit_conversion=True) helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, init_options=init_options) def test_implicit_conversion_warning(): nt = namedtuple("TestTupleWarning", ("one", "two", "three")) tree = { "val": nt(1, 2, np.ones(3)) } with pytest.warns(UserWarning, match="Failed to serialize instance"): with asdf.AsdfFile(tree) as af: pass with pytest.warns(None) as w: with asdf.AsdfFile(tree, ignore_implicit_conversion=True) as af: assert len(w) == 0 @pytest.mark.xfail(reason='pyyaml has a bug and does not support tuple keys') def test_python_tuple_key(tmpdir): """ This tests whether tuple keys are round-tripped properly. As of this writing, this does not work in pyyaml but does work in ruamel.yaml. If/when we decide to switch to ruamel.yaml, this test should pass. """ tree = { (42, 1): 'foo' } helpers.assert_roundtrip_tree(tree, tmpdir) def test_tags_removed_after_load(tmpdir): tree = { "foo": ["bar", (1, 2, None)] } def check_asdf(asdf): for node in treeutil.iter_tree(asdf.tree): if node != asdf.tree: assert not isinstance(node, tagged.Tagged) helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf) def test_explicit_tags(): yaml = """#ASDF {} %YAML 1.1 --- ! foo: ! [1, 2, 3] ... """.format(asdf.versioning.default_version) # Check that fully qualified explicit tags work buff = helpers.yaml_to_asdf(yaml, yaml_headers=False) with asdf.open(buff) as ff: assert all(ff.tree['foo'] == [1, 2, 3]) def test_yaml_internal_reference(tmpdir): # Test that YAML internal references (anchors and aliases) work, # as well as recursive data structures. d = { 'foo': '2', } d['bar'] = d l = [] l.append(l) tree = { 'first': d, 'second': d, 'list': l } def check_yaml(content): assert b'list:--&id002-*id002' in b''.join(content.split()) helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_yaml) def test_yaml_nan_inf(): tree = { 'a': np.nan, 'b': np.inf, 'c': -np.inf } buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.open(buff) as ff: assert np.isnan(ff.tree['a']) assert np.isinf(ff.tree['b']) assert np.isinf(ff.tree['c']) def test_tag_object(): class SomeObject: pass tag = 'tag:nowhere.org:none/some/thing' instance = tagged.tag_object(tag, SomeObject()) assert instance._tag == tag asdf-2.5.1/asdf/treeutil.py0000644000446400020070000001337713567314375020005 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Utility functions for managing tree-like data structures. """ import inspect import warnings from .tagged import tag_object def walk(top, callback): """ Walking through a tree of objects, calling a given function at each node. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. The callback is called on an instance after all of its children have been visited (depth-first order). Returns ------- tree : object The modified tree. """ for x in iter_tree(top): callback(x) def iter_tree(top): """ Iterate over all nodes in a tree, in depth-first order. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. The callback is called on an instance after all of its children have been visited (depth-first order). Returns ------- tree : object The modified tree. """ seen = set() def recurse(tree): tree_id = id(tree) if tree_id in seen: return if isinstance(tree, (list, tuple)): seen.add(tree_id) for val in tree: for sub in recurse(val): yield sub seen.remove(tree_id) elif isinstance(tree, dict): seen.add(tree_id) for val in tree.values(): for sub in recurse(val): yield sub seen.remove(tree_id) yield tree return recurse(top) def walk_and_modify(top, callback, ignore_implicit_conversion=False): """Modify a tree by walking it with a callback function. It also has the effect of doing a deep copy. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. It takes either one or two arguments: - an instance from the tere - a json id (optional) It may return a different instance in order to modify the tree. The json id is the context under which any relative URLs should be resolved. It may be `None` if no ids are in the file The callback is called on an instance after all of its children have been visited (depth-first order). ignore_implicit_conversion : bool Controls whether warnings should be issued when implicitly converting a given type instance in the tree into a serializable object. The primary case for this is currently `namedtuple`. Defaults to `False`. Returns ------- tree : object The modified tree. """ # For speed reasons, there are two different versions of the inner # function seen = set() def recurse(tree): id_tree = id(tree) if id_tree in seen: return tree if isinstance(tree, dict): result = tree.__class__() seen.add(id_tree) for key, val in tree.items(): val = recurse(val) if val is not None: result[key] = val seen.remove(id_tree) if hasattr(tree, '_tag'): result = tag_object(tree._tag, result) elif isinstance(tree, (list, tuple)): seen.add(id_tree) contents = [recurse(val) for val in tree] try: result = tree.__class__(contents) except TypeError: # the derived class' signature is different # erase the type result = contents seen.remove(id_tree) if hasattr(tree, '_tag'): result = tag_object(tree._tag, result) else: result = tree result = callback(result) return result def recurse_with_json_ids(tree, json_id): id_tree = id(tree) if id_tree in seen: return tree if isinstance(tree, dict): if 'id' in tree: json_id = tree['id'] result = tree.__class__() seen.add(id_tree) for key, val in tree.items(): val = recurse_with_json_ids(val, json_id) if val is not None: result[key] = val seen.remove(id_tree) if hasattr(tree, '_tag'): result = tag_object(tree._tag, result) elif isinstance(tree, (list, tuple)): seen.add(id_tree) contents = [recurse_with_json_ids(val, json_id) for val in tree] try: result = tree.__class__(contents) except TypeError: # The derived class signature is different, so simply store the # list representing the contents. Currently this is primarly # intended to handle namedtuple and NamedTuple instances. if not ignore_implicit_conversion: msg = "Failed to serialize instance of {}, converting to list instead" warnings.warn(msg.format(type(tree))) result = contents seen.remove(id_tree) if hasattr(tree, '_tag'): result = tag_object(tree._tag, result) else: result = tree result = callback(result, json_id) return result if callback.__code__.co_argcount == 2: return recurse_with_json_ids(top, None) else: return recurse(top) asdf-2.5.1/asdf/type_index.py0000644000446400020070000003574013567314375020316 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import bisect import warnings from functools import lru_cache from collections import OrderedDict from . import util from .versioning import (AsdfVersion, get_version_map, default_version, split_tag_version, join_tag_version) __all__ = ['AsdfTypeIndex'] _BASIC_PYTHON_TYPES = [str, int, float, list, dict, tuple] class _AsdfWriteTypeIndex: """ The _AsdfWriteTypeIndex is a helper class for AsdfTypeIndex that manages an index of types for writing out ASDF files, i.e. from converting from custom types to tagged_types. It is not always the inverse of the mapping from tags to custom types, since there are likely multiple versions present for a given tag. This uses the `version_map.yaml` file that ships with the ASDF standard to figure out which schemas correspond to a particular version of the ASDF standard. An AsdfTypeIndex manages multiple _AsdfWriteTypeIndex instances for each version the user may want to write out, and they are instantiated on-demand. If version is ``'latest'``, it will just use the highest-numbered versions of each of the schemas. This is currently only used to aid in testing. In the future, this may be renamed to _ExtensionWriteTypeIndex since it is not specific to classes that inherit `AsdfType`. """ _version_map = None def __init__(self, version, index): self._version = version self._type_by_cls = {} self._type_by_name = {} self._type_by_subclasses = {} self._class_by_subclass = {} self._types_with_dynamic_subclasses = {} self._extension_by_cls = {} self._extensions_used = set() try: version_map = get_version_map(self._version) core_version_map = version_map['core'] standard_version_map = version_map['standard'] except ValueError: raise ValueError( "Don't know how to write out ASDF version {0}".format( self._version)) # Process all types defined in the ASDF version map. It is important to # make sure that tags that are associated with the core part of the # standard are processed first in order to handle subclasses properly. for name, _version in core_version_map.items(): self._add_by_tag(index, name, AsdfVersion(_version)) for name, _version in standard_version_map.items(): self._add_by_tag(index, name, AsdfVersion(_version)) # Now add any extension types that aren't known to the ASDF standard. # This expects that all types defined by ASDF will be encountered # before any types that are defined by external packages. This # allows external packages to override types that are also defined # by ASDF. The ordering is guaranteed due to the use of OrderedDict # for _versions_by_type_name, and due to the fact that the built-in # extension will always be processed first. for name, versions in index._versions_by_type_name.items(): if name not in self._type_by_name: self._add_by_tag(index, name, versions[-1]) for asdftype in index._unnamed_types: self._add_all_types(index, asdftype) def _should_overwrite(self, cls, new_type): existing_type = self._type_by_cls[cls] # Types that are provided by extensions from other packages should # only override the type index corresponding to the latest version # of ASDF. if existing_type.tag_base() != new_type.tag_base(): return self._version == default_version return True def _add_type_to_index(self, index, cls, typ): if cls in self._type_by_cls and not self._should_overwrite(cls, typ): return self._type_by_cls[cls] = typ self._extension_by_cls[cls] = index._extension_by_type[typ] def _add_subclasses(self, index, typ, asdftype): for subclass in util.iter_subclasses(typ): # Do not overwrite the tag type for an existing subclass if the # new tag serializes a class that is higher in the type # hierarchy than the existing subclass. if subclass in self._class_by_subclass: if issubclass(self._class_by_subclass[subclass], typ): # Allow for cases where a subclass tag is being # overridden by a tag from another extension. if (self._extension_by_cls[subclass] == index._extension_by_type[asdftype]): continue self._class_by_subclass[subclass] = typ self._type_by_subclasses[subclass] = asdftype self._extension_by_cls[subclass] = index._extension_by_type[asdftype] def _add_all_types(self, index, asdftype): self._add_type_to_index(index, asdftype, asdftype) for typ in asdftype.types: self._add_type_to_index(index, typ, asdftype) self._add_subclasses(index, typ, asdftype) if asdftype.handle_dynamic_subclasses: for typ in asdftype.types: self._types_with_dynamic_subclasses[typ] = asdftype def _add_by_tag(self, index, name, version): tag = join_tag_version(name, version) if tag in index._type_by_tag: asdftype = index._type_by_tag[tag] self._type_by_name[name] = asdftype self._add_all_types(index, asdftype) def _mark_used_extension(self, custom_type): self._extensions_used.add(self._extension_by_cls[custom_type]) def _process_dynamic_subclass(self, custom_type): for key, val in self._types_with_dynamic_subclasses.items(): if issubclass(custom_type, key): self._type_by_cls[custom_type] = val self._mark_used_extension(key) return val return None def from_custom_type(self, custom_type): """ Given a custom type, return the corresponding `ExtensionType` definition. """ asdftype = None # Try to find an exact class match first... try: asdftype = self._type_by_cls[custom_type] except KeyError: # ...failing that, match any subclasses try: asdftype = self._type_by_subclasses[custom_type] except KeyError: # ...failing that, try any subclasses that we couldn't # cache in _type_by_subclasses. This generally only # includes classes that are created dynamically post # Python-import, e.g. astropy.modeling._CompoundModel # subclasses. return self._process_dynamic_subclass(custom_type) if asdftype is not None: extension = self._extension_by_cls.get(custom_type) if extension is not None: self._mark_used_extension(custom_type) else: # Handle the case where the dynamic subclass was identified as # a proper subclass above, but it has not yet been registered # as such. self._process_dynamic_subclass(custom_type) return asdftype class AsdfTypeIndex: """ An index of the known `ExtensionType` classes. In the future this class may be renamed to ExtensionTypeIndex, since it is not specific to classes that inherit `AsdfType`. """ def __init__(self): self._write_type_indices = {} self._type_by_tag = {} # Use OrderedDict here to preserve the order in which types are added # to the type index. Since the ASDF built-in extension is always # processed first, this ensures that types defined by external packages # will always override corresponding types that are defined by ASDF # itself. However, if two different external packages define tags for # the same type, the result is currently undefined. self._versions_by_type_name = OrderedDict() self._best_matches = {} self._real_tag = {} self._unnamed_types = set() self._hooks_by_type = {} self._all_types = set() self._has_warned = {} self._extension_by_type = {} def add_type(self, asdftype, extension): """ Add a type to the index. """ self._all_types.add(asdftype) self._extension_by_type[asdftype] = extension if asdftype.yaml_tag is None and asdftype.name is None: return if isinstance(asdftype.name, list): yaml_tags = [asdftype.make_yaml_tag(name) for name in asdftype.name] elif isinstance(asdftype.name, str): yaml_tags = [asdftype.yaml_tag] elif asdftype.name is None: yaml_tags = [] else: raise TypeError("name must be a string, list or None") for yaml_tag in yaml_tags: self._type_by_tag[yaml_tag] = asdftype name, version = split_tag_version(yaml_tag) versions = self._versions_by_type_name.get(name) if versions is None: self._versions_by_type_name[name] = [version] else: idx = bisect.bisect_left(versions, version) if idx == len(versions) or versions[idx] != version: versions.insert(idx, version) if not len(yaml_tags): self._unnamed_types.add(asdftype) def from_custom_type(self, custom_type, version=default_version): """ Given a custom type, return the corresponding `ExtensionType` definition. """ # Basic Python types should not ever have an AsdfType associated with # them. if custom_type in _BASIC_PYTHON_TYPES: return None write_type_index = self._write_type_indices.get(str(version)) if write_type_index is None: write_type_index = _AsdfWriteTypeIndex(version, self) self._write_type_indices[version] = write_type_index return write_type_index.from_custom_type(custom_type) def _get_version_mismatch(self, name, version, latest_version): warning_string = None if (latest_version.major, latest_version.minor) != \ (version.major, version.minor): warning_string = \ "'{}' with version {} found in file{{}}, but latest " \ "supported version is {}".format( name, version, latest_version) return warning_string def _warn_version_mismatch(self, ctx, tag, warning_string, fname): if warning_string is not None: # Ensure that only a single warning occurs per tag per AsdfFile # TODO: If it is useful to only have a single warning per file on # disk, then use `fname` in the key instead of `ctx`. if not (ctx, tag) in self._has_warned: warnings.warn(warning_string.format(fname)) self._has_warned[(ctx, tag)] = True def fix_yaml_tag(self, ctx, tag, ignore_version_mismatch=True): """ Given a YAML tag, adjust it to the best supported version. If there is no exact match, this finds the newest version understood that is still less than the version in file. Or, the earliest understood version if none are less than the version in the file. If ``ignore_version_mismatch==False``, this function raises a warning if it could not find a match where the major and minor numbers are the same. """ warning_string = None name, version = split_tag_version(tag) fname = " '{}'".format(ctx._fname) if ctx._fname else '' if tag in self._type_by_tag: asdftype = self._type_by_tag[tag] # Issue warnings for the case where there exists a class for the # given tag due to the 'supported_versions' attribute being # defined, but this tag is not the latest version of the type. # This prevents 'supported_versions' from affecting the behavior of # warnings that are purely related to YAML validation. if not ignore_version_mismatch and hasattr(asdftype, '_latest_version'): warning_string = self._get_version_mismatch( name, version, asdftype._latest_version) self._warn_version_mismatch(ctx, tag, warning_string, fname) return tag if tag in self._best_matches: best_tag, warning_string = self._best_matches[tag] if not ignore_version_mismatch: self._warn_version_mismatch(ctx, tag, warning_string, fname) return best_tag versions = self._versions_by_type_name.get(name) if versions is None: return tag # The versions list is kept sorted, so bisect can be used to # quickly find the best option. i = bisect.bisect_left(versions, version) i = max(0, i - 1) if not ignore_version_mismatch: warning_string = self._get_version_mismatch( name, version, versions[-1]) self._warn_version_mismatch(ctx, tag, warning_string, fname) best_version = versions[i] best_tag = join_tag_version(name, best_version) self._best_matches[tag] = best_tag, warning_string if tag != best_tag: self._real_tag[best_tag] = tag return best_tag def get_real_tag(self, tag): if tag in self._real_tag: return self._real_tag[tag] elif tag in self._type_by_tag: return tag return None def from_yaml_tag(self, ctx, tag): """ From a given YAML tag string, return the corresponding AsdfType definition. """ tag = self.fix_yaml_tag(ctx, tag) return self._type_by_tag.get(tag) @lru_cache(5) def has_hook(self, hook_name): """ Returns `True` if the given hook name exists on any of the managed types. """ for cls in self._all_types: if hasattr(cls, hook_name): return True return False def get_hook_for_type(self, hookname, typ, version=default_version): """ Get the hook function for the given type, if it exists, else return None. """ hooks = self._hooks_by_type.setdefault(hookname, {}) hook = hooks.get(typ, None) if hook is not None: return hook tag = self.from_custom_type(typ, version) if tag is not None: hook = getattr(tag, hookname, None) if hook is not None: hooks[typ] = hook return hook hooks[typ] = None return None def get_extensions_used(self, version=default_version): write_type_index = self._write_type_indices.get(str(version)) if write_type_index is None: return [] return list(write_type_index._extensions_used) asdf-2.5.1/asdf/types.py0000644000446400020070000005025313605165746017305 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import re import inspect import warnings import importlib from collections import defaultdict import six from copy import copy from . import tagged from . import util from .versioning import AsdfVersion, AsdfSpec __all__ = ['format_tag', 'CustomType'] # regex used to parse module name from optional version string MODULE_RE = re.compile(r'([a-zA-Z]+)(-(\d+\.\d+\.\d+))?') class AsdfSubclassProperty(property): pass def format_tag(organization, standard, version, tag_name): """ Format a YAML tag. """ tag = 'tag:{0}:{1}/{2}'.format(organization, standard, tag_name) if version is None: return tag if isinstance(version, AsdfSpec): version = str(version.spec) return "{0}-{1}".format(tag, version) _all_asdftypes = set() def _from_tree_tagged_missing_requirements(cls, tree, ctx): # A special version of AsdfType.from_tree_tagged for when the # required dependencies for an AsdfType are missing. plural, verb = ('s', 'are') if len(cls.requires) else ('', 'is') message = "{0} package{1} {2} required to instantiate '{3}'".format( util.human_list(cls.requires), plural, verb, tree._tag) # This error will be handled by yamlutil.tagged_tree_to_custom_tree, which # will cause a warning to be issued indicating that the tree failed to be # converted. raise TypeError(message) class ExtensionTypeMeta(type): """ Custom class constructor for tag types. """ _import_cache = {} @classmethod def _has_required_modules(cls, requires): for string in requires: has_module = True match = MODULE_RE.match(string) modname, _, version = match.groups() if modname in cls._import_cache: if not cls._import_cache[modname]: return False try: module = importlib.import_module(modname) if version and hasattr(module, '__version__'): if module.__version__ < version: has_module = False except ImportError: has_module = False finally: cls._import_cache[modname] = has_module if not has_module: return False return True @classmethod def _find_in_bases(cls, attrs, bases, name, default=None): if name in attrs: return attrs[name] for base in bases: if hasattr(base, name): return getattr(base, name) return default @property def versioned_siblings(mcls): return getattr(mcls, '__versioned_siblings') or [] def __new__(mcls, name, bases, attrs): requires = mcls._find_in_bases(attrs, bases, 'requires', []) if not mcls._has_required_modules(requires): attrs['from_tree_tagged'] = classmethod( _from_tree_tagged_missing_requirements) attrs['types'] = [] attrs['has_required_modules'] = False else: attrs['has_required_modules'] = True types = mcls._find_in_bases(attrs, bases, 'types', []) new_types = [] for typ in types: if isinstance(typ, str): typ = util.resolve_name(typ) new_types.append(typ) attrs['types'] = new_types cls = super(ExtensionTypeMeta, mcls).__new__(mcls, name, bases, attrs) if hasattr(cls, 'version'): if not isinstance(cls.version, (AsdfVersion, AsdfSpec)): cls.version = AsdfVersion(cls.version) if hasattr(cls, 'name'): if isinstance(cls.name, str): if 'yaml_tag' not in attrs: cls.yaml_tag = cls.make_yaml_tag(cls.name) elif isinstance(cls.name, list): pass elif cls.name is not None: raise TypeError("name must be string or list") if hasattr(cls, 'supported_versions'): if not isinstance(cls.supported_versions, (list, set)): cls.supported_versions = [cls.supported_versions] supported_versions = set() for version in cls.supported_versions: if not isinstance(version, (AsdfVersion, AsdfSpec)): version = AsdfVersion(version) # This should cause an exception for invalid input supported_versions.add(version) # We need to convert back to a list here so that the 'in' operator # uses actual comparison instead of hash equality cls.supported_versions = list(supported_versions) siblings = list() for version in cls.supported_versions: if version != cls.version: new_attrs = copy(attrs) new_attrs['version'] = version new_attrs['supported_versions'] = set() new_attrs['_latest_version'] = cls.version siblings.append( ExtensionTypeMeta. __new__(mcls, name, bases, new_attrs)) setattr(cls, '__versioned_siblings', siblings) return cls class AsdfTypeMeta(ExtensionTypeMeta): """ Keeps track of `AsdfType` subclasses that are created, and stores them in `AsdfTypeIndex`. """ def __new__(mcls, name, bases, attrs): cls = super(AsdfTypeMeta, mcls).__new__(mcls, name, bases, attrs) # Classes using this metaclass get added to the list of built-in # extensions _all_asdftypes.add(cls) return cls class ExtensionType: """ The base class of all custom types in the tree. Besides the attributes defined below, most subclasses will also override `to_tree` and `from_tree`. """ name = None organization = 'stsci.edu' standard = 'asdf' version = (1, 0, 0) supported_versions = set() types = [] handle_dynamic_subclasses = False validators = {} requires = [] yaml_tag = None _subclass_map = {} _subclass_attr_map = defaultdict(lambda: list()) @classmethod def names(cls): """ Returns the name(s) represented by this tag type as a list. While some tag types represent only a single custom type, others represent multiple types. In the latter case, the `name` attribute of the extension is actually a list, not simply a string. This method normalizes the value of `name` by returning a list in all cases. Returns ------- `list` of names represented by this tag type """ if cls.name is None: return None return cls.name if isinstance(cls.name, list) else [cls.name] @classmethod def make_yaml_tag(cls, name, versioned=True): """ Given the name of a type, returns a string representing its YAML tag. Parameters ---------- name : str The name of the type. In most cases this will correspond to the `name` attribute of the tag type. However, it is passed as a parameter since some tag types represent multiple custom types. versioned : bool If `True`, the tag will be versioned. Otherwise, a YAML tag without a version will be returned. Returns ------- `str` representing the YAML tag """ return format_tag( cls.organization, cls.standard, cls.version if versioned else None, name) @classmethod def tag_base(cls): """ Returns the base of the YAML tag for types represented by this class. This method returns the portion of the tag that represents the standard and the organization of any type represented by this class. Returns ------- `str` representing the base of the YAML tag """ return cls.make_yaml_tag('', versioned=False) @classmethod def to_tree(cls, node, ctx): """ Converts instances of custom types into YAML representations. This method should be overridden by custom extension classes in order to define how custom types are serialized into YAML. The method must return a single Python object corresponding to one of the basic YAML types (dict, list, str, or number). However, the types can be nested and combined in order to represent more complex custom types. This method is called as part of the process of writing an `AsdfFile` object. Whenever a custom type (or a subclass of that type) that is listed in the `types` attribute of this class is encountered, this method will be used to serialize that type. The name `to_tree` refers to the act of converting a custom type into part of a YAML object tree. Parameters ---------- node : `object` Instance of a custom type to be serialized. Will be an instance (or an instance of a subclass) of one of the types listed in the `types` attribute of this class. ctx : `AsdfFile` An instance of the `AsdfFile` object that is being written out. Returns ------- A basic YAML type (`dict`, `list`, `str`, `int`, `float`, or `complex`) representing the properties of the custom type to be serialized. These types can be nested in order to represent more complex custom types. """ return node.__class__.__bases__[0](node) @classmethod def to_tree_tagged(cls, node, ctx): """ Converts instances of custom types into tagged objects. It is more common for custom tag types to override `to_tree` instead of this method. This method should only be overridden if it is necessary to modify the YAML tag that will be used to tag this object. Parameters ---------- node : `object` Instance of a custom type to be serialized. Will be an instance (or an instance of a subclass) of one of the types listed in the `types` attribute of this class. ctx : `AsdfFile` An instance of the `AsdfFile` object that is being written out. Returns ------- An instance of `asdf.tagged.Tagged`. """ obj = cls.to_tree(node, ctx) yaml_tag = cls.yaml_tag node_cls = type(node) cls_name = node_cls.__name__ if node_cls.__name__ in cls._subclass_map and isinstance(obj, dict): from .tags.core import SubclassMetadata from .yamlutil import custom_tree_to_tagged_tree attribute = cls._subclass_map[cls_name][0] subclass = SubclassMetadata(name=cls_name) obj[attribute] = custom_tree_to_tagged_tree(subclass, ctx) if node_cls in cls._subclass_attr_map: if isinstance(obj, dict): for name, member in cls._subclass_attr_map[node_cls]: obj[name] = member.fget(node) else: # TODO: should this be an exception? Should it be a custom warning type? warnings.warn( "Failed to add subclass attribute(s) to node that is " "not an object (is a {}). No subclass attributes are being " "added (tag={}, subclass={})".format( type(obj).__name__, cls, node_cls) ) return tagged.tag_object(cls.yaml_tag, obj, ctx=ctx) @classmethod def from_tree(cls, tree, ctx): """ Converts basic types representing YAML trees into custom types. This method should be overridden by custom extension classes in order to define how custom types are deserialized from the YAML representation back into their original types. The method will return an instance of the original custom type. This method is called as part of the process of reading an ASDF file in order to construct an `AsdfFile` object. Whenever a YAML subtree is encountered that has a tag that corresponds to the `yaml_tag` property of this class, this method will be used to deserialize that tree back into an instance of the original custom type. Parameters ---------- tree : `object` representing YAML tree An instance of a basic Python type (possibly nested) that corresponds to a YAML subtree. ctx : `AsdfFile` An instance of the `AsdfFile` object that is being constructed. Returns ------- An instance of the custom type represented by this extension class. """ return cls(tree) @classmethod def from_tree_tagged(cls, tree, ctx): """ Converts from tagged tree into custom type. It is more common for extension classes to override `from_tree` instead of this method. This method should only be overridden if it is necessary to access the `_tag` property of the `Tagged` object directly. Parameters ---------- tree : `asdf.tagged.Tagged` object representing YAML tree ctx : `AsdfFile` An instance of the `AsdfFile` object that is being constructed. Returns ------- An instance of the custom type represented by this extension class. """ from .tags.core import SubclassMetadata if isinstance(tree, dict): for k, v in tree.items(): if isinstance(v, SubclassMetadata): tree.pop(k) subclass_name = v['name'] return cls._subclass_map[subclass_name][1](**tree.data) return cls.from_tree(tree.data, ctx) @classmethod def incompatible_version(cls, version): """ Indicates if given version is known to be incompatible with this type. If this tag class explicitly identifies compatible versions then this checks whether a given version is compatible or not (see `supported_versions`). Otherwise, all versions are assumed to be compatible. Child classes can override this method to affect how version compatiblity for this type is determined. Parameters ---------- version : `str` or `~asdf.versioning.AsdfVersion` The version to test for compatibility. """ if cls.supported_versions: if version not in cls.supported_versions: return True return False @classmethod def subclass(cls, *args, attribute='subclass'): """ Decorator to enable serialization of a subclass of an existing type. Use this method to decorate subclasses of custom types that are already handled by an existing ASDF tag class. This enables subclasses of known types to be properly serialized without having to write an entirely separate tag class for the subclass. This feature can only be used for tagged types where the underlying YAML representation of the type is an object (i.e. a Python `dict`). It will not work for nodes that are basic types. The subclass metadata is stored in a new attribute of the YAML node. By default the attribute name is "subclass", but it is customizable by using the optional `attribute` keyword argument of the decorator. The schema of the base custom type is used for validation. This feature will not work if the base schema disallows additional attributes. It is incumbent upon the user to avoid name conflicts with attributes that already exist in the representation of the base custom class. For example, a base class may use the attribute "subclass" for some other purpose, in which case it would be necessary to provide a different custom attribute name here. Parameters ---------- attribute : `str` Custom attribute name used to store subclass metadata in this node. """ def decorator(subclass): cls._subclass_map[subclass.__name__] = (attribute, subclass) for name, member in inspect.getmembers(subclass): if isinstance(member, AsdfSubclassProperty): cls._subclass_attr_map[subclass].append((name, member)) return subclass return decorator(args[0]) if args else decorator @classmethod def subclass_property(cls, attribute): """ Decorator to enable serialization of custom subclass attributes. Use this decorator to serialize attributes that are specific to a subclass of a custom type that is already handled by an existing ASDF tag class. This decorator will only work on subclasses that have been decorated with the `~asdf.AsdfTypes.subclass` decorator. Methods that are decorated in this way are treated as properties (see `property`). The name of the property **must** correspond to a keyword argument of the subclass constructor. The property will be serialized as a YAML object attribute with the same name. Users are responsible for ensuring that any and all additional subclass properties conform to the schema of the base custom type and do not conflict with existing attributes. """ return AsdfSubclassProperty(attribute) @six.add_metaclass(AsdfTypeMeta) class AsdfType(ExtensionType): """ Base class for all built-in ASDF types. Types that inherit this class will be automatically added to the list of built-ins. This should *not* be used for user-defined extensions. """ @six.add_metaclass(ExtensionTypeMeta) class CustomType(ExtensionType): """ Base class for all user-defined types. """ # These attributes are duplicated here with docstrings since a bug in # sphinx prevents the docstrings of class attributes from being inherited # properly (see https://github.com/sphinx-doc/sphinx/issues/741). The # docstrings are not included anywhere else in the class hierarchy since # this class is the only one exposed in the public API. name = None """ `str` or `list`: The name of the type. """ organization = 'stsci.edu' """ `str`: The organization responsible for the type. """ standard = 'asdf' """ `str`: The standard the type is defined in. """ version = (1, 0, 0) """ `str`, `tuple`, `AsdfVersion`, or `AsdfSpec`: The version of the type. """ supported_versions = set() """ `set`: Versions that explicitly compatible with this extension class. If provided, indicates explicit compatibility with the given set of versions. Other versions of the same schema that are not included in this set will not be converted to custom types with this class. """ types = [] """ `list`: List of types that this extension class can convert to/from YAML. Custom Python types that, when found in the tree, will be converted into basic types for YAML output. Can be either strings referring to the types or the types themselves.""" handle_dynamic_subclasses = False """ `bool`: Indicates whether dynamically generated subclasses can be serialized Flag indicating whether this type is capable of serializing subclasses of any of the types listed in ``types`` that are generated dynamically. """ validators = {} """ `dict`: Mapping JSON Schema keywords to validation functions for jsonschema. Useful if the type defines extra types of validation that can be performed. """ requires = [] """ `list`: Python packages that are required to instantiate the object. """ yaml_tag = None """ `str`: The YAML tag to use for the type. If not provided, it will be automatically generated from name, organization, standard and version. """ has_required_modules = True """ `bool`: Indicates whether modules specified by `requires` are available. NOTE: This value is automatically generated. Do not set it in subclasses as it will be overwritten. """ asdf-2.5.1/asdf/util.py0000644000446400020070000002645313567314375017124 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import inspect import math import struct import types from urllib.parse import urljoin from urllib.request import pathname2url from urllib import parse as urlparse import numpy as np from .extern.decorators import add_common_docstring __all__ = ['human_list', 'get_array_base', 'get_base_uri', 'filepath_to_url', 'iter_subclasses', 'calculate_padding', 'resolve_name'] def human_list(l, separator="and"): """ Formats a list for human readability. Parameters ---------- l : sequence A sequence of strings separator : string, optional The word to use between the last two entries. Default: ``"and"``. Returns ------- formatted_list : string Examples -------- >>> human_list(["vanilla", "strawberry", "chocolate"], "or") 'vanilla, strawberry or chocolate' """ if len(l) == 1: return l[0] else: return ', '.join(l[:-1]) + ' ' + separator + ' ' + l[-1] def get_array_base(arr): """ For a given Numpy array, finds the base array that "owns" the actual data. """ base = arr while isinstance(base.base, np.ndarray): base = base.base return base def get_base_uri(uri): """ For a given URI, return the part without any fragment. """ parts = urlparse.urlparse(uri) return urlparse.urlunparse(list(parts[:5]) + ['']) def filepath_to_url(path): """ For a given local file path, return a file:// url. """ return urljoin('file:', pathname2url(path)) def iter_subclasses(cls): """ Returns all subclasses of a class. """ for x in cls.__subclasses__(): yield x for y in iter_subclasses(x): yield y def calculate_padding(content_size, pad_blocks, block_size): """ Calculates the amount of extra space to add to a block given the user's request for the amount of extra space. Care is given so that the total of size of the block with padding is evenly divisible by block size. Parameters ---------- content_size : int The size of the actual content pad_blocks : float or bool If `False`, add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. block_size : int The filesystem block size to use. Returns ------- nbytes : int The number of extra bytes to add for padding. """ if not pad_blocks: return 0 if pad_blocks is True: pad_blocks = 1.1 new_size = content_size * pad_blocks new_size = int((math.ceil( float(new_size) / block_size) + 1) * block_size) return max(new_size - content_size, 0) class BinaryStruct: """ A wrapper around the Python stdlib struct module to define a binary struct more like a dictionary than a tuple. """ def __init__(self, descr, endian='>'): """ Parameters ---------- descr : list of tuple Each entry is a pair ``(name, format)``, where ``format`` is one of the format types understood by `struct`. endian : str, optional The endianness of the struct. Must be ``>`` or ``<``. """ self._fmt = [endian] self._offsets = {} self._names = [] i = 0 for name, fmt in descr: self._fmt.append(fmt) self._offsets[name] = (i, (endian + fmt).encode('ascii')) self._names.append(name) i += struct.calcsize(fmt.encode('ascii')) self._fmt = ''.join(self._fmt).encode('ascii') self._size = struct.calcsize(self._fmt) @property def size(self): """ Return the size of the struct. """ return self._size def pack(self, **kwargs): """ Pack the given arguments, which are given as kwargs, and return the binary struct. """ fields = [0] * len(self._names) for key, val in kwargs.items(): if key not in self._offsets: raise KeyError("No header field '{0}'".format(key)) i = self._names.index(key) fields[i] = val return struct.pack(self._fmt, *fields) def unpack(self, buff): """ Unpack the given binary buffer into the fields. The result is a dictionary mapping field names to values. """ args = struct.unpack_from(self._fmt, buff[:self._size]) return dict(zip(self._names, args)) def update(self, fd, **kwargs): """ Update part of the struct in-place. Parameters ---------- fd : generic_io.GenericIO instance A writable, seekable file descriptor, currently seeked to the beginning of the struct. **kwargs : values The values to update on the struct. """ updates = [] for key, val in kwargs.items(): if key not in self._offsets: raise KeyError("No header field '{0}'".format(key)) updates.append((self._offsets[key], val)) updates.sort() start = fd.tell() for ((offset, datatype), val) in updates: fd.seek(start + offset) fd.write(struct.pack(datatype, val)) class HashableDict(dict): """ A simple wrapper around dict to make it hashable. This is sure to be slow, but for small dictionaries it shouldn't matter. """ def __hash__(self): return hash(frozenset(self.items())) def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. This ends up working like ``from module import object`` but is easier to deal with than the `__import__` builtin and supports digging into submodules. Parameters ---------- name : `str` A dotted path to a Python object--that is, the name of a function, class, or other object in a module with the full path to that module, including parent modules, separated by dots. Also known as the fully qualified name of the object. Examples -------- >>> resolve_name('asdf.util.resolve_name') Raises ------ `ImportError` If the module or named object is not found. """ # Note: On python 2 these must be str objects and not unicode parts = [str(part) for part in name.split('.')] if len(parts) == 1: # No dots in the name--just a straight up module import cursor = 1 attr_name = str('') # Must not be unicode on Python 2 else: cursor = len(parts) - 1 attr_name = parts[-1] module_name = parts[:cursor] while cursor > 0: try: ret = __import__(str('.'.join(module_name)), fromlist=[attr_name]) break except ImportError: if cursor == 0: raise cursor -= 1 module_name = parts[:cursor] attr_name = parts[cursor] ret = '' for part in parts[cursor:]: try: ret = getattr(ret, part) except AttributeError: raise ImportError(name) return ret def get_class_name(obj, instance=True): """ Given a class or instance of a class, returns a string representing the fully specified path of the class. Parameters ---------- obj : object An instance of any object instance: bool Indicates whether given object is an instance of the class to be named """ typ = type(obj) if instance else obj return "{}.{}".format(typ.__module__, typ.__name__) def minversion(module, version, inclusive=True, version_path='__version__'): """ Returns `True` if the specified Python module satisfies a minimum version requirement, and `False` if not. By default this uses `pkg_resources.parse_version` to do the version comparison if available. Otherwise it falls back on `distutils.version.LooseVersion`. Parameters ---------- module : module or `str` An imported module of which to check the version, or the name of that module (in which case an import of that module is attempted-- if this fails `False` is returned). version : `str` The version as a string that this module must have at a minimum (e.g. ``'0.12'``). inclusive : `bool` The specified version meets the requirement inclusively (i.e. ``>=``) as opposed to strictly greater than (default: `True`). version_path : `str` A dotted attribute path to follow in the module for the version. Defaults to just ``'__version__'``, which should work for most Python modules. """ if isinstance(module, types.ModuleType): module_name = module.__name__ elif isinstance(module, str): module_name = module try: module = resolve_name(module_name) except ImportError: return False else: raise ValueError('module argument must be an actual imported ' 'module, or the import name of the module; ' 'got {0!r}'.format(module)) if '.' not in version_path: have_version = getattr(module, version_path) else: have_version = resolve_name('.'.join([module.__name__, version_path])) try: from pkg_resources import parse_version except ImportError: from distutils.version import LooseVersion as parse_version if inclusive: return parse_version(have_version) >= parse_version(version) else: return parse_version(have_version) > parse_version(version) class InheritDocstrings(type): """ This metaclass makes methods of a class automatically have their docstrings filled in from the methods they override in the base class. If the class uses multiple inheritance, the docstring will be chosen from the first class in the bases list, in the same way as methods are normally resolved in Python. If this results in selecting the wrong docstring, the docstring will need to be explicitly included on the method. For example:: >>> from asdf.util import InheritDocstrings >>> import six >>> @six.add_metaclass(InheritDocstrings) ... class A: ... def wiggle(self): ... "Wiggle the thingamajig" ... pass >>> class B(A): ... def wiggle(self): ... pass >>> B.wiggle.__doc__ u'Wiggle the thingamajig' """ def __init__(cls, name, bases, dct): def is_public_member(key): return ( (key.startswith('__') and key.endswith('__') and len(key) > 4) or not key.startswith('_')) for key, val in dct.items(): if (inspect.isfunction(val) and is_public_member(key) and val.__doc__ is None): for base in cls.__mro__[1:]: super_method = getattr(base, key, None) if super_method is not None: val.__doc__ = super_method.__doc__ break super(InheritDocstrings, cls).__init__(name, bases, dct) asdf-2.5.1/asdf/version.py0000644000446400020070000000031213605165746017615 0ustar eslavichSTSCI\science00000000000000from pkg_resources import get_distribution, DistributionNotFound try: version = get_distribution('asdf').version except DistributionNotFound: # package is not installed version = "unknown" asdf-2.5.1/asdf/versioning.py0000644000446400020070000001437413605165746020330 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ This module deals with things that change between different versions of the ASDF spec. """ from functools import total_ordering import yaml if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_loader = yaml.SafeLoader from semantic_version import Version, SimpleSpec from . import generic_io from . import resolver from . import util from .version import version as asdf_version __all__ = ['AsdfVersion', 'AsdfSpec', 'split_tag_version', 'join_tag_version'] def split_tag_version(tag): """ Split a tag into its base and version. """ name, version = tag.rsplit('-', 1) version = AsdfVersion(version) return name, version def join_tag_version(name, version): """ Join the root and version of a tag back together. """ return '{0}-{1}'.format(name, version) _version_map = {} def get_version_map(version): version_map = _version_map.get(version) if version_map is None: version_map_path = resolver.DEFAULT_URL_MAPPING[0][1].replace( '{url_suffix}', 'asdf/version_map-{0}'.format(version)) try: with generic_io.get_file(version_map_path, 'r') as fd: version_map = yaml.load( fd, Loader=_yaml_base_loader) except Exception: raise ValueError( "Could not load version map for version {0}".format(version)) # Separate the core tags from the rest of the standard for convenience version_map['core'] = {} version_map['standard'] = {} for name, version in version_map['tags'].items(): if name.startswith('tag:stsci.edu:asdf/core'): version_map['core'][name] = version else: version_map['standard'][name] = version _version_map[version] = version_map return version_map @total_ordering class AsdfVersionMixin: """This mix-in is required in order to impose the total ordering that we want for ``AsdfVersion``, rather than accepting the total ordering that is already provided by ``Version`` from ``semantic_version``. Defining these comparisons directly in ``AsdfVersion`` and applying ``total_ordering`` there will not work since ``total_ordering`` only defines comparison operations if they do not exist already and the base class ``Version`` already defines these operations. """ def __eq__(self, other): # Seems like a bit of a hack... if isinstance(other, SimpleSpec): return other == self if isinstance(other, (str, tuple, list)): other = AsdfVersion(other) return Version.__eq__(self, other) def __ne__(self, other): return not self.__eq__(other) def __lt__(self, other): if isinstance(other, (str, tuple, list)): other = AsdfVersion(other) return Version.__lt__(self, other) def __hash__(self): # To be honest, I'm not sure why I had to make this explicit return Version.__hash__(self) class AsdfVersion(AsdfVersionMixin, Version): """This class adds features to the existing ``Version`` class from the ``semantic_version`` module. Namely, it allows ``Version`` objects to be constructed from tuples and lists as well as strings, and it allows ``Version`` objects to be compared with tuples, lists, and strings, instead of just other ``Version`` objects. If any of these features are added to the ``Version`` class itself (as requested in https://github.com/rbarrois/python-semanticversion/issues/52), then this class will become obsolete. """ def __init__(self, version): # This is a dirty hack and you know it if isinstance(version, AsdfVersion): version = str(version) if isinstance(version, (tuple, list)): version = '.'.join([str(x) for x in version]) super(AsdfVersion, self).__init__(version) class AsdfSpec(SimpleSpec): def __init__(self, *args, **kwargs): super(AsdfSpec, self).__init__(*args, **kwargs) def match(self, version): if isinstance(version, (str, tuple, list)): version = AsdfVersion(version) return super(AsdfSpec, self).match(version) def __iterate_versions(self, versions): for v in versions: if isinstance(v, (str, tuple, list)): v = AsdfVersion(v) yield v def select(self, versions): return super(AsdfSpec, self).select(self.__iterate_versions(versions)) def filter(self, versions): return super(AsdfSpec, self).filter(self.__iterate_versions(versions)) def __eq__(self, other): """Equality between Spec and Version, string, or tuple, means match""" if isinstance(other, SimpleSpec): return super(AsdfSpec, self).__eq__(other) return self.match(other) def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return super(AsdfSpec, self).__hash__() supported_versions = [ AsdfVersion('1.0.0'), AsdfVersion('1.1.0'), AsdfVersion('1.2.0'), AsdfVersion('1.3.0'), AsdfVersion('1.4.0') ] default_version = supported_versions[-1] class VersionedMixin: _version = default_version @property def version(self): return self._version @version.setter def version(self, version): if version not in supported_versions: human_versions = util.human_list( [str(x) for x in supported_versions]) raise ValueError( "This version of the asdf package ({0}) only understands how " "to handle versions {1} of the ASDF Standard. Got " "'{2}'".format(asdf_version, human_versions, version)) self._version = version @property def version_string(self): return str(self._version) @property def version_map(self): try: version_map = get_version_map(self.version_string) except ValueError: raise ValueError( "Don't have information about version {0}".format( self.version_string)) return version_map asdf-2.5.1/asdf/yamlutil.py0000644000446400020070000002455313567314375020006 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import warnings from collections import OrderedDict import numpy as np import yaml from . import schema from . import tagged from . import treeutil from . import util from .constants import YAML_TAG_PREFIX from .versioning import split_tag_version from .exceptions import AsdfConversionWarning __all__ = ['custom_tree_to_tagged_tree', 'tagged_tree_to_custom_tree'] if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_dumper = yaml.CSafeDumper _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_dumper = yaml.SafeDumper _yaml_base_loader = yaml.SafeLoader # ---------------------------------------------------------------------- # Custom loader/dumpers _yaml_base_type_map = { yaml.MappingNode: lambda node, loader: loader.construct_mapping(node, deep=True), yaml.SequenceNode: lambda node, loader: loader.construct_sequence(node, deep=True), yaml.ScalarNode: lambda node, loader: loader.construct_scalar(node) } def _yaml_to_base_type(node, loader): """ Converts a PyYAML node type to a basic Python data type. Parameters ---------- node : yaml.Node The node is converted to a basic Python type using the following: - MappingNode -> dict - SequenceNode -> list - ScalarNode -> str, int, float etc. loader : yaml.Loader Returns ------- basic : object Basic Python data type. """ def unknown_type_exception(node, loader): raise TypeError("Don't know how to implicitly construct '{0}'".format( type(node))) return _yaml_base_type_map.get( type(node), unknown_type_exception)(node, loader) class AsdfDumper(_yaml_base_dumper): """ A specialized YAML dumper that understands "tagged basic Python data types" as implemented in the `tagged` module. """ def __init__(self, *args, **kwargs): kwargs['default_flow_style'] = None super().__init__(*args, **kwargs) def represent_data(self, data): node = super(AsdfDumper, self).represent_data(data) tag_name = getattr(data, '_tag', None) if tag_name is not None: node.tag = tag_name return node _flow_style_map = { 'flow': True, 'block': False } def represent_sequence(dumper, sequence): flow_style = _flow_style_map.get(sequence.flow_style, None) sequence = sequence.data return super(AsdfDumper, dumper).represent_sequence( None, sequence, flow_style) def represent_mapping(dumper, mapping): flow_style = _flow_style_map.get(mapping.flow_style, None) node = super(AsdfDumper, dumper).represent_mapping( None, mapping.data, flow_style) if mapping.property_order: values = node.value new_mapping = {} for key, val in values: new_mapping[key.value] = (key, val) new_values = [] for key in mapping.property_order: if key in mapping: new_values.append(new_mapping[key]) property_order = set(mapping.property_order) for key, val in values: if key.value not in property_order: new_values.append((key, val)) node.value = new_values return node _style_map = { 'inline': '"', 'folded': '>', 'literal': '|' } def represent_scalar(dumper, value): style = _style_map.get(value.style, None) return super(AsdfDumper, dumper).represent_scalar( None, value.data, style) AsdfDumper.add_representer(tagged.TaggedList, represent_sequence) AsdfDumper.add_representer(tagged.TaggedDict, represent_mapping) AsdfDumper.add_representer(tagged.TaggedString, represent_scalar) class AsdfLoader(_yaml_base_loader): """ A specialized YAML loader that can construct "tagged basic Python data types" as implemented in the `tagged` module. """ ignore_version_mismatch = False def construct_object(self, node, deep=False): tag = node.tag if node.tag in self.yaml_constructors: return super(AsdfLoader, self).construct_object(node, deep=False) data = _yaml_to_base_type(node, self) tag = self.ctx.type_index.fix_yaml_tag( self.ctx, tag, self.ignore_version_mismatch) data = tagged.tag_object(tag, data) return data # ---------------------------------------------------------------------- # Handle omap (ordered mappings) YAML_OMAP_TAG = YAML_TAG_PREFIX + 'omap' # Add support for loading YAML !!omap objects as OrderedDicts and dumping # OrderedDict in the omap format as well. def ordereddict_constructor(loader, node): try: omap = loader.construct_yaml_omap(node) return OrderedDict(*omap) except yaml.constructor.ConstructorError: return list(*loader.construct_yaml_seq(node)) def represent_ordered_mapping(dumper, tag, data): # TODO: Again, adjust for preferred flow style, and other stylistic details # NOTE: For block style this uses the compact omap notation, but for flow style # it does not. # TODO: Need to see if I can figure out a mechanism so that classes that # use this representer can specify which values should use flow style values = [] node = yaml.SequenceNode(tag, values, flow_style=dumper.default_flow_style) if dumper.alias_key is not None: dumper.represented_objects[dumper.alias_key] = node for key, value in data.items(): key_item = dumper.represent_data(key) value_item = dumper.represent_data(value) node_item = yaml.MappingNode(YAML_OMAP_TAG, [(key_item, value_item)], flow_style=False) values.append(node_item) return node def represent_ordereddict(dumper, data): return represent_ordered_mapping(dumper, YAML_OMAP_TAG, data) AsdfLoader.add_constructor(YAML_OMAP_TAG, ordereddict_constructor) AsdfDumper.add_representer(OrderedDict, represent_ordereddict) # ---------------------------------------------------------------------- # Handle numpy scalars for scalar_type in util.iter_subclasses(np.floating): AsdfDumper.add_representer(scalar_type, AsdfDumper.represent_float) for scalar_type in util.iter_subclasses(np.integer): AsdfDumper.add_representer(scalar_type, AsdfDumper.represent_int) def custom_tree_to_tagged_tree(tree, ctx): """ Convert a tree, possibly containing custom data types that aren't directly representable in YAML, to a tree of basic data types, annotated with tags. """ def walker(node): tag = ctx.type_index.from_custom_type(type(node), ctx.version_string) if tag is not None: return tag.to_tree_tagged(node, ctx) return node return treeutil.walk_and_modify(tree, walker) def tagged_tree_to_custom_tree(tree, ctx, force_raw_types=False): """ Convert a tree containing only basic data types, annotated with tags, to a tree containing custom data types. """ def walker(node): if force_raw_types: return node tag_name = getattr(node, '_tag', None) if tag_name is None: return node tag_type = ctx.type_index.from_yaml_tag(ctx, tag_name) # This means the tag did not correspond to any type in our type index. if tag_type is None: if not ctx._ignore_unrecognized_tag: warnings.warn("{} is not recognized, converting to raw Python " "data structure".format(tag_name), AsdfConversionWarning) return node real_tag = ctx.type_index.get_real_tag(tag_name) real_tag_name, real_tag_version = split_tag_version(real_tag) # This means that there is an explicit description of versions that are # compatible with the associated tag class implementation, but the # version we found does not fit that description. if tag_type.incompatible_version(real_tag_version): warnings.warn("Version {} of {} is not compatible with any " "existing tag implementations".format( real_tag_version, real_tag_name), AsdfConversionWarning) return node # If a tag class does not explicitly list compatible versions, then all # versions of the corresponding schema are assumed to be compatible. # Therefore we need to check to make sure whether the conversion is # actually successful, and just return a raw Python data type if it is # not. try: return tag_type.from_tree_tagged(node, ctx) except TypeError as err: warnings.warn("Failed to convert {} to custom type (detail: {}). " "Using raw Python data structure instead".format(real_tag, err), AsdfConversionWarning) return node return treeutil.walk_and_modify(tree, walker) def load_tree(stream, ctx, ignore_version_mismatch=False): """ Load YAML, returning a tree of objects. Parameters ---------- stream : readable file-like object Stream containing the raw YAML content. """ class AsdfLoaderTmp(AsdfLoader): pass AsdfLoaderTmp.ctx = ctx AsdfLoaderTmp.ignore_version_mismatch = ignore_version_mismatch return yaml.load(stream, Loader=AsdfLoaderTmp) def dump_tree(tree, fd, ctx): """ Dump a tree of objects, possibly containing custom types, to YAML. Parameters ---------- tree : object Tree of objects, possibly containing custom data types. fd : asdf.generic_io.GenericFile A file object to dump the serialized YAML to. ctx : Context The writing context. """ class AsdfDumperTmp(AsdfDumper): pass AsdfDumperTmp.ctx = ctx tags = None if hasattr(tree, 'yaml_tag'): tag = tree.yaml_tag tag = tag[:tag.index('/core/asdf') + 1] if tag.strip(): tags = {'!': tag} tree = custom_tree_to_tagged_tree(tree, ctx) schema.validate(tree, ctx) schema.remove_defaults(tree, ctx) yaml_version = tuple( int(x) for x in ctx.version_map['YAML_VERSION'].split('.')) yaml.dump_all( [tree], stream=fd, Dumper=AsdfDumperTmp, explicit_start=True, explicit_end=True, version=yaml_version, allow_unicode=True, encoding='utf-8', tags=tags) asdf-2.5.1/asdf-standard/0000755000446400020070000000000013605166132017346 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/reference_files/0000755000446400020070000000000013605166132022466 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/reference_files/1.0.0/0000755000446400020070000000000013605166132023122 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/reference_files/1.0.0/ascii.asdf0000644000446400020070000000070413567314601025055 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: [ascii, 5] byteorder: big shape: [2] ... BLK0 ` o본Rڥ+*ascii#ASDF BLOCK INDEX %YAML 1.1 --- [350] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/ascii.yaml0000644000446400020070000000052113567314601025077 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: ['', ascii] datatype: [ascii, 5] shape: [2] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/basic.asdf0000644000446400020070000000077013567314601025051 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [8] ... BLK0@@@5YL_A&L#ASDF BLOCK INDEX %YAML 1.1 --- [348] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/basic.yaml0000644000446400020070000000053113567314601025071 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/complex.asdf0000644000446400020070000001315113567314601025434 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypec16: !core/ndarray-1.0.0 source: 1 datatype: complex128 byteorder: big shape: [100] datatype>c8: !core/ndarray-1.0.0 source: 0 datatype: complex64 byteorder: big shape: [100] ... BLK0   &^len冄4343434343434344444443433333433343BLK0@@@nA[eWv<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<BLK0   Mzj4ߝoA{`4343434343434344444443433333433343BLK0@@@e1$ǜ90m3<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<#ASDF BLOCK INDEX %YAML 1.1 --- [665, 1519, 3173, 4027] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/complex.yaml0000644000446400020070000004412113567314601025462 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypec16: !core/ndarray-1.0.0 data: [!core/complex-1.0.0 0j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 -1.79769313486e+308j, !core/complex-1.0.0 1.79769313486e+308j, !core/complex-1.0.0 2.22044604925e-16j, !core/complex-1.0.0 1.11022302463e-16j, !core/complex-1.0.0 2.22507385851e-308j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (-0+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-0-1.79769313486e+308j), !core/complex-1.0.0 1.79769313486e+308j, !core/complex-1.0.0 2.22044604925e-16j, !core/complex-1.0.0 1.11022302463e-16j, !core/complex-1.0.0 2.22507385851e-308j, !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (nan-1.79769313486e+308j), !core/complex-1.0.0 (nan+1.79769313486e+308j), !core/complex-1.0.0 (nan+2.22044604925e-16j), !core/complex-1.0.0 (nan+1.11022302463e-16j), !core/complex-1.0.0 (nan+2.22507385851e-308j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (inf-1.79769313486e+308j), !core/complex-1.0.0 (inf+1.79769313486e+308j), !core/complex-1.0.0 (inf+2.22044604925e-16j), !core/complex-1.0.0 (inf+1.11022302463e-16j), !core/complex-1.0.0 (inf+2.22507385851e-308j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-inf-1.79769313486e+308j), !core/complex-1.0.0 (-inf+1.79769313486e+308j), !core/complex-1.0.0 (-inf+2.22044604925e-16j), !core/complex-1.0.0 (-inf+1.11022302463e-16j), !core/complex-1.0.0 (-inf+2.22507385851e-308j), !core/complex-1.0.0 (-1.79769313486e+308+0j), !core/complex-1.0.0 (-1.79769313486e+308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-1.79769313486e+308-1.79769313486e+308j), !core/complex-1.0.0 (-1.79769313486e+308+1.79769313486e+308j), !core/complex-1.0.0 (-1.79769313486e+308+2.22044604925e-16j), !core/complex-1.0.0 (-1.79769313486e+308+1.11022302463e-16j), !core/complex-1.0.0 (-1.79769313486e+308+2.22507385851e-308j), !core/complex-1.0.0 (1.79769313486e+308+0j), !core/complex-1.0.0 (1.79769313486e+308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.79769313486e+308-1.79769313486e+308j), !core/complex-1.0.0 (1.79769313486e+308+1.79769313486e+308j), !core/complex-1.0.0 (1.79769313486e+308+2.22044604925e-16j), !core/complex-1.0.0 (1.79769313486e+308+1.11022302463e-16j), !core/complex-1.0.0 (1.79769313486e+308+2.22507385851e-308j), !core/complex-1.0.0 (2.22044604925e-16+0j), !core/complex-1.0.0 (2.22044604925e-16+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (2.22044604925e-16-1.79769313486e+308j), !core/complex-1.0.0 (2.22044604925e-16+1.79769313486e+308j), !core/complex-1.0.0 (2.22044604925e-16+2.22044604925e-16j), !core/complex-1.0.0 (2.22044604925e-16+1.11022302463e-16j), !core/complex-1.0.0 (2.22044604925e-16+2.22507385851e-308j), !core/complex-1.0.0 (1.11022302463e-16+0j), !core/complex-1.0.0 (1.11022302463e-16+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.11022302463e-16-1.79769313486e+308j), !core/complex-1.0.0 (1.11022302463e-16+1.79769313486e+308j), !core/complex-1.0.0 (1.11022302463e-16+2.22044604925e-16j), !core/complex-1.0.0 (1.11022302463e-16+1.11022302463e-16j), !core/complex-1.0.0 (1.11022302463e-16+2.22507385851e-308j), !core/complex-1.0.0 (2.22507385851e-308+0j), !core/complex-1.0.0 (2.22507385851e-308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (2.22507385851e-308-1.79769313486e+308j), !core/complex-1.0.0 (2.22507385851e-308+1.79769313486e+308j), !core/complex-1.0.0 (2.22507385851e-308+2.22044604925e-16j), !core/complex-1.0.0 (2.22507385851e-308+1.11022302463e-16j), !core/complex-1.0.0 (2.22507385851e-308+2.22507385851e-308j)] datatype: complex128 shape: [100] datatype>c8: !core/ndarray-1.0.0 data: [!core/complex-1.0.0 0j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 -3.40282346639e+38j, !core/complex-1.0.0 3.40282346639e+38j, !core/complex-1.0.0 1.19209289551e-07j, !core/complex-1.0.0 5.96046447754e-08j, !core/complex-1.0.0 1.17549435082e-38j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (-0+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-0-3.40282346639e+38j), !core/complex-1.0.0 3.40282346639e+38j, !core/complex-1.0.0 1.19209289551e-07j, !core/complex-1.0.0 5.96046447754e-08j, !core/complex-1.0.0 1.17549435082e-38j, !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (nan-3.40282346639e+38j), !core/complex-1.0.0 (nan+3.40282346639e+38j), !core/complex-1.0.0 (nan+1.19209289551e-07j), !core/complex-1.0.0 (nan+5.96046447754e-08j), !core/complex-1.0.0 (nan+1.17549435082e-38j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (inf-3.40282346639e+38j), !core/complex-1.0.0 (inf+3.40282346639e+38j), !core/complex-1.0.0 (inf+1.19209289551e-07j), !core/complex-1.0.0 (inf+5.96046447754e-08j), !core/complex-1.0.0 (inf+1.17549435082e-38j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-inf-3.40282346639e+38j), !core/complex-1.0.0 (-inf+3.40282346639e+38j), !core/complex-1.0.0 (-inf+1.19209289551e-07j), !core/complex-1.0.0 (-inf+5.96046447754e-08j), !core/complex-1.0.0 (-inf+1.17549435082e-38j), !core/complex-1.0.0 (-3.40282346639e+38+0j), !core/complex-1.0.0 (-3.40282346639e+38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-3.40282346639e+38-3.40282346639e+38j), !core/complex-1.0.0 (-3.40282346639e+38+3.40282346639e+38j), !core/complex-1.0.0 (-3.40282346639e+38+1.19209289551e-07j), !core/complex-1.0.0 (-3.40282346639e+38+5.96046447754e-08j), !core/complex-1.0.0 (-3.40282346639e+38+1.17549435082e-38j), !core/complex-1.0.0 (3.40282346639e+38+0j), !core/complex-1.0.0 (3.40282346639e+38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (3.40282346639e+38-3.40282346639e+38j), !core/complex-1.0.0 (3.40282346639e+38+3.40282346639e+38j), !core/complex-1.0.0 (3.40282346639e+38+1.19209289551e-07j), !core/complex-1.0.0 (3.40282346639e+38+5.96046447754e-08j), !core/complex-1.0.0 (3.40282346639e+38+1.17549435082e-38j), !core/complex-1.0.0 (1.19209289551e-07+0j), !core/complex-1.0.0 (1.19209289551e-07+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.19209289551e-07-3.40282346639e+38j), !core/complex-1.0.0 (1.19209289551e-07+3.40282346639e+38j), !core/complex-1.0.0 (1.19209289551e-07+1.19209289551e-07j), !core/complex-1.0.0 (1.19209289551e-07+5.96046447754e-08j), !core/complex-1.0.0 (1.19209289551e-07+1.17549435082e-38j), !core/complex-1.0.0 (5.96046447754e-08+0j), !core/complex-1.0.0 (5.96046447754e-08+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (5.96046447754e-08-3.40282346639e+38j), !core/complex-1.0.0 (5.96046447754e-08+3.40282346639e+38j), !core/complex-1.0.0 (5.96046447754e-08+1.19209289551e-07j), !core/complex-1.0.0 (5.96046447754e-08+5.96046447754e-08j), !core/complex-1.0.0 (5.96046447754e-08+1.17549435082e-38j), !core/complex-1.0.0 (1.17549435082e-38+0j), !core/complex-1.0.0 (1.17549435082e-38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.17549435082e-38-3.40282346639e+38j), !core/complex-1.0.0 (1.17549435082e-38+3.40282346639e+38j), !core/complex-1.0.0 (1.17549435082e-38+1.19209289551e-07j), !core/complex-1.0.0 (1.17549435082e-38+5.96046447754e-08j), !core/complex-1.0.0 (1.17549435082e-38+1.17549435082e-38j)] datatype: complex64 shape: [100] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/compressed.asdf0000644000446400020070000000200513567314601026125 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} bzp2: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [128] zlib: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [128] ... BLK0bzp2m@]ZBZh91AY&SY\(W?&b4Ʉ0 C4`4` <  a Hc?@"e .`ɣ> $iNJk^&mn˧o (q"Ə"Ls&ΟB*u*֯bͫw.޿Ջ6ܻz8?˛?EӨ@%A ":ܑN$8BLK0zlibm@]Zx-Blܲms-۶{_rC0;‘rc8;NrS4;Ùrs<.rK\2•rk\:׻nr[6Ýr{>{G<1'ߓg<9{^W5{Ûw=>O|3—o|;~_7ßv#ASDF BLOCK INDEX %YAML 1.1 --- [441, 721] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/compressed.yaml0000644000446400020070000000273313567314601026162 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} bzp2: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127] datatype: int64 shape: [128] zlib: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127] datatype: int64 shape: [128] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/exploded.asdf0000644000446400020070000000055013567314601025570 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: exploded0000.asdf datatype: int64 byteorder: little shape: [8] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/exploded.yaml0000644000446400020070000000053113567314601025614 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/exploded0000.asdf0000644000446400020070000000063713567314601026076 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} ... BLK0@@@5YL_A&L#ASDF BLOCK INDEX %YAML 1.1 --- [259] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/float.asdf0000644000446400020070000000220613567314601025071 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypef4: !core/ndarray-1.0.0 source: 1 datatype: float32 byteorder: big shape: [10] datatype>f8: !core/ndarray-1.0.0 source: 3 datatype: float64 byteorder: big shape: [10] ... BLK0(((1[\Z143BLK0(((*!SeGľ{$nT43BLK0PPPeջ };b<<BLK0PPPl~ArQq<<#ASDF BLOCK INDEX %YAML 1.1 --- [649, 743, 837, 971] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/float.yaml0000644000446400020070000000223513567314601025120 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypef4: !core/ndarray-1.0.0 data: [0.0, -0.0, .nan, .inf, -.inf, -3.4028234663852886e+38, 3.4028234663852886e+38, 1.1920928955078125e-07, 5.960464477539063e-08, 1.1754943508222875e-38] datatype: float32 shape: [10] datatype>f8: !core/ndarray-1.0.0 data: [0.0, -0.0, .nan, .inf, -.inf, -1.7976931348623157e+308, 1.7976931348623157e+308, 2.220446049250313e-16, 1.1102230246251565e-16, 2.2250738585072014e-308] datatype: float64 shape: [10] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/int.asdf0000644000446400020070000000424613567314601024564 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypei1: !core/ndarray-1.0.0 source: 1 datatype: int8 byteorder: big shape: [3] datatype>i2: !core/ndarray-1.0.0 source: 0 datatype: int16 byteorder: big shape: [3] datatype>i4: !core/ndarray-1.0.0 source: 2 datatype: int32 byteorder: big shape: [3] datatype>u1: !core/ndarray-1.0.0 source: 8 datatype: uint8 byteorder: big shape: [2] datatype>u2: !core/ndarray-1.0.0 source: 7 datatype: uint16 byteorder: big shape: [2] datatype>u4: !core/ndarray-1.0.0 source: 6 datatype: uint32 byteorder: big shape: [2] ... BLK0ebb-R_fBLK0ztuOI+NUBLK0 I R4jBLK0 ҒkS(iZJ<BLK0ztuOI+NUBLK0qɭӜr][s2BLK0ĭ+돩:vBLK0]^w-t<BLK0迯V;/xBLK0迯V;/xBLK0]^w-t<BLK0ĭ+돩:v#ASDF BLOCK INDEX %YAML 1.1 --- [1391, 1451, 1508, 1574, 1640, 1697, 1757, 1819, 1877, 1933, 1989, 2047] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/int.yaml0000644000446400020070000000244713567314601024612 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypei1: !core/ndarray-1.0.0 data: [127, -128, 0] datatype: int8 shape: [3] datatype>i2: !core/ndarray-1.0.0 data: [32767, -32768, 0] datatype: int16 shape: [3] datatype>i4: !core/ndarray-1.0.0 data: [2147483647, -2147483648, 0] datatype: int32 shape: [3] datatype>u1: !core/ndarray-1.0.0 data: [255, 0] datatype: uint8 shape: [2] datatype>u2: !core/ndarray-1.0.0 data: [65535, 0] datatype: uint16 shape: [2] datatype>u4: !core/ndarray-1.0.0 data: [4294967295, 0] datatype: uint32 shape: [2] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/shared.asdf0000644000446400020070000000115713567314601025236 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [8] subset: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [4] offset: 8 strides: [16] ... BLK0@@@5YL_A&L#ASDF BLOCK INDEX %YAML 1.1 --- [467] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/shared.yaml0000644000446400020070000000065113567314601025261 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] subset: !core/ndarray-1.0.0 data: [1, 3, 5, 7] datatype: int64 shape: [4] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/stream.asdf0000644000446400020070000000163313567314601025262 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} my_stream: !core/ndarray-1.0.0 source: -1 datatype: float64 byteorder: little shape: ['*', 8] ... BLK0????????@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@asdf-2.5.1/asdf-standard/reference_files/1.0.0/stream.yaml0000644000446400020070000000126213567314601025305 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} my_stream: !core/ndarray-1.0.0 data: - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] - [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0] - [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0] - [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0] - [4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0] - [5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0] - [6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0] - [7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0] datatype: float64 shape: [8, 8] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/unicode_bmp.asdf0000644000446400020070000000117513567314601026254 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 source: 1 datatype: [ucs4, 2] byteorder: big shape: [2] ... BLK0IS6.cܪMKqBLK0Mt_ZQ#ASDF BLOCK INDEX %YAML 1.1 --- [454, 524] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/unicode_bmp.yaml0000644000446400020070000000065313567314601026301 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 data: ['', Æʩ] datatype: [ucs4, 2] shape: [2] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/unicode_spp.asdf0000644000446400020070000000117513567314601026300 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 source: 1 datatype: [ucs4, 2] byteorder: big shape: [2] ... BLK0'0B9e< BLK0dluэ#` #ASDF BLOCK INDEX %YAML 1.1 --- [454, 524] ... asdf-2.5.1/asdf-standard/reference_files/1.0.0/unicode_spp.yaml0000644000446400020070000000067313567314601026327 0ustar eslavichSTSCI\science00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 data: ['', "\U00010020"] datatype: [ucs4, 2] shape: [2] ... asdf-2.5.1/asdf-standard/reference_files/README.rst0000644000446400020070000000133013567314601024155 0ustar eslavichSTSCI\science00000000000000This directory contains reference ADSF files. ASDF parser implementations are encouraged to use these files as part of their test suite. There is a directory here for each version of the ASDF standard. They contain pairs of files: one ``.asdf`` file and one ``.yaml`` file. To use the reference file suite, load the ``.asdf`` file and perform the following transformations: - Convert all ``core/ndarray`` tags to in-line YAML data. - Load and store inline all ``JSON Pointer`` references. - Dereference all YAML aliases to anchors. Compare the result to the matching ``.yaml`` file. For compliance, the files do not need to be byte-for-byte identical, but should represent the same values at the YAML level. asdf-2.5.1/asdf-standard/reference_files/generate/0000755000446400020070000000000013605166132024260 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/reference_files/generate/README.rst0000644000446400020070000000036513567314601025756 0ustar eslavichSTSCI\science00000000000000To regenerate the reference files: ./generate.py $version where $version is the version of the ASDF standard to generate. The resulting reference files should be inspected for correctness by hand and then committed to the git repository. asdf-2.5.1/asdf-standard/reference_files/generate/generate0000755000446400020070000001043713567314601026010 0ustar eslavichSTSCI\science00000000000000#!/usr/bin/env python #-*- coding: utf-8 -*- from __future__ import unicode_literals import os try: import asdf except ImportError: raise ImportError( "asdf must be installed to regenerate the reference files.") import numpy as np def ref_basic(fd): tree = { 'data': np.arange(8) } asdf.AsdfFile(tree).write_to(fd) def ref_int(fd): tree = {} for size in (1, 2, 4): bits = size * 8 for endian in ['>', '<']: values = [(1 << (bits - 1)) - 1, -(1 << (bits - 1)), 0] datatype = '%si%d' % (endian, size) arr = np.array(values, datatype) tree['datatype' + datatype] = arr values = [(1 << bits) - 1, 0] datatype = '%su%d' % (endian, size) arr = np.array(values, datatype) tree['datatype' + datatype] = arr asdf.AsdfFile(tree).write_to(fd) def ref_float(fd): tree = {} for size in (4, 8): for endian in ['>', '<']: datatype = '%sf%d' % (endian, size) finfo = np.finfo(np.dtype(datatype)) values = [0.0, -0.0, np.nan, np.inf, -np.inf, finfo.min, finfo.max, finfo.eps, finfo.epsneg, finfo.tiny] arr = np.array(values, datatype) tree['datatype' + datatype] = arr asdf.AsdfFile(tree).write_to(fd) def ref_complex(fd): tree = {} for size in (4, 8): for endian in ['>', '<']: datatype = '%sf%d' % (endian, size) finfo = np.finfo(np.dtype(datatype)) values = [0.0, -0.0, np.nan, np.inf, -np.inf, finfo.min, finfo.max, finfo.eps, finfo.epsneg, finfo.tiny] complex_values = [] for x in values: for y in values: complex_values.append(x + 1j * y) datatype = '%sc%d' % (endian, size * 2) arr = np.array(complex_values, datatype) tree['datatype' + datatype] = arr asdf.AsdfFile(tree).write_to(fd) def ref_ascii(fd): arr = np.array([b'', b'ascii'], dtype='S') tree = {'data': arr} asdf.AsdfFile(tree).write_to(fd) def ref_unicode_bmp(fd): tree = {} for endian in ['>', '<']: arr = np.array(['', 'Æʩ'], dtype=endian + 'U') tree['datatype' + endian + 'U'] = arr asdf.AsdfFile(tree).write_to(fd) def ref_unicode_spp(fd): tree = {} for endian in ['>', '<']: arr = np.array(['', '𐀠'], dtype=endian + 'U') tree['datatype' + endian + 'U'] = arr asdf.AsdfFile(tree).write_to(fd) def ref_shared(fd): data = np.arange(8) tree = { 'data': data, 'subset': data[1::2] } asdf.AsdfFile(tree).write_to(fd) def ref_stream(fd): tree = { # Each "row" of data will have 128 entries. 'my_stream': asdf.Stream([8], np.float64) } ff = asdf.AsdfFile(tree) with open(fd, 'wb') as fd: ff.write_to(fd) # Write 100 rows of data, one row at a time. ``write_to_stream`` # expects the raw binary bytes, not an array, so we use # ``tostring()``. for i in range(8): fd.write(np.array([i] * 8, np.float64).tostring()) def ref_exploded(fd): tree = { 'data': np.arange(8) } asdf.AsdfFile(tree).write_to(fd, all_array_storage='external') def ref_compressed(fd): tree = { 'zlib': np.arange(128), 'bzp2': np.arange(128) } ff = asdf.AsdfFile(tree) ff.set_array_compression(tree['zlib'], 'zlib') ff.set_array_compression(tree['bzp2'], 'bzp2') ff.write_to(fd) def generate(version): outdir = os.path.join(os.path.dirname(__file__), '..', version) for name, func in globals().items(): if not name.startswith("ref_"): continue name = name[4:] filename = os.path.join(outdir, name) func(filename + ".asdf") with asdf.open(filename + ".asdf") as asdf: asdf.resolve_and_inline() asdf.write_to(filename + ".yaml") if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( "generate", description="Regenerate the ASDF reference files") parser.add_argument( "version", type=str, nargs=1, help="The ASDF version") args = parser.parse_args() generate(args.version[0]) asdf-2.5.1/asdf-standard/schemas/0000755000446400020070000000000013605166132020771 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/0000755000446400020070000000000013605166132022672 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/0000755000446400020070000000000013605166132023607 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/asdf-schema-1.0.0.yaml0000644000446400020070000000272513567314601027311 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://json-schema.org/draft-04/schema" id: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" title: ASDF Schema description: | Extending YAML Schema and JSON Schema to add support for some ASDF-specific checks, related to [ndarrays](ref:core/ndarray-1.0.0). allOf: - $ref: "http://stsci.edu/schemas/yaml-schema/draft-01" - type: object properties: max_ndim: description: | Specifies that the corresponding **ndarray** is at most the given number of dimensions. If the array has fewer dimensions, it should be logically treated as if it were "broadcast" to the expected dimensions by adding 1's to the front of the shape list. type: integer minimum: 0 ndim: description: | Specifies that the matching **ndarray** is exactly the given number of dimensions. type: integer minimum: 0 datatype: description: | Specifies the datatype of the **ndarray**. By default, an array is considered "matching" if the array can be cast to the given datatype without data loss. For exact datatype matching, set `exact_datatype` to `true`. allOf: - $ref: "tag:stsci.edu:asdf/core/ndarray-1.0.0#/definitions/datatype" exact_datatype: description: | If `true`, the datatype must match exactly. type: boolean default: false ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/0000755000446400020070000000000013605166132024537 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.0.0.yaml0000644000446400020070000000260613567314601027001 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/asdf-1.0.0" title: | Top-level schema for every ASDF file. description: | This schema contains the top-level attributes for every ASDF file. tag: "tag:stsci.edu:asdf/core/asdf-1.0.0" type: object properties: asdf_library: description: | Describes the ASDF library that produced the file. $ref: "software-1.0.0" history: description: | A log of transformations that have happened to the file. May include such things as data collection, data calibration pipelines, data analysis etc. type: array items: $ref: "history_entry-1.0.0" data: description: | The data array corresponds to the main science data array in the file. Oftentimes, the data model will be much more complex than a single array, but this array will be used by applications that just want to convert to a display an image or preview of the file. It is recommended, but not required, that it is a 2-dimensional image array. $ref: "ndarray-1.0.0" fits: description: | A way to specify exactly how this ASDF file should be converted to FITS. $ref: "../fits/fits-1.0.0" wcs: description: | The location of the main WCS for the main data. $ref: "../wcs/wcs-1.0.0" additionalProperties: trueasdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.1.0.yaml0000644000446400020070000000270613567314601027003 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/asdf-1.1.0" title: | Top-level schema for every ASDF file. description: | This schema contains the top-level attributes for every ASDF file. tag: "tag:stsci.edu:asdf/core/asdf-1.1.0" type: object properties: asdf_library: description: | Describes the ASDF library that produced the file. $ref: "software-1.0.0" history: description: | A log of transformations that have happened to the file. May include such things as data collection, data calibration pipelines, data analysis etc. anyOf: # This is to support backwards compatibility with older history formats - type: array items: - $ref: "history_entry-1.0.0" # This is the new, richer history implementation that includes # extension metadata. - $ref: "#/definitions/history-1.1.0" additionalProperties: true # Make sure that these two metadata fields are always at the top of the file propertyOrder: [asdf_library, history] # This contains the definition of the new history format, which includes # metadata about the extensions used to create the file. definitions: history-1.1.0: type: object properties: extensions: type: array items: - $ref: "extension_metadata-1.0.0" entries: type: array items: - $ref: "history_entry-1.0.0" ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/column-1.0.0.yaml0000644000446400020070000000201713567314601027355 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/column-1.0.0" tag: "tag:stsci.edu:asdf/core/column-1.0.0" title: > A column in a table. description: | Each column contains a name and an array of data, and an optional description and unit. type: object properties: name: description: | The name of the column. Each name in a [table](http://stsci.edu/schemas/asdf/core/table-1.0.0) must be unique. type: string pattern: "[A-Za-z_][A-Za-z0-9_]*" data: description: | The array data for the column. allOf: - $ref: ndarray-1.0.0 description: description: | An optional description of the column. type: string default: '' unit: description: An optional unit for the column. allOf: - $ref: ../unit/unit-1.0.0 meta: description: Additional free-form metadata about the column. type: object default: {} required: [name, data] additionalProperties: false asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/complex-1.0.0.yaml0000644000446400020070000000546513567314601027541 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/complex-1.0.0" title: Complex number value. description: | Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. examples: - - 1 real, -1 imaginary - "!core/complex-1.0.0 1-1j" - - 0 real, 1 imaginary - "!core/complex-1.0.0 1J" - - -1 real, 0 imaginary - "!core/complex-1.0.0 -1" tag: "tag:stsci.edu:asdf/core/complex-1.0.0" type: string # This regex was automatically generated from a description of a grammar pattern: "^(((((([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN)))))|([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+)))|([+-]?(((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[iIjJ])|((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+[iIjJ])))|((([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN)))))|([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+))[+-](((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[iIjJ])|((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+[iIjJ])))))|(\\((((([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN)))))|([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+)))|([+-]?(((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[iIjJ])|((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+[iIjJ])))|((([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN)))))|([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+))[+-](((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[iIjJ])|((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+[iIjJ]))))\\)))$" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/constant-1.0.0.yaml0000644000446400020070000000045113567314601027711 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/constant-1.0.0" tag: "tag:stsci.edu:asdf/core/constant-1.0.0" title: Specify that a value is a constant. description: | Used as a utility to indicate that value is a literal constant. asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/extension_metadata-1.0.0.yaml0000644000446400020070000000107613567314601031740 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/extension_metadata-1.0.0" title: | Metadata about specific ASDF extensions that were used to create this file. tag: "tag:stsci.edu:asdf/core/extension_metadata-1.0.0" type: object properties: extension_class: description: | The fully-specified name of the extension class. type: string package: description: | The name and version of the package that contains the extension. $ref: "software-1.0.0" required: [extension_class] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/externalarray-1.0.0.yaml0000644000446400020070000000177213567314601030750 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/externalarray-1.0.0" tag: "tag:stsci.edu:asdf/core/externalarray-1.0.0" title: Point to an array-like object in an external file. description: | Allow referencing of array-like objects in external files. These files can be any type of file and in any absolute or relative location to the asdf file. Loading of these files into arrays is not handled by asdf. examples: - - Example external reference - | !core/externalarray-1.0.0 datatype: int16 fileuri: aia.lev1_euv_12s.2017-09-06T120001Z.94.image_lev1.fits shape: [4096, 4096] target: 1 type: object properties: fileuri: type: string target: anyOf: - type: integer - type: string datatype: type: string shape: type: array items: anyOf: - type: integer minimum: 0 required: [fileuri, target, datatype, shape] additionalProperties: true ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/history_entry-1.0.0.yaml0000644000446400020070000000134713567314601031007 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/history_entry-1.0.0" title: | An entry in the file history. tag: "tag:stsci.edu:asdf/core/history_entry-1.0.0" type: object properties: description: description: | A description of the transformation performed. type: string time: description: | A timestamp for the operation, in UTC. type: string format: date-time software: description: | One or more descriptions of the software that performed the operation. anyOf: - $ref: "software-1.0.0" - type: array items: $ref: "software-1.0.0" required: [description] additionalProperties: true asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/integer-1.0.0.yaml0000644000446400020070000000312713567314601027520 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/integer-1.0.0" title: Arbitrary precision integer value. description: | Represents an arbitrarily large integer value. examples: - - An integer value that is stored using an internal array - | !core/integer-1.0.0 sign: + string: '1193942770599561143856918438330' words: !core/ndarray-1.0.0 source: 0 datatype: uint32 byteorder: little shape: [4] - - The same integer value is stored using an inline array - | !core/integer-1.0.0 sign: + string: '1193942770599561143856918438330' words: !core/ndarray-1.0.0 data: [1103110586, 1590521629, 299257845, 15] datatype: uint32 shape: [4] tag: "tag:stsci.edu:asdf/core/integer-1.0.0" type: object properties: words: $ref: "ndarray-1.0.0" description: | An array of unsigned 32-bit words representing the integer value, stored as little endian (i.e. the first word of the array represents the least significant bits of the integer value). sign: type: string pattern: "^[+-]$" description: | String indicating whether the integer value is positive or negative. string: type: string description: | Optional string representation of the integer value. This field is only intended to improve readability for humans, and therefore no assumptions about format should be made by ASDF readers. required: [words, sign] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/ndarray-1.0.0.yaml0000644000446400020070000002667113567314601027534 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0" tag: "tag:stsci.edu:asdf/core/ndarray-1.0.0" title: > An *n*-dimensional array. description: | There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. examples: - - An inline array, with implicit data type - | !core/ndarray-1.0.0 [[1, 0, 0], [0, 1, 0], [0, 0, 1]] - - An inline array, with an explicit data type - | !core/ndarray-1.0.0 datatype: float64 data: [[1, 0, 0], [0, 1, 0], [0, 0, 1]] - - An inline structured array, where the types of each column are automatically detected - | !core/ndarray-1.0.0 [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]] - - An inline structured array, where the types of each column are explicitly specified - | !core/ndarray-1.0.0 datatype: [['ascii', 4], uint16, uint16, ['ascii', 4]] data: [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]] - - A double-precision array, in contiguous memory in a block within the same file - | !core/ndarray-1.0.0 source: 0 shape: [1024, 1024] datatype: float64 byteorder: little - - A view of a tile in that image - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little strides: [8192, 8] offset: 2099200 - - A structured datatype, with nested columns for a coordinate in (*ra*, *dec*), and a 3x3 convolution kernel - | !core/ndarray-1.0.0 source: 0 shape: [64] datatype: - name: coordinate datatype: - name: ra datatype: float64 - name: dec datatype: float64 - name: kernel datatype: float32 shape: [3, 3] byteorder: little - - An array in Fortran order - | !core/ndarray-1.0.0 source: 0 shape: [1024, 1024] datatype: float64 byteorder: little strides: [8192, 8] - - An array where values of -999 are treated as missing - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little mask: -999 - - An array where another array is used as a mask - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little mask: !core/ndarray-1.0.0 source: 1 shape: [256, 256] datatype: bool8 byteorder: little - - An array where the data is stored in the first block in another ASDF file. - | !core/ndarray-1.0.0 source: external.asdf shape: [256, 256] datatype: float64 byteorder: little definitions: scalar-datatype: description: | Describes the type of a single element. There is a set of numeric types, each with a single identifier: - `int8`, `int16`, `int32`, `int64`: Signed integer types, with the given bit size. - `uint8`, `uint16`, `uint32`, `uint64`: Unsigned integer types, with the given bit size. - `float32`: Single-precision floating-point type or "binary32", as defined in IEEE 754. - `float64`: Double-precision floating-point type or "binary64", as defined in IEEE 754. - `complex64`: Complex number where the real and imaginary parts are each single-precision floating-point ("binary32") numbers, as defined in IEEE 754. - `complex128`: Complex number where the real and imaginary parts are each double-precision floating-point ("binary64") numbers, as defined in IEEE 754. There are two distinct fixed-length string types, which must be indicated with a 2-element array where the first element is an identifier for the string type, and the second is a length: - `ascii`: A string containing ASCII text (all codepoints < 128), where each character is 1 byte. - `ucs4`: A string containing unicode text in the UCS-4 encoding, where each character is always 4 bytes long. Here the number of bytes used is 4 times the given length. anyOf: - type: string enum: [int8, uint8, int16, uint16, int32, uint32, int64, uint64, float32, float64, complex64, complex128, bool8] - type: array items: - type: string enum: [ascii, ucs4] - type: integer minimum: 0 minLength: 2 maxLength: 2 datatype: description: | The data format of the array elements. May be a single scalar datatype, or may be a nested list of datatypes. When a list, each field may have a name. anyOf: - $ref: "#/definitions/scalar-datatype" - type: array items: anyOf: - $ref: "#/definitions/scalar-datatype" - type: object properties: name: type: string pattern: "[A-Za-z_][A-Za-z0-9_]*" description: The name of the field datatype: $ref: "#/definitions/datatype" byteorder: type: string enum: [big, little] description: | The byteorder for the field. If not provided, the byteorder of the datatype as a whole will be used. shape: type: array items: type: integer minimum: 0 required: [datatype] inline-data: description: | Inline data is stored in YAML format directly in the tree, rather than referencing a binary block. It is made out of nested lists. If the datatype of the array is not specified, it is inferred from the array contents. Type inference is supported only for homogeneous arrays, not tables. - If any of the elements in the array are YAML strings, the `datatype` of the entire array is `ucs4`, with the width of the largest string in the column, otherwise... - If any of the elements in the array are complex numbers, the `datatype` of the entire column is `complex128`, otherwise... - If any of the types in the column are numbers with a decimal point, the `datatype` of the entire column is `float64`, otherwise.. - If any of the types in the column are integers, the `datatype` of the entire column is `int64`, otherwise... - The `datatype` of the entire column is `bool8`. Masked values may be included in the array using `null`. If an explicit mask array is also provided, it takes precedence. type: array items: anyOf: - type: number - type: string - type: "null" - $ref: "complex-1.0.0" - $ref: "#/definitions/inline-data" - type: boolean anyOf: - $ref: "#/definitions/inline-data" - type: object properties: source: description: | The source of the data. - If an integer: If positive, the zero-based index of the block within the same file. If negative, the index from the last block within the same file. For example, a source of `-1` corresponds to the last block in the same file. - If a string, a URI to an external ASDF file containing the block data. Relative URIs and ``file:`` and ``http:`` protocols must be supported. Other protocols may be supported by specific library implementations. The ability to reference block data in an external ASDF file is intentionally limited to the first block in the external ASDF file, and is intended only to support the needs of [exploded](ref:exploded). For the more general case of referencing data in an external ASDF file, use tree [references](ref:references). anyOf: - type: integer - type: string format: uri data: description: | The data for the array inline. If `datatype` and/or `shape` are also provided, they must match the data here and can be used as a consistency check. `strides`, `offset` and `byteorder` are meaningless when `data` is provided. $ref: "#/definitions/inline-data" shape: description: | The shape of the array. The first entry may be the string `*`, indicating that the length of the first index of the array will be automatically determined from the size of the block. This is used for streaming support. type: array items: anyOf: - type: integer minimum: 0 - enum: ['*'] datatype: description: | The data format of the array elements. $ref: "#/definitions/datatype" byteorder: description: > The byte order (big- or little-endian) of the array data. type: string enum: [big, little] offset: description: > The offset, in bytes, within the data for this start of this view. type: integer minimum: 0 default: 0 strides: description: > The number of bytes to skip in each dimension. If not provided, the array is assumed by be contiguous and in C order. If provided, must be the same length as the shape property. type: array items: anyOf: - type: integer minimum: 1 - type: integer maximum: -1 mask: description: > Describes how missing values in the array are stored. If a scalar number, that number is used to represent missing values. If an ndarray, the given array provides a mask, where non-zero values represent missing values in this array. The mask array must be broadcastable to the dimensions of this array. anyOf: - type: number - $ref: "complex-1.0.0" - allOf: - $ref: "ndarray-1.0.0" - datatype: bool8 dependencies: source: [shape, datatype, byteorder] propertyOrder: [source, data, mask, datatype, byteorder, shape, offset, strides]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/software-1.0.0.yaml0000644000446400020070000000151413567314601027713 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/software-1.0.0" title: | Describes a software package. tag: "tag:stsci.edu:asdf/core/software-1.0.0" type: object properties: name: description: | The name of the application or library. type: string author: description: | The author (or institution) that produced the software package. type: string homepage: description: | A URI to the homepage of the software. type: string format: uri version: description: | The version of the software used. It is recommended, but not required, that this follows the (Semantic Versioning Specification)[http://semver.org/spec/v2.0.0.html]. type: string required: [name, version] additionalProperties: true asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/core/table-1.0.0.yaml0000644000446400020070000000550513567314601027154 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/table-1.0.0" tag: "tag:stsci.edu:asdf/core/table-1.0.0" title: > A table. description: | A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. examples: - - A table stored in column-major order, with each column in a separate block - | !core/table-1.0.0 columns: - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] description: RA meta: {foo: bar} name: a unit: !unit/unit-1.0.0 deg - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 1 datatype: float64 byteorder: little shape: [3] description: DEC name: b - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 2 datatype: [ascii, 1] byteorder: big shape: [3] description: The target name name: c - - A table stored in row-major order, all stored in the same block - | !core/table-1.0.0 columns: - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] strides: [13] description: RA meta: {foo: bar} name: a unit: !unit/unit-1.0.0 deg - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] offset: 4 strides: [13] description: DEC name: b - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: [ascii, 1] byteorder: big shape: [3] offset: 12 strides: [13] description: The target name name: c type: object properties: columns: description: | A list of columns in the table. type: array items: $ref: column-1.0.0 meta: description: | Additional free-form metadata about the table. type: object default: {} additionalProperties: false required: [columns] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/fits/0000755000446400020070000000000013605166132024554 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/fits/fits-1.0.0.yaml0000644000446400020070000000753713567314601027056 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/fits/fits-1.0.0" title: > A FITS file inside of an ASDF file. description: | This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. examples: - - A simple FITS file with a primary header and two extensions - | !fits/fits-1.0.0 - header: - [SIMPLE, true, conforms to FITS standard] - [BITPIX, 8, array data type] - [NAXIS, 0, number of array dimensions] - [EXTEND, true] - [] - ['', Top Level MIRI Metadata] - [] - [DATE, '2013-08-30T10:49:55.070373', The date this file was created (UTC)] - [FILENAME, MiriDarkReferenceModel_test.fits, The name of the file] - [TELESCOP, JWST, The telescope used to acquire the data] - [] - ['', Information about the observation] - [] - [DATE-OBS, '2013-08-30T10:49:55.000000', The date the observation was made (UTC)] - data: !core/ndarray-1.0.0 datatype: float32 shape: [2, 3, 3, 4] source: 0 byteorder: big header: - [XTENSION, IMAGE, Image extension] - [BITPIX, -32, array data type] - [NAXIS, 4, number of array dimensions] - [NAXIS1, 4] - [NAXIS2, 3] - [NAXIS3, 3] - [NAXIS4, 2] - [PCOUNT, 0, number of parameters] - [GCOUNT, 1, number of groups] - [EXTNAME, SCI, extension name] - [BUNIT, DN, Units of the data array] - data: !core/ndarray-1.0.0 datatype: float32 shape: [2, 3, 3, 4] source: 1 byteorder: big header: - [XTENSION, IMAGE, Image extension] - [BITPIX, -32, array data type] - [NAXIS, 4, number of array dimensions] - [NAXIS1, 4] - [NAXIS2, 3] - [NAXIS3, 3] - [NAXIS4, 2] - [PCOUNT, 0, number of parameters] - [GCOUNT, 1, number of groups] - [EXTNAME, ERR, extension name] - [BUNIT, DN, Units of the error array] tag: "tag:stsci.edu:asdf/fits/fits-1.0.0" type: array items: description: > Each item represents a single header/data unit (HDU). type: object properties: header: description: > A list of the keyword/value/comment triples from the header, in the order they appear in the FITS file. type: array items: type: array minItems: 0 maxItems: 3 items: - description: "The keyword." type: string maxLength: 8 pattern: "[A-Z0-9]*" - description: "The value." anyOf: - type: string maxLength: 60 - type: number - type: boolean - description: "The comment." type: string maxLength: 60 data: description: "The data part of the HDU." anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../core/table-1.0.0" - type: "null" default: null required: [header] additionalProperties: false asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/time/0000755000446400020070000000000013605166132024545 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/time/time-1.0.0.yaml0000644000446400020070000002015513567314601027027 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/time/time-1.0.0" tag: "tag:stsci.edu:asdf/time/time-1.0.0" title: Represents an instance in time. description: | A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. examples: - - Example ISO time - | !time/time-1.0.0 "2000-12-31T13:05:27.737" - - Example year, day-of-year and time format time - | !time/time-1.0.0 "2001:003:04:05:06.789" - - Example Besselian Epoch time - | !time/time-1.0.0 B2000.0 - - Example Besselian Epoch time, equivalent to above - | !time/time-1.0.0 value: 2000.0 format: byear - - Example list of times - | !time/time-1.0.0 ["2000-12-31T13:05:27.737", "2000-12-31T13:06:38.444"] - - Example of an array of times - | !time/time-1.0.0 value: !core/ndarray-1.0.0 data: [2000, 2001] datatype: float64 format: jyear - - Example with a location - | !time/time-1.0.0 value: 2000.0 format: jyear scale: tdb location: x: 6378100 y: 0 z: 0 definitions: iso_time: type: string pattern: "[0-9]{4}-(0[1-9])|(1[0-2])-(0[1-9])|([1-2][0-9])|(3[0-1])[T ]([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" byear: type: string pattern: "B[0-9]+(.[0-9]+)?" jyear: type: string pattern: "J[0-9]+(.[0-9]+)?" yday: type: string pattern: "[0-9]{4}:(00[1-9])|(0[1-9][0-9])|([1-2][0-9][0-9])|(3[0-5][0-9])|(36[0-5]):([0-1][0-9])|([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" string_formats: anyOf: - $ref: "#/definitions/iso_time" - $ref: "#/definitions/byear" - $ref: "#/definitions/jyear" - $ref: "#/definitions/yday" array_of_strings: type: array items: anyOf: - $ref: "#/definitions/array_of_strings" - $ref: "#/definitions/string_formats" anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0#/anyOf/1" - type: object properties: value: description: | The value(s) of the time. anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0" - type: number format: description: | The format of the time. If not provided, the the format should be guessed from the string from among the following unambiguous options: `iso`, `byear`, `jyear` and `yday`. The supported formats are: - `iso`: ISO 8601 compliant date-time format `YYYY-MM-DDTHH:MM:SS.sss...`. For example, `2000-01-01 00:00:00.000` is midnight on January 1, 2000. The `T` separating the date from the time section is optional. - `yday`: Year, day-of-year and time as `YYYY:DOY:HH:MM:SS.sss...`. The day-of-year (DOY) goes from 001 to 365 (366 in leap years). For example, `2000:001:00:00:00.000` is midnight on January 1, 2000. - `byear`: Besselian Epoch year, eg. `B1950.0`. The `B` is optional if the `byear` format is explicitly specified. - `jyear`: Julian Epoch year, eg. `J2000.0`. The `J` is optional if the `jyear` format is explicitly specified. - `decimalyear`: Time as a decimal year, with integer values corresponding to midnight of the first day of each year. For example 2000.5 corresponds to the ISO time `2000-07-02 00:00:00`. - `jd`: Julian Date time format. This represents the number of days since the beginning of the Julian Period. For example, 2451544.5 in `jd` is midnight on January 1, 2000. - `mjd`: Modified Julian Date time format. This represents the number of days since midnight on November 17, 1858. For example, 51544.0 in MJD is midnight on January 1, 2000. - `gps`: GPS time: seconds from 1980-01-06 00:00:00 UTC For example, 630720013.0 is midnight on January 1, 2000. - `unix`: Unix time: seconds from 1970-01-01 00:00:00 UTC. For example, 946684800.0 in Unix time is midnight on January 1, 2000. [TODO: Astropy's definition of UNIX time doesn't match POSIX's here. What should we do for the purposes of ASDF?] enum: - iso - yday - byear - jyear - decimalyear - jd - mjd - gps - unix - cxcsec scale: description: | The time scale (or time standard) is a specification for measuring time: either the rate at which time passes; or points in time; or both. See also [3] and [4]. These scales are defined in detail in [SOFA Time Scale and Calendar Tools](http://www.iausofa.org/sofa_ts_c.pdf). The supported time scales are: - `utc`: Coordinated Universal Time (UTC). This is the default time scale, except for `gps`, `unix`. - `tai`: International Atomic Time (TAI). - `tcb`: Barycentric Coordinate Time (TCB). - `tcg`: Geocentric Coordinate Time (TCG). - `tdb`: Barycentric Dynamical Time (TDB). - `tt`: Terrestrial Time (TT). - `ut1`: Universal Time (UT1). enum: - utc - tai - tcb - tcg - tdb - tt - ut1 location: description: | Specifies the observer location for scales that are sensitive to observer location, currently only `tdb`. May be specified either with geocentric coordinates (X, Y, Z) with an optional unit or geodetic coordinates: - `long`: longitude in degrees - `lat`: in degrees - `h`: optional height anyOf: - type: object properties: x: type: number y: type: number z: type: number unit: allOf: - $ref: "../unit/unit-1.0.0" - default: m required: [x, y, z] - type: object properties: long: type: number minimum: -180 maximum: 180 lat: type: number minimum: -90 maximum: 90 h: type: number default: 0 unit: allOf: - $ref: "../unit/unit-1.0.0" - default: m required: [long, lat] required: [value] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/time/time-1.1.0.yaml0000644000446400020070000001701513567314601027031 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/time/time-1.1.0" tag: "tag:stsci.edu:asdf/time/time-1.1.0" title: Represents an instance in time. description: | A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. examples: - - Example ISO time - | !time/time-1.1.0 "2000-12-31T13:05:27.737" - - Example year, day-of-year and time format time - | !time/time-1.1.0 "2001:003:04:05:06.789" - - Example Besselian Epoch time - | !time/time-1.1.0 B2000.0 - - Example Besselian Epoch time, equivalent to above - | !time/time-1.1.0 value: 2000.0 format: byear - - Example list of times - | !time/time-1.1.0 ["2000-12-31T13:05:27.737", "2000-12-31T13:06:38.444"] - - Example of an array of times - | !time/time-1.1.0 value: !core/ndarray-1.0.0 data: [2000, 2001] datatype: float64 format: jyear - - Example with a location - | !time/time-1.1.0 value: 2000.0 format: jyear scale: tdb location: x: 6378100 y: 0 z: 0 definitions: iso_time: type: string pattern: "[0-9]{4}-(0[1-9])|(1[0-2])-(0[1-9])|([1-2][0-9])|(3[0-1])[T ]([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" byear: type: string pattern: "B[0-9]+(.[0-9]+)?" jyear: type: string pattern: "J[0-9]+(.[0-9]+)?" yday: type: string pattern: "[0-9]{4}:(00[1-9])|(0[1-9][0-9])|([1-2][0-9][0-9])|(3[0-5][0-9])|(36[0-5]):([0-1][0-9])|([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" string_formats: anyOf: - $ref: "#/definitions/iso_time" - $ref: "#/definitions/byear" - $ref: "#/definitions/jyear" - $ref: "#/definitions/yday" array_of_strings: type: array items: anyOf: - $ref: "#/definitions/array_of_strings" - $ref: "#/definitions/string_formats" anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0#/anyOf/1" - type: object properties: value: description: | The value(s) of the time. anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0" - type: number format: description: | The format of the time. If not provided, the the format should be guessed from the string from among the following unambiguous options: `iso`, `byear`, `jyear` and `yday`. The supported formats are: - `iso`: ISO 8601 compliant date-time format `YYYY-MM-DDTHH:MM:SS.sss...`. For example, `2000-01-01 00:00:00.000` is midnight on January 1, 2000. The `T` separating the date from the time section is optional. - `yday`: Year, day-of-year and time as `YYYY:DOY:HH:MM:SS.sss...`. The day-of-year (DOY) goes from 001 to 365 (366 in leap years). For example, `2000:001:00:00:00.000` is midnight on January 1, 2000. - `byear`: Besselian Epoch year, eg. `B1950.0`. The `B` is optional if the `byear` format is explicitly specified. - `jyear`: Julian Epoch year, eg. `J2000.0`. The `J` is optional if the `jyear` format is explicitly specified. - `decimalyear`: Time as a decimal year, with integer values corresponding to midnight of the first day of each year. For example 2000.5 corresponds to the ISO time `2000-07-02 00:00:00`. - `jd`: Julian Date time format. This represents the number of days since the beginning of the Julian Period. For example, 2451544.5 in `jd` is midnight on January 1, 2000. - `mjd`: Modified Julian Date time format. This represents the number of days since midnight on November 17, 1858. For example, 51544.0 in MJD is midnight on January 1, 2000. - `gps`: GPS time: seconds from 1980-01-06 00:00:00 UTC For example, 630720013.0 is midnight on January 1, 2000. - `unix`: Unix time: seconds from 1970-01-01 00:00:00 UTC. For example, 946684800.0 in Unix time is midnight on January 1, 2000. [TODO: Astropy's definition of UNIX time doesn't match POSIX's here. What should we do for the purposes of ASDF?] enum: - iso - yday - byear - jyear - decimalyear - jd - mjd - gps - unix - cxcsec scale: description: | The time scale (or time standard) is a specification for measuring time: either the rate at which time passes; or points in time; or both. See also [3] and [4]. These scales are defined in detail in [SOFA Time Scale and Calendar Tools](http://www.iausofa.org/sofa_ts_c.pdf). The supported time scales are: - `utc`: Coordinated Universal Time (UTC). This is the default time scale, except for `gps`, `unix`. - `tai`: International Atomic Time (TAI). - `tcb`: Barycentric Coordinate Time (TCB). - `tcg`: Geocentric Coordinate Time (TCG). - `tdb`: Barycentric Dynamical Time (TDB). - `tt`: Terrestrial Time (TT). - `ut1`: Universal Time (UT1). enum: - utc - tai - tcb - tcg - tdb - tt - ut1 location: description: | Specifies the observer location for scales that are sensitive to observer location, currently only `tdb`. May be specified either with geocentric coordinates (X, Y, Z) with an optional unit or geodetic coordinates: - `long`: longitude in degrees - `lat`: in degrees - `h`: optional height type: object properties: x: $ref: "../unit/quantity-1.1.0" y: $ref: "../unit/quantity-1.1.0" z: $ref: "../unit/quantity-1.1.0" required: [x, y, z] required: [value] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/0000755000446400020070000000000013605166132025622 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/add-1.0.0.yaml0000644000446400020070000000147513567314601027702 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/add-1.0.0" tag: "tag:stsci.edu:asdf/transform/add-1.0.0" title: > Perform a list of subtransforms in parallel and then add their results together. description: | Each of the subtransforms must have the same number of inputs and outputs. examples: - - A list of transforms, performed in parallel and added together - | !transform/add-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/add-1.1.0.yaml0000644000446400020070000000147513567314601027703 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/add-1.1.0" tag: "tag:stsci.edu:asdf/transform/add-1.1.0" title: > Perform a list of subtransforms in parallel and then add their results together. description: | Each of the subtransforms must have the same number of inputs and outputs. examples: - - A list of transforms, performed in parallel and added together - | !transform/add-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/add-1.2.0.yaml0000644000446400020070000000147513567314601027704 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/add-1.2.0" tag: "tag:stsci.edu:asdf/transform/add-1.2.0" title: > Perform a list of subtransforms in parallel and then add their results together. description: | Each of the subtransforms must have the same number of inputs and outputs. examples: - - A list of transforms, performed in parallel and added together - | !transform/add-1.2.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.0.0.yaml0000644000446400020070000000232413567314601030374 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.0.0" tag: "tag:stsci.edu:asdf/transform/affine-1.0.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.1.0.yaml0000644000446400020070000000232413567314601030375 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.1.0" tag: "tag:stsci.edu:asdf/transform/affine-1.1.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.2.0.yaml0000644000446400020070000000245213567314601030400 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.2.0" tag: "tag:stsci.edu:asdf/transform/affine-1.2.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.3.0.yaml0000644000446400020070000000245213567314601030401 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.3.0" tag: "tag:stsci.edu:asdf/transform/affine-1.3.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.2.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.0.0.yaml0000644000446400020070000000121313567314601030104 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy-1.0.0" tag: "tag:stsci.edu:asdf/transform/airy-1.0.0" title: | The Airy projection. description: | Corresponds to the `AIR` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. allOf: - $ref: "zenithal-1.0.0" - type: object properties: theta_b: type: number description: | The latitude $\theta_b$ at which to minimize the error, in degrees. default: 90 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.1.0.yaml0000644000446400020070000000115513567314601030112 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy-1.1.0" tag: "tag:stsci.edu:asdf/transform/airy-1.1.0" title: | The Airy projection. description: | Corresponds to the `AIR` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. allOf: - $ref: "zenithal-1.1.0" - type: object properties: theta_b: type: number description: | The latitude $\theta_b$ at which to minimize the error, in degrees. default: 90 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.2.0.yaml0000644000446400020070000000125313567314601030112 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy-1.2.0" tag: "tag:stsci.edu:asdf/transform/airy-1.2.0" title: | The Airy projection. description: | Corresponds to the `AIR` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. allOf: - $ref: "zenithal-1.2.0" - type: object properties: theta_b: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | The latitude $\theta_b$ at which to minimize the error, in degrees. default: 90 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.0.0.yaml0000644000446400020070000000230313567314601032421 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.0.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.0.0" - type: object properties: theta1: type: number description: | Bonne conformal latitude, in degrees. default: 0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.1.0.yaml0000644000446400020070000000230313567314601032422 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.1.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.1.0" - type: object properties: theta1: type: number description: | Bonne conformal latitude, in degrees. default: 0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.2.0.yaml0000644000446400020070000000240113567314601032422 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.2.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.2.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.1.0" - type: object properties: theta1: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Bonne conformal latitude, in degrees. default: 0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.0.0.yaml0000644000446400020070000000072613567314601034122 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cobe_quad_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube-1.0.0" title: | COBE quadrilateralized spherical cube projection. description: | Corresponds to the `CSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.1.0.yaml0000644000446400020070000000072613567314601034123 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cobe_quad_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube-1.1.0" title: | COBE quadrilateralized spherical cube projection. description: | Corresponds to the `CSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.0.0.yaml0000644000446400020070000000223213567314601030607 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/compose-1.0.0" tag: "tag:stsci.edu:asdf/transform/compose-1.0.0" title: > Perform a list of subtransforms in series. description: | The output of each subtransform is fed into the input of the next subtransform. The number of output dimensions of each subtransform must be equal to the number of input dimensions of the next subtransform in list. To reorder or add/drop axes, insert `remap_axes` transforms in the subtransform list. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, by reversing the list of transforms and applying the inverse of each. examples: - - A series of transforms - | !transform/compose-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.1.0.yaml0000644000446400020070000000223213567314601030610 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/compose-1.1.0" tag: "tag:stsci.edu:asdf/transform/compose-1.1.0" title: > Perform a list of subtransforms in series. description: | The output of each subtransform is fed into the input of the next subtransform. The number of output dimensions of each subtransform must be equal to the number of input dimensions of the next subtransform in list. To reorder or add/drop axes, insert `remap_axes` transforms in the subtransform list. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, by reversing the list of transforms and applying the inverse of each. examples: - - A series of transforms - | !transform/compose-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.2.0.yaml0000644000446400020070000000223213567314601030611 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/compose-1.2.0" tag: "tag:stsci.edu:asdf/transform/compose-1.2.0" title: > Perform a list of subtransforms in series. description: | The output of each subtransform is fed into the input of the next subtransform. The number of output dimensions of each subtransform must be equal to the number of input dimensions of the next subtransform in list. To reorder or add/drop axes, insert `remap_axes` transforms in the subtransform list. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, by reversing the list of transforms and applying the inverse of each. examples: - - A series of transforms - | !transform/compose-1.2.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.0.0.yaml0000644000446400020070000000377013567314601031436 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/concatenate-1.0.0" tag: "tag:stsci.edu:asdf/transform/concatenate-1.0.0" title: > Send axes to different subtransforms. description: | Transforms a set of separable inputs by splitting the axes apart, sending them through the given subtransforms in parallel, and finally concatenating the subtransform output axes back together. The input axes are assigned to each subtransform in order. If the number of input axes is unequal to the sum of the number of input axes of all of the subtransforms, that is considered an error case. The output axes from each subtransform are appended together to make up the resulting output axes. For example, given 5 input axes, and 3 subtransforms with the following orders: 1. transform A: 2 in -> 2 out 1. transform B: 1 in -> 2 out 1. transform C: 2 in -> 1 out The transform is performed as follows: ``` : i0 i1 i2 i3 i4 : | | | | | : +---------+ +---------+ +----------+ : | A | | B | | C | : +---------+ +---------+ +----------+ : | | | | | : o0 o1 o2 o3 o4 ``` If reordering of the input or output axes is required, use in series with the `remap_axes` transform. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The example in the description - | !transform/concatenate-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 2 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.1.0.yaml0000644000446400020070000000377113567314601031440 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/concatenate-1.1.0" tag: "tag:stsci.edu:asdf/transform/concatenate-1.1.0" title: > Send axes to different subtransforms. description: | Transforms a set of separable inputs by splitting the axes apart, sending them through the given subtransforms in parallel, and finally concatenating the subtransform output axes back together. The input axes are assigned to each subtransform in order. If the number of input axes is unequal to the sum of the number of input axes of all of the subtransforms, that is considered an error case. The output axes from each subtransform are appended together to make up the resulting output axes. For example, given 5 input axes, and 3 subtransforms with the following orders: 1. transform A: 2 in -> 2 out 1. transform B: 1 in -> 2 out 1. transform C: 2 in -> 1 out The transform is performed as follows: ``` : i0 i1 i2 i3 i4 : | | | | | : +---------+ +---------+ +----------+ : | A | | B | | C | : +---------+ +---------+ +----------+ : | | | | | : o0 o1 o2 o3 o4 ``` If reordering of the input or output axes is required, use in series with the `remap_axes` transform. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The example in the description - | !transform/concatenate-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.2.0.yaml0000644000446400020070000000377013567314601031440 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/concatenate-1.2.0" tag: "tag:stsci.edu:asdf/transform/concatenate-1.2.0" title: > Send axes to different subtransforms. description: | Transforms a set of separable inputs by splitting the axes apart, sending them through the given subtransforms in parallel, and finally concatenating the subtransform output axes back together. The input axes are assigned to each subtransform in order. If the number of input axes is unequal to the sum of the number of input axes of all of the subtransforms, that is considered an error case. The output axes from each subtransform are appended together to make up the resulting output axes. For example, given 5 input axes, and 3 subtransforms with the following orders: 1. transform A: 2 in -> 2 out 1. transform B: 1 in -> 2 out 1. transform C: 2 in -> 1 out The transform is performed as follows: ``` : i0 i1 i2 i3 i4 : | | | | | : +---------+ +---------+ +----------+ : | A | | B | | C | : +---------+ +---------+ +----------+ : | | | | | : o0 o1 o2 o3 o4 ``` If reordering of the input or output axes is required, use in series with the `remap_axes` transform. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The example in the description - | !transform/concatenate-1.2.0 forward: - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.0.0.yaml0000644000446400020070000000245213567314601030241 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.0.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.1.0.yaml0000644000446400020070000000245213567314601030242 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.1.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.2.0.yaml0000644000446400020070000000264713567314601030251 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.2.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.3.0.yaml0000644000446400020070000000264713567314601030252 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.3.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.0.0.yaml0000644000446400020070000000163613567314601032423 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.0.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.1.0.yaml0000644000446400020070000000163613567314601032424 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.1.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.2.0.yaml0000644000446400020070000000160013567314601032414 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.2.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.2.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.2.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.0.0.yaml0000644000446400020070000000137313567314601032654 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.0.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.1.0.yaml0000644000446400020070000000137313567314601032655 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.1.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.2.0.yaml0000644000446400020070000000133513567314601032654 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.2.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.2.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.2.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.0.0.yaml0000644000446400020070000000217213567314601033035 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.0.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.1.0.yaml0000644000446400020070000000217213567314601033036 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.1.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.2.0.yaml0000644000446400020070000000213413567314601033035 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.2.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.2.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.2.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.0.0.yaml0000644000446400020070000000141513567314601032650 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.0.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.1.0.yaml0000644000446400020070000000141513567314601032651 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.1.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.2.0.yaml0000644000446400020070000000135713567314601032657 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.2.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.2.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.2.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.0.0.yaml0000644000446400020070000000104313567314601030772 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.0.0" tag: "tag:stsci.edu:asdf/transform/constant-1.0.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.0.0" - type: object properties: value: type: number required: [value]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.1.0.yaml0000644000446400020070000000104313567314601030773 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.1.0" tag: "tag:stsci.edu:asdf/transform/constant-1.1.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.1.0" - type: object properties: value: type: number required: [value]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.2.0.yaml0000644000446400020070000000114213567314601030774 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.2.0" tag: "tag:stsci.edu:asdf/transform/constant-1.2.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.1.0" - type: object properties: value: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number required: [value] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.3.0.yaml0000644000446400020070000000114213567314601030775 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.3.0" tag: "tag:stsci.edu:asdf/transform/constant-1.3.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.2.0" - type: object properties: value: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number required: [value] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.0.0.yaml0000644000446400020070000000062513567314601031443 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical-1.0.0" title: | Base class of all cylindrical projections. description: | The surface of cylindrical projections is a cylinder. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.1.0.yaml0000644000446400020070000000062513567314601031444 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical-1.1.0" title: | Base class of all cylindrical projections. description: | The surface of cylindrical projections is a cylinder. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.2.0.yaml0000644000446400020070000000062513567314601031445 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical-1.2.0" title: | Base class of all cylindrical projections. description: | The surface of cylindrical projections is a cylinder. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.0.0.yaml0000644000446400020070000000162213567314601033620 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.0.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.0.0" - type: object properties: lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.1.0.yaml0000644000446400020070000000162213567314601033621 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.1.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.2.0.yaml0000644000446400020070000000172113567314601033622 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.2.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.2.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: lambda: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.0.0.yaml0000644000446400020070000000224413567314601034053 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.0.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.0.0" - type: object properties: mu: type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.1.0.yaml0000644000446400020070000000224413567314601034054 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.1.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: mu: type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.2.0.yaml0000644000446400020070000000244113567314601034054 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.2.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.2.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: mu: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.0.0.yaml0000644000446400020070000000163113567314601030410 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/divide-1.0.0" tag: "tag:stsci.edu:asdf/transform/divide-1.0.0" title: > Perform a list of subtransforms in parallel and then divide their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through division. - | !transform/divide-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.1.0.yaml0000644000446400020070000000163113567314601030411 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/divide-1.1.0" tag: "tag:stsci.edu:asdf/transform/divide-1.1.0" title: > Perform a list of subtransforms in parallel and then divide their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through division. - | !transform/divide-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.2.0.yaml0000644000446400020070000000163113567314601030412 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/divide-1.2.0" tag: "tag:stsci.edu:asdf/transform/divide-1.2.0" title: > Perform a list of subtransforms in parallel and then divide their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through division. - | !transform/divide-1.2.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/domain-1.0.0.yaml0000644000446400020070000000203113567314601030406 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/domain-1.0.0" tag: "tag:stsci.edu:asdf/transform/domain-1.0.0" title: > Defines the domain of an input axis. (deprecated since 1.1.0) description: > Describes the range of acceptable input values to a particular axis of a transform. examples: - - The domain `[0, 1)`. - | !transform/domain-1.0.0 lower: 0 upper: 1 includes_lower: true properties: lower: description: > The lower value of the domain. If not provided, the domain has no lower limit. type: number default: -.inf upper: description: > The upper value of the domain. If not provided, the domain has no upper limit. type: number default: .inf includes_lower: description: If `true`, the domain includes `lower`. type: boolean default: false includes_upper: description: If `true`, the domain includes `upper`. type: boolean default: falseasdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/fix_inputs-1.1.0.yaml0000644000446400020070000000341313567314601031335 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/fix_inputs-1.1.0" tag: "tag:stsci.edu:asdf/transform/fix_inputs-1.1.0" title: > Set to a constant selected input arguments of a model. description: | This operation takes as the right hand side a dict equivalent that consists of key:value pairs where the key identifies the input argument to be set, either by position number (0 based) or name, and the value is the floating point value that should be assigned to that input. The result is a compound model with n fewer input arguments where n is the number of input values to be set (i.e., the number of keys in the dict). examples: - - Fix the 0-th coordinate. - | !transform/fix_inputs-1.1.0 forward: - !transform/compose-1.1.0 forward: - !transform/gnomonic-1.1.0 {direction: pix2sky} - !transform/rotate2d-1.2.0 {angle: 23.0} - keys: [0] values: [2] - - Fix the "x" coordinate. - | !transform/fix_inputs-1.1.0 forward: - !transform/compose-1.1.0 forward: - !transform/gnomonic-1.1.0 {direction: pix2sky} - !transform/rotate2d-1.2.0 {angle: 23.0} - keys: [x] values: [2] allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: - $ref: "transform-1.1.0" - type: object properties: keys: type: array items: type: [string, integer] values: type: array items: - type: number minItems: 2 maxItems: 2 required: [forward] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/generic-1.0.0.yaml0000644000446400020070000000114213567314601030555 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/generic-1.0.0" tag: "tag:stsci.edu:asdf/transform/generic-1.0.0" title: > A generic transform. description: > This is used **entirely** for bootstrapping purposes so one can create composite models including transforms that haven't yet been written. **IT WILL NOT BE IN THE FINAL VERSION OF THE SPEC**. allOf: - $ref: "transform-1.0.0" - type: object properties: n_inputs: type: integer n_outputs: type: integer required: [n_inputs, n_outputs] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/generic-1.1.0.yaml0000644000446400020070000000114213567314601030556 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/generic-1.1.0" tag: "tag:stsci.edu:asdf/transform/generic-1.1.0" title: > A generic transform. description: > This is used **entirely** for bootstrapping purposes so one can create composite models including transforms that haven't yet been written. **IT WILL NOT BE IN THE FINAL VERSION OF THE SPEC**. allOf: - $ref: "transform-1.1.0" - type: object properties: n_inputs: type: integer n_outputs: type: integer required: [n_inputs, n_outputs] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/generic-1.2.0.yaml0000644000446400020070000000114213567314601030557 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/generic-1.2.0" tag: "tag:stsci.edu:asdf/transform/generic-1.2.0" title: > A generic transform. description: > This is used **entirely** for bootstrapping purposes so one can create composite models including transforms that haven't yet been written. **IT WILL NOT BE IN THE FINAL VERSION OF THE SPEC**. allOf: - $ref: "transform-1.2.0" - type: object properties: n_inputs: type: integer n_outputs: type: integer required: [n_inputs, n_outputs] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.0.0.yaml0000644000446400020070000000140213567314601030751 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gnomonic-1.0.0" tag: "tag:stsci.edu:asdf/transform/gnomonic-1.0.0" title: | The gnomonic projection. description: | Corresponds to the `TAN` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.1.0.yaml0000644000446400020070000000134413567314601030757 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gnomonic-1.1.0" tag: "tag:stsci.edu:asdf/transform/gnomonic-1.1.0" title: | The gnomonic projection. description: | Corresponds to the `TAN` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.2.0.yaml0000644000446400020070000000134413567314601030760 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gnomonic-1.2.0" tag: "tag:stsci.edu:asdf/transform/gnomonic-1.2.0" title: | The gnomonic projection. description: | Corresponds to the `TAN` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.0.0.yaml0000644000446400020070000000153413567314601031747 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/hammer_aitoff-1.0.0" tag: "tag:stsci.edu:asdf/transform/hammer_aitoff-1.0.0" title: | Hammer-Aitoff projection. description: | Corresponds to the `AIT` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= 2 \arg \left(2Z^2 - 1, \frac{\pi}{180^\circ} \frac{Z}{2}x\right) \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^\circ}yZ\right)$$ And the sky-to-pixel transformation is defined as: $$x &= 2 \gamma \cos \theta \sin \frac{\phi}{2} \\ y &= \gamma \sin \theta$$ where: $$\gamma = \frac{180^\circ}{\pi} \sqrt{\frac{2}{1 + \cos \theta \cos(\phi / 2)}}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.1.0.yaml0000644000446400020070000000153413567314601031750 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/hammer_aitoff-1.1.0" tag: "tag:stsci.edu:asdf/transform/hammer_aitoff-1.1.0" title: | Hammer-Aitoff projection. description: | Corresponds to the `AIT` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= 2 \arg \left(2Z^2 - 1, \frac{\pi}{180^\circ} \frac{Z}{2}x\right) \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^\circ}yZ\right)$$ And the sky-to-pixel transformation is defined as: $$x &= 2 \gamma \cos \theta \sin \frac{\phi}{2} \\ y &= \gamma \sin \theta$$ where: $$\gamma = \frac{180^\circ}{\pi} \sqrt{\frac{2}{1 + \cos \theta \cos(\phi / 2)}}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.0.0.yaml0000644000446400020070000000143113567314601030574 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix-1.0.0" tag: "tag:stsci.edu:asdf/transform/healpix-1.0.0" title: | HEALPix projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky H: type: number description: | The number of facets in the longitude direction. default: 4.0 X: type: number description: | The number of facets in the latitude direction. default: 3.0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.1.0.yaml0000644000446400020070000000143113567314601030575 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix-1.1.0" tag: "tag:stsci.edu:asdf/transform/healpix-1.1.0" title: | HEALPix projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky H: type: number description: | The number of facets in the longitude direction. default: 4.0 X: type: number description: | The number of facets in the latitude direction. default: 3.0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.2.0.yaml0000644000446400020070000000143113567314601030576 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix-1.2.0" tag: "tag:stsci.edu:asdf/transform/healpix-1.2.0" title: | HEALPix projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky H: type: number description: | The number of facets in the longitude direction. default: 4.0 X: type: number description: | The number of facets in the latitude direction. default: 3.0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.0.0.yaml0000644000446400020070000000106213567314601031771 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix_polar-1.0.0" tag: "tag:stsci.edu:asdf/transform/healpix_polar-1.0.0" title: | HEALPix polar, aka "butterfly", projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.1.0.yaml0000644000446400020070000000106213567314601031772 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix_polar-1.1.0" tag: "tag:stsci.edu:asdf/transform/healpix_polar-1.1.0" title: | HEALPix polar, aka "butterfly", projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.2.0.yaml0000644000446400020070000000106213567314601031773 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix_polar-1.2.0" tag: "tag:stsci.edu:asdf/transform/healpix_polar-1.2.0" title: | HEALPix polar, aka "butterfly", projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.0.0.yaml0000644000446400020070000000076013567314601030777 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/identity-1.0.0" tag: "tag:stsci.edu:asdf/transform/identity-1.0.0" title: > The identity transform. description: > Invertibility: The inverse of this transform is also the identity transform. allOf: - $ref: "transform-1.0.0" - type: object properties: n_dims: type: integer default: 1 description: | The number of dimensions. asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.1.0.yaml0000644000446400020070000000076013567314601031000 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/identity-1.1.0" tag: "tag:stsci.edu:asdf/transform/identity-1.1.0" title: > The identity transform. description: > Invertibility: The inverse of this transform is also the identity transform. allOf: - $ref: "transform-1.1.0" - type: object properties: n_dims: type: integer default: 1 description: | The number of dimensions. asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.2.0.yaml0000644000446400020070000000076013567314601031001 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/identity-1.2.0" tag: "tag:stsci.edu:asdf/transform/identity-1.2.0" title: > The identity transform. description: > Invertibility: The inverse of this transform is also the identity transform. allOf: - $ref: "transform-1.2.0" - type: object properties: n_dims: type: integer default: 1 description: | The number of dimensions. asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.0.0.yaml0000644000446400020070000001020613567314601031565 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0" tag: "tag:stsci.edu:asdf/transform/label_mapper-1.0.0" title: > Represents a mapping from a coordinate value to a label. description: | A label mapper instance maps inputs to a label. It is used together with [regions_selector](ref:http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0). The [label_mapper](ref:http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0) returns the label corresponding to given inputs. The [regions_selector](ref:http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0) returns the transform corresponding to this label. This maps inputs (e.g. pixels on a detector) to transforms uniquely. examples: - - Map array indices are to labels. - | !transform/label_mapper-1.0.0 mapper: !core/ndarray-1.0.0 [[1, 0, 2], [1, 0, 2], [1, 0, 2]] - - Map numbers dictionary to transforms which return labels. - | !transform/label_mapper-1.0.0 mapper: !!omap - !!omap labels: [-1.67833272, -1.9580548, -1.118888] - !!omap models: - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 6.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 2.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.0.0 mapping: [0] n_inputs: 2 - - Map a number wihtin a range of numbers to transforms which return labels. - | !transform/label_mapper-1.0.0 mapper: !!omap - !!omap labels: - [3.2, 4.1] - [2.67, 2.98] - [1.95, 2.3] - !!omap models: - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 6.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 2.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.0.0 mapping: [0] n_inputs: 2 allOf: - $ref: "transform-1.0.0" - type: object properties: mapper: description: | An array with the shape of the detector/observation. Pixel values are of type integer or string and represent region labels. Pixels which are not within any region have value 0 or " ". anyOf: - $ref: "../core/ndarray-1.0.0" - type: object properties: labels: type: array items: anyOf: - type: number - type: array items: type: number minLength: 2 maxLength: 2 models: type: array items: $ref: "transform-1.0.0" inputs: type: array items: type: string description: | Names of inputs. inputs_mapping: $ref: "transform-1.0.0" description: | [mapping](ref:http://stsci.edu/schemas/asdf/transform/remap-axes-1.0.0) atol: type: number description: | absolute tolerance to compare keys in mapper. required: [mapper] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.1.0.yaml0000644000446400020070000001065013567314601031571 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/label_mapper-1.1.0" tag: "tag:stsci.edu:asdf/transform/label_mapper-1.1.0" title: > Represents a mapping from a coordinate value to a label. description: | A label mapper instance maps inputs to a label. It is used together with [regions_selector](ref:transform/regions_selector-1.1.0). The [label_mapper](ref:transform/label_mapper-1.1.0) returns the label corresponding to given inputs. The [regions_selector](ref:transform/regions_selector-1.1.0) returns the transform corresponding to this label. This maps inputs (e.g. pixels on a detector) to transforms uniquely. examples: - - Map array indices are to labels. - | !transform/label_mapper-1.1.0 mapper: !core/ndarray-1.0.0 [[1, 0, 2], [1, 0, 2], [1, 0, 2]] - - Map numbers dictionary to transforms which return labels. - | !transform/label_mapper-1.1.0 mapper: !!omap - !!omap labels: [-1.67833272, -1.9580548, -1.118888] - !!omap models: - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 6.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 2.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.1.0 mapping: [0] n_inputs: 2 - - Map a number wihtin a range of numbers to transforms which return labels. - | !transform/label_mapper-1.1.0 mapper: !!omap - !!omap labels: - [3.2, 4.1] - [2.67, 2.98] - [1.95, 2.3] - !!omap models: - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 6.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 2.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.1.0 mapping: [0] n_inputs: 2 allOf: - $ref: "transform-1.1.0" - type: object properties: mapper: description: | A mapping of inputs to labels. In the general case this is a `astropy.modeling.core.Model`. It could be a numpy array with the shape of the detector/observation. Pixel values are of type integer or string and represent region labels. Pixels which are not within any region have value ``no_label``. It could be a dictionary which maps tuples to labels or floating point numbers to labels. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "transform-1.1.0" - type: object properties: labels: type: array items: anyOf: - type: number - type: array items: type: number minLength: 2 maxLength: 2 models: type: array items: $ref: "transform-1.1.0" inputs: type: array items: type: string description: | Names of inputs. inputs_mapping: $ref: "transform-1.1.0" description: | [mapping](ref:transform/remap_axes-1.1.0) atol: type: number description: | absolute tolerance to compare keys in mapper. no_label: description: | Fill in value for missing output. anyOf: - type: number - type: string required: [mapper] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.2.0.yaml0000644000446400020070000001065013567314601031572 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/label_mapper-1.2.0" tag: "tag:stsci.edu:asdf/transform/label_mapper-1.2.0" title: > Represents a mapping from a coordinate value to a label. description: | A label mapper instance maps inputs to a label. It is used together with [regions_selector](ref:transform/regions_selector-1.2.0). The [label_mapper](ref:transform/label_mapper-1.2.0) returns the label corresponding to given inputs. The [regions_selector](ref:transform/regions_selector-1.2.0) returns the transform corresponding to this label. This maps inputs (e.g. pixels on a detector) to transforms uniquely. examples: - - Map array indices are to labels. - | !transform/label_mapper-1.2.0 mapper: !core/ndarray-1.0.0 [[1, 0, 2], [1, 0, 2], [1, 0, 2]] - - Map numbers dictionary to transforms which return labels. - | !transform/label_mapper-1.2.0 mapper: !!omap - !!omap labels: [-1.67833272, -1.9580548, -1.118888] - !!omap models: - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 6.0} - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 2.0} - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.2.0 mapping: [0] n_inputs: 2 - - Map a number wihtin a range of numbers to transforms which return labels. - | !transform/label_mapper-1.2.0 mapper: !!omap - !!omap labels: - [3.2, 4.1] - [2.67, 2.98] - [1.95, 2.3] - !!omap models: - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 6.0} - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 2.0} - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.2.0 mapping: [0] n_inputs: 2 allOf: - $ref: "transform-1.2.0" - type: object properties: mapper: description: | A mapping of inputs to labels. In the general case this is a `astropy.modeling.core.Model`. It could be a numpy array with the shape of the detector/observation. Pixel values are of type integer or string and represent region labels. Pixels which are not within any region have value ``no_label``. It could be a dictionary which maps tuples to labels or floating point numbers to labels. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "transform-1.2.0" - type: object properties: labels: type: array items: anyOf: - type: number - type: array items: type: number minLength: 2 maxLength: 2 models: type: array items: $ref: "transform-1.2.0" inputs: type: array items: type: string description: | Names of inputs. inputs_mapping: $ref: "transform-1.2.0" description: | [mapping](ref:transform/remap_axes-1.2.0) atol: type: number description: | absolute tolerance to compare keys in mapper. no_label: description: | Fill in value for missing output. anyOf: - type: number - type: string required: [mapper] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/linear1d-1.0.0.yaml0000644000446400020070000000105113567314601030637 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/linear1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/linear1d-1.0.0" title: > A one dimensional line model description: > A one dimensional line model type: object properties: slope: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Slope of the straight line. intercept: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Intercept of the straight line. asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/math_functions-1.0.0.yaml0000644000446400020070000000103013567314601032156 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/math_functions-1.0.0" tag: "tag:stsci.edu:asdf/transform/math_functions-1.0.0" title: > Math functions. description: | Commonly used math funcitons. examples: - - Atan2 - | !transform/math_functions-1.0.0 func_name: arctan2 allOf: - $ref: "transform-1.2.0" - type: object properties: func_name: type: string description: | The name of a numpy ufunc. asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.0.0.yaml0000644000446400020070000000131113567314601030753 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/mercator-1.0.0" tag: "tag:stsci.edu:asdf/transform/mercator-1.0.0" title: | The Mercator projection. description: | Corresponds to the `MER` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= 2 \tan^{-1}\left(e^{y \pi / 180^{\circ}}\right)-90^{\circ}$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\ln \tan \left(\frac{90^{\circ} + \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.1.0.yaml0000644000446400020070000000131113567314601030754 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/mercator-1.1.0" tag: "tag:stsci.edu:asdf/transform/mercator-1.1.0" title: | The Mercator projection. description: | Corresponds to the `MER` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= 2 \tan^{-1}\left(e^{y \pi / 180^{\circ}}\right)-90^{\circ}$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\ln \tan \left(\frac{90^{\circ} + \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.0.0.yaml0000644000446400020070000000161213567314601031271 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/molleweide-1.0.0" tag: "tag:stsci.edu:asdf/transform/molleweide-1.0.0" title: | Molleweide's projection. description: | Corresponds to the `MOL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi x}{2 \sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}} \\ \theta &= \sin^{-1}\left(\frac{1}{90^\circ}\sin^{-1}\left(\frac{\pi}{180^\circ}\frac{y}{\sqrt{2}}\right) + \frac{y}{180^\circ}\sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \frac{2 \sqrt{2}}{\pi} \phi \cos \gamma \\ y &= \sqrt{2} \frac{180^\circ}{\pi} \sin \gamma$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.1.0.yaml0000644000446400020070000000161213567314601031272 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/molleweide-1.1.0" tag: "tag:stsci.edu:asdf/transform/molleweide-1.1.0" title: | Molleweide's projection. description: | Corresponds to the `MOL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi x}{2 \sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}} \\ \theta &= \sin^{-1}\left(\frac{1}{90^\circ}\sin^{-1}\left(\frac{\pi}{180^\circ}\frac{y}{\sqrt{2}}\right) + \frac{y}{180^\circ}\sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \frac{2 \sqrt{2}}{\pi} \phi \cos \gamma \\ y &= \sqrt{2} \frac{180^\circ}{\pi} \sin \gamma$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.0.0.yaml0000644000446400020070000000164713567314601031032 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiply-1.0.0" tag: "tag:stsci.edu:asdf/transform/multiply-1.0.0" title: > Perform a list of subtransforms in parallel and then multiply their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through multiplication. - | !transform/multiply-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.1.0.yaml0000644000446400020070000000164713567314601031033 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiply-1.1.0" tag: "tag:stsci.edu:asdf/transform/multiply-1.1.0" title: > Perform a list of subtransforms in parallel and then multiply their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through multiplication. - | !transform/multiply-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.2.0.yaml0000644000446400020070000000164713567314601031034 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiply-1.2.0" tag: "tag:stsci.edu:asdf/transform/multiply-1.2.0" title: > Perform a list of subtransforms in parallel and then multiply their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through multiplication. - | !transform/multiply-1.2.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/multiplyscale-1.0.0.yaml0000644000446400020070000000121613567314601032032 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiplyscale-1.0.0" tag: "tag:stsci.edu:asdf/transform/multiplyscale-1.0.0" title: > A Multiply model. description: > Multiply the input by a factor. examples: - - Multiply the input by a pixel scale factor. - | !transform/multiplyscale-1.0.0 factor: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 arcsec pixel-1, value: 0.06} type: object properties: factor: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Multiplication factor. required: [factor] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/ortho_polynomial-1.0.0.yaml0000644000446400020070000000310113567314601032534 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/ortho_polynomial-1.0.0" tag: "tag:stsci.edu:asdf/transform/ortho_polynomial-1.0.0" title: > Respresents various Orthogonal Polynomial models. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. The property polynomial_type defines what kind of polynomial is defined. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/ortho_polynomial-1.0.0 polynomial_type: hermite coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/ortho_polynomial-1.0.0 polynomial_type: chebyshev coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] type: object properties: polynomial_type: description: | One of a selected set of polynomial types. type: string enum: [chebyshev, legendre, hermite] coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array required: [polynomial_type, coefficients] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.0.0.yaml0000644000446400020070000000136513567314601031104 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/parabolic-1.0.0" tag: "tag:stsci.edu:asdf/transform/parabolic-1.0.0" title: | Parabolic projection. description: | Corresponds to the `PAR` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{180^\circ}{\pi} \frac{x}{1 - 4(y / 180^\circ)^2} \\ \theta &= 3 \sin^{-1}\left(\frac{y}{180^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \left(2\cos\frac{2\theta}{3} - 1\right) \\ y &= 180^\circ \sin \frac{\theta}{3}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.1.0.yaml0000644000446400020070000000136513567314601031105 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/parabolic-1.1.0" tag: "tag:stsci.edu:asdf/transform/parabolic-1.1.0" title: | Parabolic projection. description: | Corresponds to the `PAR` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{180^\circ}{\pi} \frac{x}{1 - 4(y / 180^\circ)^2} \\ \theta &= 3 \sin^{-1}\left(\frac{y}{180^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \left(2\cos\frac{2\theta}{3} - 1\right) \\ y &= 180^\circ \sin \frac{\theta}{3}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.0.0.yaml0000644000446400020070000000122613567314601031572 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/plate_carree-1.0.0" tag: "tag:stsci.edu:asdf/transform/plate_carree-1.0.0" title: | The plate carrée projection. description: | Corresponds to the `CAR` projection in the FITS WCS standard. The main virtue of this transformation is its simplicity. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.1.0.yaml0000644000446400020070000000122613567314601031573 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/plate_carree-1.1.0" tag: "tag:stsci.edu:asdf/transform/plate_carree-1.1.0" title: | The plate carrée projection. description: | Corresponds to the `CAR` projection in the FITS WCS standard. The main virtue of this transformation is its simplicity. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.0.0.yaml0000644000446400020070000000063713567314601031150 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polyconic-1.0.0" tag: "tag:stsci.edu:asdf/transform/polyconic-1.0.0" title: | Polyconic projection. description: | Corresponds to the `PCO` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudoconic-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.1.0.yaml0000644000446400020070000000063713567314601031151 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polyconic-1.1.0" tag: "tag:stsci.edu:asdf/transform/polyconic-1.1.0" title: | Polyconic projection. description: | Corresponds to the `PCO` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudoconic-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.0.0.yaml0000644000446400020070000000232613567314601031331 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polynomial-1.0.0" tag: "tag:stsci.edu:asdf/transform/polynomial-1.0.0" title: > A Polynomial model. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/polynomial-1.0.0 coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/polynomial-1.0.0 coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] type: object properties: coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array required: [coefficients] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.1.0.yaml0000644000446400020070000000232613567314601031332 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polynomial-1.1.0" tag: "tag:stsci.edu:asdf/transform/polynomial-1.1.0" title: > A Polynomial model. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/polynomial-1.1.0 coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/polynomial-1.1.0 coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] type: object properties: coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array required: [coefficients] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.2.0.yaml0000644000446400020070000000237513567314601031337 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polynomial-1.2.0" tag: "tag:stsci.edu:asdf/transform/polynomial-1.2.0" title: > A Polynomial model. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/polynomial-1.2.0 coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/polynomial-1.2.0 coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] type: object properties: coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array required: [coefficients] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/power-1.0.0.yaml0000644000446400020070000000113713567314601030301 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power-1.0.0" tag: "tag:stsci.edu:asdf/transform/power-1.0.0" title: > Perform a list of subtransforms in parallel and then raise each result to the power of the next. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/power-1.1.0.yaml0000644000446400020070000000113713567314601030302 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power-1.1.0" tag: "tag:stsci.edu:asdf/transform/power-1.1.0" title: > Perform a list of subtransforms in parallel and then raise each result to the power of the next. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/power-1.2.0.yaml0000644000446400020070000000113713567314601030303 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power-1.2.0" tag: "tag:stsci.edu:asdf/transform/power-1.2.0" title: > Perform a list of subtransforms in parallel and then raise each result to the power of the next. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.0.0.yaml0000644000446400020070000000064013567314601031456 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudoconic-1.0.0" title: | Base class of all pseudoconic projections. description: | Pseudoconics are a subclass of conics with concentric parallels. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.1.0.yaml0000644000446400020070000000064013567314601031457 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudoconic-1.1.0" title: | Base class of all pseudoconic projections. description: | Pseudoconics are a subclass of conics with concentric parallels. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.2.0.yaml0000644000446400020070000000064013567314601031460 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudoconic-1.2.0" title: | Base class of all pseudoconic projections. description: | Pseudoconics are a subclass of conics with concentric parallels. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.0.0.yaml0000644000446400020070000000114513567314601032661 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudocylindrical-1.0.0" title: | Base class of all pseudocylindrical projections. description: | Pseudocylindrical projections are like cylindrical projections except the parallels of latitude are projected at diminishing lengths toward the polar regions in order to reduce lateral distortion there. Consequently, the meridians are curved. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.1.0.yaml0000644000446400020070000000114513567314601032662 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudocylindrical-1.1.0" title: | Base class of all pseudocylindrical projections. description: | Pseudocylindrical projections are like cylindrical projections except the parallels of latitude are projected at diminishing lengths toward the polar regions in order to reduce lateral distortion there. Consequently, the meridians are curved. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.2.0.yaml0000644000446400020070000000114513567314601032663 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudocylindrical-1.2.0" title: | Base class of all pseudocylindrical projections. description: | Pseudocylindrical projections are like cylindrical projections except the parallels of latitude are projected at diminishing lengths toward the polar regions in order to reduce lateral distortion there. Consequently, the meridians are curved. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.0.0.yaml0000644000446400020070000000070713567314601033131 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quad_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/quad_spherical_cube-1.0.0" title: | Quadrilateralized spherical cube projection. description: | Corresponds to the `QSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.1.0.yaml0000644000446400020070000000070713567314601033132 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quad_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/quad_spherical_cube-1.1.0" title: | Quadrilateralized spherical cube projection. description: | Corresponds to the `QSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.0.0.yaml0000644000446400020070000000124313567314601030734 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quadcube-1.0.0" title: | Base class of all quadcube projections. description: | Quadrilateralized spherical cube (quad-cube) projections belong to the class of polyhedral projections in which the sphere is projected onto the surface of an enclosing polyhedron. The six faces of the quad-cube projections are numbered and laid out as: ``` 0 4 3 2 1 4 3 2 5 ``` allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.1.0.yaml0000644000446400020070000000124313567314601030735 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quadcube-1.1.0" title: | Base class of all quadcube projections. description: | Quadrilateralized spherical cube (quad-cube) projections belong to the class of polyhedral projections in which the sphere is projected onto the surface of an enclosing polyhedron. The six faces of the quad-cube projections are numbered and laid out as: ``` 0 4 3 2 1 4 3 2 5 ``` allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.2.0.yaml0000644000446400020070000000124313567314601030736 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quadcube-1.2.0" title: | Base class of all quadcube projections. description: | Quadrilateralized spherical cube (quad-cube) projections belong to the class of polyhedral projections in which the sphere is projected onto the surface of an enclosing polyhedron. The six faces of the quad-cube projections are numbered and laid out as: ``` 0 4 3 2 1 4 3 2 5 ``` allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.0.0.yaml0000644000446400020070000000607313567314601032517 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0" tag: "tag:stsci.edu:asdf/transform/regions_selector-1.0.0" title: > Represents a discontinuous transform. description: | Maps regions to transgorms and evaluates the transforms with the corresponding inputs. examples: - - Create a regions_selector schema for 2 regions, labeled "1" and "2". - | !transform/regions_selector-1.0.0 inputs: [x, y] label_mapper: !transform/label_mapper-1.0.0 mapper: !core/ndarray-1.0.0 datatype: int8 data: [[0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0]] outputs: [ra, dec, lam] selector: 1: !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0, 1, 1] - !transform/concatenate-1.0.0 forward: - !transform/concatenate-1.0.0 forward: - !transform/shift-1.0.0 {offset: 1.0} - !transform/shift-1.0.0 {offset: 2.0} - !transform/shift-1.0.0 {offset: 3.0} 2: !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0, 1, 1] - !transform/concatenate-1.0.0 forward: - !transform/concatenate-1.0.0 forward: - !transform/scale-1.0.0 {factor: 2.0} - !transform/scale-1.0.0 {factor: 3.0} - !transform/scale-1.0.0 {factor: 3.0} undefined_transform_value: .nan allOf: - $ref: "transform-1.0.0" - type: object properties: label_mapper: description: | An instance of [label_mapper-1.0.0](ref:http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0) $ref: "./label_mapper-1.0.0" inputs: description: | Names of inputs. type: array items: type: string outputs: description: | Names of outputs. type: array items: type: string selector: description: | A mapping of regions to trransforms. type: object properties: labels: description: | An array of unique region labels. type: array items: type: - integer - string transforms: description: | A transform for each region. The order should match the order of labels. type: array items: $ref: "transform-1.0.0" undefined_transform_value: description: | Value to be returned if there's no transform defined for the inputs. type: number required: [label_mapper, inputs, outputs, selector] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.1.0.yaml0000644000446400020070000000603513567314601032516 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/regions_selector-1.1.0" tag: "tag:stsci.edu:asdf/transform/regions_selector-1.1.0" title: > Represents a discontinuous transform. description: | Maps regions to transgorms and evaluates the transforms with the corresponding inputs. examples: - - Create a regions_selector schema for 2 regions, labeled "1" and "2". - | !transform/regions_selector-1.1.0 inputs: [x, y] label_mapper: !transform/label_mapper-1.1.0 mapper: !core/ndarray-1.0.0 datatype: int8 data: [[0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0]] outputs: [ra, dec, lam] selector: 1: !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0, 1, 1] - !transform/concatenate-1.1.0 forward: - !transform/concatenate-1.1.0 forward: - !transform/shift-1.1.0 {offset: 1.0} - !transform/shift-1.1.0 {offset: 2.0} - !transform/shift-1.1.0 {offset: 3.0} 2: !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0, 1, 1] - !transform/concatenate-1.1.0 forward: - !transform/concatenate-1.1.0 forward: - !transform/scale-1.1.0 {factor: 2.0} - !transform/scale-1.1.0 {factor: 3.0} - !transform/scale-1.1.0 {factor: 3.0} undefined_transform_value: .nan allOf: - $ref: "transform-1.1.0" - type: object properties: label_mapper: description: | An instance of [label_mapper-1.1.0](ref:transform/label_mapper-1.1.0) $ref: "./label_mapper-1.1.0" inputs: description: | Names of inputs. type: array items: type: string outputs: description: | Names of outputs. type: array items: type: string selector: description: | A mapping of regions to trransforms. type: object properties: labels: description: | An array of unique region labels. type: array items: type: - integer - string transforms: description: | A transform for each region. The order should match the order of labels. type: array items: $ref: "transform-1.1.0" undefined_transform_value: description: | Value to be returned if there's no transform defined for the inputs. type: number required: [label_mapper, inputs, outputs, selector] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.2.0.yaml0000644000446400020070000000603513567314601032517 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/regions_selector-1.2.0" tag: "tag:stsci.edu:asdf/transform/regions_selector-1.2.0" title: > Represents a discontinuous transform. description: | Maps regions to transgorms and evaluates the transforms with the corresponding inputs. examples: - - Create a regions_selector schema for 2 regions, labeled "1" and "2". - | !transform/regions_selector-1.2.0 inputs: [x, y] label_mapper: !transform/label_mapper-1.2.0 mapper: !core/ndarray-1.0.0 datatype: int8 data: [[0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0]] outputs: [ra, dec, lam] selector: 1: !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [0, 1, 1] - !transform/concatenate-1.2.0 forward: - !transform/concatenate-1.2.0 forward: - !transform/shift-1.2.0 {offset: 1.0} - !transform/shift-1.2.0 {offset: 2.0} - !transform/shift-1.2.0 {offset: 3.0} 2: !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [0, 1, 1] - !transform/concatenate-1.2.0 forward: - !transform/concatenate-1.2.0 forward: - !transform/scale-1.2.0 {factor: 2.0} - !transform/scale-1.2.0 {factor: 3.0} - !transform/scale-1.2.0 {factor: 3.0} undefined_transform_value: .nan allOf: - $ref: "transform-1.2.0" - type: object properties: label_mapper: description: | An instance of [label_mapper-1.2.0](ref:transform/label_mapper-1.2.0) $ref: "./label_mapper-1.2.0" inputs: description: | Names of inputs. type: array items: type: string outputs: description: | Names of outputs. type: array items: type: string selector: description: | A mapping of regions to trransforms. type: object properties: labels: description: | An array of unique region labels. type: array items: type: - integer - string transforms: description: | A transform for each region. The order should match the order of labels. type: array items: $ref: "transform-1.2.0" undefined_transform_value: description: | Value to be returned if there's no transform defined for the inputs. type: number required: [label_mapper, inputs, outputs, selector] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.0.0.yaml0000644000446400020070000000501713567314601031272 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.0.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.0.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers or "constant markers". Each item in the list corresponds to an output axis. For each item: - If an integer, it is the index of the input axis to send to the output axis. - If a constant, it must be a single item which is a constant value to send to the output axis. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. Invertibility: TBD examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.0.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.0.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.0.0 mapping: [0, 0, 1] - - For 2 input axes, add a third axis which is a constant - | !transform/remap_axes-1.0.0 mapping: [0, 1, !core/constant-1.0.0 42] - - | The above example is equivalent to the following, and ASDF implementations are free to normalize it thusly: - | !transform/concatenate-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0] - !transform/remap_axes-1.0.0 mapping: [1] - !transform/constant-1.0.0 value: 42 - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.0.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: anyOf: - type: integer - $ref: "../core/constant-1.0.0" allOf: - $ref: "transform-1.0.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.1.0.yaml0000644000446400020070000000501713567314601031273 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.1.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.1.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers or "constant markers". Each item in the list corresponds to an output axis. For each item: - If an integer, it is the index of the input axis to send to the output axis. - If a constant, it must be a single item which is a constant value to send to the output axis. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. Invertibility: TBD examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.1.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.1.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.1.0 mapping: [0, 0, 1] - - For 2 input axes, add a third axis which is a constant - | !transform/remap_axes-1.1.0 mapping: [0, 1, !core/constant-1.0.0 42] - - | The above example is equivalent to the following, and ASDF implementations are free to normalize it thusly: - | !transform/concatenate-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0] - !transform/remap_axes-1.1.0 mapping: [1] - !transform/constant-1.0.0 value: 42 - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.1.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: anyOf: - type: integer - $ref: "../core/constant-1.0.0" allOf: - $ref: "transform-1.1.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.2.0.yaml0000644000446400020070000000501713567314601031274 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.2.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.2.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers or "constant markers". Each item in the list corresponds to an output axis. For each item: - If an integer, it is the index of the input axis to send to the output axis. - If a constant, it must be a single item which is a constant value to send to the output axis. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. Invertibility: TBD examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.2.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.2.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.2.0 mapping: [0, 0, 1] - - For 2 input axes, add a third axis which is a constant - | !transform/remap_axes-1.2.0 mapping: [0, 1, !core/constant-1.0.0 42] - - | The above example is equivalent to the following, and ASDF implementations are free to normalize it thusly: - | !transform/concatenate-1.1.0 forward: - !transform/remap_axes-1.2.0 mapping: [0] - !transform/remap_axes-1.2.0 mapping: [1] - !transform/constant-1.0.0 value: 42 - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.2.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: anyOf: - type: integer - $ref: "../core/constant-1.0.0" allOf: - $ref: "transform-1.2.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.0.0.yaml0000644000446400020070000000104013567314601030662 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.0.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: angle: type: number description: Angle, in degrees. required: [angle]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.1.0.yaml0000644000446400020070000000104013567314601030663 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.1.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.1.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: angle: type: number description: Angle, in degrees. required: [angle]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.2.0.yaml0000644000446400020070000000113713567314601030673 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.2.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.2.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: angle: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. required: [angle] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.3.0.yaml0000644000446400020070000000113713567314601030674 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.3.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.3.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.2.0" - type: object properties: angle: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. required: [angle] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.0.0.yaml0000644000446400020070000000266313567314601030677 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.0.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.0.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.0.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.0.0" - type: object properties: phi: type: number description: Angle, in degrees. theta: type: number description: Angle, in degrees. psi: type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.1.0.yaml0000644000446400020070000000266313567314601030700 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.1.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.1.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.1.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.1.0" - type: object properties: phi: type: number description: Angle, in degrees. theta: type: number description: Angle, in degrees. psi: type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.2.0.yaml0000644000446400020070000000315613567314601030677 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.2.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.2.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.2.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.1.0" - type: object properties: phi: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. theta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. psi: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.3.0.yaml0000644000446400020070000000315613567314601030700 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.3.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.3.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.3.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.2.0" - type: object properties: phi: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. theta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. psi: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/rotate_sequence_3d-1.0.0.yaml0000644000446400020070000000244013567314601032717 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate_sequence_3d-1.0.0" tag: "tag:stsci.edu:asdf/transform/rotate_sequence_3d-1.0.0" title: > Rotation in 3D space. description: | Rotation in 3D space by arbitrary number of angles about arbitrary order of "x", "y", "z" axes. examples: - - A sequence of rotation around 5 axes.. - | !transform/rotate_sequence_3d-1.0.0 angles: [-0.0193, -0.1432, -0.04, -65.60, 273.089] axes_order: zyxyz rotation_type: cartesian allOf: - $ref: "transform-1.2.0" - type: object properties: angles: type: array items: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | The angles of rotation in units of deg. axes_order: description: | A sequence of "x", "y" or "z" characters representing an axis of rotation. The number of characters must equal the number of angles. For the JWST V23 to sky transform the axes are zyxyz. type: string rotation_type: description: | The type of rotation class to nitialize type: str enum: [spherical, cartesian] required: [angles, axes_order, rotation_type] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.0.0.yaml0000644000446400020070000000121113567314601032463 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sanson_flamsteed-1.0.0" tag: "tag:stsci.edu:asdf/transform/sanson_flamsteed-1.0.0" title: | The Sanson-Flamsteed projection. description: | Corresponds to the `SFL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\cos y} \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \cos \theta \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.1.0.yaml0000644000446400020070000000121113567314601032464 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sanson_flamsteed-1.1.0" tag: "tag:stsci.edu:asdf/transform/sanson_flamsteed-1.1.0" title: | The Sanson-Flamsteed projection. description: | Corresponds to the `SFL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\cos y} \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \cos \theta \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.0.0.yaml0000644000446400020070000000055313567314601030235 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/scale-1.0.0" tag: "tag:stsci.edu:asdf/transform/scale-1.0.0" title: > A Scale model. description: > Multiply the input by a factor. type: object properties: factor: type: number description: Multiplication factor. required: [factor]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.1.0.yaml0000644000446400020070000000055313567314601030236 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/scale-1.1.0" tag: "tag:stsci.edu:asdf/transform/scale-1.1.0" title: > A Scale model. description: > Multiply the input by a factor. type: object properties: factor: type: number description: Multiplication factor. required: [factor]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.2.0.yaml0000644000446400020070000000064413567314601030240 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/scale-1.2.0" tag: "tag:stsci.edu:asdf/transform/scale-1.2.0" title: > A Scale model. description: > Scale the input by a dimensionless factor. type: object properties: factor: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Scale factor. required: [factor] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.0.0.yaml0000644000446400020070000000056313567314601030264 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/shift-1.0.0" tag: "tag:stsci.edu:asdf/transform/shift-1.0.0" title: > A Shift opeartion. description: > Apply an offset in one direction. type: object properties: offset: type: number description: Offset in one direction. required: [offset]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.1.0.yaml0000644000446400020070000000056313567314601030265 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/shift-1.1.0" tag: "tag:stsci.edu:asdf/transform/shift-1.1.0" title: > A Shift opeartion. description: > Apply an offset in one direction. type: object properties: offset: type: number description: Offset in one direction. required: [offset]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.2.0.yaml0000644000446400020070000000065213567314601030265 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/shift-1.2.0" tag: "tag:stsci.edu:asdf/transform/shift-1.2.0" title: > A Shift opeartion. description: > Apply an offset in one direction. type: object properties: offset: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Offset in one direction. required: [offset] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.0.0.yaml0000644000446400020070000000143713567314601033042 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_orthographic-1.0.0" tag: "tag:stsci.edu:asdf/transform/slant_orthographic-1.0.0" title: | The slant orthographic projection. description: | Corresponds to the `SIN` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \cos^{-1}\left(\frac{\pi}{180^{\circ}}R_\theta\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cos \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.1.0.yaml0000644000446400020070000000140113567314601033032 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_orthographic-1.1.0" tag: "tag:stsci.edu:asdf/transform/slant_orthographic-1.1.0" title: | The slant orthographic projection. description: | Corresponds to the `SIN` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \cos^{-1}\left(\frac{\pi}{180^{\circ}}R_\theta\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cos \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.2.0.yaml0000644000446400020070000000140113567314601033033 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_orthographic-1.2.0" tag: "tag:stsci.edu:asdf/transform/slant_orthographic-1.2.0" title: | The slant orthographic projection. description: | Corresponds to the `SIN` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \cos^{-1}\left(\frac{\pi}{180^{\circ}}R_\theta\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cos \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.0.0.yaml0000644000446400020070000000246013567314601034575 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_zenithal_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/slant_zenithal_perspective-1.0.0" title: | The slant zenithal perspective projection. description: | Corresponds to the `SZP` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.0.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 phi0: type: number description: | The longitude $\phi_0$ of the reference point, in degrees. default: 0 theta0: type: number description: | The latitude $\theta_0$ of the reference point, in degrees. default: 90asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.1.0.yaml0000644000446400020070000000246013567314601034576 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_zenithal_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/slant_zenithal_perspective-1.1.0" title: | The slant zenithal perspective projection. description: | Corresponds to the `SZP` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.1.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 phi0: type: number description: | The longitude $\phi_0$ of the reference point, in degrees. default: 0 theta0: type: number description: | The latitude $\theta_0$ of the reference point, in degrees. default: 90asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.2.0.yaml0000644000446400020070000000271513567314601034602 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_zenithal_perspective-1.2.0" tag: "tag:stsci.edu:asdf/transform/slant_zenithal_perspective-1.2.0" title: | The slant zenithal perspective projection. description: | Corresponds to the `SZP` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.2.0" - type: object properties: mu: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 phi0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | The longitude $\phi_0$ of the reference point, in degrees. default: 0 theta0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | The latitude $\theta_0$ of the reference point, in degrees. default: 90 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.0.0.yaml0000644000446400020070000000147213567314601032006 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/stereographic-1.0.0" tag: "tag:stsci.edu:asdf/transform/stereographic-1.0.0" title: | The stereographic projection. description: | Corresponds to the `STG` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^{\circ} - 2 \tan^{-1}\left(\frac{\pi R_\theta}{360^{\circ}}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\frac{2 \cos \theta}{1 + \sin \theta}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.1.0.yaml0000644000446400020070000000143413567314601032005 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/stereographic-1.1.0" tag: "tag:stsci.edu:asdf/transform/stereographic-1.1.0" title: | The stereographic projection. description: | Corresponds to the `STG` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^{\circ} - 2 \tan^{-1}\left(\frac{\pi R_\theta}{360^{\circ}}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\frac{2 \cos \theta}{1 + \sin \theta}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.2.0.yaml0000644000446400020070000000143413567314601032006 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/stereographic-1.2.0" tag: "tag:stsci.edu:asdf/transform/stereographic-1.2.0" title: | The stereographic projection. description: | Corresponds to the `STG` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^{\circ} - 2 \tan^{-1}\left(\frac{\pi R_\theta}{360^{\circ}}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\frac{2 \cos \theta}{1 + \sin \theta}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.0.0.yaml0000644000446400020070000000164413567314601030777 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/subtract-1.0.0" tag: "tag:stsci.edu:asdf/transform/subtract-1.0.0" title: > Perform a list of subtransforms in parallel and then subtract their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through subtraction. - | !transform/subtract-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.1.0.yaml0000644000446400020070000000164413567314601031000 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/subtract-1.1.0" tag: "tag:stsci.edu:asdf/transform/subtract-1.1.0" title: > Perform a list of subtransforms in parallel and then subtract their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through subtraction. - | !transform/subtract-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.2.0.yaml0000644000446400020070000000164413567314601031001 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/subtract-1.2.0" tag: "tag:stsci.edu:asdf/transform/subtract-1.2.0" title: > Perform a list of subtransforms in parallel and then subtract their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through subtraction. - | !transform/subtract-1.2.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.0.0.yaml0000644000446400020070000000323213567314601030575 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tabular-1.0.0" tag: "tag:stsci.edu:asdf/transform/tabular-1.0.0" title: > A Tabular model. description: | Tabular represents a lookup table with values corresponding to some grid points. It computes the interpolated values corresponding to the given inputs. Three methods of interpolation are supported - "linear", "nearest" and "splinef2d". It supports extrapolation. type: object properties: lookup_table: description: > Table values. anyOf: - $ref: ../core/ndarray-1.0.0 - type: array points: type: array items: anyOf: - type: array - $ref: ../core/ndarray-1.0.0 description: | Grid values - each row in the array corresponds to a dimension in the lookup table. The grid does not have to be regular. method: description: | Method of interpolation. Supported are "linear" and "nearest", and "splinef2d". "splinef2d" is only supported for 2-dimensional data. type: string enum: ["linear", "nearest", "splinef2d"] default: "linear" bounds_error: description: | If True, when interpolated values are requested outside of the domain of the input data, a ValueError is raised. If False, then "fill_value" is used. type: boolean default: true fill_value: description: | If provided, the value to use for points outside of the interpolation domain. If None, values outside the domain are extrapolated. Extrapolation is not supported by method "splinef2d". type: number required: [lookup_table]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.1.0.yaml0000644000446400020070000000323213567314601030576 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tabular-1.1.0" tag: "tag:stsci.edu:asdf/transform/tabular-1.1.0" title: > A Tabular model. description: | Tabular represents a lookup table with values corresponding to some grid points. It computes the interpolated values corresponding to the given inputs. Three methods of interpolation are supported - "linear", "nearest" and "splinef2d". It supports extrapolation. type: object properties: lookup_table: description: > Table values. anyOf: - $ref: ../core/ndarray-1.0.0 - type: array points: type: array items: anyOf: - type: array - $ref: ../core/ndarray-1.0.0 description: | Grid values - each row in the array corresponds to a dimension in the lookup table. The grid does not have to be regular. method: description: | Method of interpolation. Supported are "linear" and "nearest", and "splinef2d". "splinef2d" is only supported for 2-dimensional data. type: string enum: ["linear", "nearest", "splinef2d"] default: "linear" bounds_error: description: | If True, when interpolated values are requested outside of the domain of the input data, a ValueError is raised. If False, then "fill_value" is used. type: boolean default: true fill_value: description: | If provided, the value to use for points outside of the interpolation domain. If None, values outside the domain are extrapolated. Extrapolation is not supported by method "splinef2d". type: number required: [lookup_table]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.2.0.yaml0000644000446400020070000000334713567314601030606 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tabular-1.2.0" tag: "tag:stsci.edu:asdf/transform/tabular-1.2.0" title: > A Tabular model. description: | Tabular represents a lookup table with values corresponding to some grid points. It computes the interpolated values corresponding to the given inputs. Three methods of interpolation are supported - "linear", "nearest" and "splinef2d". It supports extrapolation. type: object properties: lookup_table: description: > Table values. anyOf: - type: array - $ref: ../core/ndarray-1.0.0 - $ref: ../unit/quantity-1.1.0 points: type: array items: anyOf: - type: array - $ref: ../core/ndarray-1.0.0 - $ref: ../unit/quantity-1.1.0 description: | Grid values - each row in the array corresponds to a dimension in the lookup table. The grid does not have to be regular. method: description: | Method of interpolation. Supported are "linear" and "nearest", and "splinef2d". "splinef2d" is only supported for 2-dimensional data. type: string enum: ["linear", "nearest", "splinef2d"] default: "linear" bounds_error: description: | If True, when interpolated values are requested outside of the domain of the input data, a ValueError is raised. If False, then "fill_value" is used. type: boolean default: true fill_value: description: | If provided, the value to use for points outside of the interpolation domain. If None, values outside the domain are extrapolated. Extrapolation is not supported by method "splinef2d". type: number required: [lookup_table] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.0.0.yaml0000644000446400020070000000071413567314601034323 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tangential_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/tangential_spherical_cube-1.0.0" title: | Tangential spherical cube projection. description: | Corresponds to the `TSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.1.0.yaml0000644000446400020070000000071413567314601034324 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tangential_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/tangential_spherical_cube-1.1.0" title: | Tangential spherical cube projection. description: | Corresponds to the `TSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.0.0.yaml0000644000446400020070000000200413567314601031152 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/transform-1.0.0" title: > A generic type used to mark where other transforms are accepted. description: > These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. type: object properties: name: description: | A user-friendly name for the transform, to give it extra meaning. type: string domain: description: | The domain (range of valid inputs) to the transform. Each entry in the list corresponds to an input dimension. type: array items: $ref: "domain-1.0.0" inverse: description: | Explicitly sets the inverse transform of this transform. If the transform has a direct analytic inverse, this property is usually not necessary, as the ASDF-reading tool can provide it automatically. $ref: "transform-1.0.0" additionalProperties: true asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.1.0.yaml0000644000446400020070000000200413567314601031153 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/transform-1.1.0" title: > A generic type used to mark where other transforms are accepted. description: > These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. type: object properties: name: description: | A user-friendly name for the transform, to give it extra meaning. type: string domain: description: | The domain (range of valid inputs) to the transform. Each entry in the list corresponds to an input dimension. type: array items: $ref: "domain-1.0.0" inverse: description: | Explicitly sets the inverse transform of this transform. If the transform has a direct analytic inverse, this property is usually not necessary, as the ASDF-reading tool can provide it automatically. $ref: "transform-1.1.0" additionalProperties: true asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.2.0.yaml0000644000446400020070000000146513567314601031166 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/transform-1.2.0" title: > A generic type used to mark where other transforms are accepted. description: > These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. type: object properties: name: description: | A user-friendly name for the transform, to give it extra meaning. type: string inverse: description: | Explicitly sets the inverse transform of this transform. If the transform has a direct analytic inverse, this property is usually not necessary, as the ASDF-reading tool can provide it automatically. $ref: "transform-1.2.0" additionalProperties: true asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.0.0.yaml0000644000446400020070000000127113567314601030762 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0" title: | Base class of all zenithal (or azimuthal) projections. description: | Zenithal projections are completely specified by defining the radius as a function of native latitude, $R_\theta$. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y, x) \\ R_\theta &= \sqrt{x^2 + y^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin \phi \\ y &= R_\theta \cos \phi$$ allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.1.0.yaml0000644000446400020070000000127113567314601030763 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0" title: | Base class of all zenithal (or azimuthal) projections. description: | Zenithal projections are completely specified by defining the radius as a function of native latitude, $R_\theta$. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y, x) \\ R_\theta &= \sqrt{x^2 + y^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin \phi \\ y &= R_\theta \cos \phi$$ allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.2.0.yaml0000644000446400020070000000127113567314601030764 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal-1.2.0" title: | Base class of all zenithal (or azimuthal) projections. description: | Zenithal projections are completely specified by defining the radius as a function of native latitude, $R_\theta$. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y, x) \\ R_\theta &= \sqrt{x^2 + y^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin \phi \\ y &= R_\theta \cos \phi$$ allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.0.0.yaml0000644000446400020070000000162213567314601033141 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equal_area-1.0.0" title: | The zenithal equal area projection. description: | Corresponds to the `ZEA` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - 2 \sin^{-1} \left(\frac{\pi R_\theta}{360^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta &= \frac{180^\circ}{\pi} \sqrt{2(1 - \sin\theta)} \\ &= \frac{360^\circ}{\pi} \sin\left(\frac{90^\circ - \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.1.0.yaml0000644000446400020070000000156413567314601033147 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equal_area-1.1.0" title: | The zenithal equal area projection. description: | Corresponds to the `ZEA` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - 2 \sin^{-1} \left(\frac{\pi R_\theta}{360^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta &= \frac{180^\circ}{\pi} \sqrt{2(1 - \sin\theta)} \\ &= \frac{360^\circ}{\pi} \sin\left(\frac{90^\circ - \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.2.0.yaml0000644000446400020070000000156413567314601033150 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equal_area-1.2.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equal_area-1.2.0" title: | The zenithal equal area projection. description: | Corresponds to the `ZEA` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - 2 \sin^{-1} \left(\frac{\pi R_\theta}{360^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta &= \frac{180^\circ}{\pi} \sqrt{2(1 - \sin\theta)} \\ &= \frac{360^\circ}{\pi} \sin\left(\frac{90^\circ - \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.0.0.yaml0000644000446400020070000000136213567314601033375 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equidistant-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equidistant-1.0.0" title: | The zenithal equidistant projection. description: | Corresponds to the `ARC` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - R_\theta$$ And the sky-to-pixel transformation is defined as: $$R_\theta = 90^\circ - \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.1.0.yaml0000644000446400020070000000132413567314601033374 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equidistant-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equidistant-1.1.0" title: | The zenithal equidistant projection. description: | Corresponds to the `ARC` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - R_\theta$$ And the sky-to-pixel transformation is defined as: $$R_\theta = 90^\circ - \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.2.0.yaml0000644000446400020070000000132413567314601033375 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equidistant-1.2.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equidistant-1.2.0" title: | The zenithal equidistant projection. description: | Corresponds to the `ARC` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - R_\theta$$ And the sky-to-pixel transformation is defined as: $$R_\theta = 90^\circ - \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.0.0.yaml0000644000446400020070000000261613567314601033377 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.0.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.0.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: type: number description: | Look angle, in degrees. default: 0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.1.0.yaml0000644000446400020070000000261613567314601033400 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.1.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.1.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: type: number description: | Look angle, in degrees. default: 0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.2.0.yaml0000644000446400020070000000301313567314601033371 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.2.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.2.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.1.0" - type: object properties: mu: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Look angle, in degrees. default: 0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/unit/0000755000446400020070000000000013605166132024566 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/unit/defunit-1.0.0.yaml0000644000446400020070000000147013567314601027547 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/defunit-1.0.0" title: Define a new physical unit. description: | Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. tag: "tag:stsci.edu:asdf/unit/defunit-1.0.0" type: object properties: name: description: The name of the new unit. type: string pattern: "[A-Za-z_][A-Za-z0-9_]+" unit: description: | The unit that the new name is equivalent to. It is optional, and if not provided, or ``null``, this ``defunit`` defines a new base unit. anyOf: - $ref: "unit-1.0.0" - type: "null" required: [name]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/unit/quantity-1.1.0.yaml0000644000446400020070000000230213567314601027763 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/quantity-1.1.0" tag: "tag:stsci.edu:asdf/unit/quantity-1.1.0" title: > Represents a Quantity object from astropy examples: - - A quantity consisting of a scalar value and unit - | !unit/quantity-1.1.0 value: 3.14159 unit: km - - A quantity consisting of a single value in an array - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 [2.71828] unit: A - - A quantity with an array of values - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 [1, 2, 3, 4] unit: s - - A quantity with an n-dimensional array of values - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 datatype: float64 data: [[1, 2, 3], [4, 5, 6]] unit: pc type: object properties: value: description: | A vector of one or more values anyOf: - type: number - $ref: "../core/ndarray-1.0.0" unit: description: | The unit corresponding to the values $ref: unit-1.0.0 required: [value, unit] ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/unit/unit-1.0.0.yaml0000644000446400020070000000105313567314601027065 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/unit-1.0.0" title: Physical unit. description: > This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. examples: - - Example unit - | !unit/unit-1.0.0 "2.1798721 10-18kg m2 s-2" anyOf: - tag: "tag:stsci.edu:asdf/unit/unit-1.0.0" - {} type: string pattern: "[\x00-\x7f]*" asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/version_map-1.0.0.yaml0000644000446400020070000000707113567314601027457 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/asdf-schema: 1.0.0 tag:stsci.edu:asdf/core/asdf: 1.0.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.0.0 tag:stsci.edu:asdf/transform/add: 1.0.0 tag:stsci.edu:asdf/transform/affine: 1.0.0 tag:stsci.edu:asdf/transform/airy: 1.0.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/compose: 1.0.0 tag:stsci.edu:asdf/transform/concatenate: 1.0.0 tag:stsci.edu:asdf/transform/conic: 1.0.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.0.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.0.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.0.0 tag:stsci.edu:asdf/transform/constant: 1.0.0 tag:stsci.edu:asdf/transform/cylindrical: 1.0.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.0.0 tag:stsci.edu:asdf/transform/divide: 1.0.0 tag:stsci.edu:asdf/transform/domain: 1.0.0 tag:stsci.edu:asdf/transform/generic: 1.0.0 tag:stsci.edu:asdf/transform/gnomonic: 1.0.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.0.0 tag:stsci.edu:asdf/transform/healpix: 1.0.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.0.0 tag:stsci.edu:asdf/transform/identity: 1.0.0 tag:stsci.edu:asdf/transform/label_mapper: 1.0.0 tag:stsci.edu:asdf/transform/mercator: 1.0.0 tag:stsci.edu:asdf/transform/molleweide: 1.0.0 tag:stsci.edu:asdf/transform/multiply: 1.0.0 tag:stsci.edu:asdf/transform/parabolic: 1.0.0 tag:stsci.edu:asdf/transform/plate_carree: 1.0.0 tag:stsci.edu:asdf/transform/polyconic: 1.0.0 tag:stsci.edu:asdf/transform/polynomial: 1.0.0 tag:stsci.edu:asdf/transform/power: 1.0.0 tag:stsci.edu:asdf/transform/pseudoconic: 1.0.0 tag:stsci.edu:asdf/transform/pseudocylindrical: 1.0.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/quadcube: 1.0.0 tag:stsci.edu:asdf/transform/regions_selector: 1.0.0 tag:stsci.edu:asdf/transform/remap_axes: 1.0.0 tag:stsci.edu:asdf/transform/rotate2d: 1.0.0 tag:stsci.edu:asdf/transform/rotate3d: 1.0.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.0.0 tag:stsci.edu:asdf/transform/scale: 1.0.0 tag:stsci.edu:asdf/transform/shift: 1.0.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.0.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.0.0 tag:stsci.edu:asdf/transform/stereographic: 1.0.0 tag:stsci.edu:asdf/transform/subtract: 1.0.0 tag:stsci.edu:asdf/transform/tabular: 1.0.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/transform: 1.0.0 tag:stsci.edu:asdf/transform/zenithal: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.0.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.0.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.0.0 tag:stsci.edu:asdf/wcs/frame: 1.0.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.0.0 tag:stsci.edu:asdf/wcs/step: 1.0.0 tag:stsci.edu:asdf/wcs/wcs: 1.0.0 ...asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/version_map-1.1.0.yaml0000644000446400020070000000714213567314601027457 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/asdf-schema: 1.0.0 tag:stsci.edu:asdf/core/asdf: 1.0.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.1.0 tag:stsci.edu:asdf/transform/affine: 1.1.0 tag:stsci.edu:asdf/transform/airy: 1.1.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/compose: 1.1.0 tag:stsci.edu:asdf/transform/concatenate: 1.1.0 tag:stsci.edu:asdf/transform/conic: 1.1.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.1.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.1.0 tag:stsci.edu:asdf/transform/constant: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.1.0 tag:stsci.edu:asdf/transform/divide: 1.1.0 tag:stsci.edu:asdf/transform/generic: 1.1.0 tag:stsci.edu:asdf/transform/gnomonic: 1.1.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.1.0 tag:stsci.edu:asdf/transform/healpix: 1.1.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.1.0 tag:stsci.edu:asdf/transform/identity: 1.1.0 tag:stsci.edu:asdf/transform/label_mapper: 1.1.0 tag:stsci.edu:asdf/transform/mercator: 1.1.0 tag:stsci.edu:asdf/transform/molleweide: 1.1.0 tag:stsci.edu:asdf/transform/multiply: 1.1.0 tag:stsci.edu:asdf/transform/parabolic: 1.1.0 tag:stsci.edu:asdf/transform/plate_carree: 1.1.0 tag:stsci.edu:asdf/transform/polyconic: 1.1.0 tag:stsci.edu:asdf/transform/polynomial: 1.1.0 tag:stsci.edu:asdf/transform/power: 1.1.0 tag:stsci.edu:asdf/transform/pseudoconic: 1.1.0 tag:stsci.edu:asdf/transform/pseudocylindrical: 1.1.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/quadcube: 1.1.0 tag:stsci.edu:asdf/transform/regions_selector: 1.1.0 tag:stsci.edu:asdf/transform/remap_axes: 1.1.0 tag:stsci.edu:asdf/transform/rotate2d: 1.1.0 tag:stsci.edu:asdf/transform/rotate3d: 1.1.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.1.0 tag:stsci.edu:asdf/transform/scale: 1.1.0 tag:stsci.edu:asdf/transform/shift: 1.1.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.1.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.1.0 tag:stsci.edu:asdf/transform/stereographic: 1.1.0 tag:stsci.edu:asdf/transform/subtract: 1.1.0 tag:stsci.edu:asdf/transform/tabular: 1.1.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/transform: 1.1.0 tag:stsci.edu:asdf/transform/zenithal: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.1.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.1.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.1.0 tag:stsci.edu:asdf/wcs/frame: 1.1.0 tag:stsci.edu:asdf/wcs/icrs_coord: 1.1.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.1.0 tag:stsci.edu:asdf/wcs/step: 1.1.0 tag:stsci.edu:asdf/wcs/wcs: 1.0.0 ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/version_map-1.2.0.yaml0000644000446400020070000000714213567314601027460 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/asdf-schema: 1.0.0 tag:stsci.edu:asdf/core/asdf: 1.1.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.1.0 tag:stsci.edu:asdf/transform/affine: 1.2.0 tag:stsci.edu:asdf/transform/airy: 1.2.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/compose: 1.1.0 tag:stsci.edu:asdf/transform/concatenate: 1.1.0 tag:stsci.edu:asdf/transform/conic: 1.2.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.2.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.2.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.2.0 tag:stsci.edu:asdf/transform/constant: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.2.0 tag:stsci.edu:asdf/transform/divide: 1.1.0 tag:stsci.edu:asdf/transform/generic: 1.1.0 tag:stsci.edu:asdf/transform/gnomonic: 1.1.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.1.0 tag:stsci.edu:asdf/transform/healpix: 1.1.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.1.0 tag:stsci.edu:asdf/transform/identity: 1.1.0 tag:stsci.edu:asdf/transform/label_mapper: 1.1.0 tag:stsci.edu:asdf/transform/mercator: 1.1.0 tag:stsci.edu:asdf/transform/molleweide: 1.1.0 tag:stsci.edu:asdf/transform/multiply: 1.1.0 tag:stsci.edu:asdf/transform/parabolic: 1.1.0 tag:stsci.edu:asdf/transform/plate_carree: 1.1.0 tag:stsci.edu:asdf/transform/polyconic: 1.1.0 tag:stsci.edu:asdf/transform/polynomial: 1.2.0 tag:stsci.edu:asdf/transform/power: 1.1.0 tag:stsci.edu:asdf/transform/pseudoconic: 1.1.0 tag:stsci.edu:asdf/transform/pseudocylindrical: 1.1.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/quadcube: 1.1.0 tag:stsci.edu:asdf/transform/regions_selector: 1.1.0 tag:stsci.edu:asdf/transform/remap_axes: 1.1.0 tag:stsci.edu:asdf/transform/rotate2d: 1.2.0 tag:stsci.edu:asdf/transform/rotate3d: 1.2.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.1.0 tag:stsci.edu:asdf/transform/scale: 1.2.0 tag:stsci.edu:asdf/transform/shift: 1.2.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.1.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/transform/stereographic: 1.1.0 tag:stsci.edu:asdf/transform/subtract: 1.1.0 tag:stsci.edu:asdf/transform/tabular: 1.2.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/transform: 1.2.0 tag:stsci.edu:asdf/transform/zenithal: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.1.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.1.0 tag:stsci.edu:asdf/wcs/frame: 1.1.0 tag:stsci.edu:asdf/wcs/icrs_coord: 1.1.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.1.0 tag:stsci.edu:asdf/wcs/step: 1.1.0 tag:stsci.edu:asdf/wcs/wcs: 1.1.0 ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/version_map-1.3.0.yaml0000644000446400020070000000662213567314601027463 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/asdf-schema: 1.0.0 tag:stsci.edu:asdf/core/asdf: 1.1.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/integer: 1.0.0 tag:stsci.edu:asdf/core/external_array: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.1.0 tag:stsci.edu:asdf/transform/affine: 1.2.0 tag:stsci.edu:asdf/transform/airy: 1.2.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/compose: 1.1.0 tag:stsci.edu:asdf/transform/concatenate: 1.1.0 tag:stsci.edu:asdf/transform/conic: 1.2.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.2.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.2.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.2.0 tag:stsci.edu:asdf/transform/constant: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.2.0 tag:stsci.edu:asdf/transform/divide: 1.1.0 tag:stsci.edu:asdf/transform/generic: 1.1.0 tag:stsci.edu:asdf/transform/gnomonic: 1.1.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.1.0 tag:stsci.edu:asdf/transform/healpix: 1.1.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.1.0 tag:stsci.edu:asdf/transform/identity: 1.1.0 tag:stsci.edu:asdf/transform/label_mapper: 1.1.0 tag:stsci.edu:asdf/transform/mercator: 1.1.0 tag:stsci.edu:asdf/transform/molleweide: 1.1.0 tag:stsci.edu:asdf/transform/multiply: 1.1.0 tag:stsci.edu:asdf/transform/parabolic: 1.1.0 tag:stsci.edu:asdf/transform/plate_carree: 1.1.0 tag:stsci.edu:asdf/transform/polyconic: 1.1.0 tag:stsci.edu:asdf/transform/polynomial: 1.2.0 tag:stsci.edu:asdf/transform/power: 1.1.0 tag:stsci.edu:asdf/transform/pseudoconic: 1.1.0 tag:stsci.edu:asdf/transform/pseudocylindrical: 1.1.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/quadcube: 1.1.0 tag:stsci.edu:asdf/transform/regions_selector: 1.1.0 tag:stsci.edu:asdf/transform/remap_axes: 1.1.0 tag:stsci.edu:asdf/transform/rotate2d: 1.2.0 tag:stsci.edu:asdf/transform/rotate3d: 1.2.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.1.0 tag:stsci.edu:asdf/transform/scale: 1.2.0 tag:stsci.edu:asdf/transform/shift: 1.2.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.1.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/transform/stereographic: 1.1.0 tag:stsci.edu:asdf/transform/subtract: 1.1.0 tag:stsci.edu:asdf/transform/tabular: 1.2.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/transform: 1.1.0 tag:stsci.edu:asdf/transform/zenithal: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/version_map-1.4.0.yaml0000644000446400020070000000705713567314601027467 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/asdf-schema: 1.0.0 tag:stsci.edu:asdf/core/asdf: 1.1.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/integer: 1.0.0 tag:stsci.edu:asdf/core/external_array: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.1.0 tag:stsci.edu:asdf/transform/affine: 1.2.0 tag:stsci.edu:asdf/transform/airy: 1.2.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/compose: 1.1.0 tag:stsci.edu:asdf/transform/concatenate: 1.1.0 tag:stsci.edu:asdf/transform/conic: 1.2.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.2.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.2.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.2.0 tag:stsci.edu:asdf/transform/constant: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.2.0 tag:stsci.edu:asdf/transform/divide: 1.1.0 tag:stsci.edu:asdf/transform/fix_inputs: 1.1.0 tag:stsci.edu:asdf/transform/generic: 1.1.0 tag:stsci.edu:asdf/transform/gnomonic: 1.1.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.1.0 tag:stsci.edu:asdf/transform/healpix: 1.1.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.1.0 tag:stsci.edu:asdf/transform/identity: 1.1.0 tag:stsci.edu:asdf/transform/label_mapper: 1.1.0 tag:stsci.edu:asdf/transform/math_functions: 1.0.0 tag:stsci.edu:asdf/transform/mercator: 1.1.0 tag:stsci.edu:asdf/transform/molleweide: 1.1.0 tag:stsci.edu:asdf/transform/multiply: 1.1.0 tag:stsci.edu:asdf/transform/ortho_polynomial: 1.0.0 tag:stsci.edu:asdf/transform/parabolic: 1.1.0 tag:stsci.edu:asdf/transform/plate_carree: 1.1.0 tag:stsci.edu:asdf/transform/polyconic: 1.1.0 tag:stsci.edu:asdf/transform/polynomial: 1.2.0 tag:stsci.edu:asdf/transform/power: 1.1.0 tag:stsci.edu:asdf/transform/pseudoconic: 1.1.0 tag:stsci.edu:asdf/transform/pseudocylindrical: 1.1.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/quadcube: 1.1.0 tag:stsci.edu:asdf/transform/regions_selector: 1.1.0 tag:stsci.edu:asdf/transform/remap_axes: 1.1.0 tag:stsci.edu:asdf/transform/rotate2d: 1.2.0 tag:stsci.edu:asdf/transform/rotate3d: 1.2.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.1.0 tag:stsci.edu:asdf/transform/scale: 1.2.0 tag:stsci.edu:asdf/transform/shift: 1.2.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.1.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/transform/stereographic: 1.1.0 tag:stsci.edu:asdf/transform/subtract: 1.1.0 tag:stsci.edu:asdf/transform/tabular: 1.2.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/transform: 1.1.0 tag:stsci.edu:asdf/transform/zenithal: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/0000755000446400020070000000000013605166132024403 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.0.0.yaml0000644000446400020070000000071413567314601031045 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/celestial_frame-1.0.0" title: > Represents a celestial frame. allOf: - type: object properties: axes_names: minItems: 2 maxItems: 3 axes_order: minItems: 2 maxItems: 3 unit: minItems: 2 maxItems: 3 - $ref: frame-1.0.0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.1.0.yaml0000644000446400020070000000071513567314601031047 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/celestial_frame-1.1.0" title: > Represents a celestial frame. allOf: - type: object properties: axes_names: minItems: 2 maxItems: 3 axes_order: minItems: 2 maxItems: 3 unit: minItems: 2 maxItems: 3 - $ref: frame-1.1.0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.0.0.yaml0000644000446400020070000000074113567314601031102 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/composite_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/composite_frame-1.0.0" title: > Represents a set of frames. allOf: - type: object properties: name: description: Name of composite frame. type: string frames: description: List of frames in the composite frame. type: array - $ref: frame-1.0.0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.1.0.yaml0000644000446400020070000000071313567314601031102 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/composite_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/composite_frame-1.1.0" title: > Represents a set of frames. allOf: - type: object properties: name: description: Name of composite frame. type: string frames: description: List of frames in the composite frame. type: array asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.0.0.yaml0000644000446400020070000001316713567314601027026 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/frame-1.0.0" title: | The base class of all coordinate frames. description: | These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. Most of these coordinate frames are defined in [IERS conventions](http://www.iers.org/IERS/EN/Publications/TechnicalNotes/tn36.html). examples: - - | A celestial frame in the FK4 reference frame. - | !wcs/celestial_frame-1.0.0 axes_names: [ra, dec] name: CelestialFrame reference_frame: type: FK4 equinox: !time/time-1.0.0 '2010-01-01 00:00:00.000' obstime: !time/time-1.0.0 '2015-01-01 00:00:00.000' unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] type: object properties: name: description: | A user-friendly name for the frame. type: string axes_order: description: | The order of the axes. type: array items: type: integer axes_names: description: | The name of each axis in this frame. type: array items: anyOf: - type: string - type: 'null' reference_frame: description: | The reference frame. type: object properties: type: description: | The reference frame type. Some reference frame types require additional properties, listed next to each reference frame type below. The reference frames types are: - `ICRS` - `FK5`: `equinox`. - `FK4`: `equinox` and optionally `obstime`. - `FK4_noeterms`: `equinox` and optionally `obstime`. - `galactic` - `galactocentric`: `galcen_distance`, `galcen_ra`, `galcen_dec`, `z_sun` and `roll`. - `GCRS`: `obstime`, `obsgeoloc`, and `obsgeovel`. - `CIRS`: `obstime`. - `ITRS`: `obstime`. - `precessed_geocentric`: `obstime`, `obsgeoloc`, and `obsgeovel`. enum: [ICRS, FK5, FK4, FK4_noeterms, galactic, galactocentric, GCRS, CIRS, ITRS, precessed_geocentric] default: ICRS equinox: description: | The equinox of the reference frame. Required when `reference_frame` one of: `FK5`, `FK4`, `FK4_noeterms` $ref: ../time/time-1.0.0 obstime: description: | The observation time of the reference frame, used to determine the location of the Earth. Required when `reference_frame` is one of: `FK4`, `FK4_noeterms`, `GCRS`, `CIRS`, `ITRS` If not provided, it defaults to the same value as `equinox`. $ref: ../time/time-1.0.0 galcen_distance: description: | The distance from the Sun to the Galactic center. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: pc galcen_ra: description: | The Right Ascension (RA) of the Galactic center in the ICRS frame. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg galcen_dec: description: | The Declination (DEC) of the Galactic center in the ICRS frame. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg z_sun: description: | The distance from the sun to the galactic midplane. Required when `reference_frame` is `galactocentric`. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: pc roll: description: | The angle to rotate about the final x-axis, relative to the orientation for `galactic`. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg obsgeoloc: description: | 3-vector giving the position of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: - type: array items: type: number minItems: 3 maxItems: 3 - $ref: ../unit/unit-1.0.0 default: m default: - [0, 0, 0] obsgeovel: description: | 3-vector giving the velocity of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: - type: array items: type: number minItems: 3 maxItems: 3 - $ref: ../unit/unit-1.0.0 default: m/s default: - [0, 0, 0] required: [type] unit: description: | Units for each axis. type: array items: $ref: ../unit/unit-1.0.0 required: [name] additionalProperties: trueasdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.1.0.yaml0000644000446400020070000001250513567314601027022 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/frame-1.1.0" title: | The base class of all coordinate frames. description: | These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. Most of these coordinate frames are defined in [IERS conventions](http://www.iers.org/IERS/EN/Publications/TechnicalNotes/tn36.html). examples: - - | A celestial frame in the FK4 reference frame. - | !wcs/celestial_frame-1.1.0 axes_names: [ra, dec] name: CelestialFrame reference_frame: type: FK4 equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000' obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000' unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] type: object properties: name: description: | A user-friendly name for the frame. type: string axes_order: description: | The order of the axes. type: array items: type: integer axes_names: description: | The name of each axis in this frame. type: array items: anyOf: - type: string - type: 'null' reference_frame: description: | The reference frame. type: object properties: type: description: | The reference frame type. Some reference frame types require additional properties, listed next to each reference frame type below. The reference frames types are: - `ICRS` - `FK5`: `equinox`. - `FK4`: `equinox` and optionally `obstime`. - `FK4_noeterms`: `equinox` and optionally `obstime`. - `galactic` - `galactocentric`: `galcen_distance`, `galcen_ra`, `galcen_dec`, `z_sun` and `roll`. - `GCRS`: `obstime`, `obsgeoloc`, and `obsgeovel`. - `CIRS`: `obstime`. - `ITRS`: `obstime`. - `precessed_geocentric`: `obstime`, `obsgeoloc`, and `obsgeovel`. enum: [ICRS, FK5, FK4, FK4_noeterms, galactic, galactocentric, GCRS, CIRS, ITRS, precessed_geocentric] default: ICRS equinox: description: | The equinox of the reference frame. Required when `reference_frame` one of: `FK5`, `FK4`, `FK4_noeterms` $ref: ../time/time-1.1.0 obstime: description: | The observation time of the reference frame, used to determine the location of the Earth. Required when `reference_frame` is one of: `FK4`, `FK4_noeterms`, `GCRS`, `CIRS`, `ITRS` If not provided, it defaults to the same value as `equinox`. $ref: ../time/time-1.1.0 galcen_distance: description: | The distance from the Sun to the Galactic center. Required when `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 galcen_coord: description: | The ICRS coordinates of the Galactic center. Required when `reference_frame` is `galactocentric`. $ref: icrs_coord-1.1.0 galcen_v_sun: description: | The velocity of the sun in the galactocentric frame as Cartesian velocity components. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: k/s } - { value: [0], unit: k/s } - { value: [0], unit: k/s } z_sun: description: | The distance from the sun to the galactic midplane. Required when `reference_frame` is `galactocentric`. `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 roll: description: | The angle to rotate about the final x-axis, relative to the orientation for `galactic`. Required when `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 obsgeoloc: description: | 3-vector giving the position of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: m } - { value: [0], unit: m } - { value: [0], unit: m } obsgeovel: description: | 3-vector giving the velocity of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: m/s } - { value: [0], unit: m/s } - { value: [0], unit: m/s } required: [type] unit: description: | Units for each axis. type: array items: $ref: ../unit/unit-1.0.0 required: [name] additionalProperties: true asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/icrs_coord-1.1.0.yaml0000644000446400020070000000202213567314601030047 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/icrs_coord-1.1.0" tag: "tag:stsci.edu:asdf/wcs/icrs_coord-1.1.0" title: | Represents an ICRS coordinate object from astropy description: This object represents the right ascension (RA) and declination of an ICRS coordinate or frame. The astropy ICRS class contains additional fields that may be useful to add here in the future. type: object properties: ra: type: object description: | A longitude representing the right ascension of the ICRS coordinate properties: value: type: number unit: $ref: ../unit/unit-1.0.0 default: deg wrap_angle: $ref: ../unit/quantity-1.1.0 default: "360 deg" dec: type: object description: | A latitude representing the declination of the ICRS coordinate properties: value: type: number unit: $ref: ../unit/unit-1.0.0 default: deg required: [ra, dec] ... asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.0.0.yaml0000644000446400020070000000116713567314601030720 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/spectral_frame-1.0.0" title: > Represents a spectral frame. allOf: - type: object properties: reference_position: description: | The position of the reference frame. enum: [geocenter, barycenter, heliocenter] default: geocenter axes_names: minItems: 1 maxItems: 1 axes_order: minItems: 1 maxItems: 1 unit: minItems: 1 maxItems: 1 - $ref: frame-1.0.0asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.1.0.yaml0000644000446400020070000000117013567314601030713 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/spectral_frame-1.1.0" title: > Represents a spectral frame. allOf: - type: object properties: reference_position: description: | The position of the reference frame. enum: [geocenter, barycenter, heliocenter] default: geocenter axes_names: minItems: 1 maxItems: 1 axes_order: minItems: 1 maxItems: 1 unit: minItems: 1 maxItems: 1 - $ref: frame-1.1.0 asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.0.0.yaml0000644000446400020070000000136513567314601026704 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/step-1.0.0" tag: "tag:stsci.edu:asdf/wcs/step-1.0.0" title: > Describes a single step of a WCS transform pipeline. description: > examples: [] type: object properties: frame: description: | The frame of the inputs to the transform. anyOf: - type: string - $ref: frame-1.0.0 transform: description: | The transform from this step to the next one. The last step in a WCS should not have a transform, but exists only to describe the frames and units of the final output axes. anyOf: - $ref: ../transform/transform-1.0.0 - type: 'null' default: null required: [frame]asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.1.0.yaml0000644000446400020070000000136613567314601026706 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/step-1.1.0" tag: "tag:stsci.edu:asdf/wcs/step-1.1.0" title: > Describes a single step of a WCS transform pipeline. description: > examples: [] type: object properties: frame: description: | The frame of the inputs to the transform. anyOf: - type: string - $ref: frame-1.1.0 transform: description: | The transform from this step to the next one. The last step in a WCS should not have a transform, but exists only to describe the frames and units of the final output axes. anyOf: - $ref: ../transform/transform-1.1.0 - type: 'null' default: null required: [frame] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.2.0.yaml0000644000446400020070000000136613567314601026707 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/step-1.2.0" tag: "tag:stsci.edu:asdf/wcs/step-1.2.0" title: > Describes a single step of a WCS transform pipeline. description: > examples: [] type: object properties: frame: description: | The frame of the inputs to the transform. anyOf: - type: string - $ref: frame-1.1.0 transform: description: | The transform from this step to the next one. The last step in a WCS should not have a transform, but exists only to describe the frames and units of the final output axes. anyOf: - $ref: ../transform/transform-1.2.0 - type: 'null' default: null required: [frame] asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.0.0.yaml0000644000446400020070000000203113567314601026514 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/wcs-1.0.0" tag: "tag:stsci.edu:asdf/wcs/wcs-1.0.0" title: > A system for describing generalized world coordinate transformations. description: > ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. type: object properties: name: description: | A descriptive name for this WCS. type: string steps: description: | A list of steps in the forward transformation from detector to world coordinates. The inverse transformation is determined automatically by reversing this list, and inverting each of the individual transforms according to the rules described in [inverse](ref:http://stsci.edu/schemas/asdf/transform/transform-1.0.0/properties/inverse). type: array items: $ref: step-1.0.0 required: [name, steps] additionalProperties: trueasdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.1.0.yaml0000644000446400020070000000176113567314601026526 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/wcs-1.1.0" tag: "tag:stsci.edu:asdf/wcs/wcs-1.1.0" title: > A system for describing generalized world coordinate transformations. description: > ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. type: object properties: name: description: | A descriptive name for this WCS. type: string steps: description: | A list of steps in the forward transformation from detector to world coordinates. The inverse transformation is determined automatically by reversing this list, and inverting each of the individual transforms according to the rules described in [inverse](ref:transform/transform-1.1.0:inverse). type: array items: $ref: step-1.1.0 required: [name, steps] additionalProperties: true asdf-2.5.1/asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.2.0.yaml0000644000446400020070000000176113567314601026527 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/wcs-1.2.0" tag: "tag:stsci.edu:asdf/wcs/wcs-1.2.0" title: > A system for describing generalized world coordinate transformations. description: > ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. type: object properties: name: description: | A descriptive name for this WCS. type: string steps: description: | A list of steps in the forward transformation from detector to world coordinates. The inverse transformation is determined automatically by reversing this list, and inverting each of the individual transforms according to the rules described in [inverse](ref:transform/transform-1.2.0:inverse). type: array items: $ref: step-1.1.0 required: [name, steps] additionalProperties: true asdf-2.5.1/asdf-standard/schemas/stsci.edu/yaml-schema/0000755000446400020070000000000013605166132025072 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf-standard/schemas/stsci.edu/yaml-schema/draft-01.yaml0000644000446400020070000000652513567314601027307 0ustar eslavichSTSCI\science00000000000000%YAML 1.1 --- $schema: "http://json-schema.org/draft-04/schema" id: "http://stsci.edu/schemas/yaml-schema/draft-01" title: YAML Schema description: | A metaschema extending JSON Schema's metaschema to add support for some YAML-specific constructions. allOf: - $ref: "http://json-schema.org/draft-04/schema" - type: object properties: tag: description: | A fully-qualified YAML tag name that should be associated with the object type returned by the YAML parser; for example, the object must be an instance of the class registered with the parser to create instances of objects with this tag. Implementation of this validator is optional and depends on details of the YAML parser. type: string minLength: 6 propertyOrder: description: | Specifies the default order of the properties when writing out. Any keys not listed in **propertyOrder** will be in arbitrary order at the end. This field applies only to nodes with **object** type. type: array items: type: string flowStyle: description: | Specifies the default serialization style to use for an array or object. YAML supports multiple styles for arrays/sequences and objects/maps, called "block style" and "flow style". For example:: Block style: !!map Clark : Evans Ingy : döt Net Oren : Ben-Kiki Flow style: !!map { Clark: Evans, Ingy: döt Net, Oren: Ben-Kiki } This property gives a hint to the tool outputting the YAML which style to use. If not provided, the library is free to use whatever heuristics it wishes to determine the output style. This property does not enforce any particular style on YAML being parsed. type: string enum: [block, flow] style: description: | Specifies the default serialization style to use for a string. YAML supports multiple styles for strings: ```yaml Inline style: "First line\nSecond line" Literal style: | First line Second line Folded style: > First line Second line ``` This property gives a hint to the tool outputting the YAML which style to use. If not provided, the library is free to use whatever heuristics it wishes to determine the output style. This property does not enforce any particular style on YAML being parsed. type: string enum: [inline, literal, folded] examples: description: | A list of examples to help document the schema. Each pair is a prose description followed by a string containing YAML content. For example: ```yaml examples: - - Complex number: 1 real, -1 imaginary - "!complex 1-1j" type: array items: ``` type: array items: type: array items: - type: string - anyOf: - type: string - type: object ... asdf-2.5.1/asdf.egg-info/0000755000446400020070000000000013605166132017242 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/asdf.egg-info/PKG-INFO0000644000446400020070000003125513605166131020344 0ustar eslavichSTSCI\science00000000000000Metadata-Version: 2.1 Name: asdf Version: 2.5.1 Summary: Python tools to handle ASDF files Home-page: http://github.com/spacetelescope/asdf Author: Erik Bray, Dan D'Avella, Michael Droettboom Author-email: mdroe@stsci.edu License: BSD Description: ASDF - Advanced Scientific Data Format ====================================== .. _begin-summary-text: The **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat (ASDF) is a next-generation interchange format for scientific data. This package contains the Python implementation of the ASDF Standard. More information on the ASDF Standard itself can be found `here `__. The ASDF format has the following features: * A hierarchical, human-readable metadata format (implemented using `YAML `__) * Numerical arrays are stored as binary data blocks which can be memory mapped. Data blocks can optionally be compressed. * The structure of the data can be automatically validated using schemas (implemented using `JSON Schema `__) * Native Python data types (numerical types, strings, dicts, lists) are serialized automatically * ASDF can be extended to serialize custom data types .. _end-summary-text: ASDF is under active development `on github `__. More information on contributing can be found `below <#contributing>`__. Overview -------- This section outlines basic use cases of the ASDF package for creating and reading ASDF files. Creating a file ~~~~~~~~~~~~~~~ .. _begin-create-file-text: We're going to store several `numpy` arrays and other data to an ASDF file. We do this by creating a "tree", which is simply a `dict`, and we provide it as input to the constructor of `AsdfFile`: .. code:: python import asdf import numpy as np # Create some data sequence = np.array([x for x in range(100)]) squares = np.array([x**2 for x in range(100)]) random = np.random.random(100) # Store the data in an arbitrarily nested dictionary tree = { 'foo': 42, 'name': 'Monty', 'sequence': sequence, 'powers': { 'squares' : squares }, 'random': random } # Create the ASDF file object from our data tree af = asdf.AsdfFile(tree) # Write the data to a new file af.write_to('example.asdf') If we open the newly created file, we can see some of the key features of ASDF on display: :: #ASDF 1.0.0 #ASDF_STANDARD 1.2.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 2.0.0} history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: {name: asdf, version: 2.0.0} foo: 42 name: Monty powers: squares: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [100] random: !core/ndarray-1.0.0 source: 2 datatype: float64 byteorder: little shape: [100] sequence: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [100] ... The metadata in the file mirrors the structure of the tree that was stored. It is hierarchical and human-readable. Notice that metadata has been added to the tree that was not explicitly given by the user. Notice also that the numerical array data is not stored in the metadata tree itself. Instead, it is stored as binary data blocks below the metadata section (not shown here). It is possible to compress the array data when writing the file: .. code:: python af.write_to('compressed.asdf', all_array_compression='zlib') Available compression algorithms are ``'zlib'``, ``'bzp2'``, and ``'lz4'``. .. _end-create-file-text: Reading a file ~~~~~~~~~~~~~~ .. _begin-read-file-text: To read an existing ASDF file, we simply use the top-level `open` function of the `asdf` package: .. code:: python import asdf af = asdf.open('example.asdf') The `open` function also works as a context handler: .. code:: python with asdf.open('example.asdf') as af: ... To access the data stored in the file, use the top-level `AsdfFile.tree` attribute: .. code:: python >>> import asdf >>> af = asdf.open('example.asdf') >>> af.tree {'asdf_library': {'author': 'Space Telescope Science Institute', 'homepage': 'http://github.com/spacetelescope/asdf', 'name': 'asdf', 'version': '1.3.1'}, 'foo': 42, 'name': 'Monty', 'powers': {'squares': }, 'random': , 'sequence': } The tree is simply a Python `dict`, and nodes are accessed like any other dictionary entry: .. code:: python >>> af.tree['name'] 'Monty' >>> af.tree['powers'] {'squares': } Array data remains unloaded until it is explicitly accessed: .. code:: python >>> af.tree['powers']['squares'] array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256, 289, 324, 361, 400, 441, 484, 529, 576, 625, 676, 729, 784, 841, 900, 961, 1024, 1089, 1156, 1225, 1296, 1369, 1444, 1521, 1600, 1681, 1764, 1849, 1936, 2025, 2116, 2209, 2304, 2401, 2500, 2601, 2704, 2809, 2916, 3025, 3136, 3249, 3364, 3481, 3600, 3721, 3844, 3969, 4096, 4225, 4356, 4489, 4624, 4761, 4900, 5041, 5184, 5329, 5476, 5625, 5776, 5929, 6084, 6241, 6400, 6561, 6724, 6889, 7056, 7225, 7396, 7569, 7744, 7921, 8100, 8281, 8464, 8649, 8836, 9025, 9216, 9409, 9604, 9801]) >>> import numpy as np >>> expected = [x**2 for x in range(100)] >>> np.equal(af.tree['powers']['squares'], expected).all() True By default, uncompressed data blocks are memory mapped for efficient access. Memory mapping can be disabled by using the ``copy_arrays`` option of `open` when reading: .. code:: python af = asdf.open('example.asdf', copy_arrays=True) .. _end-read-file-text: For more information and for advanced usage examples, see the `documentation <#documentation>`__. Extending ASDF ~~~~~~~~~~~~~~ Out of the box, the ``asdf`` package automatically serializes and deserializes native Python types. It is possible to extend ``asdf`` by implementing custom tag types that correspond to custom user types. More information on extending ASDF can be found in the `official documentation `__. Installation ------------ .. _begin-pip-install-text: Stable releases of the ASDF Python package are registered `at PyPi `__. The latest stable version can be installed using ``pip``: :: $ pip install asdf .. _begin-source-install-text: The latest development version of ASDF is available from the ``master`` branch `on github `__. To clone the project: :: $ git clone https://github.com/spacetelescope/asdf To install: :: $ cd asdf $ git submodule update --init $ pip install . To install in `development mode `__:: $ pip install -e . .. note:: The source repository makes use of a git submodule for referencing the schemas provided by the ASDF standard. While this submodule is automatically initialized when installing the package (including in development mode), it may be necessary for developers to manually update the submodule if changes are made upstream. See the `documentation on git submodules `__ for more information. .. _end-source-install-text: Testing ------- .. _begin-testing-text: To install the test dependencies from a source checkout of the repository: :: $ pip install -e .[tests] To run the unit tests from a source checkout of the repository: :: $ pytest It is also possible to run the test suite from an installed version of the package. In a Python interpreter: .. code:: python import asdf asdf.test() Please note that the `astropy `__ package must be installed to run the tests. It is also possible to run the tests using `tox `__. It is first necessary to install ``tox`` and `tox-conda `__: :: $ pip install tox tox-conda To list all available environments: :: $ tox -va To run a specific environment: :: $ tox -e .. _end-testing-text: Documentation ------------- More detailed documentation on this software package can be found `here `__. More information on the ASDF Standard itself can be found `here `__. If you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, information can be found `here `__. Contributing ------------ We welcome feedback and contributions to the project. Contributions of code, documentation, or general feedback are all appreciated. Please follow the `contributing guidelines `__ to submit an issue or a pull request. We strive to provide a welcoming community to all of our users by abiding to the `Code of Conduct `__. Platform: UNKNOWN Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.3 Description-Content-Type: text/x-rst Provides-Extra: docs Provides-Extra: tests Provides-Extra: all asdf-2.5.1/asdf.egg-info/SOURCES.txt0000644000446400020070000004627313605166132021142 0ustar eslavichSTSCI\science00000000000000.gitignore .gitmodules .rtd-environment.yml .travis.yml CHANGES.rst CODE_OF_CONDUCT.md CONTRIBUTING.md MANIFEST.in README.rst conftest.py environment.yml pyproject.toml readthedocs.yml setup.cfg setup.py tox.ini asdf/__init__.py asdf/_internal_init.py asdf/asdf.py asdf/asdftypes.py asdf/block.py asdf/compression.py asdf/conftest.py asdf/constants.py asdf/exceptions.py asdf/extension.py asdf/fits_embed.py asdf/generic_io.py asdf/reference.py asdf/resolver.py asdf/schema.py asdf/stream.py asdf/tagged.py asdf/treeutil.py asdf/type_index.py asdf/types.py asdf/util.py asdf/version.py asdf/versioning.py asdf/yamlutil.py asdf-standard/reference_files/README.rst asdf-standard/reference_files/1.0.0/ascii.asdf asdf-standard/reference_files/1.0.0/ascii.yaml asdf-standard/reference_files/1.0.0/basic.asdf asdf-standard/reference_files/1.0.0/basic.yaml asdf-standard/reference_files/1.0.0/complex.asdf asdf-standard/reference_files/1.0.0/complex.yaml asdf-standard/reference_files/1.0.0/compressed.asdf asdf-standard/reference_files/1.0.0/compressed.yaml asdf-standard/reference_files/1.0.0/exploded.asdf asdf-standard/reference_files/1.0.0/exploded.yaml asdf-standard/reference_files/1.0.0/exploded0000.asdf asdf-standard/reference_files/1.0.0/float.asdf asdf-standard/reference_files/1.0.0/float.yaml asdf-standard/reference_files/1.0.0/int.asdf asdf-standard/reference_files/1.0.0/int.yaml asdf-standard/reference_files/1.0.0/shared.asdf asdf-standard/reference_files/1.0.0/shared.yaml asdf-standard/reference_files/1.0.0/stream.asdf asdf-standard/reference_files/1.0.0/stream.yaml asdf-standard/reference_files/1.0.0/unicode_bmp.asdf asdf-standard/reference_files/1.0.0/unicode_bmp.yaml asdf-standard/reference_files/1.0.0/unicode_spp.asdf asdf-standard/reference_files/1.0.0/unicode_spp.yaml asdf-standard/reference_files/generate/README.rst asdf-standard/reference_files/generate/generate asdf-standard/schemas/stsci.edu/asdf/asdf-schema-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.4.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/column-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/complex-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/constant-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/extension_metadata-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/externalarray-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/history_entry-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/integer-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/ndarray-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/software-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/table-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/fits/fits-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/time/time-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/time/time-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/add-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/add-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/add-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/domain-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/fix_inputs-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/generic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/generic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/generic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/linear1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/math_functions-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/multiplyscale-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/ortho_polynomial-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/power-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/power-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/power-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate_sequence_3d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/unit/defunit-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/unit/quantity-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/unit/unit-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/icrs_coord-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.2.0.yaml asdf-standard/schemas/stsci.edu/yaml-schema/draft-01.yaml asdf.egg-info/PKG-INFO asdf.egg-info/SOURCES.txt asdf.egg-info/dependency_links.txt asdf.egg-info/entry_points.txt asdf.egg-info/requires.txt asdf.egg-info/top_level.txt asdf/commands/__init__.py asdf/commands/defragment.py asdf/commands/diff.py asdf/commands/exploded.py asdf/commands/extension.py asdf/commands/extract.py asdf/commands/main.py asdf/commands/remove_hdu.py asdf/commands/tags.py asdf/commands/to_yaml.py asdf/commands/tests/__init__.py asdf/commands/tests/test_defragment.py asdf/commands/tests/test_diff.py asdf/commands/tests/test_exploded.py asdf/commands/tests/test_extract.py asdf/commands/tests/test_main.py asdf/commands/tests/test_remove_hdu.py asdf/commands/tests/test_tags.py asdf/commands/tests/test_to_yaml.py asdf/commands/tests/data/__init__.py asdf/commands/tests/data/block0.asdf asdf/commands/tests/data/block1.asdf asdf/commands/tests/data/blocks.diff asdf/commands/tests/data/frames.diff asdf/commands/tests/data/frames0.asdf asdf/commands/tests/data/frames1.asdf asdf/commands/tests/data/frames_minimal.diff asdf/compat/__init__.py asdf/compat/jsonschemacompat.py asdf/compat/numpycompat.py asdf/extern/RangeHTTPServer.py asdf/extern/__init__.py asdf/extern/atomicfile.py asdf/extern/decorators.py asdf/tags/__init__.py asdf/tags/core/__init__.py asdf/tags/core/complex.py asdf/tags/core/constant.py asdf/tags/core/external_reference.py asdf/tags/core/integer.py asdf/tags/core/ndarray.py asdf/tags/core/tests/__init__.py asdf/tags/core/tests/test_complex.py asdf/tags/core/tests/test_external_reference.py asdf/tags/core/tests/test_history.py asdf/tags/core/tests/test_integer.py asdf/tags/core/tests/test_ndarray.py asdf/tags/core/tests/data/__init__.py asdf/tags/core/tests/data/datatype-1.0.0.yaml asdf/tags/core/tests/data/ndim-1.0.0.yaml asdf/tests/__init__.py asdf/tests/conftest.py asdf/tests/coveragerc asdf/tests/helpers.py asdf/tests/httpserver.py asdf/tests/test_api.py asdf/tests/test_array_blocks.py asdf/tests/test_asdftypes.py asdf/tests/test_compression.py asdf/tests/test_file_format.py asdf/tests/test_fits_embed.py asdf/tests/test_generic_io.py asdf/tests/test_helpers.py asdf/tests/test_reference.py asdf/tests/test_reference_files.py asdf/tests/test_resolver.py asdf/tests/test_schema.py asdf/tests/test_stream.py asdf/tests/test_versioning.py asdf/tests/test_yaml.py asdf/tests/data/__init__.py asdf/tests/data/asdf.fits.gz asdf/tests/data/complex-42.0.0.yaml asdf/tests/data/custom-1.0.0.yaml asdf/tests/data/custom_flow-1.0.0.yaml asdf/tests/data/custom_flow-1.1.0.yaml asdf/tests/data/custom_schema.yaml asdf/tests/data/custom_schema_definitions.yaml asdf/tests/data/custom_style-1.0.0.yaml asdf/tests/data/default-1.0.0.yaml asdf/tests/data/example_schema.json asdf/tests/data/extension_check.fits asdf/tests/data/foreign_tag_reference-1.0.0.yaml asdf/tests/data/fraction-1.0.0.yaml asdf/tests/data/fractional_2d_coord-1.0.0.yaml asdf/tests/data/missing-1.1.0.yaml asdf/tests/data/self_referencing-1.0.0.yaml asdf/tests/data/tag_reference-1.0.0.yaml asdf/tests/data/version_mismatch.fits docs/Makefile docs/conf.py docs/index.rst docs/make.bat docs/_static/custom.css docs/_static/stsci_logo.png docs/_templates/autosummary/base.rst docs/_templates/autosummary/class.rst docs/_templates/autosummary/module.rst docs/asdf/arrays.rst docs/asdf/asdf_tool.rst docs/asdf/changes.rst docs/asdf/developer_api.rst docs/asdf/extensions.rst docs/asdf/features.rst docs/asdf/install.rst docs/asdf/overview.rst docs/asdf/user_api.rst docs/asdf/using_extensions.rst docs/sphinxext/__init__.py docs/sphinxext/example.py licenses/LICENSE.rst licenses/README.rst licenses/SUNPY_LICENSE.rst pytest_asdf/__init__.py pytest_asdf/extension.py pytest_asdf/plugin.pyasdf-2.5.1/asdf.egg-info/dependency_links.txt0000644000446400020070000000000113605166131023307 0ustar eslavichSTSCI\science00000000000000 asdf-2.5.1/asdf.egg-info/entry_points.txt0000644000446400020070000000024713605166131022542 0ustar eslavichSTSCI\science00000000000000[asdf_extensions] builtin = asdf.extension:BuiltinExtension [console_scripts] asdftool = asdf.commands.main:main [pytest11] asdf_schema_tester = pytest_asdf.plugin asdf-2.5.1/asdf.egg-info/requires.txt0000644000446400020070000000036113605166131021641 0ustar eslavichSTSCI\science00000000000000semantic_version>=2.8 pyyaml>=3.10 jsonschema<4,>=2.3 six>=1.9.0 numpy>=1.8 [all] lz4>=0.10 [docs] sphinx sphinx-astropy astropy graphviz matplotlib [tests] pytest astropy gwcs pytest-doctestplus pytest-remotedata pytest-openfiles psutil asdf-2.5.1/asdf.egg-info/top_level.txt0000644000446400020070000000002113605166131021764 0ustar eslavichSTSCI\science00000000000000asdf pytest_asdf asdf-2.5.1/conftest.py0000644000446400020070000000143613605165746017050 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import os import pytest from _pytest.doctest import DoctestItem @pytest.fixture(autouse=True) def _docdir(request): """ Make sure that doctests run in a temporary directory so that any files that are created as part of the test get removed automatically. """ # Trigger ONLY for the doctests. if isinstance(request.node, DoctestItem): # Get the fixture dynamically by its name. tmpdir = request.getfixturevalue('tmpdir') # Chdir only for the duration of the test. olddir = os.getcwd() tmpdir.chdir() yield os.chdir(olddir) else: # For normal tests, we have to yield, since this is a yield-fixture. yield asdf-2.5.1/docs/0000755000446400020070000000000013605166132015563 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/docs/Makefile0000644000446400020070000001116413567314375017241 0ustar eslavichSTSCI\science00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest #This is needed with git because git doesn't create a dir if it's empty $(shell [ -d "_static" ] || mkdir -p _static) help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR) -rm -rf api html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." asdf-2.5.1/docs/_static/0000755000446400020070000000000013605166132017211 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/docs/_static/custom.css0000644000446400020070000000022113567314375021243 0ustar eslavichSTSCI\science00000000000000div.highlight { transition: width 0.5s; -webkit-transition: width 0.5s; overflow: hidden; } div.highlight:hover { width: 150% } asdf-2.5.1/docs/_static/stsci_logo.png0000644000446400020070000016507613567314375022116 0ustar eslavichSTSCI\science00000000000000PNG  IHDR.gAMA a cHRMz&u0`:pQ< pHYs.#.#x?viTXtXML:com.adobe.xmp 2016-04-20T07:27:53-04:00 2016-04-19T13:46:17-04:00 2016-04-20T07:27:53-04:00 Adobe Photoshop CC 2015 (Macintosh) adobe:docid:photoshop:f864f9df-4777-1179-9a58-969102aea28f xmp.did:11167d79-54b1-40cf-aa0c-e76165a7065a xmp.iid:266eff88-0746-420a-b537-0a9e370e7271 xmp.iid:dbd8d0a6-feb0-4685-a923-baa18b4d3313 Adobe Photoshop CC 2015 (Macintosh) 2016-04-19T13:46:17-04:00 xmp.iid:11167d79-54b1-40cf-aa0c-e76165a7065a created Adobe Photoshop CC 2015 (Macintosh) / 2016-04-19T13:51:45-04:00 xmp.iid:53283076-a342-4d2c-923d-3d65d97f6b71 saved Adobe Photoshop CC 2015 (Macintosh) / 2016-04-20T07:27:53-04:00 xmp.iid:266eff88-0746-420a-b537-0a9e370e7271 saved converted from application/vnd.adobe.photoshop to image/png derived converted from application/vnd.adobe.photoshop to image/png Adobe Photoshop CC 2015 (Macintosh) / 2016-04-20T07:27:53-04:00 xmp.iid:dbd8d0a6-feb0-4685-a923-baa18b4d3313 saved adobe:docid:photoshop:518f8ea2-4779-1179-9a58-969102aea28f xmp.did:11167d79-54b1-40cf-aa0c-e76165a7065a 1917 760 1 image/png 3000000/10000 2 1 3000000/10000 sRGB IEC61966-2.1 3 ̈́@IDATx}\ő~Q9K(g 2->s6|wGm;g>#c0hb@Ps@q%VZmy~5vftX"8^kx 5p8k;K,ooZlQ$~

gT YQ긣5iؽzq϶Ɂ1Md)ơ@HFI P!}{@$c#G# `HkDi(f6c$X$F7RM3WANScA4O\i^q8x$$4u4-^f6!HDlpbF uu5֎qj @I l]NzJ4;lbz]nk afFV<Pb )W TjH^P^h78BY&Xk+F!Vd96DFhf`$lK$QA X㱺*F"K.Κj(Gz$>T-۬ʢesN Ld};yM@bEg?3SP̎!j&! 5lÚI`_0>/"ң0* d&+#QE@f^ 53j!&M19X=`Mj-dFYy(&+j@ ْL>sIV'tсB$#Z/~dnCUKu*t괝[G3Mu9DHS A.A$IHmX׆ٱuHֳwsJhۤ %SGL^) Ģ܈d3(zp ;7No +e&YW̳x(P gX 3'K`cPX?#wMkf3Hai|XrQfe]GY^U{q Dr blX=Ar"B|"٨Qv}kbF>$*3zfY2[,͒ \~4F jg vWu]j ~W,Q'o` eL#PN Z³:5a[/ᆃ5ljdyˌShbS[(@ q3}u$;o &Vݍ3) 27XVtgj\vB#ͤ⨜+˛?#3ĘA3хuqla5ޡZSŤ>n~M}k8u"cG<μ~fT_"ʉJnNDrGœ ?';Y2,"?Yf*ykw se%XcFy]@&F5C0Rue{kVQE7 ,rHY úe)'߿axE:XGBҊއF ~lkwaf@^5C+ʠ9ҹĘI5@]5vrzP@d-]ٵTśUe<'?'}mom`ٻ4SzRn0Xf՝y~cuIҗ93CaLR)RgAH(X`6͖{$G~M6Dze=kZ}rOwncyŹC\+As]9%Y$ ̃1o 쌮r\>$d1fTPn*U`7;eE,y|vy93Ë3119cPݴFqՏɐӦdr و׈A "2Qݲdxl5KzX6\X>wʛQ{r=zRq1ƽ\GZфV@C^ B̆HoE-Ju?Q.J W+rϺ Ez4(/ Xѻ9l,WΑ3F s,(h`O^+K>y;(ԁ:{y!H|$ @#'vPO8}ӏpf㴩$ZSP, !"m&0˞)Ca_\qлo}` ٴ\(3BWW/X]yIF%ZNH\4?~>% ɬgt~w3] Vs7ƮðUWʔύ)H/t†,m5hsVVq0:g 30aItX~Y2臹` ٚ)͛Uڌ\ 92 E{/!AkTYJ~yqbOefҘFQN= {s_RZ>qV>qr #U;ލW(- W)ɉqC>4ż044G._$cqQp<t5*Mu2*[ *@@1ֵ`ƪ%~ֹ/l?LpIM#*$X..(aN! 'q TOkb䉥ރ론ZZe$"m`f$+ZH%7# )#M ,'ȸЄNC3ckmiLiHk Qyr޸"֕30UZ^ݥ˼/EZDžOd` !ɓO$C{&zo>$Z|@\ eL~֠uJl/qm3WB$=ή~+ȳby rɄ"7+Ie[ ]+;60N. LƷ j>v?MwSCfPm$N);rggIPF^qhL֓c+Y}l8GuѢU[ `< j溨&?X6hk~3\N2Ԛ4 øt0Vb2@OvPOoI07@vŔb70]ϛkW\,bϕ[M<(`I |(6bb-|Y|cj*1[I?{'s7&cl'q!႐}*lԹf#1qJ'9clg@a/\X)bGc=dBsG%HdG,%m\.͖ u8!I1d=EBP/̤Wk7BmqI^X_}RQӋ2|X6D2s %7 >>ch{E,F'C4%۞fDmf6P7˝`($QSS8AP3Ԙ5( %GªLwq1<4T@Ⱥk,Dfd #7icVyFKO+Aݓפ[O˿ۗ`sbJ66yfg%V&/w2j:v~\e뿀 MPMq֖8 af8y|^ oYedvzrb{-:SڞVVNptu\F1p|A>?=ءBB(NO#ΨUbه`5T{aqtٻ$:Jriֲ6D ׍ ̤yBtlxJ|̢L鍕^'{<k b0 ľh9/-qsHۛzw:i?6k24;Y7 끍%)X[z 3H V~F@ Z xJD,F9WcVbwb|h݁|f"Qf f1_̅]2dzlcfm eB ɆPAS%F~#t{o0flHǀ9[68۴&L]Ngf~ 4"DJR"*2e6/^+t8+j[pFFU)ToKo|Sqe>V k1~Z`em>n6(y`LNv)Psrl^ydc=RA(Ih"j5FpvDQ Zu' ,}e\7Wt6#̪[AAU(R ~QW%Қ{)*\M\BAvNA`w\Y2'脰8|Ž>`fxj-1m hQ+esjcƸ5δV7d]@ls0:>_yFOA[N< '_ʝN1dkۮhc/עg]N#DZ \f!q3:v(}NSJ:.-xaOEcRJaO `J4tihqr$JȖם@Չl@7ŵ!"^e,z*q,Dqozh9SE=ۜ*))tYx36փ]m8wLQP#`,gwUϱ:F2io992'InHF I.\bJ7)7T3 onɌ5:˱Y O{ y(ӈu}yt~ aQ`-?Y]O)JKP6' l VcY|~L6Qc1Nl<<}DLpX#2gfiNHis6G밶,^7g9 d܀|)J∩:ywBx7p1uiICoa]N"K*cr ](dhk^iK~9L֡zYPfE6|e)_"#Ԥڏ60eOx[$'{>VVC6lރ'5uN>(yQ@$Ls7UJB0:!j>dF;F_('w- [ yi13$c̶͉6SqSMC:HK:/1"Z_©/ɗGIO~-k:IB5b"jm$|P8Ealm \GuNÅiw &J w˿/i P 13wwF2p¥[8L Z%{6<tsaMK 349rgJ)X!uL(@Hm-n5v<NdɰTAkswuݣSAm\^yH,#8Qrhɞ'RI4vSIC$~#Y;E]mŬKYXQ%95 N!nA&u|y="F/Bq`m$p%)U.cP؂R ^4Gdx/8WO8ArDƚ"܂sFin'򈔕0>&?jZ0:]\%1&X˺ʴa2`mxG=/ww%Y!*aQhi`̜$Lm]0^ M$ 0*/׽u@*xWv8>`6~Gbj襆Eߗvd%ѲPk{e Aa[zW nͪH6ǝ'7f~=r$E Z01cQ@]BaT޶s>36TQI&QyYq3IVqe~ui7#&֗f#'wOѡ0q:Fubv@3.p0 >^\e05\jؙW pތ<ͣ/_݊?݊F">Yb|2L뷜هtYd-? gK%(;m|!d\ |h.\XX7]5y17䉗wOy| a;<,SyF%7=?[|I}O^kDB7?`e#{-N0O=bt~ΆOg~]p"*kK!LOp*XmpIʆMe##_ wțjy=򽥇(<$$h QHN3JWrɝYC>[^I.Ub54_ ʏ| ?qV; <(9R (_Na[-.~FHy0QbѶ!LO<=@f*1>cY = ; ly||7ƺ͌F0q* ~wg0Z]?,W~ٞҭ8 sM+_CK}Pnfa?hnؑYTn`~GV?$Vہܞ5,_ C11+B'cW~;QZprTg?7a*9 $5WL.~e!s~Mpؖ#oo g͙Wp[HC jmtpT}½.vI711u>Ο?N\j"FLΔl$;bSuv3-~; L0saƾ¼m\xA$''Z!!`fKIiz̐fP8nn;^R\ 0vO^T,g؝ɝr?s0]5TeV8D~w/^gOťdk(߄bDŽ@lgPU$y0X%7 EQ#9Nҗz7gqMA5K^ˌ7)?$PpK3| z"hNe6T@kHcE+«EwU 3"UOV 1h5~[tG0KAQwl܅M*^1X\ 2 \tm, j Jڥ=ϼv/]ž/Dqt3GF8M>1%CO ++z] 3Z?K/rO$:]k_Na0ލ79cB{P[ u1￞).9$'F&@o׿a&jT pp*p6 `ˢ0nN`:zu ePIIka0#p#y(5-t{> 񟂧2^'48ȠplPElbg^%&Џ86̿7x_a 2td*T)3N5VId;lӥq?,ոnӶ`[p A<53UTpoҝA<~kV18~;g23~bK2@NZWV"abCxiCq{1$_m>0_<7tǿ NnGx."(zqw z:in#3<{<2\,̃Рg\v{|C5Z-Q]c撌۸8x0y$5>]hbӞb/#eW0cB;iWBsD K ~QC}X嚆[9S.!TL`<{Ӈ'$jLf}HބUX1T|rmlu3ncCc՘`пk.aSݦSl%$v$ Ep8(Gs3np";y.!HNӡ.xpn 2VAԉ_=JǾuyy1Ъh&ܫ&ܻ'l5ښ78f݋gPeI>6"pTc#(Xc+*h$3:Zo6-@fTg֙PCPɻpR ;98~_ ̋w&H9a n[a [ #ªMTƆzA?ZY#;h#+#W/Lj fV[vmѸm#]/蓅v, Ńpan@jD`롽9׽ڑ&}EQ79sTH/1\u0Qp90l5 -˞(j5QLY•'u'1^ f"mfد޿&7PfbߣO߁uI=HT.]ý 5 $-*&Дbt-`ZmyH3;c5#' `.ٍ[nϷ&Wc \-k`~ŬIXAy 99KN5iLw]\dWfr=|< yP:GO.+u"&ȆBn-T?a_ㄿm</ |?^$!@`i~T{AضuZʛyqLΏ:\!q;A̮m#ٕFu5mºp\7* cZI`o'&Z{&/4!;nצN OBB`̞n}jPs~Q7buwϙp[;gyTӪAuH 8͋ױDEq= tOxhp>dݴXJ~[H! ,i]3Aiq__yPnIũDx&n;Ysw`t7Wjqjk`p AXsn;dWs8dϜNmkq+{{Q3(eCjO ƅo&720"o_6gU_Cv~Y#8‡)\q{=G6NcgxrP}!f!;edQ|78|#lж\;N/~ƶşđ?>t)ve<7`Ώp&SىNǺ [vS/Am" skxr0>e- #*y˾poMl<䘉:y/4p ص^eg ? xfђ0S\ 6 0>*dnC8^ 'x暲2 B^O)/W8Pq4q LX 5!qDT۾&їM8hv/M_s&Ln%R~֙A eiOlh*\[U>rԯ4S%ҥ\ަjyee<ΰFCg-Iz C,x?GzÃ~B.2Pu f8(i^6@q!~x§K@Ki@ 9 Lhh~ njNo$HV| f"܁u9l Ytqr7WbM8-VM#dk}*_4wAEw+S¬Ǩ.+HZǮ'LnT?Pb$K8N-W4ɎB7P^zo|o ܕt"f"צ!-<?02l uWXSSSz@i g˦;_! L_Ja0 \37{Uu&J,\[c{2 LGrԀ+ur220sWt|;8g͡,T%Oq24yi;7mjӎw.QM^-F&cvR:~aa57-M  @$O]Ƕ˹#k07 JASM : lٯì?HrgxHf hѸt 5ؿ^kzN~k4N/tYg'~g!H̾: ם<"Ocm>c^f%8!cӨySX[ߊm2%#:x6Qԭ瓝*X3|R1[ al_)7UW/:bPbOqqy Ο!7q:ৗ(ͧ$C!-|`SO e?03Q>_Ib_mZWs=Aq{!F)\1S^ͻ sƍ0/mᗭAZʒ}sePF0%>;G5 N=`xוN(_}m?UH9 dE{kP/ l̇T̛ <4dC),B @?`pA߈a7SP?vDxkb0{f(:O$8dڌ)^5ixT,myMHǗuiƂ㘖y=с (QJm9p(:u^A%gWKRCeAf&4Ʈcx !b"`/ѵf#"N=V4(:AͶ஘סWR k4:6t /$D G‹-䑸\_L K:ǥaL,^Xgp hA`L(B(s3~GV5B-'5a`d tD^5r7sJ̞*-ZY>΁(%ǝokg=8u+#w~"|˰/Z.b݁cBܧhE(t>zF_5x8 VـثѠ@ɁJĆTJbꈴ v`䅺4Ԓixy%GV1!Q25nHq-@}T6f ,'cgx yMH0{FNYRFr8lsx!9M=qY35E*P-VEgg1:LD$ Gb*!Uӑ4 Mg˺JkGW3dobxpܚc\^Gi^q[ kX+hxd9XAj{o: 1Zؔ5뫣~haÊ8+h+*LTBKև*n,5سt[L۠ NJߎ2n<ISl 8 `Aiޟs)P%6_+K)e[KKv36@L2K?WL{ԧGh[˗ա퇊_*iz Pln4\T߅;aOOZ gٓf;g--&7@IDATkfɭEc& @ G~z]cj#aR̚w3 v$&L IVXT~ƅ[,{8-Rϟ7G qzyzGY%ra&]󸾃8є^΄Ԛ$I ~8\|%X()2Aݠ ErtƓ?9v۾FtH=Ney\y2%pxSamړJ aMz1tFhVޠX> <0%ԣZ3kR0awC˅sC\ $:D* \gP.v:d(:#!t+1Q5!&0N[jk:w[܌Q9v Y= IŌ8omP?vC &9ٍZ#ao?ɱ+^ac2?5 jXl=In I$hL1-I=3e,yy9XZͮ[c©@œ'0R_'[S(hH G55]mtlaFas/6NM ~4RՋVۍT k) CNbF蝓os} 0۬(0AH7IeX[r@^,o>ؚG41\7]*,Ov \v2 ,56[<8n]RsrD,ƻvn`}1\k0|7{wvb|L淟,?$ct4b%8 hέ0A5 x0qy: "c|θ4YxȘk,*')S+z|SZM}U (u4 JvXOw:y'l8ItSDF6B" 6JNN^9n7]SC(@5yH\ Š*fɦyj0@4&U2(D.řM/A?Ν#ӿy\$%ɾ ~t|J3q[g]sw#攛mOѯYv?3r. e_#"A ^5&5 TJ'jm sv] ۩`%4Dm' ->V V )m4xލp0ivnݒ"Ժ~ hi1@bşq6_'aیC7M ҋl ՜|>:L`~$#wdeS TӚf,[>7ȱY "1@Q6$ǙDScObq\ӌg:b6`S4d|]QxP| S\ Ӌ 'Ch8/~R0 vgЂ [&mA4TW\q;TW T9L:Dn0٠t#7}HPMp/l!4qWk,ƃ[<6N4|Z(??6¤mÈGqmQe7ɩTM B|vyU0x?{1V5dhml]Vۈ^4u}QgQAIxmUT{V''`WëE79;aw.p|3Q[LlbSq[Wgp0Q̀#hNOJɞFXZ/ )Zҙ`MBp2 '6$ np0t&/,$J4ikBi0C>sfӒVſQ ƌX TFM&*mhX\1G x:dp%P`px^F0f9 myj 8UGыDcyY3dsHՐڱdiokw  $!bT{v:^~9:(n#fBJ ڏ8].2ٞkE+7@[p3!Uzˇ^| 1]Rf3u%@Dw͖1l 5queP ~ŝ[pBg'NM1".S6MƠn6u?FckS0#s߷3I_.iN;D5:(\Ea`ܲčd2UY%]ͪf\5Ao670؎g):y>2d9V}Qûd!YoabV|,l:3`ABV S{aԶ m9{䭦a AR68L|Nb^%^&ORpW; +|: 583? ;^ ܖ\Z [+;cΨ=y|'g!-h)- S;Q]ɶ o}m'u>~yˆļ(DŖZ&N<[vPd0b1 !}[vӳO:tvjgݦ,ے8N-|(e6PJo{as&:CsƧEtr4N60ҟG\;Y @Z1Et2*"l;RHy qsckd,\Zd`ihOgp7pXb߬8'j >N5BЙYce0aմ,\WdJxF15WmF21h5C-+{dʻlH+hQ\kf~ hz0-gY hD#lz)2B3(pJm۠49Sea5?f*?'/qβ5a;!سOPVp9hlEh}qwR6i*FQ30meG,v<m9 :L_+4\3y˭f60^i!Զif2=a}nd;71i Οh]ZJ,ATH ըS89 5@y S˘C˵;0BHL#2?#L7oc;߄ ǿCp#taFQ?hh HnG@]m!MR=WK87J|KCaSFI4/sͱ ?{^gI_ԣ$յ6ʀUVU/.E-yb6443Q\fl7̦MzOQ_xh/Búi ô5m7pOq@[q>7HXޅ`ڐDAj ?$mK7is\:B!lzAG$xk:NDI'#d16lV'D\amk2; nvG#1g+~}N jMx %rí$_4V6zO]PMV@H@AUj4yfr. g>NzMSxe<Ch ;kqHcoMώiIυMkoKo6 %DnH_yf`@Ļ@AN2||5rםi|E;̖zRtaKy P益hq%& 9F~g3;"~<%Xgqv%H46x# ?T0͇B}χl[ /"XbҢXފ-UMsHLvxCQ8jaFúL|m;qB*A j gʲeĖJn.ƱdIR%β dvMTYD@nF.]oRaҥqG8䣂_P_ų7Kᔄef?dk- TP\E`lDE P1'w/FAZrQ a2ybmTvW.R=ґ$-9,4M%.+ޤl6??"&nq AX޽,PPB&1]GJTn&NXo@PtGT;wCgLif`61J!C*#v[m)A0YqKἑRsStQZ_f?6BxF)yla+#PHt򮺺#!K:g0%L 0m/`aEsuo9"CܒF]l:@orj6&n ;Mp> }ͶZyzwy8{s+SKp|vh@xuoaenkP&[fRmp]odxYb$z>~ Ԭ D%/\ͭ}+6^mW;N6;oIcMH`xl۽ \t.I`2l] w|8l_l5;jw0 {\[<7S/_ÞdAM?t6ЂᱠWgwl(:/\>opdScNN.jج;^es1xzCMO09D:yxv+T?Uŕ>y].W/pF4Zr$l*8f%Fhi9Njt6򶼼jwMk., vpo+7IH05=@hxŗptR,?3R}ka@PTBv DN20; z z; KHK"2O rie=QC9!%gNe?W>-r o\SFyZ6K#1 SM՚CK(FB*u&n\Y#r핺N~{zMU3J왜\Ca³+䳯9N7kF·]p6^6X O`.ysl4U̺Sy@M`w?~zvUcjKG{gS^oB(lb;g5_cOA$5yN jG,폫1ڏa+i"08Nuą}CGh>l4g`̹ RGQImcCc7PR ,pcgBkN8.l:mvk#z)"5f?!fVHE)9 Z2:r@`ѼzgJM%apf` O) ^|(8(# e:D[+#W85Hjm0!'wcABҩS_ۍqG=qN"G~ǭ߄.4`¾57׳?0ׄn0i4gHFZǍ[36},T,k Opxv.J,[S& X 8%YF鉅.Z  U|;IdMԵ\af偉CF`֞|l_ 1s*yIxqo(jhM5yw Ðh_4H`$,N-5aOtzJ/ǗJq/[(z,#gꯞU~|NHk B3476@affx, ˌ/|SjlI7RiGtGlAMXBq;ESKE)*;c[͈Sd DdrQ1M;R#2\a9AM3xVCqDKCBw~Ư"gADl^5N89ĹoV W̑(yYFls6+}OQSuOڴ # L_-#zF7fLlr^̅\(r8 ?D"G2('ndz/< 0c\J8&]TJ ]<$*퍛=ͳz @ 3'+)})v%#0)1ay6aqd>[+&yv W1{<<Ʒ=x؝wmx1kNqv]ʗUƉ' aǡܺUq ᆥv=a?M&.oJ 2 {wE(ßQI+ijduDdC:!KpDrT^P<5LrG"<&w[>l~|<<8@'Tϗ'-w0$sB뎾h-64T(8w|q|R%L B ?#tPvcn3a~5}/ 5G p .&0tyW.bXfvU۪ۋd Fv'lva]wKqF\/< dI* Wc$`tr hL!Sڙ9ю8Y͙TE&Fׄ05\tV8o* ҋ߂F' ,3 !;jc z+5khHؼ<o^+R';nףY%SpJp$;n[a gL9TLec_7P>tCx}܁mjQb%G' KJ^S!N wۉ!@!p{D;pwR,lD؜xi0l߾l  $P%|\pgA g5ln\BXض`8+qǰHhdw3<'[y $0è]X;ZwG!qϔslau ;FKq_xNߟT )cf7cM+0@fp ~X2Q6DbX 7kQxqoO۬nIL{UP1'[J};=r?{`WQ훲N@B !"EEDT_Y~eQ, Al?HI@Jg@$@B*Ivyw{6&vܙ9SΜ9c$K}]]%׶yv~FyT#m WȪV~'[ey2L,y2tcF3 3[>4F}[YajjQN=0[ZVm$ b#u ѓd@C7-ۗWW)`Nq7?D~7<0fx17 Sv=4u$z~=ί3_#tK˟5%za6)`88UN_HZ .FՙZx0phP}Aa|Նo9O6 woȒ~@=pUBHCPަyܳu6/2'59EBK_NDL$0v:>x͠A3r)AB~jYGܥѵS<ʟ* :0b2d "հ$;n8/^ОEgh's!B{9,sSq<#w諛@Iaڻ^D`OJ$|M_/=,Y y цaFGDR_x5\xxܟ NWl6E(?6>}HÍyWzz9Y<Tךmy:gc{GyKKxCEΏOuEܣR5 *Q2!4@FX >z^{t< q3{{tOqCm+%3ͱҘ .>K&b 8Eu[F¥UDl 2M"+7ɒD]F-޺?Jv)qxxܯ ,?cܡhڟpX:#+BP7N>y}y͗ѩG +$5LuŘM!1ĥ` Ϊv"%c& nE(&=o6|"nwd@>3cQu?V<\ /]bjSk^('\Ym*6MCō q5ppeSܰ&ȶ\Nl,q""~ęEP89(l4-\3eWh`m/qD tahMlliŚ'*L{VӦg E`9x:3vHG5%A{z{?޵}̙#: R BlM;~f)n,߫`$³^RX(aa(ۗ{eP4' H$G d,!W]+{r/ܪ2.B?̨X=98~l{ҵriatztno5;1-yжHCzIzc *x`&P漢.TB ofGq{wwxqr&⊂"jY$>\v/OA@8u "|YKZsp uR7,t#J :f#pCU\b 0eW܈Kw%gG gI-w@%hW|(b^sTKs2lp[2kN&ԓNFteÑ첅kQr'ut 6qC: uRu!sNU[Tu6K({3I? -\wo^3r2GA8+qBY1&kϡp{ w0x5\YJ|sE§"vᵷ̋g{(~Xиw$0NE1׽#!::m iCvG;CMTQ`9a%-Crv8ְ9% g F="JIa 87>mvqk:8my.Xb1)&a[^u` DQq]0g ~cY'$;: G8sM7JE >ʝ?wE`X@`);L,jcϝo~D3^ c .cI7,Ls] -܅M㰮? U!gl|FFLG2P$^aV3g^k)q*4 D*ND-MP8ם/u3}TnL~'!qOv(8POQ5ɽ9u\}zHosz#?f<$DOc됞QfW⛸Mb*jW?1B 5{q{78lz^8w̚%C_ ef&`pDa\ - ']qkw82p`cɔ[ۀc)@Ak6jk~I7QArˀ CU^3|Ak] %WTXJh+}<>OuB&w9@Q0kk0K`4@m 39X_6[q)y;3G̐0R ;%n'MDb} Gz5oR«MU0ֵѓ LC [ s_[“ GR[>a[$xReDGĉS4 qf{~bp;)xWθduŬcKC[bVN@HY[`u&p 7lB +Rg\DtE,GΩhsGs9H$?#,9K<֍( |`!&'|8G@g p@m=0Ci4X+ۊ8ՖgK5s^ oz -S˳`a%}`Ƴ/5t~GQnDe)lka pa F1И|he {Ӏ$|9[Gκ@3+ g 2+R n z=lh<:ZԷK÷'9{#VWн,pπrB iڭimvK=c ؏42|t 8q㰌/C̬*eH5pOh7vTZغdT?2RE},qO~큍rteÑΰlX3}EP oIJV t3I4oK1&tssNt%hRAXlA>kb~u qvz(:8ܡ`6߆03֥% +;*mnxEfV.eu͘ȍe:(HOçgAB1l+oZP Vn25=p+ rX/ӻ8xuo]0tIthoi!7hֳmi(N/l "q2Jf`#jE-2P}hQXc#$AY憐)7aYf SqũnGg|pKQܞS3%}h4Uz!}8ÆʐPq]5?8Y(n6A>|ibfyD O`h;>8 =A*|d)xx͋x;(Sඑfe$0M~w6<^1-aR3:"pɫgV$\w4r/c7SPDTmKKE۰D'Ϭ|?ҟ P@-j%?R_I+nD]&5+Xe '+R }@8Ap@ݟ[67Ň䋖ƇPNV0MXGZ6a; !n/{^sq(Gj W]),lfD遽J|r#?UQ,qb J+ސ*+ߢcv+JO$rl^9x%pW ul{1tڸ+ ،zh6Dq^_loƁgX n /x4цa?Xq %I} \17=^-295{U5,Hy4vSKQ1y ߺkh8O}N`5M`\@ttSwڦfc:FV7ޓsj,l4G40m,_8ԥWXWF m'j3HpIxVw9.5@dlT~ /HCb |:=מZP {8i r˒U<8kp]1*.14OÀGdְ1ʚt7dPi6b2v 6+*>"R- H? zܐT =%XnF3 FAAD;Ya涄2}Y5г9W3qr2G''byuL,KX#al9 InJP'`>ܝFAlm)ޘ'ׅG"h}{a> L(Sp177U67g* 0{Y J'eCFR= BS 6$N)Ҿد3{9gis=]N~[k嶩tdf*oAJC=wD{R`{냦?78ĩb|XHj&/MJ3`v2B޻,URGv>[Ʒe3=N sߠ:w+yL1 "Wraܬ Ε)a8KgA#>uiy<0 >7N8p*b.oㅴꃷ],Gtxck@~[2tC[n=OӪdwp[6x OJôxԶ)s>%Fhx@+r$ݤ jSb1o] %9e܋K<@.1/݋hPJа7ɂ!-*NW;[ īIcL^8L~aˆ{}Op{™1k^V׺!-d~g1/rMn}jK[d-WB#utu+ָ5 s)l&W> &8Iy FyeB$盳7}1L b|V%ʾS67;UH6v!+:9A B~}P0t% wq83,mي%Q8 ˄b2}=ݫJ#v7I߲uv$þݩiyPP1Q$bB?zy^xg„0zz.]?wab'a. ˭i4tݮ_&D2"~}i -YKO3ԍp]ىB (N*חo T,| a+19%L3c|bZSk>V-"a_2kL[vS>qkOo.Xk*u0Uo4V r7[;rЛ21~09kcJO9_㡓N K';(̸i{7-5j6&ÓX$5܈w0&/).rB-[B9/yu9״A06} AGh䌸7a:C?uOl]mPE t~Zcv33tᴜBoh㶠(dֺbPNIIaL-\궢Ɲ PkJm cŇLRh@gOjϳKNd1TjӏCA$c/]2b/S %}`w92,E`~mk!G>paX,ǐǽM8ij}ǟrHg!RYٳD곍(Ը7{iwv=ԌPx*HJiVǸ+u+#8 r`%.u BnyaA}BvߜJGel&qVc0 UNk޾r~G* WbpmG (PX#"8vx\RMn2Bˠvflk+`pjB0+?{|̆O:w6*GKao0h 2E;XhԎb2]n|MHm;M¸anvӻn7]p&f-ondI ؁ycEhOd=,üD' AMk._GNZ25U\vj6nvF̐oC)1:M'@\my]zHP|azl0w5pg E%Sq o4o (7n5?sY{C TI4z BZ#aoW`kךਏCaǢ~!j-~ڂ&Ǎ]B0pQ)Du'#0_\x"*2y%7q + FyڂRiz|w6ra&N pMuA5v~*+[lsX>#p;/WSKAwQhkWpSڐ|=W|.|mY*A35C K$qA7Ļ eD|wHU6/|BrQœ4 'w:c|M<|[>nqJU_y( w`KrPEYK3~abQʋ8iB[-Ҷ9+詆1̌(7[O[ &nBxS07v vб2'.5C #RYb!NzGfo0B\憒4gs/d+©qx=n:ם GR8h_N2oӑc.m3]`59aXҖU\~pfВex  8b>*O M4\s l:Q Exkxp Mo|2L OA#p2iP/ նmO@~iNNOͦ7f[>귶Cɔtn7=E;{;iYottu+lvaqpW6x OJ:XjZ3,$gXi?v"IO^3'L6ѫBw$&bK1x՟YZdBO V cSP|L4v(+ iOYmҏ+!'kHPU$saPs~́P ɛԯbuHTT Iqb[ oHD]W8.(S0#qr) Zd R${Rlt+Errfp>B77ՐDZ%k4f:郎kj(%T2$bm0 ]Fm#MC;@ZIqIamɌ D\90Z>fM`26/**J+ J,ri K7qri;q\7[wpi[>v O:2? 2kyqMKɄn&a]9,@M˙gIcۅ/U( $OGv!x]{rΫR#&'Y ~[diMCGQlHi_B\:9exG3lr!RC+pg76WDCw 7܌ÃȐ΢9yH9, 7@{Cq?EB4rW5$ r(&k6sm5bnƌamF#? F1X[~u#Ά3,'RmvNkkMOZN%il,9qEpژܲQ,/ќ|pg_HfVq`[fڊa!K߾huowx$Δ XO1%b%k3YL #}\jyUv60V}}60f-NyYx IpeH O #.뗻mY~3۟Y#\ `cpWP)@&N6CiIAQtvL,Ee{fɯcƠRwoIlPkІTC?@:IDHqA.JdX\!L ‰/ F-$GhD /ɸ#ܷ[ͽh\² ZY⭄jKag|2w]0oAF).\ϤG@`tMޯ?_f+D~0>ѿc&7"L.Q덩Y̸anԱm XG , wÆȯ'^NtvAzl )(9+i#62Py,f}@1CA6Kx{nN$ S'F oW]@1\7A>:1qHi*pM]P~{HkK`1G{ۂo0~Y-%ln Fn\_%Lɍ5Kݷ[xk#K"`K""qra\&p z|$6Ӹʫ^ouf+gC2%PsM,YY.C.|JVoF`M9}͔Ot:3h~ZOL q+V H gXWpx?V.5BX}YA*LWXJFjsME_N̺jdwGTf,)Kט=0wYz|e9hfҿ֘o_m~ gm f>Aoo8.lY21O2vVC NRLԯ6DY6Y^ Hu;Z q_ldJͻNbfͩяB)@#ܒbJ珁6_6.≧p0<K|4\Gto:xp\Ғ%mu n9eY-J q[A\ fOߌ7WjTЩ͐lq31FGFF@Awΰ<_ hJ0ۭ>^U;Tx#j1򯚵ŌCu, ‚O2_-ض  >0LCb" c8F.a^H̲ VD: 6VOZOƍ䅕?ns1NDcKA[i̬cŋiŶua˜P7÷߫x%xNo8Vu=xО5 k딬ϽbZ5$5;Ma0{nji߆2{Vic`g'Fyk^mS2d!s'4eǕ8:9krycv^5L׿xI7&2qIupz K9 3C1\lma$&ee6$e [EXZxm̂lß}jT!L.cWEw270(&o_qf2k5x^W;g2QoSp>4Y`ںkg6Lƹf9[yo +Y]yD#;Ob^DŠ$?rnK r›%]@MfW" 4ﲫ!Li (\Ueo{8-κw qڷBߏS`Ӡ/f:6J0G'f&/]1,}̒ؤHunv1OxWCj3gܱ܋sGiuNppCNaIe4{0;ݍ {SLY/aM]h c5n`{$EEwN]rZ]z]⢚dNs1%de+kR8҉߮/rE N<Yǟ xwc(Kvo@cs݂ F[R0ce]zι= ^kihktJn[ p&eK:E4>p9Ǻ|:cȈ> ,&PyQױ$ ~n'NϾt@秾d&_ŪA+W>_ލJ zC,*qDP#r#L9ᐇ !c S3ԖD ˋ1A8).Y46Ȓ #!S<+xdzx@WKռ M% P[)E~;\w 3Ą2A$\ =ܬKL7aX+|$U0l+b ~fn}} +6x>8<{T|'w7o`͞dNFNK8 KpK8q09?2kZɑXʢ=5 0o#DlOb'H {O0s6SCgň Z`VnU #4RDœ@p\0{zO@{sPHKHŭ~d6M_S~.uf9$]RtzRCS:nw3¢±Gh47lZZP8#;zOdx襍JVO鈝&[p֐#&FyG`U9 (,1k?^f(]uS5x֪F]@gJ;f?V6[QkgX".FatQe,gp\=o4΢UIZM[6iĨ}w{8uy;nj nOtQX˟&BAQrn:gvQ6H (#0ЦbN mNܼF^XE8,sYSq4%,׋JNq]dKV٧*PnI^o͗a)ȝ܌<YՀm5y'+IJ |~D㧛&W;A|䦐{_,qzeϥYQrK٨@uhO0,Sj=YC:@{`'.=F Y.%KpFZB\;L<.O‡}:I>&~>˺bD]6~y׭E=-l܅Ȃ:o_&mkfԌ&I6珩p6⩱bۗB~ѿכ6={?\&1Ic=N5x9i m!waxdm֚[C|2/v|ms|M0ڙz`?*ks(8Z 8:^yЩ27Jc}~3[7qCֽ׋V!``l]|?H;\nn;t/Q)C(>Y5 poU3Y wvWG52q 'N+Ǎfm[^Ҡ^)k,jb>|g\bg4pP4 7(Z S\ֺ yȳ1GJ&P9<Y2sZ+bBE=_bY}jx+o6Pa@$"n8;{4ɵPm B$i$L6x@[<>|5f{~ M9i!WA !,3ShA{9$cu6c7ҩ6NZ m/FFG@q;I nf/Y +z"Qb7 MI+Z2.9,jL3Sk{zql|3; Hsf0$dǂZ 9 s/iݰ`}WxPLSM^^P+Y.K\nya9]ޗ$7Yh~ Bs̜Tt5j$E'Hm+.vp4f8ײ'*O>etnmg{dRW"7 g"aX/=|Jw* g̙on6^kF7QU݁YXwJγ9P{d9[ޥ1" Jhj٠^NE<[OQn1Ct>͎ho,mu=b&ͫByAzZ$0Aٮrgu0pYɘdWt x2d̝SƗ/a@A 0J;K; 8rY8 "Uc].}9ލ=؃FͽJPEa4f r&| F'4Äx2ħai%0jH7.fFe7_Ӆv'kڸiujy\­l<=':|j'm6#p<W[) ׾fn9gf#kO_حf㾱5O[xI? F%> ˗5d8E#;zy[Mo "BRwa$F#iK'B42kz%8q$w ^`sNaՎg O N # `[Փ~CC(nk&T.eإlYg/Xw+ -{~'+"FAl1cʃ8Uc(Z!]P]k!eCl<{оtfXAhs8(CV]TռmsTZ^#y5yٻZ?d]a5(8f,ZёT&wwKxX\ys0dڠg4' ~40 VN&㖅OCկf<묶VPIDATǝa7[f|b<>i𚇋Wâ-B PndsVEV^6fF[fKm ޷>eǛTB ؉ *4L"Π"rq8Wd;~N]L/`sP2 J:dUڤ>55(0*Ne#k78bbИ|cV]#Mɥ^ͅ׋D%KmiPMg8d"WK'->`w?0qp7u7HFdBOytCmMss˧Dovr|x 2 X# !~+fQ7[ {7hn }DnmʚavMsK>V@1j}q&Fgכo=2*nx(bcB*}̻6mN}H1? ?a7n򎘴+* ̢hU <ʂ*uKy}rNɴZwG*A8ړ\w>ww={.^>Z06[_n0?bQR؄:nGކpqTsDg>q؍aX gWՏ&) !P ? OW͌1+0Ld.C*C83oݻI -YX0',\\m0^]q[rqo&3OleRIx;Vn8"-fKbz>^M|GM$Um VRPU#k. &NZNZn]FCm-[|{[#:(9爮Dxq5DbJK]9Y2~m0¹|Ii|FIho.˲V1?@"rs]<j?<\{k[~r<*$@đ26Ua0;VZY[nmx%|o K&R6e;|ጭǘR|8p[5KXg~jKb 9k~+`SS"HMfiCt;inzu?w^_n,HvQ)3#+́nUjjdn^P!2)Pϵi ǹJD>&.F xKKsk,cNZ͚P~r3#Bx߉'+1 AM\+MؾxiI(|]q<+-F݌Yn8?';M^5[e[ׄ# h YҎS #xR J;㈤ixe!*wCf?|rNuV/RP^\PRnP9w!Èl|L%\_UOZъ9Z5􅛽m1wBڈf=t7ٵi~ښ5ܵ>!3(9kAlG\+|#ֵ>`ypQGly:7vT'N~\s5C{a\l\1Mga0|H<6&G%}o:̌߯һԔp]>q1]o(GC3fGu"@WrD維Cd |̢(r2޸|,~ /DD#{gH!qtC:Ԣjyp/{6B!k"&`(B=a5}* gA^u1l{IWx,w@V8zf蚥[?g{ôpFC.um*i o@8V:_G#7Ic̖uuWM9vhW%3 vkYiYvyO[d^U.9EӀXYP ӆδ$$B)J@nnʹl8[S 7_f (*(3#tXbpKL.Eߦxk6ni6V7ion5Bc& !e<^^e0NΔ>޿,ē)k1`57[Cvj@t[/T4`H]Udn )`*O$NLNqD)ɆtSlLb)6#0 7dz {ckv$2nu^wG^^h6`u ٟR˽CJу g n-(g_:`d?\X~c>&[C3qdQ2F^VS|>|,C;/p6eb&t 3w O[bS*{2Q+ M Lկ4ajی%q#zVmI| M :[f̤A9y3e!:qJ1tRQ N72T7ɁdņL ah(ѐ  # ,i"\~ƄBױf~s ~4~h$0ΤfbDêu,Tv:쉎NcQ0 [t?ux('M 87E6gW fy9JG;kd&?" "%^ $F/5엟_LR1" }~;-B `NBlK9K"rмarPQζ_QI q/1ir^8C ק7poi HV*QpADe4u uc<o"  LE034Ol:[Zj,S`Y!.ҚHn3gY|!)$~ ? faf:TAYE?Dg[?9Nh~įVbvLtL$5␜5UtN.'2*; , c?]0vvKG!Z,UA$$")̋舟eKX"o1rky $BHNFI`')p:hE^rAaepZ}?5%=AWC8_rViy~?"μ*#K@QQXa FR!̪J `99BZFRH4$&d ` 吋?O@jm<8Q8'Q$qH:^6\K~)0uq D=YI̒åk|4#;&M(4tKqc&m; ̉ tr>Ȃ%0Ak ](m,EH" d!Bn I' \@2GهTH0D:ܿt2& AtDU7$ބs*P>J$oojs JIO5 l yil6rӡb5G!}WOLVM nUL.<#3-z?A mkh,V]ă1X\J;6avh58MA.PV t&;4<&&5mRb+;3(U #Hѯs-&rr{ޖkWH~ҽ;D>/(4PJ2PU2wu.W^Aɗ! Ui[-!DZ}h:vNȫ̘,pO!oϊ Ú ³Q7dqty]}%giP:?V'Q-d4Z괾#6_,6YT6Qm^y9+dIx>>*c-/<,?9Ҋ:N`__ or `bzip2 `__ compress all blocks: .. runcode:: from asdf import AsdfFile import numpy as np tree = { 'a': np.random.rand(32, 32), 'b': np.random.rand(64, 64) } target = AsdfFile(tree) target.write_to('target.asdf', all_array_compression='zlib') target.write_to('target.asdf', all_array_compression='bzp2') .. asdf:: target.asdf The `lz4 `__ compression algorithm is also supported, but requires the optional `lz4 `__ package in order to work. When reading a file with compressed blocks, the blocks will be automatically decompressed when accessed. If a file with compressed blocks is read and then written out again, by default the new file will use the same compression as the original file. This behavior can be overridden by explicitly providing a different compression algorithm when writing the file out again. .. code:: import asdf # Open a file with some compression af = asdf.open('compressed.asdf') # Use the same compression when writing out a new file af.write_to('same.asdf') # Or specify the (possibly different) algorithm to use when writing out af.write_to('different.asdf', all_array_compression='lz4') Memory mapping -------------- By default, all internal array data is memory mapped using `numpy.memmap`. This allows for the efficient use of memory even when reading files with very large arrays. The use of memory mapping means that the following usage pattern is not permitted: .. code:: import asdf with asdf.open('my_data.asdf') as af: ... af.tree Specifically, if an ASDF file has been opened using a `with` context, it is not possible to access the file contents outside of the scope of that context, because any memory mapped arrays will no longer be available. It may sometimes be useful to copy array data into memory instead of using memory maps. This can be controlled by passing the `copy_arrays` parameter to either the `AsdfFile` constructor or `asdf.open`. By default, `copy_arrays=False`. asdf-2.5.1/docs/asdf/asdf_tool.rst0000644000446400020070000000147613567314375021227 0ustar eslavichSTSCI\science00000000000000Command line tool ----------------- ``asdf`` includes a command-line tool, ``asdftool`` that performs a number of useful operations: - ``explode``: Convert a self-contained ASDF file into exploded form (see :ref:`exploded`). - ``implode``: Convert an ASDF file in exploded form into a self-contained file. - ``defragment``: Remove unused blocks and extra space. - ``diff``: Report differences between two ASDF files - ``remove-hdu``: Remove ASDF extension from ASDF-in-FITS file (requires `astropy`, see :ref:`asdf-in-fits`). - ``extensions``: Show information about installed extensions (see :ref:`other_packages`). - ``tags``: List currently available tags - ``to_yaml``: Inline all of the data in an ASDF file so that it is pure YAML. Run ``asdftool --help`` for more information. asdf-2.5.1/docs/asdf/changes.rst0000644000446400020070000001531513605165767020664 0ustar eslavichSTSCI\science00000000000000.. currentmodule:: asdf ******* Changes ******* What's New in ASDF 2.5.1? ========================= The ASDF Standard is at v1.4.0. Changes include: - Fix bug in test causing failure when test suite is run against an installed asdf package. What's New in ASDF 2.5.0? ========================= The ASDF Standard is at v1.4.0. Changes include: * Added convenience method for fetching the default resolver * Fixed load_schema LRU cache memory usage issue * Fixed bug causing segfault after update of a memory-mapped file. What's New in ASDF 2.4.2? ========================= The ASDF Standard is at v1.3.0. Changes include: * Define the ``in`` operator for top-level ``AsdfFile`` objects. * Automatically register schema tester plugin. Do not enable schema tests by default. Add configuration setting and command line option to enable schema tests. * Enable handling of subclasses of known custom types by using decorators for convenience. * Add support for jsonschema 3.x. * Fix bug in ``NDArrayType.__len__``. It must be a method, not a property. What's New in ASDF 2.3.3? ========================= The ASDF Standard is at v1.3.0. Changes include: * Pass ``ignore_unrecognized_tag`` setting through to ASDF-in-FITS. * Use ``$schema`` keyword if available to determine meta-schema to use when testing whether schemas themselves are valid. * Take into account resolvers from installed extensions when loading schemas for validation. * Fix compatibility issue with new release of ``pyyaml`` (version 5.1). * Allow use of ``pathlib.Path`` objects for ``custom_schema`` option. What's New in ASDF 2.3.1? ========================= he ASDF Standard is at v1.3.0. Changes include: * Provide source information for ``AsdfDeprecationWarning`` that come from extensions from external packages. * Fix the way ``generic_io`` handles URIs and paths on Windows. * Fix bug in ``asdftool`` that prevented ``extract`` command from being visible. What's New in ASDF 2.3? ======================= ASDF 2.3 reflects the update of ASDF Standard to v1.3.0, and contains a few notable features and an API change: * Storage of arbitrary precision integers is now provided by `asdf.IntegerType`. This new type is provided by version 1.3.0 of the ASDF Standard. * Reading a file with integer literals that are too large now causes only a warning instead of a validation error. This is to provide backwards compatibility for files that were created with a buggy version of ASDF. * The functions `asdf.open` and `AsdfFile.write_to` now support the use of `pathlib.Path`. * The `asdf.asdftypes` module has been deprecated in favor of `asdf.types`. The old module will be removed entirely in the 3.0 release. What's New in ASDF 2.2? ======================= ASDF 2.2 contains several API changes, although backwards compatibilty is preserved for now. The most significant changes are: * The function `AsdfFile.open` has been deprecated in favor of `asdf.open`. It will be removed entirely in the 3.0 release. More intelligent file mode handling has been added to `asdf.open`. Files that are opened in read-only mode with `asdf.open` now explicitly block writes to memory-mapped arrays. This may cause problems for some existing code, but any such code was accessing these arrays in an unsafe manner, so backwards compatibility for this case is not provided. The old mode handling behavior is retained for now in `AsdfFile.open`. * It is now possible to disable lazy loading of internal arrays. This is useful when the `AsdfFile` was opened using another open file. With lazy loading, it is possible to close the original file but still retain access to the array data. * There is a new warning `AsdfConversionWarning` that occurs when failing to convert nodes in the ASDF tree into custom tagged types. This makes it easier for users to filter specifically for this failure case. What's New in ASDF 2.1? ======================= ASDF 2.1 is a minor release, and most of the changes affect only a subset of users. The most notable changes are the following: * `namedtuple` objects can now be serialized. They are automatically converted into `list` objects, and therefore are not strictly able to round-trip. By default a warning occurs when performing this conversion, but the warning can be disabled by passing `ignore_implicit_conversion=True` to the `AsdfFile` constructor. * Added a method `AsdfFile.get_history_entries` for getting a list of history entries from the tree. * Added an option to `generic_io.get_file` to close the underlying file handle. Please see the :ref:`change_log` for additional details. What's New in ASDF 2.0? ======================= ASDF 2.0 is a major release that includes many improvements, new features, and some API changes. It is the first release of the ASDF package that only supports Python 3. The full list of changes, including bug fixes, can be found in the :ref:`change_log`. A brief overview of changes is provided below: * Support for Python 2.7 has been removed entirely. * There is no longer a hard dependency on `astropy`. It is still required for some features, and for running the tests. Astropy-related tag implementations have been moved to the Astropy package itself. * External packages can now install and register custom ASDF extensions using `setuptools` entry points (see :ref:`other_packages` and :ref:`packaging_extensions`). ASDF detects extensions that are installed in this way and automatically uses them when reading and writing files with custom types. * A bug was fixed that now allows fully-specified tags from external packages to be properly resolved. * The file format now includes metadata about the extensions that were used to create an ASDF file. The software automatically adds this information when writing an ASDF file, and will check for installed extensions when reading a file containing such metadata (see :ref:`extension_checking`). * The restrictions on the top-level attributes `data`, `wcs`, and `fits` have been removed. * Clients that wish to impose additional validation requirements on files can now provide custom top-level schemas (see :ref:`custom-schemas`). * There is a new way to reference array data that is defined in external files (see :ref:`array-references`). * Several new commands have been added to the `asdftool` command line interface: * ``extensions`` for showing information about installed extensions (see :ref:`other_packages`). * ``remove-hdu`` for removing ASDF extension from ASDF-in-FITS file (requires `astropy`, see :ref:`asdf-in-fits`). * The package now cleanly supports builds in `develop` mode and can be imported from the source tree. .. _change_log: Change Log ========== .. include:: ../../CHANGES.rst asdf-2.5.1/docs/asdf/developer_api.rst0000644000446400020070000000055513567314375022070 0ustar eslavichSTSCI\science00000000000000************* Developer API ************* The classes and functions documented here will be of use to developers who wish to create their own custom ASDF types and extensions. .. automodapi:: asdf.types .. automodapi:: asdf.extension .. automodapi:: asdf.yamlutil .. automodapi:: asdf.util .. automodapi:: asdf.versioning .. automodapi:: asdf.tests.helpers asdf-2.5.1/docs/asdf/extensions.rst0000644000446400020070000007315313605165746021454 0ustar eslavichSTSCI\science00000000000000.. currentmodule:: asdf.extensions .. _extensions: Writing ASDF extensions ======================= Extensions provide a way for ASDF to represent complex types that are not defined by the ASDF standard. Examples of types that require custom extensions include types from third-party libraries, user-defined types, and complex types that are part of the Python standard library but are not handled in the ASDF standard. From ASDF's perspective, these are all considered 'custom' types. Supporting new types in ASDF is easy. Three components are required: 1. A YAML Schema file for each new type. 2. A tag class (inheriting from `asdf.CustomType`) corresponding to each new custom type. The class must override `~asdf.CustomType.to_tree` and `~asdf.CustomType.from_tree` from `asdf.CustomType` in order to define how ASDF serializes and deserializes the custom type. 3. A Python class to define an "extension" to ASDF, which is a set of related types. This class must implement the `asdf.AsdfExtension` abstract base class. In general, a third-party library that defines multiple custom types can group them all in the same extension. .. note:: The mechanisms of tag classes and extension classes are specific to this particular implementation of ASDF. As of this writing, this is the only complete implementation of the ASDF Standard. However, other language implementations may use other mechanisms for processing custom types. All implementations of ASDF, regardless of language, will make use of the same schemas for abstract data type definitions. This allows all ASDF files to be language-agnostic, and also enables interoperability. An Example ---------- As an example, we will write an extension for ASDF that allows us to represent Python's standard `fractions.Fraction` class for representing rational numbers. We will call our new ASDF type ``fraction``. First, the YAML Schema, defining the type as a pair of integers: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fraction-1.0.0" title: An example custom type for handling fractions tag: "tag:nowhere.org:custom/fraction-1.0.0" type: array items: type: integer minItems: 2 maxItems: 2 ... Then, the Python implementation of the tag class and extension class. See the `asdf.CustomType` and `asdf.AsdfExtension` documentation for more information: .. runcode:: hidden import os import asdf # This is a hack in order to get the example below to work properly __file__ = os.path.join(asdf.__path__[0], 'tests', 'data', 'fraction-1.0.0.yaml') .. runcode:: import os import asdf from asdf import util import fractions class FractionType(asdf.CustomType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [fractions.Fraction] @classmethod def to_tree(cls, node, ctx): return [node.numerator, node.denominator] @classmethod def from_tree(cls, tree, ctx): return fractions.Fraction(tree[0], tree[1]) class FractionExtension(asdf.AsdfExtension): @property def types(self): return [FractionType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(os.path.dirname(__file__)) + '/{url_suffix}.yaml')] Note that the method `~asdf.CustomType.to_tree` of the tag class ``FractionType`` defines how the library converts `fractions.Fraction` into a tree that can be stored by ASDF. Conversely, the method `~asdf.CustomType.from_tree` defines how the library reads a serialized representation of the object and converts it back into an instance of `fractions.Fraction`. Note that the values of the `~asdf.CustomType.name`, `~asdf.CustomType.organization`, `~asdf.CustomType.standard`, and `~asdf.CustomType.version` fields are all reflected in the ``id`` and ``tag`` definitions in the schema. Note also that the base of the ``tag`` value (up to the `name` and `version` components) is reflected in `~asdf.AsdfExtension.tag_mapping` property of the `FractionExtension` type, which is used to map tags to URLs. The `~asdf.AsdfExtension.url_mapping` is used to map URLs (of the same form as the ``id`` field in the schema) to the actual location of a schema file. Once these classes and the schema have been defined, we can save an asdf file using them: .. runcode:: tree = {'fraction': fractions.Fraction(10, 3)} with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to("test.asdf") .. asdf:: test.asdf ignore_unrecognized_tag Defining custom types --------------------- In the example above, we showed how to create an extension that is capable of serializing `fractions.Fraction`. The custom tag type that we created was defined as a subclass of `asdf.CustomType`. Custom type attributes ********************** We overrode the following attributes of `~asdf.CustomType` in order to define `FractionType` (each bullet is also a link to the API documentation): * `~asdf.CustomType.name` * `~asdf.CustomType.organization` * `~asdf.CustomType.version` * `~asdf.CustomType.standard` * `~asdf.CustomType.types` Each of these attributes is important, and each is described in more detail in the linked API documentation. The choice of `~asdf.CustomType.name` should be descriptive of the custom type that is being serialized. The choice of `~asdf.CustomType.organization`, and `~asdf.CustomType.standard` is fairly arbitrary, but also important. Custom types that are provided by the same package should be grouped into the same `~asdf.CustomType.standard` and `~asdf.CustomType.organization`. These three values, along with the `~asdf.CustomType.version`, are used to define the YAML tag that will mark the serialized type in ASDF files. In our example, the tag becomes ``tag:nowhere.org:custom/fraction-1.0.0``. The tag is important when defining the `asdf.AsdfExtension` subclass. Critically, these values must all be reflected in the associated schema. Custom type methods ******************* In addition to the attributes mentioned above, we also overrode the following methods of `~asdf.CustomType` (each bullet is also a link to the API documentation): * `~asdf.CustomType.to_tree` * `~asdf.CustomType.from_tree` The `~asdf.CustomType.to_tree` method defines how an instance of a custom data type is converted into data structures that represent a YAML tree that can be serialized to a file. The `~asdf.CustomType.from_tree` method defines how a YAML tree can be converted back into an instance of the original custom data type. In the example above, we used a `list` to contain the important attributes of `fractions.Fraction`. However, this choice is fairly arbitrary, as long as it is consistent between the way that `~asdf.CustomType.to_tree` and `~asdf.CustomType.from_tree` are defined. For example, we could have also chosen to use a `dict`: .. runcode:: import asdf import fractions class FractionType(asdf.CustomType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [fractions.Fraction] @classmethod def to_tree(cls, node, ctx): return dict(numerator=node.numerator, denominator=node.denominator) @classmethod def from_tree(cls, tree, ctx): return fractions.Fraction(tree['numerator'], tree['denominator']) .. runcode:: hidden # Redefine the fraction extension for the sake of the example FractionExtension.types = [FractionType] tree = {'fraction': fractions.Fraction(10, 3)} with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to("test.asdf") In this case, the associated schema would look like the following:: %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fraction-1.0.0" title: An example custom type for handling fractions tag: "tag:nowhere.org:custom/fraction-1.0.0" type: object properties: numerator: type: integer denominator: type: integer ... We can compare the output using this representation to the example above: .. asdf:: test.asdf ignore_unrecognized_tag Serializing more complex types ****************************** Sometimes the custom types that we wish to represent in ASDF themselves have attributes which are also custom types. As a somewhat contrived example, consider a 2D cartesian coordinate that uses `fraction.Fraction` to represent each of the components. We will call this type `Fractional2DCoordinate`. First we need to define a schema to represent this new type:: %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fractional_2d_coord-1.0.0" title: An example custom type for handling components tag: "tag:nowhere.org:custom/fractional_2d_coord-1.0.0" type: object properties: x: $ref: fraction-1.0.0 y: $ref: fraction-1.0.0 ... Note that in the schema, the ``x`` and ``y`` attributes are expressed as references to our ``fraction-1.0.0`` schema. Since both of these schemas are defined under the same standard and organization, we can simply use the name and version of the ``fraction-1.0.0`` schema to refer to it. However, if the reference type was defined in a different organization and standard, it would be necessary to use the entire YAML tag in the reference (e.g. ``tag:nowhere.org:custom/fraction-1.0.0``). Relative tag references are also allowed where appropriate. .. runcode:: hidden class Fractional2DCoordinate: x = None y = None We also need to define the custom tag type that corresponds to our new type: .. runcode:: import asdf from asdf.yamlutil import (custom_tree_to_tagged_tree, tagged_tree_to_custom_tree) class Fractional2DCoordinateType(asdf.CustomType): name = 'fractional_2d_coord' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [Fractional2DCoordinate] @classmethod def to_tree(cls, node, ctx): tree = dict() tree['x'] = custom_tree_to_tagged_tree(node.x, ctx) tree['y'] = custom_tree_to_tagged_tree(node.y, ctx) return tree @classmethod def from_tree(cls, tree, ctx): coord = Fractional2DCoordinate() coord.x = tagged_tree_to_custom_tree(tree['x'], ctx) coord.y = tagged_tree_to_custom_tree(tree['y'], ctx) return coord Recall that the ``x`` and ``y`` components of our `Fractional2DCoordinate` type are represented as `fractions.Fraction`. Since this is a type for which we have already defined a tag class, we don't want to duplicate the logic from its `~asdf.CustomType.to_tree` and `~asdf.CustomType.from_tree` methods here. Instead, we use the functions `~asdf.yamlutil.custom_tree_to_tagged_tree` and `~asdf.yamlutil.tagged_tree_to_custom_tree` to recursively process the subtrees. By doing so, we ensures that the `~asdf.CustomType.to_tree` and `~asdf.CustomType.from_tree` methods specific to `fractions.Fraction` will be called automatically. Since `Fractional2DCoordinateType` shares the same `~asdf.CustomType.organization` and `~asdf.CustomType.standard` as `FractionType`, it can be added to the same extension class: .. runcode:: class FractionExtension(asdf.AsdfExtension): @property def types(self): return [FractionType, Fractional2DCoordinateType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(os.path.dirname(__file__)) + '/{url_suffix}.yaml')] Now we can use this extension to create an ASDF file: .. runcode:: coord = Fractional2DCoordinate() coord.x = fractions.Fraction(22, 7) coord.y = fractions.Fraction(355, 113) tree = {'coordinate': coord} with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to("coord.asdf") .. asdf:: coord.asdf ignore_unrecognized_tag Note that in the resulting ASDF file, the ``x`` and ``y`` components of our new `fraction_2d_coord` type are tagged as `fraction-1.0.0`. Assigning schema and tag versions ********************************* Authors of new tags and schemas should strive to use the conventions described by `semantic versioning `_. Tags and schemas for types that have not been serialized before should begin at ``1.0.0``. Versions for a particular tag type need not move in lock-step with other tag types in the same extension. The patch version should be bumped for bug fixes and other minor, backwards-compatible changes. New features can be indicated with increments to the minor version, as long as they remain backwards compatible with older versions of the schema. Any changes that break backwards compatibility must be indicated by a major version update. Since ASDF is intended to be an archival file format, authors of tags and schemas should work to ensure that ASDF files created with older extensions can continue to be processed. This means that every time a schema version is bumped (with the possible exception of patch updates), a **new** schema file should be created. For example, if we currently have a schema for ``xyz-1.0.0``, and we wish to make changes and bump the version to ``xyz-1.1.0``, we should leave the original schema intact. A **new** schema file should be created for ``xyz-1.1.0``, which can exist in parallel with the old file. The version of the corresponding tag type should be bumped to ``1.1.0``. For more details on the behavior of schema and tag versioning from a user perspective, see :ref:`version_and_compat`, and also :ref:`custom_type_versions`. Explicit version support ************************ To some extent schemas and tag classes will be closely tied to the custom data types that they represent. This means that in some cases API changes or other changes to the representation of the underlying types will force us to modify our schemas and tag classes. ASDF's schema versioning allows us to handle changes in schemas over time. Let's consider an imaginary custom type called ``Person`` that we want to serialize in ASDF. The first version of ``Person`` was constructed using a first and last name: .. code-block:: python person = Person('James', 'Webb') print(person.first, person.last) Our version 1.0.0 YAML schema for ``Person`` might look like the following: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/person-1.0.0" title: An example custom type for representing a Person tag: "tag:nowhere.org:custom/person-1.0.0" type: array items: type: string minItems: 2 maxItems: 2 ... And our tag implementation would look something like this: .. code-block:: python import asdf from people import Person class PersonType(asdf.CustomType): name = 'person' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [Person] @classmethod def to_tree(cls, node, ctx): return [node.first, node.last] @classmethod def from_tree(cls, tree, ctx): return Person(tree[0], tree[1]) However, a newer version of ``Person`` now requires a middle name in the constructor as well: .. code-block:: python person = Person('James', 'Edwin', 'Webb') print(person.first, person.middle, person.last) James Edwin Webb So we update our YAML schema to version 1.1.0 in order to support newer versions of Person: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/person-1.1.0" title: An example custom type for representing a Person tag: "tag:nowhere.org:custom/person-1.1.0" type: array items: type: string minItems: 3 maxItems: 3 ... We need to update our tag class implementation as well. However, we need to be careful. We still want to be able to read version 1.0.0 of our schema and be able to convert it to the newer version of ``Person`` objects. To accomplish this, we will make use of the `~asdf.CustomType.supported_versions` attribute for our tag class. This will allow us to declare explicit support for the schema versions our tag class implements. Under the hood, ASDF creates multiple copies of our ``PersonType`` tag class, each with a different `~asdf.CustomType.version` attribute corresponding to one of the supported versions. This means that in our new tag class implementation, we can condition our `~asdf.CustomType.from_tree` implementation on the value of ``version`` to determine which schema version should be used when reading: .. code-block:: python import asdf from people import Person class PersonType(asdf.CustomType): name = 'person' organization = 'nowhere.org' version = (1, 1, 0) supported_versions = [(1, 0, 0), (1, 1, 0)] standard = 'custom' types = [Person] @classmethod def to_tree(cls, node, ctx): return [node.first, node.middle, node.last] @classmethod def from_tree(cls, tree, ctx): # Handle the older version of the person schema if cls.version == (1, 0, 0): # Construct a Person object with an empty middle name field return Person(tree[0], '', tree[1]) else: # The newer version of the schema stores the middle name too return person(tree[0], tree[1], tree[2]) Note that the implementation of ``to_tree`` is not conditioned on ``cls.version`` since we do not need to convert new ``Person`` objects back to the older version of the schema. Handling subclasses ******************* By default, if a custom type is serialized by an ASDF tag class, then all subclasses of that type can also be serialized. However, no attributes that are specific to the subclass will be stored in the file. When reading the file, an instance of the base custom type will be returned instead of the subclass that was written. To properly handle subclasses of custom types already recognized by ASDF, it is necessary to implement a separate tag class that is specific to the subclass to be serialized. However, this can be burdensome, especially if multiple subclasses need to be handled. Version 2.4.0 of the `asdf` package introduces a new way to handle subclasses of custom types using decorators. .. attention:: This feature was introduced in version 2.4.0 and is **experimental**. The API may change in future versions. In previous examples we wrote a tag class for the built-in type `fractions.Fraction`. Let's create a subclass of this type that we wish to be able to serialize in ASDF. We have already defined ``FractionType`` which handles the serialization of `fractions.Fraction`. We will use decorators to indicate how to properly serialize the subclass: .. code-block:: python @FractionType.subclass class NamedFraction(fractions.Fraction): """ A very contrived example, indeed. """ def __init__(self, *args, name='', **kwargs): super().__init__(*args, **kwargs) self._name = name @FractionType.subclass_property def name(self): return self._name The decorators we use are defined as class methods of the ``FractionType`` tag class. By using these we enable round-trip serialization of our custom subclass type. See `asdf.CustomType.subclass` and `asdf.CustomType.subclass_property` for additional details. Note that this feature is currently not reflected in the ASDF Standard, which means that other implementations of ASDF may not preserve subclass information. Creating custom schemas ----------------------- All custom types to be serialized by ASDF require custom schemas. The best resource for creating ASDF schemas can be found in the `ASDF Standard `_ documentation. In most cases, ASDF schemas will be included as part of a packaged software distribution. In these cases, it is important for the `~asdf.AsdfExtension.url_mapping` of the corresponding `~asdf.AsdfExtension` extension class to map the schema URL to an actual location on disk. However, it is possible for schemas to be hosted online as well, in which case the URL mapping can map (perhaps trivially) to an actual network location. See :ref:`defining_extensions` for more information. It is also important for packages that provide custom schemas to test them, both to make sure that they are valid, and to ensure that any examples they provide are also valid. See :ref:`testing_custom_schemas` for more information. Adding custom validators ------------------------ A new type may also add new validation keywords to the schema language. This can be used to impose type-specific restrictions on the values in an ASDF file. This feature is used internally so a schema can specify the required datatype of an array. To support custom validation keywords, set the `~asdf.CustomType.validators` member of a `~asdf.CustomType` subclass to a dictionary where the keys are the validation keyword name and the values are validation functions. The validation functions are of the same form as the validation functions in the underlying ``jsonschema`` library, and are passed the following arguments: - ``validator``: A `jsonschema.Validator` instance. - ``value``: The value of the schema keyword. - ``instance``: The instance to validate. This will be made up of basic datatypes as represented in the YAML file (list, dict, number, strings), and not include any object types. - ``schema``: The entire schema that applies to instance. Useful to get other related schema keywords. The validation function should either return ``None`` if the instance is valid or ``yield`` one or more `asdf.ValidationError` objects if the instance is invalid. To continue the example from above, for the ``FractionType`` say we want to add a validation keyword "``simplified``" that, when ``true``, asserts that the corresponding fraction is in simplified form: .. code-block:: python from asdf import ValidationError def validate_simplified(validator, simplified, instance, schema): if simplified: reduced = fraction.Fraction(instance[0], instance[1]) if (reduced.numerator != instance[0] or reduced.denominator != instance[1]): yield ValidationError("Fraction is not in simplified form.") FractionType.validators = {'simplified': validate_simplified} .. _defining_extensions: Defining custom extension classes --------------------------------- Extension classes are the mechanism that ASDF uses to register custom tag types so that they can be used when processing ASDF files. Packages that define their own custom tag types must also define extensions in order for those types to be used. All extension classes must implement the `asdf.AsdfExtension` abstract base class. A custom extension will override each of the following properties of `AsdfExtension` (the text in each bullet is also a link to the corresponding documentation): * `~asdf.AsdfExtension.types` * `~asdf.AsdfExtension.tag_mapping` * `~asdf.AsdfExtension.url_mapping` .. _packaging_extensions: Overriding built-in extensions ****************************** It is possible for externally defined extensions to override tag types that are provided by ASDF's built-in extension. For example, maybe an external package wants to provide a different implementation of `~asdf.tags.core.NDArrayType`. In this case, the external package does not need to provide custom schemas since the schema for the type to be overridden is already provided as part of the ASDF standard. Instead, the extension class may inherit from ASDF's `~asdf.extension.BuiltinExtension` and simply override the `~asdf.AsdfExtension.types` property to indicate the type that is being overridden. Doing this preserves the `~asdf.AsdfExtension.tag_mapping` and `~asdf.AsdfExtension.url_mapping` that is used by the `BuiltinExtension`, which allows the schemas that are packaged by ASDF to be located. ASDF will give precedence to the type that is provided by the external extension, effectively overriding the corresponding type in the built-in extension. Note that it is currently undefined if multiple external extensions are provided that override the same built-in type. Packaging custom extensions --------------------------- Packaging schemas ***************** If a package provides custom schemas, the schema files must be installed as part of that package distribution. In general, schema files must be installed into a subdirectory of the package distribution. The ASDF extension class must supply a `~asdf.AsdfExtension.url_mapping` that maps to the installed location of the schemas. See :ref:`defining_extensions` for more details. Registering entry points ************************ Packages that provide their own ASDF extensions can (and should!) install them so that they are automatically detectable by the ASDF Python package. This is accomplished using Python's `setuptools` entry points. Entry points are registered in a package's `setup.py` file. Consider a package that provides an extension class `MyPackageExtension` in the submodule `mypackage.asdf.extensions`. We need to register this class as an extension entry point that ASDF will recognize. First, we create a dictionary: .. code:: python entry_points = {} entry_points['asdf_extensions'] = [ 'mypackage = mypackage.asdf.extensions:MyPackageExtension' ] The key used in the `entry_points` dictionary must be ``'asdf_extensions'``. The value must be an array of one or more strings, each with the following format: ``extension_name = fully.specified.submodule:ExtensionClass`` The extension name can be any arbitrary string, but it should be descriptive of the package and the extension. In most cases the package itself name will suffice. Note that depending on individual package requirements, there may be other entries in the `entry_points` dictionary. The entry points must be passed to the call to `setuptools.setup`: .. code:: python from setuptools import setup entry_points = {} entry_points['asdf_extensions'] = [ 'mypackage = mypackage.asdf.extensions:MyPackageExtension' ] setup( # We omit other package-specific arguments that are not # relevant to this example entry_points=entry_points, ) When running ``python setup.py install`` or ``python setup.py develop`` on this package, the entry points will be registered automatically. This allows the ASDF package to recognize the extensions without any user intervention. Users of your package that wish to read ASDF files using types that you have registered will not need to use any extension explicitly. Instead, ASDF will automatically recognize the types you have registered and will process them appropriately. See :ref:`other_packages` for more information on using extensions. .. _testing_custom_schemas: Testing custom schemas ---------------------- Packages that provide their own schemas can test them using ASDF's `pytest `_ plugin for schema testing. Schemas are tested for overall validity, and any examples given within the schemas are also tested. The schema tester plugin is automatically registered when the ASDF package is installed. In order to enable testing, it is necessary to add the directory containing your schema files to the pytest section of your project's `setup.cfg` file. If you do not already have such a file, creating a `setup.cfg` with the following should be sufficient: .. code:: ini [tool:pytest] asdf_schema_root = path/to/schemas another/path/to/schemas The schema directory paths should be paths that are relative to the top of the package directory **when it is installed**. If this is different from the path in the source directory, then both paths can be used to facilitate in-place testing (see ASDF's own `setup.cfg` for an example of this). .. note:: Older versions of ASDF (prior to 2.4.0) required the plugin to be registered in your project's `conftest.py` file. As of 2.4.0, the plugin is now registered automatically and so this line should be removed from your `conftest.py` file, unless you need to retain compatibility with older versions of ASDF. The ``asdf_schema_skip_names`` configuration variable can be used to skip schema files that live within one of the ``asdf_schema_root`` directories but should not be tested. The names should be given as simple base file names (without directory paths or extensions). Again, see ASDF's own `setup.cfg` file for an example. The schema tests do **not** run by default. In order to enable the tests by default for your package, add ``asdf_schema_tests_enabled = true`` to the ``[tool:pytest]`` section of your `setup.cfg` file. If you do not wish to enable the schema tests by default, you can add the ``--asdf-tests`` option to the ``pytest`` command line to enable tests on a per-run basis. asdf-2.5.1/docs/asdf/features.rst0000644000446400020070000004145613567314375021075 0ustar eslavichSTSCI\science00000000000000.. currentmodule:: asdf ************* Core Features ************* This section discusses the core features of the ASDF data format, and provides examples and use cases that are specific to the Python implementation. Data Model ========== The fundamental data object in ASDF is the ``tree``, which is a nested combination of basic data structures: dictionaries, lists, strings and numbers. In Python, these types correspond to :class:`dict`, :class:`list`, :class:`str`, and :class:`int`, :class:`float`, and :class:`complex`, respectively. The top-level tree object behaves like a Python dictionary and supports arbitrary nesting of data structures. For simple examples of creating and reading trees, see :ref:`overview`. .. note:: The ASDF Standard imposes a maximum size of 52 bits for integer literals in the tree (see `the docs `_ for details and justification). Attempting to store a larger value will result in a validation error. Integers and floats of up to 64 bits can be stored inside of :mod:`numpy` arrays (see below). For arbitrary precision integer support, see `IntegerType`. One of the key features of ASDF is its ability to serialize :mod:`numpy` arrays. This is discussed in detail in :ref:`array-data`. While the core ASDF package supports serialization of basic data types and Numpy arrays, its true power comes from its ability to be extended to support serialization of a wide range of custom data types. Details on using ASDF extensions can be found in :ref:`using_extensions`. Details on creating custom ASDF extensions to support custom data types can be found in :ref:`extensions`. .. _array-data: Array Data ========== Much of ASDF's power and convenience comes from its ability to represent multidimensional array data. The :mod:`asdf` Python package provides native support for :mod:`numpy` arrays. .. toctree:: :maxdepth: 2 arrays .. _using_extensions: Using extensions ================ According to Wikipedia, serialization "is the process of translating data structures or object state into a format that can be stored...and reconstructed later" [#wiki]_. The power of ASDF is that it provides the ability to store, or serialize, the state of Python objects into a *human-readable* data format. The state of those objects can later be restored by another program in a process called deserialization. While ASDF is capable of serializing basic Python types and Numpy arrays out of the box, it can also be extended to serialize arbitrary custom data types. This section discusses the extension mechanism from a user's perspective. For documentation on creating extensions, see :ref:`extensions`. Even though this particular implementation of ASDF necessarily serializes Python data types, in theory an ASDF implementation in another language could read the resulting file and reconstruct an analogous type in that language. Conversely, this implementation can read ASDF files that were written by other implementations of ASDF as long as the proper extensions are available. .. toctree:: :maxdepth: 2 using_extensions .. _schema_validation: Schema validation ================= Schema validation is used to determine whether an ASDF file is well formed. All ASDF files must conform to the schemas defined by the `ASDF Standard `_. Schema validation occurs when reading ASDF files (using `asdf.open`), and also when writing them out (using `AsdfFile.write_to` or `AsdfFile.update`). Schema validation also plays a role when using custom extensions (see :ref:`using_extensions` and :ref:`extensions`). Extensions must provide schemas for the types that they serialize. When writing a file with custom types, the output is validated against the schemas corresponding to those types. If the appropriate extension is installed when reading a file with custom types, then the types will be validated against the schemas provided by the corresponding extension. .. _custom-schemas: Custom schemas -------------- Every ASDF file is validated against the ASDF Standard, and also against any schemas provided by custom extensions. However, it is sometimes useful for particular applications to impose additional restrictions when deciding whether a given file is valid or not. For example, consider an application that processes digital image data. The application expects the file to contain an image, and also some metadata about how the image was created. The following example schema reflects these expectations: .. code:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/asdf-1.1.0" tag: "tag:stsci.edu:asdf/core/asdf-1.1.0" type: object properties: image: description: An ndarray containing image data. $ref: "ndarray-1.0.0" metadata: type: object description: Metadata about the image properties: time: description: | A timestamp for when the image was created, in UTC. type: string format: date-time resolution: description: | A 2D array representing the resolution of the image (N x M). type: array items: type: integer number: 2 required: [image, metadata] additionalProperties: true This schema restricts the kinds of files that will be accepted as valid to those that contain a top-level ``image`` property that is an ``ndarray``, and a top-level ``metadata`` property that contains information about the time the image was taken and the resolution of the image. Note that the schema uses the same ``id`` and ``tag`` as the `top-level core schema`_ from the ASDF Standard. This is because it is validating the file at the top level, but is imposing restrictions beyond what is normally required for an ASDF file. In order to use this schema for a secondary validation pass, we pass the `custom_schema` argument to either `asdf.open` or the `AsdfFile` constructor. Assume that the schema file lives in ``image_schema.yaml``, and we wish to open a file called ``image.asdf``. We would open the file with the following code: .. code:: import asdf af = asdf.open('image.asdf', custom_schema='image_schema.yaml') Similarly, if we wished to use this schema when creating new files: .. code:: new_af = asdf.AsdfFile(custom_schema='image_schema.yaml') ... .. _top-level core schema: https://github.com/spacetelescope/asdf-standard/blob/master/schemas/stsci.edu/asdf/core/asdf-1.1.0.yaml .. _version_and_compat: Versioning and Compatibility ============================ There are several different versions to keep in mind when discussing ASDF: * The software package version * The ASDF Standard version * The ASDF file format version * Individual tag and schema versions Each ASDF file contains information about the various versions that were used to create the file. The most important of these are the ASDF Standard version and the ASDF file format version. A particular version of the ASDF software package will explicitly provide support for a specific combination of these versions. Tag and schema versions are also important for serializing and deserializing data types that are stored in ASDF files. A detailed discussion of tag and schema versions from a user perspective can be found in :ref:`custom_type_versions`. Since ASDF is designed to serve as an archival format, the software attempts to provide backwards compatibility when reading older versions of the ASDF Standard and ASDF file format. However, since deserializing ASDF types sometimes requires other software packages, backwards compatibility is often contingent on the available versions of such software packages. In general, forward compatibility with newer versions of the ASDF Standard and ASDF file format is not supported by the software. However, if newer tag and schema versions are detected, the software will attempt to process them. When creating new ASDF files, it is possible to control the version of the file format that is used. This can be specified by passing the `version` argument to either the `AsdfFile` constructor when the file object is created, or to the `AsdfFile.write_to` method when it is written. By default, the latest version of the file format will be used. Note that this option has no effect on the versions of tag types from custom extensions. External References =================== Tree References --------------- ASDF files may reference items in the tree in other ASDF files. The syntax used in the file for this is called "JSON Pointer", but users of ``asdf`` can largely ignore that. First, we'll create a ASDF file with a couple of arrays in it: .. runcode:: import asdf from asdf import AsdfFile import numpy as np tree = { 'a': np.arange(0, 10), 'b': np.arange(10, 20) } target = AsdfFile(tree) target.write_to('target.asdf') .. asdf:: target.asdf Then we will reference those arrays in a couple of different ways. First, we'll load the source file in Python and use the `make_reference` method to generate a reference to array ``a``. Second, we'll work at the lower level by manually writing a JSON Pointer to array ``b``, which doesn't require loading or having access to the target file. .. runcode:: ff = AsdfFile() with asdf.open('target.asdf') as target: ff.tree['my_ref_a'] = target.make_reference(['a']) ff.tree['my_ref_b'] = {'$ref': 'target.asdf#b'} ff.write_to('source.asdf') .. asdf:: source.asdf Calling `~asdf.AsdfFile.find_references` will look up all of the references so they can be used as if they were local to the tree. It doesn't actually move any of the data, and keeps the references as references. .. runcode:: with asdf.open('source.asdf') as ff: ff.find_references() assert ff.tree['my_ref_b'].shape == (10,) On the other hand, calling `~asdf.AsdfFile.resolve_references` places all of the referenced content directly in the tree, so when we write it out again, all of the external references are gone, with the literal content in its place. .. runcode:: with asdf.open('source.asdf') as ff: ff.resolve_references() ff.write_to('resolved.asdf') .. asdf:: resolved.asdf A similar feature provided by YAML, anchors and aliases, also provides a way to support references within the same file. These are supported by asdf, however the JSON Pointer approach is generally favored because: - It is possible to reference elements in another file - Elements are referenced by location in the tree, not an identifier, therefore, everything can be referenced. Anchors and aliases are handled automatically by ``asdf`` when the data structure is recursive. For example here is a dictionary that is included twice in the same tree: .. runcode:: d = {'foo': 'bar'} d['baz'] = d tree = {'d': d} ff = AsdfFile(tree) ff.write_to('anchors.asdf') .. asdf:: anchors.asdf .. _array-references: Array References ---------------- ASDF files can refer to array data that is stored in other files using the `ExternalArrayReference` type. External files need not be ASDF files: ASDF is completely agnostic as to the format of the external file. The ASDF external array reference does not define how the external data file will be resolved; in fact it does not even check for the existence of the external file. It simply provides a way for ASDF files to refer to arrays that exist in external files. Creating an external array reference is simple. Only four pieces of information are required: * The name of the external file. Since ASDF does not itself resolve the file or check for its existence, the format of the name is not important. In most cases the name will be a path relative to the ASDF file itself, or a URI for a network resource. * The data type of the array data. This is a string representing any valid `numpy.dtype`. * The shape of the data array. This is a tuple representing the dimensions of the array data. * The array data ``target``. This is either an integer or a string that indicates to the user something about how the data array should be accessed in the external file. For example, if there are multiple data arrays in the external file, the ``target`` might be an integer index. Or if the external file is an ASDF file, the ``target`` might be a string indicating the key to use in the external file's tree. The value and format of the ``target`` field is completely arbitrary since ASDF will not use it itself. As an example, we will create a reference to an external CSV file. We will assume that one of the rows of the CSV file contains the array data we care about: .. runcode:: import asdf csv_data_row = 10 # The row of the CSV file containing the data we want csv_row_size = 100 # The size of the array extref = asdf.ExternalArrayReference('data.csv', csv_data_row, "int64", (csv_row_size,)) tree = {'csv_data': extref} af = asdf.AsdfFile(tree) af.write_to('external_array.asdf') .. asdf:: external_array.asdf When reading a file containing external references, the user is responsible for using the information in the `ExternalArrayReference` type to open the external file and retrieve the associated array data. Saving history entries ====================== ``asdf`` has a convenience method for notating the history of transformations that have been performed on a file. Given a `~asdf.AsdfFile` object, call `~asdf.AsdfFile.add_history_entry`, given a description of the change and optionally a description of the software (i.e. your software, not ``asdf``) that performed the operation. .. runcode:: from asdf import AsdfFile import numpy as np tree = { 'a': np.random.rand(32, 32) } ff = AsdfFile(tree) ff.add_history_entry( "Initial random numbers", {'name': 'asdf examples', 'author': 'John Q. Public', 'homepage': 'http://github.com/spacetelescope/asdf', 'version': '0.1'}) ff.write_to('example.asdf') .. asdf:: example.asdf ASDF automatically saves history metadata about the extensions that were used to create the file. This information is used when opening files to determine if the proper extensions are installed (see :ref:`extension_checking` for more details). .. _asdf-in-fits: Saving ASDF in FITS =================== .. note:: This section is about packaging entire ASDF files inside of `FITS data format `_ files. This is probably only of interest to astronomers. Making use of this feature requires the `astropy` package to be installed. Sometimes you may need to store the structured data supported by ASDF inside of a FITS file in order to be compatible with legacy tools that support only FITS. First, create an `~astropy.io.fits.HDUList` object using `astropy.io.fits`. Here, we are building an `~astropy.io.fits.HDUList` from scratch, but it could also have been loaded from an existing file. We will create a FITS file that has two image extensions, SCI and DQ respectively. .. runcode:: from astropy.io import fits hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='SCI')) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='DQ')) Next we make a tree structure out of the data in the FITS file. Importantly, we use the *same* array references in the FITS `~astropy.io.fits.HDUList` and store them in the tree. By doing this, ASDF will automatically refer to the data in the regular FITS extensions. .. runcode:: tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, }, 'dq': { 'data': hdulist['DQ'].data, } } } Now we take both the FITS `~astropy.io.fits.HDUList` and the ASDF tree and create an `AsdfInFits` object. .. runcode:: from asdf import fits_embed ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to('embedded_asdf.fits') .. runcode:: hidden from astropy.io import fits with fits.open('embedded_asdf.fits') as new_hdulist: with open('content.asdf', 'wb') as fd: fd.write(new_hdulist['ASDF'].data.tostring()) The special ASDF extension in the resulting FITS file contains the following data. Note that the data source of the arrays uses the ``fits:`` prefix to indicate that the data comes from a FITS extension: .. asdf:: content.asdf To load an ASDF-in-FITS file, simply open it using `asdf.open`. The returned value will be an `AsdfInFits` object, which can be used in the same way as any other `AsdfFile` object. .. runcode:: with asdf.open('embedded_asdf.fits') as asdf_in_fits: science = asdf_in_fits.tree['model']['sci'] .. rubric:: Footnotes .. [#wiki] https://en.wikipedia.org/wiki/Serialization asdf-2.5.1/docs/asdf/install.rst0000644000446400020070000000371513567314375020721 0ustar eslavichSTSCI\science00000000000000.. _installation: ************ Installation ************ There are several different ways to install the ``asdf`` package. Each is described in detail below. Requirements ============ The ``asdf`` package has the following dependencies: - `python `__ 3.3 or later - `numpy `__ 1.8 or later - `jsonschema `__ 2.3.0 or later - `pyyaml `__ 3.10 or later - `semantic_version `__ 2.3.1 or later - `six `__ 1.9.0 or later Support for units, time, transform, wcs, or running the tests also requires: - `astropy `__ 3.0 or later Optional support for `lz4 `__ compression is provided by: - `lz4 `__ 0.10 or later Also required for running the tests: - `pytest-astropy `__ Installing with pip =================== .. include:: ../../README.rst :start-after: begin-pip-install-text :end-before: begin-source-install-text Installing with conda ===================== ASDF is also distributed as a `conda `__ package via the `conda-forge `__ channel. It is also available through the `astroconda `__ channel. To install ``asdf`` within an existing conda environment:: $ conda install -c conda-forge asdf To create a new conda environment and install ``asdf``:: $ conda create -n new-env-name -c conda-forge python asdf Building from source ==================== .. include:: ../../README.rst :start-after: begin-source-install-text :end-before: end-source-install-text Running the tests ================= .. include:: ../../README.rst :start-after: begin-testing-text :end-before: end-testing-text asdf-2.5.1/docs/asdf/overview.rst0000644000446400020070000000225113567314375021113 0ustar eslavichSTSCI\science00000000000000.. currentmodule:: asdf .. _overview: ******** Overview ******** Let's start by taking a look at a few basic ASDF use cases. This will introduce you to some of the core features of ASDF and will show you how to get started with using ASDF in your own projects. To follow along with this tutorial, you will need to install the :mod:`asdf` package. See :ref:`installation` for details. Hello World =========== At its core, ASDF is a way of saving nested data structures to YAML. Here we save a :class:`dict` with the key/value pair ``'hello': 'world'``. .. runcode:: from asdf import AsdfFile # Make the tree structure, and create a AsdfFile from it. tree = {'hello': 'world'} ff = AsdfFile(tree) ff.write_to("test.asdf") # You can also make the AsdfFile first, and modify its tree directly: ff = AsdfFile() ff.tree['hello'] = 'world' ff.write_to("test.asdf") .. asdf:: test.asdf Creating Files ============== .. include:: ../../README.rst :start-after: begin-create-file-text :end-before: end-create-file-text Reading Files ============= .. include:: ../../README.rst :start-after: begin-read-file-text :end-before: end-read-file-text asdf-2.5.1/docs/asdf/user_api.rst0000644000446400020070000000014413567314375021053 0ustar eslavichSTSCI\science00000000000000******** User API ******** .. automodapi:: asdf :include-all-objects: :inherited-members: asdf-2.5.1/docs/asdf/using_extensions.rst0000644000446400020070000002616413567314375022662 0ustar eslavichSTSCI\science00000000000000.. currentmodule:: asdf The built-in extension ---------------------- The ability to serialize the following types is provided by ASDF's built-in extension: * `dict` * `list` * `str` * `int` * `float` * `complex` * `numpy.ndarray` The built-in extension is packaged with ASDF and is automatically used when reading and writing files. Users can not control the use of the built-in extension and in general they need not concern themselves with the details of its implementation. However, it is useful to be aware that the built-in extension is always in effect when reading and writing ASDF files. Custom types ------------ For the purposes of this documentation, a "custom type" is any data type that can not be serialized by the built-in extension. In order for a particular custom type to be serialized, a special class called a "tag type" (or "tag" for short) must be implemented. Each tag type defines how the corresponding custom type will be serialized and deserialized. More details on how tag types are implemented can be found in :ref:`extensions`. Users should never have to refer to tag implementations directly; they simply enable ASDF to recognize and process custom types. In addition to tag types, each custom type must have a corresponding schema, which is used for validation. The definition of the schema is closely tied to the definition of the tag type. More details on schema validation can be found in :ref:`schema_validation`. All schemas and their associated tag types have versions that move in sync. The version will change whenever a schemas (and therefore the tag type implementation) changes. Extensions ---------- In order for the tag types and schemas to be used by ASDF, they must be packaged into an **extension** class. In general, the details of extensions are transparent to users of ASDF. However, users need to be aware of extensions in the following two scenarios: * when storing custom data types to files to be written * when reading files that contain custom data types These scenarios require the use of custom extensions (the built-in extension is always used). There are two ways to use custom extensions, which are detailed below in :ref:`other_packages` and :ref:`explicit_extensions`. Writing custom types to files ***************************** ASDF is not capable of serializing any custom type unless an extension is provided that defines how to serialize that type. Attempting to do so will cause an error when trying to write the file. For details on writing custom tag types and extensions, see :ref:`extensions`. .. _reading_custom_types: Reading files with custom types ******************************* The ASDF software is capable of reading files that contain custom data types even if the extension that was used to create the file is not present. However, the extension is required in order to properly deserialize the original type. If the necessary extension is **not** present, the custom data types will simply appear in the tree as a nested combination of basic data types. The structure of this data will mirror the structure of the schema used serialize the custom type. In this case, a warning will occur by default to indicate to the user that the custom type in the file was not recognized and can not be deserialized. To suppress these warnings, users should pass ``ignore_unrecognized_tag=True`` to `asdf.open`. Even if an extension for the custom type is present, it does not guarantee that the type can be deserialized successfully. Instantiating the custom type may involve additional software dependencies, which, if not present, will cause an error when the type is deserialized. Users should be aware of the dependencies that are required for instantiating custom types when reading ASDF files. .. _custom_type_versions: Custom types, extensions, and versioning ---------------------------------------- All tag types and schemas are versioned. This allows changes to tags and schemas to be recorded, and it allows ASDF to define behavior with respect to version compatibility. Tag and schema versions may change for several reasons. One common reason is to reflect a change to the API of the custom type that a tag represents. This typically corresponds to an update to the version of the software that defines that custom type. Since ASDF is designed to be an archival file format, it attempts to maintain backwards compatibility with all older tag and schema versions, at least when reading files. However, there are some caveats, which are described below. Reading files ************* When ASDF encounters a tagged object in a file, it will compare the version of the tag in the file with the version of the corresponding tag type (if one is provided by an available extension). In general, when reading files ASDF abides by the following principles: * If a tag type is available and its version matches that of the tag in the file, ASDF will return an instance of the original custom type. * If no corresponding tag type is found in any available extension, ASDF will return a basic data structure representing the type. A warning will occur unless the option ``ignore_unrecognized_tag=True`` was given. (see :ref:`reading_custom_types`). * If a tag type is available but its version is **older** than that in the file (meaning that the file was written using a newer version of the tag type), ASDF will attempt to deserialize the tag using the existing tag type. If this fails, ASDF will return a basic data structure representing the type, and a warning will occur. * If a tag type is available but its version is **newer** than that in the file, ASDF will attempt to deserialize the tag using the existing tag type. If this fails, ASDF will return a basic data structure representing the type, and a warning will occur. In cases where the available tag type version does not match the version of the tag in the file, warnings can be enabled by passing ``ignore_version_mismatch=False`` to `asdf.open`. These warnings are ignored by default. Writing files ************* In general, ASDF makes no guarantee of being able to write older versions of tag types. Explicit version support ************************ Some tag types explicitly support reading only particular versions of the tag and schema (see `asdf.CustomType.supported_versions`). In these cases, deserialization is only possible if the version in the file matches one of the explicitly supported versions. Otherwise, ASDF will return a basic data structure representing the type, and a warning will occur. Caveats ******* While ASDF makes every attempt to deserialize stored objects even in the case of a tag version mismatch, deserialization will not always be possible. In most cases, if the versions do not match, ASDF will be able to return a basic data structure representing the original type. However, tag version mismatches often indicate a mismatch between the versions of the software packages that define the type being serialized. In some cases, these version incompatibilities may lead to errors when attempting to read a file (especially when multiple tags/packages are involved). In these cases, the best course of action is to try to install the necessary versions of the packages (and extensions) involved. .. _other_packages: Extensions from other packages ------------------------------ Some external packages may define extensions that allow ASDF to recognize some or all of the types that are defined by that package. Such packages may install the extension class as part of the package itself (details for developers can be found in :ref:`packaging_extensions`). If the package installs its extension, then ASDF will automatically detect the extension and use it when processing any files. No specific action is required by the user in order to successfully read and write custom types defined by the extension for that particular package. Users can use the ``extensions`` command of the ``asdftool`` command line tool in order to determine which packages in the current Python environment have installed ASDF extensions: .. code-block:: none $ asdftool extensions -s Extension Name: 'bizbaz' (from bizbaz 1.2.3) Class: bizbaz.io.asdf.extension.BizbazExtension Extension Name: 'builtin' (from asdf 2.0.0) Class: asdf.extension.BuiltinExtension The output will always include the built-in extension, but may also display other extensions from other packages, depending on what is installed. .. _explicit_extensions: Explicit use of extensions -------------------------- Sometimes no packaged extensions are provided for the types you wish to serialize. In this case, it is necessary to explicitly provide any necessary extension classes when reading and writing files that contain custom types. Both `asdf.open` and the `AsdfFile` constructor take an optional `extensions` keyword argument to control which extensions are used when reading or creating ASDF files. Consider the following example where there exists a custom type ``MyCustomType`` that needs to be written to a file. An extension is defined ``MyCustomExtension`` that contains a tag type that can serialize and deserialize ``MyCustomType``. Since ``MyCustomExtension`` is not installed by any package, we will need to pass it directly to the `AsdfFile` constructor: .. code-block:: python import asdf ... af = asdf.AsdfFile(extensions=MyCustomExtension()) af.tree = {'thing': MyCustomType('foo') } # This call would cause an error if the proper extension was not # provided to the constructor af.write_to('custom.asdf') Note that the extension class must actually be instantiated when it is passed as the `extensions` argument. To read the file, we pass the same extension to `asdf.open`: .. code-block:: python import asdf af = asdf.open('custom.asdf', extensions=MyCustomExtension()) If necessary, it is also possible to pass a list of extension instances to `asdf.open` and the `AsdfFile` constructor: .. code-block:: python extensions = [MyCustomExtension(), AnotherCustomExtension()] af = asdf.AsdfFile(extensions=extensions) Passing either a single extension instance or a list of extension instances to either `asdf.open` or the `AsdfFile` constructor will not override any extensions that are installed in the environment. Instead, the custom types provided by the explicitly provided extensions will be added to the list of any types that are provided by installed extensions. .. _extension_checking: Extension checking ------------------ When writing ASDF files using this software, metadata about the extensions that were used to create the file will be added to the file itself. For extensions that were provided with another software package, the metadata includes the version of that package. When reading files with extension metadata, ASDF can check whether the required extensions are present before processing the file. If a required extension is not present, or if the wrong version of a package that provides an extension is installed, ASDF will issue a warning. It is possible to turn these warnings into errors by using the `strict_extension_check` parameter of `asdf.open`. If this parameter is set to `True`, then opening the file will fail if the required extensions are missing. asdf-2.5.1/docs/conf.py0000644000446400020070000001351313605165746017077 0ustar eslavichSTSCI\science00000000000000# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # # Astropy documentation build configuration file. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default. Some values are defined in # the global Astropy configuration which is loaded here before anything else. # See astropy.sphinx.conf for which values are set there. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('..')) # IMPORTANT: the above commented section was generated by sphinx-quickstart, but # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left # commented out with this explanation to make it clear why this should not be # done. If the sys.path entry above is added, when the astropy.sphinx.conf # import occurs, it will import the *source* version of astropy instead of the # version installed (if invoked as "make html" or directly with sphinx), or the # version in the build directory (if "python setup.py build_sphinx" is used). # Thus, any C-extensions that are needed to build the documentation will *not* # be accessible, and the documentation will not build correctly. import os import sys import datetime # Ensure documentation examples are determinstically random. import numpy try: numpy.random.seed(int(os.environ['SOURCE_DATE_EPOCH'])) except KeyError: pass try: from sphinx_astropy.conf.v1 import * # noqa except ImportError: print('ERROR: the documentation requires the sphinx-astropy package to be installed') sys.exit(1) # Get configuration information from setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.2' # To perform a Sphinx version check that needs to be more specific than # major.minor, call `check_sphinx_version("x.y.z")` here. # check_sphinx_version("1.2.1") # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns.append('_templates') # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog += """ """ # -- Project information ------------------------------------------------------ # This does not *have* to match the package name, but typically does project = setup_cfg['name'] author = setup_cfg['author'] copyright = '{0}, {1}'.format( datetime.datetime.now().year, setup_cfg['author']) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. from pkg_resources import get_distribution release = get_distribution(setup_cfg['name']).version # for example take major/minor version = '.'.join(release.split('.')[:2]) # -- Options for HTML output --------------------------------------------------- # A NOTE ON HTML THEMES # The global astropy configuration uses a custom theme, 'bootstrap-astropy', # which is installed along with astropy. A different theme can be used or # the options for this theme can be modified by overriding some of the # variables set in the global configuration. The variables set in the # global configuration are listed below, commented out. # Add any paths that contain custom themes here, relative to this directory. # To use a different custom theme, add the directory containing the theme. #html_theme_path = [] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. To override the custom theme, set this to the # name of a builtin theme or the name of a custom theme in html_theme_path. html_theme = 'alabaster' html_theme_options = { 'github_user': 'spacetelescope', 'github_repo': 'asdf', 'github_button': 'true', 'fixed_sidebar': 'true', 'page_width': '45%', } html_static_path = ['_static'] # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = '' # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '' # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". html_title = '{0} v{1}'.format(project, release) # Output file base name for HTML help builder. htmlhelp_basename = project + 'doc' # -- Options for LaTeX output -------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [('index', project + '.tex', project + u' Documentation', author, 'manual')] # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [('index', project.lower(), project + u' Documentation', [author], 1)] sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname('__file__')), 'sphinxext')) extensions += ['example'] asdf-2.5.1/docs/index.rst0000644000446400020070000000316313567314375017442 0ustar eslavichSTSCI\science00000000000000************************************** ASDF - Advanced Scientific Data Format ************************************** ``asdf`` is a tool for reading and writing Advanced Scientific Data Format (ASDF) files. .. include:: ../README.rst :start-after: begin-summary-text :end-before: end-summary-text .. note:: This is the **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat - if you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, go here: http://seismic-data.org/ Getting Started =============== .. toctree:: :maxdepth: 2 asdf/install asdf/overview asdf/features asdf/asdf_tool asdf/changes Extending ASDF ============== .. toctree:: :maxdepth: 2 asdf/extensions.rst API Documentation ================= .. toctree:: :maxdepth: 1 asdf/user_api asdf/developer_api Contributing and reporting issues ================================= We welcome feedback and contributions of all kinds. Contributions of code, documentation, or general feedback are all appreciated. Feature requests and bug reports for the Python implementation can be posted at `ASDF's github page `_. The ASDF Standard itself also has a repository on github. Suggestions for improvements to the ASDF Standard can be reported `here `_. See also ======== - The `Advanced Scientific Data Format (ASDF) standard `__ - ASDF Python package distribution on `pypi `_ Index ===== * :ref:`genindex` * :ref:`modindex` * :ref:`search` asdf-2.5.1/docs/make.bat0000644000446400020070000001064113567314375017205 0ustar eslavichSTSCI\science00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end asdf-2.5.1/docs/sphinxext/0000755000446400020070000000000013605166132017615 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/docs/sphinxext/__init__.py0000644000446400020070000000013113567314375021734 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- asdf-2.5.1/docs/sphinxext/example.py0000644000446400020070000001134413567314375021640 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import atexit import io import os import shutil import tempfile import textwrap import codecs from docutils.parsers.rst import Directive from docutils import nodes from sphinx.util.nodes import set_source_info import asdf from asdf import AsdfFile from asdf.constants import ASDF_MAGIC, BLOCK_FLAG_STREAMED from asdf import versioning, util version_string = str(versioning.default_version) TMPDIR = tempfile.mkdtemp() def delete_tmpdir(): shutil.rmtree(TMPDIR) GLOBALS = {} FLAGS = { BLOCK_FLAG_STREAMED: "BLOCK_FLAG_STREAMED" } class RunCodeDirective(Directive): has_content = True optional_arguments = 1 def run(self): code = textwrap.dedent('\n'.join(self.content)) cwd = os.getcwd() os.chdir(TMPDIR) try: try: exec(code, GLOBALS) except: print(code) raise literal = nodes.literal_block(code, code) literal['language'] = 'python' set_source_info(self, literal) finally: os.chdir(cwd) if 'hidden' not in self.arguments: return [literal] else: return [] class AsdfDirective(Directive): required_arguments = 1 optional_arguments = 1 def run(self): filename = self.arguments[0] cwd = os.getcwd() os.chdir(TMPDIR) parts = [] try: ff = AsdfFile() code = AsdfFile._open_impl(ff, filename, _get_yaml_content=True) code = '{0} {1}\n'.format(ASDF_MAGIC, version_string) + code.strip().decode('utf-8') literal = nodes.literal_block(code, code) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) kwargs = dict() # Use the ignore_unrecognized_tag parameter as a proxy for both options kwargs['ignore_unrecognized_tag'] = 'ignore_unrecognized_tag' in self.arguments kwargs['ignore_missing_extensions'] = 'ignore_unrecognized_tag' in self.arguments with asdf.open(filename, **kwargs) as ff: for i, block in enumerate(ff.blocks.internal_blocks): data = codecs.encode(block.data.tostring(), 'hex') if len(data) > 40: data = data[:40] + '...'.encode() allocated = block._allocated size = block._size data_size = block._data_size flags = block._flags if flags & BLOCK_FLAG_STREAMED: allocated = size = data_size = 0 lines = [] lines.append('BLOCK {0}:'.format(i)) human_flags = [] for key, val in FLAGS.items(): if flags & key: human_flags.append(val) if len(human_flags): lines.append(' flags: {0}'.format(' | '.join(human_flags))) if block.input_compression: lines.append(' compression: {0}'.format(block.input_compression)) lines.append(' allocated_size: {0}'.format(allocated)) lines.append(' used_size: {0}'.format(size)) lines.append(' data_size: {0}'.format(data_size)) lines.append(' data: {0}'.format(data)) code = '\n'.join(lines) literal = nodes.literal_block(code, code) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) internal_blocks = list(ff.blocks.internal_blocks) if (len(internal_blocks) and internal_blocks[-1].array_storage != 'streamed'): buff = io.BytesIO() ff.blocks.write_block_index(buff, ff) block_index = buff.getvalue().decode('utf-8') literal = nodes.literal_block(block_index, block_index) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) finally: os.chdir(cwd) result = nodes.literal_block() textnodes, messages = self.state.inline_text(filename, self.lineno) title = nodes.title(filename, '', *textnodes) result += title result += parts return [result] def setup(app): app.add_directive('runcode', RunCodeDirective) app.add_directive('asdf', AsdfDirective) atexit.register(delete_tmpdir) asdf-2.5.1/environment.yml0000644000446400020070000000146313567314375017741 0ustar eslavichSTSCI\science00000000000000# Run the following command to set up this environment: # $ conda env create -f environment.yml # The environment name can be overridden with the following command: # $ conda env create -n -f environment.yml name: asdf-dev dependencies: - python=3.6 - ipython - numpy - pyyaml - jsonschema - semantic_version - pip: - git+https://github.com/astropy/astropy#egg=astropy - pytest-astropy # Optional dependencies: it's not necessary to install gwcs for testing # but it may be useful. Note that gwcs will cause a stable version of ASDF # to be installed as well. Unfortunately there's not a good way to express # --no-deps in this file (which actually seems to be a limitation of pip # itself, not conda). # - git+https://github.com/spacetelescope/gwcs#egg=gwcs asdf-2.5.1/licenses/0000755000446400020070000000000013605166132016440 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/licenses/LICENSE.rst0000644000446400020070000000274113605165746020272 0ustar eslavichSTSCI\science00000000000000Copyright (c) 2019, Space Telescope Science Institute All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Astropy Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. asdf-2.5.1/licenses/README.rst0000644000446400020070000000024213567314375020140 0ustar eslavichSTSCI\science00000000000000Licenses ======== This directory holds license and credit information for the affiliated package, works the affiliated package is derived from, and/or datasets. asdf-2.5.1/licenses/SUNPY_LICENSE.rst0000644000446400020070000000252613567314375021272 0ustar eslavichSTSCI\science00000000000000SunPy is released under a BSD-style open source licence: Copyright (c) 2013-2018 The SunPy developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. asdf-2.5.1/pyproject.toml0000644000446400020070000000011413605165746017555 0ustar eslavichSTSCI\science00000000000000[build-system] requires = ["setuptools>=30.3.0", "setuptools_scm", "wheel"] asdf-2.5.1/pytest_asdf/0000755000446400020070000000000013605166132017160 5ustar eslavichSTSCI\science00000000000000asdf-2.5.1/pytest_asdf/__init__.py0000644000446400020070000000010013605165746021272 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst asdf-2.5.1/pytest_asdf/extension.py0000644000446400020070000000221113605165746021554 0ustar eslavichSTSCI\science00000000000000from asdf import extension from asdf.tests import CustomTestType class LabelMapperTestType(CustomTestType): version = '1.0.0' name = 'transform/label_mapper' class RegionsSelectorTestType(CustomTestType): version = '1.0.0' name = 'transform/regions_selector' class TestExtension(extension.BuiltinExtension): """This class defines an extension that represents tags whose implementations current reside in other repositories (such as GWCS) but whose schemas are defined in ASDF. This provides a workaround for schema validation testing since we want to pass without warnings, but the fact that these tag classes are not defined within ASDF means that warnings occur unless this extension is used. Eventually these schemas may be moved out of ASDF and into other repositories, or ASDF will potentially provide abstract base classes for the tag implementations. """ @property def types(self): return [LabelMapperTestType, RegionsSelectorTestType] @property def tag_mapping(self): return [('tag:stsci.edu:asdf', 'http://stsci.edu/schemas/asdf{tag_suffix}')] asdf-2.5.1/pytest_asdf/plugin.py0000644000446400020070000001547313605165746021054 0ustar eslavichSTSCI\science00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os from importlib.util import find_spec from pkg_resources import parse_version import yaml import pytest import numpy as np # Avoid all imports of asdf at this level in order to avoid circular imports def pytest_addoption(parser): parser.addini( "asdf_schema_root", "Root path indicating where schemas are stored") parser.addini( "asdf_schema_skip_names", "Base names of files to skip in schema tests") parser.addini( "asdf_schema_skip_examples", "Base names of schemas whose examples should not be tested") parser.addini( "asdf_schema_tests_enabled", "Controls whether schema tests are enabled by default") parser.addoption('--asdf-tests', action='store_true', help='Enable ASDF schema tests') class AsdfSchemaFile(pytest.File): def __init__(self, *args, skip_examples=False, **kwargs): super().__init__(*args, **kwargs) self.skip_examples = skip_examples def collect(self): yield AsdfSchemaItem(str(self.fspath), self) if not self.skip_examples: for example in self.find_examples_in_schema(): yield AsdfSchemaExampleItem(str(self.fspath), self, example) def find_examples_in_schema(self): """Returns generator for all examples in schema at given path""" from asdf import treeutil with open(str(self.fspath), 'rb') as fd: schema_tree = yaml.safe_load(fd) for node in treeutil.iter_tree(schema_tree): if (isinstance(node, dict) and 'examples' in node and isinstance(node['examples'], list)): for desc, example in node['examples']: yield example class AsdfSchemaItem(pytest.Item): def __init__(self, schema_path, parent): super(AsdfSchemaItem, self).__init__(schema_path, parent) self.schema_path = schema_path def runtest(self): from asdf import schema from asdf.extension import default_extensions # Make sure that each schema itself is valid. schema_tree = schema.load_schema( self.schema_path, resolver=default_extensions.resolver, resolve_references=True) schema.check_schema(schema_tree) ASTROPY_4_0_TAGS = { 'tag:stsci.edu:asdf/transform/rotate_sequence_3d', 'tag:stsci.edu:asdf/transform/ortho_polynomial', 'tag:stsci.edu:asdf/transform/fix_inputs', 'tag:stsci.edu:asdf/transform/math_functions', 'tag:stsci.edu:asdf/time/time', } def should_skip(name, version): if name == 'tag:stsci.edu:asdf/transform/multiplyscale': return not is_min_astropy_version('3.1.dev0') elif name in ASTROPY_4_0_TAGS: return not is_min_astropy_version('4.0') return False def is_min_astropy_version(min_version): astropy = find_spec('astropy') if astropy is None: return False import astropy return parse_version(astropy.version.version) >= parse_version(min_version) def parse_schema_filename(filename): from asdf import versioning components = filename[filename.find('schemas') + 1:].split(os.path.sep) tag = 'tag:{}:{}'.format(components[1], '/'.join(components[2:])) name, version = versioning.split_tag_version(tag.replace('.yaml', '')) return name, version class AsdfSchemaExampleItem(pytest.Item): def __init__(self, schema_path, parent, example): test_name = "{}-example".format(schema_path) super(AsdfSchemaExampleItem, self).__init__(test_name, parent) self.filename = str(schema_path) self.example = example def _find_standard_version(self, name, version): from asdf import versioning for sv in versioning.supported_versions: map_version = versioning.get_version_map(sv)['tags'].get(name) if map_version is not None and version == map_version: return sv return versioning.default_version def runtest(self): from asdf import AsdfFile, block, util from asdf.tests import helpers from .extension import TestExtension name, version = parse_schema_filename(self.filename) if should_skip(name, version): return standard_version = self._find_standard_version(name, version) # Make sure that the examples in the schema files (and thus the # ASDF standard document) are valid. buff = helpers.yaml_to_asdf( 'example: ' + self.example.strip(), standard_version=standard_version) ff = AsdfFile( uri=util.filepath_to_url(os.path.abspath(self.filename)), extensions=TestExtension()) # Fake an external file ff2 = AsdfFile({'data': np.empty((1024*1024*8), dtype=np.uint8)}) ff._external_asdf_by_uri[ util.filepath_to_url( os.path.abspath( os.path.join( os.path.dirname(self.filename), 'external.asdf')))] = ff2 # Add some dummy blocks so that the ndarray examples work for i in range(3): b = block.Block(np.zeros((1024*1024*8), dtype=np.uint8)) b._used = True ff.blocks.add(b) b._array_storage = "streamed" try: with pytest.warns(None) as w: import warnings ff._open_impl(ff, buff, mode='rw') # Do not tolerate any warnings that occur during schema validation assert len(w) == 0, helpers.display_warnings(w) except Exception: print("From file:", self.filename) raise # Just test we can write it out. A roundtrip test # wouldn't always yield the correct result, so those have # to be covered by "real" unit tests. if b'external.asdf' not in buff.getvalue(): buff = io.BytesIO() ff.write_to(buff) def pytest_collect_file(path, parent): if not (parent.config.getini('asdf_schema_tests_enabled') or parent.config.getoption('asdf_tests')): return schema_roots = parent.config.getini('asdf_schema_root').split() if not schema_roots: return skip_names = parent.config.getini('asdf_schema_skip_names') skip_examples = parent.config.getini('asdf_schema_skip_examples') schema_roots = [os.path.join(str(parent.config.rootdir), os.path.normpath(root)) for root in schema_roots] if path.ext != '.yaml': return None for root in schema_roots: if str(path).startswith(root) and path.purebasename not in skip_names: skip = path.purebasename in skip_examples return AsdfSchemaFile( path, parent, skip_examples=(path.purebasename in skip_examples)) return None asdf-2.5.1/readthedocs.yml0000644000446400020070000000013113567314375017651 0ustar eslavichSTSCI\science00000000000000conda: file: .rtd-environment.yml python: setup_py_install: true formats: - none asdf-2.5.1/setup.cfg0000644000446400020070000000416213605166132016457 0ustar eslavichSTSCI\science00000000000000[metadata] name = asdf description = Python tools to handle ASDF files long_description_content_type = text/x-rst author = Erik Bray, Dan D'Avella, Michael Droettboom author_email = mdroe@stsci.edu license = BSD url = http://github.com/spacetelescope/asdf edit_on_github = False github_project = spacetelescope/asdf classifiers = Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.3 Programming Language :: Python :: 3.4 Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Development Status :: 5 - Production/Stable [options] python_requires = >=3.3 setup_requires = setuptools_scm packages = asdf asdf.schemas asdf.reference_files pytest_asdf package_dir = asdf.schemas = asdf-standard/schemas asdf.reference_files = asdf-standard/reference_files include_package_data = True install_requires = semantic_version>=2.8 pyyaml>=3.10 jsonschema>=2.3,<4 six>=1.9.0 numpy>=1.8 [options.extras_require] all = lz4>=0.10 docs = sphinx sphinx-astropy astropy graphviz matplotlib tests = pytest astropy gwcs pytest-doctestplus pytest-remotedata pytest-openfiles psutil [options.package_data] data = tests/** [options.entry_points] console_scripts = asdftool = asdf.commands.main:main asdf_extensions = builtin = asdf.extension:BuiltinExtension pytest11 = asdf_schema_tester = pytest_asdf.plugin [build_sphinx] source-dir = docs build-dir = docs/_build all_files = 1 [upload_docs] upload-dir = docs/_build/html show-response = 1 [tool:pytest] testpaths = asdf docs asdf-standard/schemas minversion = 3.1 norecursedirs = build docs/_build doctest_plus = enabled remote_data_strict = True open_files_ignore = test.fits asdf.fits text_file_format = rst asdf_schema_root = asdf-standard/schemas asdf/schemas asdf_schema_skip_names = asdf-schema-1.0.0 draft-01 asdf_schema_skip_examples = domain-1.0.0 frame-1.0.0 frame-1.1.0 asdf_schema_tests_enabled = true addopts = --doctest-rst [flake8] exclude = extern select = E101 W191 W291 W292 W293 W391 E11 E502 E722 E901 E902 [egg_info] tag_build = tag_date = 0 asdf-2.5.1/setup.py0000755000446400020070000000222313605165746016361 0ustar eslavichSTSCI\science00000000000000#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import re import codecs # We need to use custom logic here to parse the README due to the raw HTML blob # the beginning which makes things render nicely on GitHub. Without this custom # parsing logic, the README will not render properly as the long description on # PyPi. If we ever revert to using pure RST for the README, this can be # replaced by functionality built into setuptools. def read_readme(readme_filename): with codecs.open(readme_filename, encoding='utf8') as ff: lines = ff.read().splitlines() # Skip lines that contain raw HTML markup lines = lines[:4] + lines[26:] # Turn the header comment into a real header lines = lines[1:] lines[0:2] = [x.strip() for x in lines[0:2]] # Fix hyperlink targets so that the README displays properly on pypi label_re = re.compile(r'^\.\.\s+_(\w|-)+$') for i, line in enumerate(lines): if label_re.match(line): lines[i] = line + ':' return '\n'.join(lines) from setuptools import setup setup(use_scm_version=True, long_description=read_readme('README.rst')) asdf-2.5.1/tox.ini0000644000446400020070000000416313605165767016167 0ustar eslavichSTSCI\science00000000000000[tox] envlist = {py36,py37}-{stable,gwcsdev},py37-astrodev py35-legacy [testenv] deps= pytest<5.1 pytest-sugar pytest-faulthandler pytest-openfiles>=0.3.2 astrodev: git+git://github.com/astropy/astropy py35,py36: importlib_resources py35-!astrodev,py36-!astrodev: gwcs~=0.9.1 py37-!astrodev: gwcs numpydev: git+git://github.com/numpy/numpy legacy: semantic_version==2.3.1 legacy: pyyaml==3.10 legacy: jsonschema==2.3 legacy: numpy~=1.10.0 numpy11,numpy12,legacy: astropy~=3.0.0 numpy11: numpy==1.11 numpy12: numpy==1.12 numpydev,astrodev: cython conda_channels= conda-forge extras= all,tests commands= astrodev: pip install --no-deps git+git://github.com/spacetelescope/gwcs pytest {posargs} [testenv:prerelease] basepython= python3.7 pip_pre= true [testenv:packaged] basepython= python3.7 # The default tox working directory is in .tox in the source directory. If we # execute pytest from there, it will discover tox.ini in the source directory # and load the asdf module from the unpackaged sourcee, which is not what we # want. The home directory does not have a tox.ini in any of its ancestors, # so this will allow us to test the installed package. changedir= {homedir} commands= pytest --pyargs asdf [testenv:egg_info] deps= conda_deps= commands= python setup.py egg_info [testenv:twine] deps= twine conda_deps= commands= twine check {distdir}/* [testenv:docbuild] basepython= python3.6 extras= docs commands= sphinx-build -W docs build/docs [testenv:checkdocs] basepython= python3.6 deps= collective.checkdocs pygments commands= python setup.py checkdocs [testenv:style] basepython= python3.6 deps= flake8 commands= flake8 asdf --count [testenv:coverage] basepython= python3.7 deps= gwcs pytest<5.1 pytest-astropy pytest-openfiles>=0.3.2 codecov coverage commands= coverage run --source=asdf --rcfile={toxinidir}/asdf/tests/coveragerc \ -m pytest --remote-data --open-files coverage report -m codecov -e TOXENV passenv= TOXENV CI TRAVIS TRAVIS_* CODECOV_* DISPLAY