././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.985379 asdf-2.9.2/0000755000537500020070000000000000000000000013576 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.783856 asdf-2.9.2/.github/0000755000537500020070000000000000000000000015136 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.7990086 asdf-2.9.2/.github/workflows/0000755000537500020070000000000000000000000017173 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/.github/workflows/changelog.yml0000644000537500020070000000103500000000000021644 0ustar00wjamiesonSTSCI\sciencename: Changelog on: pull_request: types: [labeled, unlabeled, opened, synchronize, reopened] jobs: changelog: name: Confirm changelog entry runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v2 with: fetch-depth: 0 submodules: true - name: Grep for PR number in CHANGES.rst run: grep -P '\[[^\]]*#${{github.event.number}}[,\]]' CHANGES.rst if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-changelog-entry-needed') }} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266491.0 asdf-2.9.2/.github/workflows/ci.yml0000644000537500020070000000773700000000000020327 0ustar00wjamiesonSTSCI\sciencename: CI on: push: branches: - master - stable - '*.x' tags: - '*' pull_request: jobs: tox: name: ${{ matrix.name }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: include: - name: Python 3.10 Testing os: ubuntu-latest python-version: "3.10" toxenv: py310 - name: Code Coverage with Python 3.9 os: ubuntu-latest python-version: "3.9" toxenv: coverage - name: Python 3.8 Testing os: ubuntu-latest python-version: "3.8" toxenv: py38 - name: Python 3.7 Testing os: ubuntu-latest python-version: "3.7" toxenv: py37 - name: Python 3.7 with legacy packages os: ubuntu-latest python-version: "3.7" toxenv: py37-legacy - name: Documentation Build os: ubuntu-latest python-version: "3.9" toxenv: docbuild - name: Mac OS Latest os: macos-latest python-version: "3.9" toxenv: py39 - name: Compatibility os: ubuntu-latest python-version: "3.9" toxenv: compatibility - name: Bandit Security Checks os: ubuntu-latest python-version: "3.9" toxenv: bandit - name: Code Style Checks os: ubuntu-latest python-version: "3.9" toxenv: style - name: Twine os: ubuntu-latest python-version: "3.9" toxenv: twine - name: Checkdocs os: ubuntu-latest python-version: "3.9" toxenv: checkdocs - name: Astropy Dev os: ubuntu-latest python-version: "3.9" toxenv: py39-astropydev - name: ASDF-Astropy Dev os: ubuntu-latest python-version: "3.9" toxenv: py39-asdfastropydev - name: GWCS Dev os: ubuntu-latest python-version: "3.9" toxenv: py39-gwcsdev # Fail - name: Numpy Dev os: ubuntu-latest python-version: "3.9" toxenv: py39-numpydev - name: ASDF-Transform-Schemas Dev os: ubuntu-latest python-version: "3.9" toxenv: py39-asdftransformschemasdev - name: ASDF-WCS-Schemas Dev os: ubuntu-latest python-version: "3.9" toxenv: py39-asdfwcsschemasdev - name: ASDF-Coordinates-Schemas Dev os: ubuntu-latest python-version: "3.9" toxenv: py39-asdfcoordinatesschemasdev # Fail - name: Pre-Release Dependencies os: ubuntu-latest python-version: "3.9" toxenv: prerelease - name: Test Against Installed Package os: ubuntu-latest python-version: "3.9" toxenv: packaged - name: Warnings Treated as Exceptions os: ubuntu-latest python-version: "3.9" toxenv: warnings - name: Windows os: windows-latest python-version: "3.9" toxenv: py39 steps: - name: Install System Packages if: ${{ contains(matrix.toxenv,'docbuild') }} run: | sudo apt update -y sudo apt-get install graphviz texlive-latex-extra dvipng - name: Checkout code uses: actions/checkout@v2 with: fetch-depth: 0 submodules: true - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install tox run: | python -m pip install --upgrade pip pip install tox - name: Run tox run: tox -e ${{ matrix.toxenv }} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266491.0 asdf-2.9.2/.github/workflows/downstream.yml0000644000537500020070000000656400000000000022114 0ustar00wjamiesonSTSCI\sciencename: Downstream on: workflow_dispatch: schedule: # Run every Monday at 6am UTC - cron: '0 6 * * 1' env: CRDS_SERVER_URL: https://jwst-crds.stsci.edu CRDS_PATH: ~/crds_cache CRDS_CLIENT_RETRY_COUNT: 3 CRDS_CLIENT_RETRY_DELAY_SECONDS: 20 jobs: common: name: ${{ matrix.package_name }}@${{ matrix.ref }} unit tests runs-on: ubuntu-latest strategy: fail-fast: false matrix: include: - package_name: astropy repository: astropy/astropy ref: main install_command: pip install -e .[test] test_command: pytest - package_name: gwcs repository: spacetelescope/gwcs ref: master install_command: pip install -e .[test] test_command: pytest - package_name: jwst repository: spacetelescope/jwst ref: master install_command: pip install -e .[test] test_command: pytest - package_name: specutils repository: astropy/specutils ref: main install_command: pip install -e .[test] test_command: pytest - package_name: weldx repository: BAMWelDX/weldx ref: master install_command: pip install -e .[test] test_command: pytest weldx/tests/asdf_tests weldx/schemas --asdf-tests - package_name: sunpy repository: sunpy/sunpy ref: main install_command: pip install -e .[tests,all] test_command: pytest sunpy/io/ - package_name: dkist repository: DKISTDC/dkist ref: main install_command: pip install -e .[tests] test_command: pytest - package_name: asdf-astropy repository: astropy/asdf-astropy ref: main install_command: pip install -e .[test] test_command: pytest - package_name: asdf-transform-schemas repository: asdf-format/asdf-transform-schemas ref: master install_command: pip install -e .[test] test_command: pytest - package_name: asdf-wcs-schemas repository: asdf-format/asdf-wcs-schemas ref: main install_command: pip install -e .[test] test_command: pytest - package_name: asdf-coordinates-schemas repository: asdf-format/asdf-coordinates-schemas ref: main install_command: pip install -e .[test] test_command: pytest steps: - name: Checkout asdf uses: actions/checkout@v2 with: fetch-depth: 0 submodules: true path: asdf - name: Checkout ${{ matrix.package_name }} uses: actions/checkout@v2 with: fetch-depth: 0 repository: ${{ matrix.repository }} ref: ${{ matrix.ref }} path: target - name: Set up Python 3.9 uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install asdf run: cd asdf && pip install . - name: Install remaining ${{ matrix.package_name }} dependencies run: cd target && ${{ matrix.install_command }} - name: Pip Freeze run: pip freeze - name: Run ${{ matrix.package_name}} tests run: cd target && ${{ matrix.test_command }} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644281376.0 asdf-2.9.2/.github/workflows/publish-to-pypi.yml0000644000537500020070000000126600000000000022770 0ustar00wjamiesonSTSCI\sciencename: Publish to PyPI on: release: types: [released] jobs: publish: uses: spacetelescope/action-publish_to_pypi/.github/workflows/workflow.yml@master with: test: false build_platform_wheels: false # Set to true if your package contains a C extension secrets: user: ${{ secrets.PYPI_USERNAME_ASDF_MAINTAINER }} password: ${{ secrets.PYPI_PASSWORD_ASDF_MAINTAINER }} # WARNING: Do not hardcode secret values here! If you want to use a different user or password, you can override this secret by creating one with the same name in your Github repository settings. test_password: ${{ secrets.PYPI_PASSWORD_ASDF_MAINTAINER_TEST }} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/.github/workflows/s390x.yml0000644000537500020070000000222600000000000020606 0ustar00wjamiesonSTSCI\sciencename: s390x on: workflow_dispatch: push: tags: - '*' schedule: # Run every Monday at 6am UTC - cron: '0 6 * * 1' jobs: pytest: runs-on: ubuntu-18.04 name: Python 3.7 steps: - uses: actions/checkout@v2 with: fetch-depth: 0 submodules: true - uses: uraimo/run-on-arch-action@v2.0.5 name: Run tests id: build with: arch: s390x distro: buster shell: /bin/bash install: | apt-get update -q -y apt-get install -q -y git \ python3 \ python3-astropy \ python3-lz4 \ python3-numpy \ python3-venv \ python3-wheel run: | python3 -m venv --system-site-packages tests source tests/bin/activate pip3 install --upgrade pip setuptools gwcs==0.9.1 pytest==5.4.3 pytest-doctestplus==0.8.0 pip3 install -e .[all,tests] python3 -m pytest --remote-data ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266978.0 asdf-2.9.2/CHANGES.rst0000644000537500020070000005532400000000000015411 0ustar00wjamiesonSTSCI\science2.9.2 (2022-02-07) ------------------ - Fix deprecation warnings stemming from the release of pytest 7.0.0. [#1075] - Fix bug in pytest plugin when schemas are not in a directory named "schemas". [#1076] 2.9.1 (2022-02-03) ------------------ - Fix typo in testing module ``__init__.py`` name. [#1071] 2.9.0 (2022-02-02) ------------------ - Added the capability for tag classes to provide an interface to asdf info functionality to obtain information about the class attributes rather than appear as an opaque class object. [#1052 #1055] - Fix tag listing when extension is not fully implemented. [#1034] - Drop support for Python 3.6. [#1054] - Adjustments to compression plugin tests and documentation. [#1053] - Update setup.py to raise error if "git submodule update --init" has not been run. [#1057] - Add ability for tags to correspond to multiple schema_uri, with an implied allOf among the schema_uris. [#1058, #1069] - Add the URL of the file being parsed to ``SerializationContext``. [#1065] - Add ``asdf.testing.helpers`` module with simplified versions of test helpers previously available in ``asdf.tests.helpers``. [#1067] 2.8.3 (2021-12-13) ------------------ - Fix more use of 'python' where 'python3' is intended. [#1033] 2.8.2 (2021-12-06) ------------------ - Update documentation to reflect new 2.8 features. [#998] - Fix array compression for non-native byte order [#1010] - Fix use of 'python' where 'python3' is intended. [#1026] - Fix schema URI resolving when the URI prefix is also claimed by a legacy extension. [#1029] - Remove 'name' and 'version' attributes from NDArrayType instances. [#1031] 2.8.1 (2021-06-09) ------------------ - Fix bug in block manager when a new block is added to an existing file without a block index. [#1000] 2.8.0 (2021-05-12) ------------------ - Add ``yaml_tag_handles`` property to allow definition of custom yaml ``%TAG`` handles in the asdf file header. [#963] - Add new resource mapping API for extending asdf with additional schemas. [#819, #828, #843, #846] - Add global configuration mechanism. [#819, #839, #844, #847] - Drop support for automatic serialization of subclass attributes. [#825] - Support asdf:// as a URI scheme. [#854, #855] - Include only extensions used during serialization in a file's metadata. [#848, #864] - Drop support for Python 3.5. [#856] - Add new extension API to support versioned extensions. [#850, #851, #853, #857, #874] - Permit wildcard in tag validator URIs. [#858, #865] - Implement support for ASDF Standard 1.6.0. This version of the standard limits mapping keys to string, integer, or boolean. [#866] - Stop removing schema defaults for all ASDF Standard versions, and automatically fill defaults only for versions <= 1.5.0. [#860] - Stop removing keys with ``None`` values from the tree on write. This fixes a long-standing issue where the tree structure is not preserved on write, but will break ``ExtensionType`` subclasses that depend on this behavior. Extension developers will need to modify their ``to_tree`` methods to check for ``None`` before adding a key to the tree (or modify the schema to permit nulls, if that is the intention). [#863] - Deprecated the ``auto_inline`` argument to ``AsdfFile.write_to`` and ``AsdfFile.update`` and added ``AsdfConfig.array_inline_threshold``. [#882, #991] - Add ``edit`` subcommand to asdftool for efficient editing of the YAML portion of an ASDF file. [#873, #922] - Increase limit on integer literals to signed 64-bit. [#894] - Remove the ``asdf.test`` method and ``asdf.__githash__`` attribute. [#943] - Add support for custom compression via extensions. [#931] - Remove unnecessary ``.tree`` from search result paths. [#954] - Drop support for bugs in older operating systems and Python versions. [#955] - Add argument to ``asdftool diff`` that ignores tree nodes that match a JMESPath expression. [#956] - Fix behavior of ``exception`` argument to ``GenericFile.seek_until``. [#980] - Fix issues in file type detection to allow non-seekable input and filenames without recognizable extensions. Remove the ``asdf.asdf.is_asdf_file`` function. [#978] - Update ``asdftool extensions`` and ``asdftool tags`` to incorporate the new extension API. [#988] - Add ``AsdfSearchResult.replace`` method for assigning new values to search results. [#981] - Search for block index starting from end of file. Fixes rare bug when a data block contains a block index. [#990] - Update asdf-standard to 1.6.0 tag. [#993] 2.7.5 (2021-06-09) ------------------ - Fix bug in ``asdf.schema.check_schema`` causing relative references in metaschemas to be resolved incorrectly. [#987] - Fix bug in block manager when a new block is added to an existing file without a block index. [#1000] 2.7.4 (2021-04-30) ------------------ - Fix pytest plugin failure under older versions of pytest. [#934] - Copy array views when the base array is non-contiguous. [#949] - Prohibit views over FITS arrays that change dtype. [#952] - Add support for HTTPS URLs and following redirects. [#971] - Prevent astropy warnings in tests when opening known bad files. [#977] 2.7.3 (2021-02-25) ------------------ - Add pytest plugin options to skip and xfail individual tests and xfail the unsupported ndarray-1.0.0 example. [#929] - Fix bug resulting in invalid strides values for views over FITS arrays. [#930] 2.7.2 (2021-01-15) ------------------ - Fix bug causing test collection failures in some environments. [#889] - Fix bug when decompressing arrays with numpy 1.20. [#901, #909] 2.7.1 (2020-08-18) ------------------ - Fix bug preventing access to copied array data after ``AsdfFile`` is closed. [#869] 2.7.0 (2020-07-23) ------------------ - Fix bug preventing diff of files containing ndarray-1.0.0 objects in simplified form. [#786] - Fix bug causing duplicate elements to appear when calling ``copy.deepcopy`` on a ``TaggedList``. [#788] - Improve validator performance by skipping unnecessary step of copying schema objects. [#784] - Fix bug with ``auto_inline`` option where inline blocks are not converted to internal when they exceed the threshold. [#802] - Fix misinterpretation of byte order of blocks stored in FITS files. [#810] - Improve read performance by skipping unnecessary rebuild of tagged tree. [#787] - Add option to ``asdf.open`` and ``fits_embed.AsdfInFits.open`` that disables validation on read. [#792] - Fix bugs and code style found by adding F and W ``flake8`` checks. [#797] - Eliminate warnings in pytest plugin by using ``from_parent`` when available. [#799] - Prevent validation of empty tree when ``AsdfFile`` is initialized. [#794] - All warnings now subclass ``asdf.exceptions.AsdfWarning``. [#804] - Improve warning message when falling back to an older schema, and note that fallback behavior will be removed in 3.0. [#806] - Drop support for jsonschema 2.x. [#807] - Stop traversing oneOf and anyOf combiners when filling or removing default values. [#811] - Fix bug in version map caching that caused incompatible tags to be written under ASDF Standard 1.0.0. [#821] - Fix bug that corrupted ndarrays when the underlying block array was converted to C order on write. [#827] - Fix bug that produced unreadable ASDF files when an ndarray in the tree was both offset and broadcasted. [#827] - Fix bug preventing validation of default values in ``schema.check_schema``. [#785] - Add option to disable validation of schema default values in the pytest plugin. [#831] - Prevent errors when extension metadata contains additional properties. [#832] 2.6.0 (2020-04-22) ------------------ - AsdfDeprecationWarning now subclasses DeprecationWarning. [#710] - Resolve external references in custom schemas, and deprecate asdf.schema.load_custom_schema. [#738] - Add ``asdf.info`` for displaying a summary of a tree, and ``AsdfFile.search`` for searching a tree. [#736] - Add pytest plugin option to skip warning when a tag is unrecognized. [#771] - Fix generic_io ``read_blocks()`` reading past the requested size [#773] - Add support for ASDF Standard 1.5.0, which includes several new transform schemas. [#776] - Enable validation and serialization of previously unhandled numpy scalar types. [#778] - Fix handling of trees containing implicit internal references and reference cycles. Eliminate need to call ``yamlutil.custom_tree_to_tagged_tree`` and ``yamlutil.tagged_tree_to_custom_tree`` from extension code, and allow ``ExtensionType`` subclasses to return generators. [#777] - Fix bug preventing history entries when a file was previously saved without them. [#779] - Update developer overview documentation to describe design of changes to handle internal references and reference cycles. [#781] 2.5.2 (2020-02-28) ------------------ - Add a developer overview document to help understand how ASDF works internally. Still a work in progress. [#730] - Remove unnecessary dependency on six. [#739] - Add developer documentation on schema versioning, additional schema and extension-related tests, and fix a variety of issues in ``AsdfType`` subclasses. [#750] - Update asdf-standard to include schemas that were previously missing from 1.4.0 version maps. [#767] - Simplify example in README.rst [#763] 2.5.1 (2020-01-07) ------------------ - Fix bug in test causing failure when test suite is run against an installed asdf package. [#732] 2.5.0 (2019-12-23) ------------------ - Added asdf-standard 1.4.0 to the list of supported versions. [#704] - Fix load_schema LRU cache memory usage issue [#682] - Add convenience method for fetching the default resolver [#682] - ``SpecItem`` and ``Spec`` were deprecated in ``semantic_version`` and were replaced with ``SimpleSpec``. [#715] - Pinned the minimum required ``semantic_version`` to 2.8. [#715] - Fix bug causing segfault after update of a memory-mapped file. [#716] 2.4.2 (2019-08-29) ------------------ - Limit the version of ``semantic_version`` to <=2.6.0 to work around a Deprecation warning. [#700] 2.4.1 (2019-08-27) ------------------ - Define the ``in`` operator for top-level ``AsdfFile`` objects. [#623] - Overhaul packaging infrastructure. Remove use of ``astropy_helpers``. [#670] - Automatically register schema tester plugin. Do not enable schema tests by default. Add configuration setting and command line option to enable schema tests. [#676] - Enable handling of subclasses of known custom types by using decorators for convenience. [#563] - Add support for jsonschema 3.x. [#684] 2.3.4 (unreleased) ------------------ - Fix bug in ``NDArrayType.__len__``. It must be a method, not a property. [#673] 2.3.3 (2019-04-02) ------------------ - Pass ``ignore_unrecognized_tag`` setting through to ASDF-in-FITS. [#650] - Use ``$schema`` keyword if available to determine meta-schema to use when testing whether schemas themselves are valid. [#654] - Take into account resolvers from installed extensions when loading schemas for validation. [#655] - Fix compatibility issue with new release of ``pyyaml`` (version 5.1). [#662] - Allow use of ``pathlib.Path`` objects for ``custom_schema`` option. [#663] 2.3.2 (2019-02-19) ------------------ - Fix bug that occurs when comparing installed extension version with that found in file. [#641] 2.3.1 (2018-12-20) ------------------ - Provide source information for ``AsdfDeprecationWarning`` that come from extensions from external packages. [#629] - Ensure that top-level accesses to the tree outside a closed context handler result in an ``OSError``. [#628] - Fix the way ``generic_io`` handles URIs and paths on Windows. [#632] - Fix bug in ``asdftool`` that prevented ``extract`` command from being visible. [#633] 2.3.0 (2018-11-28) ------------------ - Storage of arbitrary precision integers is now provided by ``asdf.IntegerType``. Reading a file with integer literals that are too large now causes only a warning instead of a validation error. This is to provide backwards compatibility for files that were created with a buggy version of ASDF (see #553 below). [#566] - Remove WCS tags. These are now provided by the `gwcs package `_. [#593] - Deprecate the ``asdf.asdftypes`` module in favor of ``asdf.types``. [#611] - Support use of ``pathlib.Path`` with ``asdf.open`` and ``AsdfFile.write_to``. [#617] - Update ASDF Standard submodule to version 1.3.0. 2.2.1 (2018-11-15) ------------------ - Fix an issue with the README that caused sporadic installation failures and also prevented the long description from being rendered on pypi. [#607] 2.2.0 (2018-11-14) ------------------ - Add new parameter ``lazy_load`` to ``AsdfFile.open``. It is ``True`` by default and preserves the default behavior. ``False`` detaches the loaded tree from the underlying file: all blocks are fully read and numpy arrays are materialized. Thus it becomes safe to close the file and continue using ``AsdfFile.tree``. However, ``copy_arrays`` parameter is still effective and the active memory maps may still require the file to stay open in case ``copy_arrays`` is ``False``. [#573] - Add ``AsdfConversionWarning`` for failures to convert ASDF tree into custom types. This warning is converted to an error when using ``assert_roundtrip_tree`` for tests. [#583] - Deprecate ``asdf.AsdfFile.open`` in favor of ``asdf.open``. [#579] - Add readonly protection to memory mapped arrays when the underlying file handle is readonly. [#579] 2.1.2 (2018-11-13) ------------------ - Make sure that all types corresponding to core tags are added to the type index before any others. This fixes a bug that was related to the way that subclass tags were overwritten by external extensions. [#598] 2.1.1 (2018-11-01) ------------------ - Make sure extension metadata is written even when constructing the ASDF tree on-the-fly. [#549] - Fix large integer validation when storing `numpy` integer literals in the tree. [#553] - Fix bug that caused subclass of external type to be serialized by the wrong tag. [#560] - Fix bug that occurred when attempting to open invalid file but Astropy import fails while checking for ASDF-in-FITS. [#562] - Fix bug that caused tree creation to fail when unable to locate a schema file for an unknown tag. This now simply causes a warning, and the offending node is converted to basic Python data structures. [#571] 2.1.0 (2018-09-25) ------------------ - Add API function for retrieving history entries. [#501] - Store ASDF-in-FITS data inside a 1x1 BINTABLE HDU. [#519] - Allow implicit conversion of ``namedtuple`` into serializable types. [#534] - Fix bug that prevented use of ASDF-in-FITS with HDUs that have names with underscores. [#543] - Add option to ``generic_io.get_file`` to close underlying file handle. [#544] - Add top-level ``keys`` method to ``AsdfFile`` to access tree keys. [#545] 2.0.3 (2018-09-06) ------------------ - Update asdf-standard to reflect more stringent (and, consequently, more correct) requirements on the formatting of complex numbers. [#526] - Fix bug with dangling file handle when using ASDF-in-FITS. [#533] - Fix bug that prevented fortran-order arrays from being serialized properly. [#539] 2.0.2 (2018-07-27) ------------------ - Allow serialization of broadcasted ``numpy`` arrays. [#507] - Fix bug that caused result of ``set_array_compression`` to be overwritten by ``all_array_compression`` argument to ``write_to``. [#510] - Add workaround for Python OSX write limit bug (see https://bugs.python.org/issue24658). [#521] - Fix bug with custom schema validation when using out-of-line definitions in schema file. [#522] 2.0.1 (2018-05-08) ------------------ - Allow test suite to run even when package is not installed. [#502] 2.0.0 (2018-04-19) ------------------ - Astropy-specific tags have moved to Astropy core package. [#359] - ICRSCoord tag has moved to Astropy core package. [#401] - Remove support for Python 2. [#409] - Create ``pytest`` plugin to be used for testing schema files. [#425] - Add metadata about extensions used to create a file to the history section of the file itself. [#475] - Remove hard dependency on Astropy. It is still required for testing, and for processing ASDF-in-FITS files. [#476] - Add command for extracting ASDF extension from ASDF-in-FITS file and converting it to a pure ASDF file. [#477] - Add command for removing ASDF extension from ASDF-in-FITS file. [#480] - Add an ``ExternalArrayReference`` type for referencing arrays in external files. [#400] - Improve the way URIs are detected for ASDF-in-FITS files in order to fix bug with reading gzipped ASDF-in-FITS files. [#416] - Explicitly disallow access to entire tree for ASDF file objects that have been closed. [#407] - Install and load extensions using ``setuptools`` entry points. [#384] - Automatically initialize ``asdf-standard`` submodule in ``setup.py``. [#398] - Allow foreign tags to be resolved in schemas and files. Deprecate ``tag_to_schema_resolver`` property for ``AsdfFile`` and ``AsdfExtensionList``. [#399] - Fix bug that caused serialized FITS tables to be duplicated in embedded ASDF HDU. [#411] - Create and use a new non-standard FITS extension instead of ImageHDU for storing ASDF files embedded in FITS. Explicitly remove support for the ``.update`` method of ``AsdfInFits``, even though it didn't appear to be working previously. [#412] - Allow package to be imported and used from source directory and builds in development mode. [#420] - Add command to ``asdftool`` for querying installed extensions. [#418] - Implement optional top-level validation pass using custom schema. This can be used to ensure that particular ASDF files follow custom conventions beyond those enforced by the standard. [#442] - Remove restrictions affecting top-level attributes ``data``, ``wcs``, and ``fits``. Bump top-level ASDF schema version to v1.1.0. [#444] 1.3.3 (2018-03-01) ------------------ - Update test infrastructure to rely on new Astropy v3.0 plugins. [#461] - Disable use of 2to3. This was causing test failures on Debian builds. [#463] 1.3.2 (2018-02-22) ------------------ - Updates to allow this version of ASDF to be compatible with Astropy v3.0. [#450] - Remove tests that are no longer relevant due to latest updates to Astropy's testing infrastructure. [#458] 1.3.1 (2017-11-02) ------------------ - Relax requirement on ``semantic_version`` version to 2.3.1. [#361] - Fix bug when retrieving file format version from new ASDF file. [#365] - Fix bug when duplicating inline arrays. [#370] - Allow tag references using the tag URI scheme to be resolved in schema files. [#371] 1.3.0 (2017-10-24) ------------------ - Fixed a bug in reading data from an "http:" url. [#231] - Implements v 1.1.0 of the asdf schemas. [#233] - Added a function ``is_asdf_file`` which inspects the input and returns ``True`` or ``False``. [#239] - The ``open`` method of ``AsdfInFits`` now accepts URIs and open file handles in addition to HDULists. The ``open`` method of ``AsdfFile`` will now try to parse the given URI or file handle as ``AsdfInFits`` if it is not obviously a regular ASDF file. [#241] - Updated WCS frame fields ``obsgeoloc`` and ``obsgeovel`` to reflect recent updates in ``astropy`` that changed representation from ``Quantity`` to ``CartesianRepresentation``. Updated to reflect ``astropy`` change that combines ``galcen_ra`` and ``galcen_dec`` into ``galcen_coord``. Added support for new field ``galcen_v_sun``. Added support for required module versions for tag classes. [#244] - Added support for ``lz4`` compression algorithm [#258]. Also added support for using a different compression algorithm for writing out a file than the one that was used for reading the file (e.g. to convert blocks to use a different compression algorithm) [#257] - Tag classes may now use an optional ``supported_versions`` attribute to declare exclusive support for particular versions of the corresponding schema. If this attribute is omitted (as it is for most existing tag classes), the tag is assumed to be compatible with all versions of the corresponding schema. If ``supported_versions`` is provided, the tag class implementation can include code that is conditioned on the schema version. If an incompatible schema is encountered, or if deserialization of the tagged object fails with an exception, a raw Python data structure will be returned. [#272] - Added option to ``AsdfFile.open`` to allow suppression of warning messages when mismatched schema versions are encountered. [#294] - Added a diff tool to ``asdftool`` to allow for visual comparison of pairs of ASDF files. [#286] - Added command to ``asdftool`` to display available tags. [#303] - When possible, display name of ASDF file that caused version mismatch warning. [#306] - Issue a warning when an unrecognized tag is encountered. [#295] This warning is silenced by default, but can be enabled with a parameter to the ``AsdfFile`` constructor, or to ``AsdfFile.open``. Also added an option for ignoring warnings from unrecognized schema tags. [#319] - Fix bug with loading JSON schemas in Python 3.5. [#317] - Remove all remnants of support for Python 2.6. [#333] - Fix issues with the type index used for writing out ASDF files. This ensures that items in the type index are not inadvertently overwritten by later versions of the same type. It also makes sure that schema example tests run against the correct version of the ASDF standard. [#350] - Update time schema to reflect changes in astropy. This fixes an outstanding bug. [#343] - Add ``copy_arrays`` option to ``asdf.open`` to control whether or not underlying array data should be memory mapped, if possible. [#355] - Allow the tree to be accessed using top-level ``__getitem__`` and ``__setitem__``. [#352] 1.2.1(2016-11-07) ----------------- - Make asdf conditionally dependent on the version of astropy to allow running it with older versions of astropy. [#228] 1.2.0(2016-10-04) ----------------- - Added Tabular model. [#214] - Forced new blocks to be contiguous [#221] - Rewrote code which tags complex objects [#223] - Fixed version error message [#224] 1.0.5 (2016-06-28) ------------------ - Fixed a memory leak when reading wcs that grew memory to over 10 Gb. [#200] 1.0.4 (2016-05-25) ------------------ - Added wrapper class for astropy.core.Time, TaggedTime. [#198] 1.0.2 (2016-02-29) ------------------ - Renamed package to ASDF. [#190] - Stopped support for Python 2.6 [#191] 1.0.1 (2016-01-08) ------------------ - Fixed installation from the source tarball on Python 3. [#187] - Fixed error handling when opening ASDF files not supported by the current version of asdf. [#178] - Fixed parse error that could occur sometimes when YAML data was read from a stream. [#183] 1.0.0 (2015-09-18) ------------------ - Initial release. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/CODE_OF_CONDUCT.md0000644000537500020070000000625600000000000016406 0ustar00wjamiesonSTSCI\science# asdf-format Open Source Code of Conduct We expect all "asdf-format" organization projects to adopt a code of conduct that ensures a productive, respectful environment for all open source contributors and participants. We are committed to providing a strong and enforced code of conduct and expect everyone in our community to follow these guidelines when interacting with others in all forums. Our goal is to keep ours a positive, inclusive, successful, and growing community. The community of participants in open source Astronomy projects is made up of members from around the globe with a diverse set of skills, personalities, and experiences. It is through these differences that our community experiences success and continued growth. As members of the community, - We pledge to treat all people with respect and provide a harassment- and bullying-free environment, regardless of sex, sexual orientation and/or gender identity, disability, physical appearance, body size, race, nationality, ethnicity, and religion. In particular, sexual language and imagery, sexist, racist, or otherwise exclusionary jokes are not appropriate. - We pledge to respect the work of others by recognizing acknowledgment/citation requests of original authors. As authors, we pledge to be explicit about how we want our own work to be cited or acknowledged. - We pledge to welcome those interested in joining the community, and realize that including people with a variety of opinions and backgrounds will only serve to enrich our community. In particular, discussions relating to pros/cons of various technologies, programming languages, and so on are welcome, but these should be done with respect, taking proactive measure to ensure that all participants are heard and feel confident that they can freely express their opinions. - We pledge to welcome questions and answer them respectfully, paying particular attention to those new to the community. We pledge to provide respectful criticisms and feedback in forums, especially in discussion threads resulting from code contributions. - We pledge to be conscientious of the perceptions of the wider community and to respond to criticism respectfully. We will strive to model behaviors that encourage productive debate and disagreement, both within our community and where we are criticized. We will treat those outside our community with the same respect as people within our community. - We pledge to help the entire community follow the code of conduct, and to not remain silent when we see violations of the code of conduct. We will take action when members of our community violate this code such as such as contacting conduct@stsci.edu (all emails sent to this address will be treated with the strictest confidence) or talking privately with the person. This code of conduct applies to all community situations online and offline, including mailing lists, forums, social media, conferences, meetings, associated social events, and one-to-one interactions. Parts of this code of conduct have been adapted from the Astropy and Numfocus codes of conduct. http://www.astropy.org/code_of_conduct.html https://www.numfocus.org/about/code-of-conduct/ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/CONTRIBUTING.md0000644000537500020070000000201300000000000016023 0ustar00wjamiesonSTSCI\sciencePlease open a new issue or new pull request for bugs, feedback, or new features you would like to see. If there is an issue you would like to work on, please leave a comment and we will be happy to assist. New contributions and contributors are very welcome! The main development work is done on the "master" branch. The "stable" branch is protected and used for official releases. The rest of the branches are for release maintenance and should not be used normally. Unless otherwise told by a maintainer, pull request should be made and submitted to the "master" branch. New to GitHub or open source projects? If you are unsure about where to start or haven't used GitHub before, please feel free to contact the package maintainers. Feedback and feature requests? Is there something missing you would like to see? Please open an issue or send an email to the maintainers. This package follows the asdf-format [Code of Conduct](CODE_OF_CONDUCT.md) and strives to provide a welcoming community to all of our users and contributors. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/LICENSE0000644000537500020070000000300300000000000014577 0ustar00wjamiesonSTSCI\scienceCopyright (c) 2021 Association of Universities for Research in Astronomy. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/MANIFEST.in0000644000537500020070000000013400000000000015332 0ustar00wjamiesonSTSCI\sciencerecursive-include docs * prune docs/_build prune docs/api global-exclude *.pyc exclude .* ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.985636 asdf-2.9.2/PKG-INFO0000644000537500020070000002666200000000000014707 0ustar00wjamiesonSTSCI\scienceMetadata-Version: 2.1 Name: asdf Version: 2.9.2 Summary: Python implementation of the ASDF Standard Home-page: http://github.com/asdf-format/asdf Author: The ASDF Developers Author-email: help@stsci.edu License: BSD-3-Clause Project-URL: Bug Tracker, https://github.com/asdf-format/asdf/issues Project-URL: Documentation, https://asdf.readthedocs.io/en/stable Project-URL: Source Code, https://github.com/asdf-format/asdf Platform: UNKNOWN Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/x-rst Provides-Extra: all Provides-Extra: docs Provides-Extra: tests License-File: LICENSE ASDF - Advanced Scientific Data Format ====================================== .. image:: https://github.com/asdf-format/asdf/workflows/CI/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: CI Status .. image:: https://github.com/asdf-format/asdf/workflows/s390x/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: s390x Status .. image:: https://github.com/asdf-format/asdf/workflows/Downstream/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: Downstream CI Status .. image:: https://readthedocs.org/projects/asdf/badge/?version=latest :target: https://asdf.readthedocs.io/en/latest/ .. image:: https://codecov.io/gh/asdf-format/asdf/branch/master/graphs/badge.svg :target: https://codecov.io/gh/asdf-format/asdf .. image:: https://img.shields.io/pypi/l/asdf.svg :target: https://img.shields.io/pypi/l/asdf.svg | .. _begin-summary-text: The **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat (ASDF) is a next-generation interchange format for scientific data. This package contains the Python implementation of the ASDF Standard. More information on the ASDF Standard itself can be found `here `__. The ASDF format has the following features: * A hierarchical, human-readable metadata format (implemented using `YAML `__) * Numerical arrays are stored as binary data blocks which can be memory mapped. Data blocks can optionally be compressed. * The structure of the data can be automatically validated using schemas (implemented using `JSON Schema `__) * Native Python data types (numerical types, strings, dicts, lists) are serialized automatically * ASDF can be extended to serialize custom data types .. _end-summary-text: ASDF is under active development `on github `__. More information on contributing can be found `below <#contributing>`__. Overview -------- This section outlines basic use cases of the ASDF package for creating and reading ASDF files. Creating a file ~~~~~~~~~~~~~~~ .. _begin-create-file-text: We're going to store several `numpy` arrays and other data to an ASDF file. We do this by creating a "tree", which is simply a `dict`, and we provide it as input to the constructor of `AsdfFile`: .. code:: python import asdf import numpy as np # Create some data sequence = np.arange(100) squares = sequence**2 random = np.random.random(100) # Store the data in an arbitrarily nested dictionary tree = { 'foo': 42, 'name': 'Monty', 'sequence': sequence, 'powers': { 'squares' : squares }, 'random': random } # Create the ASDF file object from our data tree af = asdf.AsdfFile(tree) # Write the data to a new file af.write_to('example.asdf') If we open the newly created file, we can see some of the key features of ASDF on display: :: #ASDF 1.0.0 #ASDF_STANDARD 1.2.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: The ASDF Developers, homepage: 'http://github.com/asdf-format/asdf', name: asdf, version: 2.0.0} history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: {name: asdf, version: 2.0.0} foo: 42 name: Monty powers: squares: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [100] random: !core/ndarray-1.0.0 source: 2 datatype: float64 byteorder: little shape: [100] sequence: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [100] ... The metadata in the file mirrors the structure of the tree that was stored. It is hierarchical and human-readable. Notice that metadata has been added to the tree that was not explicitly given by the user. Notice also that the numerical array data is not stored in the metadata tree itself. Instead, it is stored as binary data blocks below the metadata section (not shown here). It is possible to compress the array data when writing the file: .. code:: python af.write_to('compressed.asdf', all_array_compression='zlib') The built-in compression algorithms are ``'zlib'``, and ``'bzp2'``. The ``'lz4'`` algorithm becomes available when the `lz4 `__ package is installed. Other compression algorithms may be available via extensions. .. _end-create-file-text: Reading a file ~~~~~~~~~~~~~~ .. _begin-read-file-text: To read an existing ASDF file, we simply use the top-level `open` function of the `asdf` package: .. code:: python import asdf af = asdf.open('example.asdf') The `open` function also works as a context handler: .. code:: python with asdf.open('example.asdf') as af: ... To get a quick overview of the data stored in the file, use the top-level `AsdfFile.info()` method: .. code:: python >>> import asdf >>> af = asdf.open('example.asdf') >>> af.info() root (AsdfObject) ├─asdf_library (Software) │ ├─author (str): The ASDF Developers │ ├─homepage (str): http://github.com/asdf-format/asdf │ ├─name (str): asdf │ └─version (str): 2.8.0 ├─history (dict) │ └─extensions (list) │ └─[0] (ExtensionMetadata) │ ├─extension_class (str): asdf.extension.BuiltinExtension │ └─software (Software) │ ├─name (str): asdf │ └─version (str): 2.8.0 ├─foo (int): 42 ├─name (str): Monty ├─powers (dict) │ └─squares (NDArrayType): shape=(100,), dtype=int64 ├─random (NDArrayType): shape=(100,), dtype=float64 └─sequence (NDArrayType): shape=(100,), dtype=int64 The `AsdfFile` behaves like a Python `dict`, and nodes are accessed like any other dictionary entry: .. code:: python >>> af['name'] 'Monty' >>> af['powers'] {'squares': } Array data remains unloaded until it is explicitly accessed: .. code:: python >>> af['powers']['squares'] array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256, 289, 324, 361, 400, 441, 484, 529, 576, 625, 676, 729, 784, 841, 900, 961, 1024, 1089, 1156, 1225, 1296, 1369, 1444, 1521, 1600, 1681, 1764, 1849, 1936, 2025, 2116, 2209, 2304, 2401, 2500, 2601, 2704, 2809, 2916, 3025, 3136, 3249, 3364, 3481, 3600, 3721, 3844, 3969, 4096, 4225, 4356, 4489, 4624, 4761, 4900, 5041, 5184, 5329, 5476, 5625, 5776, 5929, 6084, 6241, 6400, 6561, 6724, 6889, 7056, 7225, 7396, 7569, 7744, 7921, 8100, 8281, 8464, 8649, 8836, 9025, 9216, 9409, 9604, 9801]) >>> import numpy as np >>> expected = [x**2 for x in range(100)] >>> np.equal(af['powers']['squares'], expected).all() True By default, uncompressed data blocks are memory mapped for efficient access. Memory mapping can be disabled by using the ``copy_arrays`` option of `open` when reading: .. code:: python af = asdf.open('example.asdf', copy_arrays=True) .. _end-read-file-text: For more information and for advanced usage examples, see the `documentation <#documentation>`__. Extending ASDF ~~~~~~~~~~~~~~ Out of the box, the ``asdf`` package automatically serializes and deserializes native Python types. It is possible to extend ``asdf`` by implementing custom tags that correspond to custom user types. More information on extending ASDF can be found in the `official documentation `__. Installation ------------ .. _begin-pip-install-text: Stable releases of the ASDF Python package are registered `at PyPi `__. The latest stable version can be installed using ``pip``: :: $ pip install asdf .. _begin-source-install-text: The latest development version of ASDF is available from the ``master`` branch `on github `__. To clone the project: :: $ git clone https://github.com/asdf-format/asdf To install: :: $ cd asdf $ git submodule update --init $ pip install . To install in `development mode `__:: $ pip install -e . .. note:: The source repository makes use of a git submodule for referencing the schemas provided by the ASDF standard. While this submodule is automatically initialized when installing the package (including in development mode), it may be necessary for developers to manually update the submodule if changes are made upstream. See the `documentation on git submodules `__ for more information. .. _end-source-install-text: Testing ------- .. _begin-testing-text: To install the test dependencies from a source checkout of the repository: :: $ pip install -e ".[tests]" To run the unit tests from a source checkout of the repository: :: $ pytest It is also possible to run the test suite from an installed version of the package. :: $ pip install "asdf[tests]" $ pytest --pyargs asdf It is also possible to run the tests using `tox `__. :: $ pip install tox To list all available environments: :: $ tox -va To run a specific environment: :: $ tox -e .. _end-testing-text: Documentation ------------- More detailed documentation on this software package can be found `here `__. More information on the ASDF Standard itself can be found `here `__. There are two mailing lists for ASDF: * `asdf-users `_ * `asdf-developers `_ If you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, information can be found `here `__. Contributing ------------ We welcome feedback and contributions to the project. Contributions of code, documentation, or general feedback are all appreciated. Please follow the `contributing guidelines `__ to submit an issue or a pull request. We strive to provide a welcoming community to all of our users by abiding to the `Code of Conduct `__. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/README.rst0000644000537500020070000002501200000000000015265 0ustar00wjamiesonSTSCI\scienceASDF - Advanced Scientific Data Format ====================================== .. image:: https://github.com/asdf-format/asdf/workflows/CI/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: CI Status .. image:: https://github.com/asdf-format/asdf/workflows/s390x/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: s390x Status .. image:: https://github.com/asdf-format/asdf/workflows/Downstream/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: Downstream CI Status .. image:: https://readthedocs.org/projects/asdf/badge/?version=latest :target: https://asdf.readthedocs.io/en/latest/ .. image:: https://codecov.io/gh/asdf-format/asdf/branch/master/graphs/badge.svg :target: https://codecov.io/gh/asdf-format/asdf .. image:: https://img.shields.io/pypi/l/asdf.svg :target: https://img.shields.io/pypi/l/asdf.svg | .. _begin-summary-text: The **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat (ASDF) is a next-generation interchange format for scientific data. This package contains the Python implementation of the ASDF Standard. More information on the ASDF Standard itself can be found `here `__. The ASDF format has the following features: * A hierarchical, human-readable metadata format (implemented using `YAML `__) * Numerical arrays are stored as binary data blocks which can be memory mapped. Data blocks can optionally be compressed. * The structure of the data can be automatically validated using schemas (implemented using `JSON Schema `__) * Native Python data types (numerical types, strings, dicts, lists) are serialized automatically * ASDF can be extended to serialize custom data types .. _end-summary-text: ASDF is under active development `on github `__. More information on contributing can be found `below <#contributing>`__. Overview -------- This section outlines basic use cases of the ASDF package for creating and reading ASDF files. Creating a file ~~~~~~~~~~~~~~~ .. _begin-create-file-text: We're going to store several `numpy` arrays and other data to an ASDF file. We do this by creating a "tree", which is simply a `dict`, and we provide it as input to the constructor of `AsdfFile`: .. code:: python import asdf import numpy as np # Create some data sequence = np.arange(100) squares = sequence**2 random = np.random.random(100) # Store the data in an arbitrarily nested dictionary tree = { 'foo': 42, 'name': 'Monty', 'sequence': sequence, 'powers': { 'squares' : squares }, 'random': random } # Create the ASDF file object from our data tree af = asdf.AsdfFile(tree) # Write the data to a new file af.write_to('example.asdf') If we open the newly created file, we can see some of the key features of ASDF on display: :: #ASDF 1.0.0 #ASDF_STANDARD 1.2.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: The ASDF Developers, homepage: 'http://github.com/asdf-format/asdf', name: asdf, version: 2.0.0} history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: {name: asdf, version: 2.0.0} foo: 42 name: Monty powers: squares: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [100] random: !core/ndarray-1.0.0 source: 2 datatype: float64 byteorder: little shape: [100] sequence: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [100] ... The metadata in the file mirrors the structure of the tree that was stored. It is hierarchical and human-readable. Notice that metadata has been added to the tree that was not explicitly given by the user. Notice also that the numerical array data is not stored in the metadata tree itself. Instead, it is stored as binary data blocks below the metadata section (not shown here). It is possible to compress the array data when writing the file: .. code:: python af.write_to('compressed.asdf', all_array_compression='zlib') The built-in compression algorithms are ``'zlib'``, and ``'bzp2'``. The ``'lz4'`` algorithm becomes available when the `lz4 `__ package is installed. Other compression algorithms may be available via extensions. .. _end-create-file-text: Reading a file ~~~~~~~~~~~~~~ .. _begin-read-file-text: To read an existing ASDF file, we simply use the top-level `open` function of the `asdf` package: .. code:: python import asdf af = asdf.open('example.asdf') The `open` function also works as a context handler: .. code:: python with asdf.open('example.asdf') as af: ... To get a quick overview of the data stored in the file, use the top-level `AsdfFile.info()` method: .. code:: python >>> import asdf >>> af = asdf.open('example.asdf') >>> af.info() root (AsdfObject) ├─asdf_library (Software) │ ├─author (str): The ASDF Developers │ ├─homepage (str): http://github.com/asdf-format/asdf │ ├─name (str): asdf │ └─version (str): 2.8.0 ├─history (dict) │ └─extensions (list) │ └─[0] (ExtensionMetadata) │ ├─extension_class (str): asdf.extension.BuiltinExtension │ └─software (Software) │ ├─name (str): asdf │ └─version (str): 2.8.0 ├─foo (int): 42 ├─name (str): Monty ├─powers (dict) │ └─squares (NDArrayType): shape=(100,), dtype=int64 ├─random (NDArrayType): shape=(100,), dtype=float64 └─sequence (NDArrayType): shape=(100,), dtype=int64 The `AsdfFile` behaves like a Python `dict`, and nodes are accessed like any other dictionary entry: .. code:: python >>> af['name'] 'Monty' >>> af['powers'] {'squares': } Array data remains unloaded until it is explicitly accessed: .. code:: python >>> af['powers']['squares'] array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256, 289, 324, 361, 400, 441, 484, 529, 576, 625, 676, 729, 784, 841, 900, 961, 1024, 1089, 1156, 1225, 1296, 1369, 1444, 1521, 1600, 1681, 1764, 1849, 1936, 2025, 2116, 2209, 2304, 2401, 2500, 2601, 2704, 2809, 2916, 3025, 3136, 3249, 3364, 3481, 3600, 3721, 3844, 3969, 4096, 4225, 4356, 4489, 4624, 4761, 4900, 5041, 5184, 5329, 5476, 5625, 5776, 5929, 6084, 6241, 6400, 6561, 6724, 6889, 7056, 7225, 7396, 7569, 7744, 7921, 8100, 8281, 8464, 8649, 8836, 9025, 9216, 9409, 9604, 9801]) >>> import numpy as np >>> expected = [x**2 for x in range(100)] >>> np.equal(af['powers']['squares'], expected).all() True By default, uncompressed data blocks are memory mapped for efficient access. Memory mapping can be disabled by using the ``copy_arrays`` option of `open` when reading: .. code:: python af = asdf.open('example.asdf', copy_arrays=True) .. _end-read-file-text: For more information and for advanced usage examples, see the `documentation <#documentation>`__. Extending ASDF ~~~~~~~~~~~~~~ Out of the box, the ``asdf`` package automatically serializes and deserializes native Python types. It is possible to extend ``asdf`` by implementing custom tags that correspond to custom user types. More information on extending ASDF can be found in the `official documentation `__. Installation ------------ .. _begin-pip-install-text: Stable releases of the ASDF Python package are registered `at PyPi `__. The latest stable version can be installed using ``pip``: :: $ pip install asdf .. _begin-source-install-text: The latest development version of ASDF is available from the ``master`` branch `on github `__. To clone the project: :: $ git clone https://github.com/asdf-format/asdf To install: :: $ cd asdf $ git submodule update --init $ pip install . To install in `development mode `__:: $ pip install -e . .. note:: The source repository makes use of a git submodule for referencing the schemas provided by the ASDF standard. While this submodule is automatically initialized when installing the package (including in development mode), it may be necessary for developers to manually update the submodule if changes are made upstream. See the `documentation on git submodules `__ for more information. .. _end-source-install-text: Testing ------- .. _begin-testing-text: To install the test dependencies from a source checkout of the repository: :: $ pip install -e ".[tests]" To run the unit tests from a source checkout of the repository: :: $ pytest It is also possible to run the test suite from an installed version of the package. :: $ pip install "asdf[tests]" $ pytest --pyargs asdf It is also possible to run the tests using `tox `__. :: $ pip install tox To list all available environments: :: $ tox -va To run a specific environment: :: $ tox -e .. _end-testing-text: Documentation ------------- More detailed documentation on this software package can be found `here `__. More information on the ASDF Standard itself can be found `here `__. There are two mailing lists for ASDF: * `asdf-users `_ * `asdf-developers `_ If you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, information can be found `here `__. Contributing ------------ We welcome feedback and contributions to the project. Contributions of code, documentation, or general feedback are all appreciated. Please follow the `contributing guidelines `__ to submit an issue or a pull request. We strive to provide a welcoming community to all of our users by abiding to the `Code of Conduct `__. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.8141205 asdf-2.9.2/asdf/0000755000537500020070000000000000000000000014513 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/__init__.py0000644000537500020070000000134400000000000016626 0ustar00wjamiesonSTSCI\science""" asdf: Python library for reading and writing Advanced Scientific Data Format (ASDF) files """ __all__ = [ 'AsdfFile', 'CustomType', 'AsdfExtension', 'Stream', 'open', 'commands', 'IntegerType', 'ExternalArrayReference', 'info', '__version__', 'ValidationError', 'get_config', 'config_context', ] from .asdf import AsdfFile, open_asdf as open from .types import CustomType from .extension import AsdfExtension from .stream import Stream from . import commands from .tags.core import IntegerType from .tags.core.external_reference import ExternalArrayReference from ._convenience import info from .config import get_config, config_context from .version import version as __version__ from jsonschema import ValidationError ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/_convenience.py0000644000537500020070000000375100000000000017526 0ustar00wjamiesonSTSCI\science""" Implementation of the asdf.info(...) function. This is just a thin wrapper around _display module code. """ import pathlib from contextlib import contextmanager from .asdf import open_asdf, AsdfFile from ._display import render_tree, DEFAULT_MAX_ROWS, DEFAULT_MAX_COLS, DEFAULT_SHOW_VALUES __all__ = ["info"] def info(node_or_path, max_rows=DEFAULT_MAX_ROWS, max_cols=DEFAULT_MAX_COLS, show_values=DEFAULT_SHOW_VALUES): """ Print a rendering of an ASDF tree or sub-tree to stdout. Parameters ---------- node_or_path : str, pathlib.Path, asdf.asdf.AsdfFile, or any \ ASDF tree node The tree or sub-tree to render. Strings and Path objects will first be passed to asdf.open(...). max_rows : int, tuple, or None, optional Maximum number of lines to print. Nodes that cannot be displayed will be elided with a message. If int, constrain total number of displayed lines. If tuple, constrain lines per node at the depth corresponding \ to the tuple index. If None, display all lines. max_cols : int or None, optional Maximum length of line to print. Nodes that cannot be fully displayed will be truncated with a message. If int, constrain length of displayed lines. If None, line length is unconstrained. show_values : bool, optional Set to False to disable display of primitive values in the rendered tree. """ with _manage_node(node_or_path) as node: lines = render_tree(node, max_rows=max_rows, max_cols=max_cols, show_values=show_values, identifier="root") print("\n".join(lines)) @contextmanager def _manage_node(node_or_path): if isinstance(node_or_path, str) or isinstance(node_or_path, pathlib.Path): with open_asdf(node_or_path) as af: yield af.tree else: if isinstance(node_or_path, AsdfFile): yield node_or_path.tree else: yield node_or_path ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/_display.py0000644000537500020070000002626500000000000016704 0ustar00wjamiesonSTSCI\science""" Utilities for displaying the content of an ASDF tree. Normally these tools only will introspect dicts, lists, and primitive values (with an exception for arrays). However, if the object that is generated by the converter mechanism has a __asdf_traverse__() method, then it will call that method expecting a dict or list to be returned. The method can return what it thinks is suitable for display. """ import numpy as np from .util import is_primitive from .treeutil import get_children from .tags.core.ndarray import NDArrayType __all__ = [ "DEFAULT_MAX_ROWS", "DEFAULT_MAX_COLS", "DEFAULT_SHOW_VALUES", "render_tree", "format_bold", "format_faint", "format_italic", ] DEFAULT_MAX_ROWS = 24 DEFAULT_MAX_COLS = 120 DEFAULT_SHOW_VALUES = True def render_tree(node, max_rows=DEFAULT_MAX_ROWS, max_cols=DEFAULT_MAX_COLS, show_values=DEFAULT_SHOW_VALUES, filters=[], identifier="root"): """ Render a tree as text with indents showing depth. """ info = _NodeInfo.from_root_node(identifier, node) if len(filters) > 0: if not _filter_tree(info, filters): return [] renderer = _TreeRenderer(max_rows, max_cols, show_values) return renderer.render(info) def format_bold(value): """ Wrap the input value in the ANSI escape sequence for increased intensity. """ return _format_code(value, 1) def format_faint(value): """ Wrap the input value in the ANSI escape sequence for decreased intensity. """ return _format_code(value, 2) def format_italic(value): """ Wrap the input value in the ANSI escape sequence for italic. """ return _format_code(value, 3) def _format_code(value, code): return "\x1B[{}m{}\x1B[0m".format(code, value) class _NodeInfo: """ Container for a node, its state of visibility, and values used to display it. """ @classmethod def from_root_node(cls, root_identifier, root_node): """ Build a _NodeInfo tree from the given ASDF root node. Intentionally processes the tree in breadth-first order so that recursively referenced nodes are displayed at their shallowest reference point. """ current_nodes = [(None, root_identifier, root_node)] seen = set() root_info = None current_depth = 0 while True: next_nodes = [] for parent, identifier, node in current_nodes: if (isinstance(node, dict) or isinstance(node, tuple) or cls.supports_info(node)) and id(node) in seen: info = _NodeInfo(parent, identifier, node, current_depth, recursive=True) parent.children.append(info) else: info = _NodeInfo(parent, identifier, node, current_depth) if root_info is None: root_info = info if parent is not None: parent.children.append(info) seen.add(id(node)) if cls.supports_info(node): tnode = node.__asdf_traverse__() else: tnode = node for child_identifier, child_node in get_children(tnode): next_nodes.append((info, child_identifier, child_node)) if len(next_nodes) == 0: break current_nodes = next_nodes current_depth += 1 return root_info def __init__( self, parent, identifier, node, depth, recursive=False, visible=True): self.parent = parent self.identifier = identifier self.node = node self.depth = depth self.recursive = recursive self.visible = visible self.children = [] @classmethod def supports_info(cls, node): """ This method determines if the node is an instance of a class that supports introspection by the info machinery. This determined by the presence of a __asdf_traverse__ method. """ return hasattr(node, "__asdf_traverse__") @property def visible_children(self): return [c for c in self.children if c.visible] @property def parent_node(self): if self.parent is None: return None else: return self.parent.node def _filter_tree(info, filters): """ Remove nodes from the tree that get caught in the filters. Mutates the tree. """ filtered_children = [] for child in info.children: if _filter_tree(child, filters): filtered_children.append(child) info.children = filtered_children return len(info.children) > 0 or all(f(info.node, info.identifier) for f in filters) class _TreeRenderer: """ Render a _NodeInfo tree with indent showing depth. """ def __init__(self, max_rows, max_cols, show_values): self._max_rows = max_rows self._max_cols = max_cols self._show_values = show_values def render(self, info): self._mark_visible(info) lines, elided = self._render(info, set(), True) if elided: lines.append(format_faint(format_italic("Some nodes not shown."))) return lines def _mark_visible(self, root_info): """ Select nodes to display, respecting max_rows. Nodes at lower depths will be prioritized. """ if isinstance(self._max_rows, tuple): self._mark_visible_tuple(root_info) else: self._mark_visible_int(root_info) def _mark_visible_int(self, root_info): """ Select nodes to display, obeying max_rows as an overall limit on the number of lines returned. """ if self._max_rows is None: return if self._max_rows < 2: root_info.visible = False return current_infos = [root_info] # Reserve one row for the root node, and another for the # "Some nodes not shown." message. rows_left = self._max_rows - 2 while True: next_infos = [] for info in current_infos: if rows_left >= len(info.children): rows_left -= len(info.children) next_infos.extend(info.children) elif rows_left > 1: for child in info.children[rows_left-1:]: child.visible = False next_infos.extend(info.children[0:rows_left-1]) rows_left = 0 else: for child in info.children: child.visible = False if len(next_infos) == 0: break current_infos = next_infos def _mark_visible_tuple(self, root_info): """ Select nodes to display, obeying the per-node max_rows value for each tree depth. """ max_rows = (None,) + self._max_rows current_infos = [root_info] while True: next_infos = [] for info in current_infos: if info.depth + 1 < len(max_rows): rows_left = max_rows[info.depth + 1] if rows_left is None or rows_left >= len(info.children): next_infos.extend(info.children) elif rows_left > 1: for child in info.children[rows_left-1:]: child.visible = False next_infos.extend(info.children[0:rows_left-1]) else: for child in info.children: child.visible = False else: for child in info.children: child.visible = False if len(next_infos) == 0: break current_infos = next_infos def _render(self, info, active_depths, is_tail): """ Render the tree. Called recursively on child nodes. """ lines = [] if info.visible == False: return lines, True lines.append(self._render_node(info, active_depths, is_tail)) elided = len(info.visible_children) < len(info.children) for i, child in enumerate(info.visible_children): if i == len(info.children) - 1: child_is_tail = True child_active_depths = active_depths else: child_is_tail = False child_active_depths = active_depths.union({info.depth}) child_list, child_elided = self._render(child, child_active_depths, child_is_tail) lines.extend(child_list) elided = elided or child_elided num_visible_children = len(info.visible_children) if num_visible_children > 0 and num_visible_children != len(info.children): hidden_count = len(info.children) - num_visible_children prefix = self._make_prefix(info.depth + 1, active_depths, True) message = format_faint(format_italic(str(hidden_count) + ' not shown')) lines.append( "{}{}".format(prefix, message) ) return lines, elided def _render_node(self, info, active_depths, is_tail): prefix = self._make_prefix(info.depth, active_depths, is_tail) value = self._render_node_value(info) if isinstance(info.parent_node, list) or isinstance(info.parent_node, tuple): line = "{}[{}] {}".format(prefix, format_bold(info.identifier), value) else: line = "{}{} {}".format(prefix, format_bold(info.identifier), value) visible_children = info.visible_children if len(visible_children) == 0 and len(info.children) > 0: line = line + format_italic(" ...") if info.recursive: line = line + " " + format_faint(format_italic("(recursive reference)")) if self._max_cols is not None and len(line) > self._max_cols: message = " (truncated)" line = line[0 : (self._max_cols - len(message))] + format_faint(format_italic(message)) return line def _render_node_value(self, info): rendered_type = type(info.node).__name__ if is_primitive(info.node) and self._show_values: return "({}): {}".format(rendered_type, info.node) elif isinstance(info.node, NDArrayType) or isinstance(info.node, np.ndarray): return "({}): shape={}, dtype={}".format(rendered_type, info.node.shape, info.node.dtype.name) else: return "({})".format(rendered_type) def _make_prefix(self, depth, active_depths, is_tail): """ Create a prefix for a displayed node, accounting for depth and including lines that show connections to other nodes. """ prefix = "" if depth < 1: return prefix if depth >= 2: for n in range(0, depth - 1): if n in active_depths: prefix = prefix + "│ " else: prefix = prefix + " " if is_tail: prefix = prefix + "└─" else: prefix = prefix + "├─" return format_faint(prefix) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/_helpers.py0000644000537500020070000000110000000000000016656 0ustar00wjamiesonSTSCI\sciencefrom . import versioning from .version import version as asdf_package_version def validate_version(version): # Account for the possibility of AsdfVersion version = str(version) if version not in versioning.supported_versions: raise ValueError( "ASDF Standard version {} is not supported by asdf=={}. " "Available ASDF Standard versions: {}".format( version, asdf_package_version, ", ".join(str(v) for v in versioning.supported_versions), ) ) return version ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/asdf.py0000644000537500020070000020111500000000000016002 0ustar00wjamiesonSTSCI\scienceimport io import os import time import copy import datetime import warnings from pkg_resources import parse_version import numpy as np from jsonschema import ValidationError from .config import get_config, config_context from . import block from . import constants from . import generic_io from . import reference from . import schema from . import treeutil from . import util from . import version from . import versioning from . import yamlutil from . import _display as display from .exceptions import AsdfDeprecationWarning, AsdfWarning, AsdfConversionWarning from .extension import ( AsdfExtensionList, AsdfExtension, Extension, ExtensionProxy, get_cached_asdf_extension_list, get_cached_extension_manager, ) from .util import NotSet from .search import AsdfSearchResult from ._helpers import validate_version from .tags.core import AsdfObject, Software, HistoryEntry, ExtensionMetadata def get_asdf_library_info(): """ Get information about asdf to include in the asdf_library entry in the Tree. """ return Software({ 'name': 'asdf', 'version': version.version, 'homepage': 'http://github.com/asdf-format/asdf', 'author': 'The ASDF Developers' }) class AsdfFile: """ The main class that represents an ASDF file object. """ def __init__(self, tree=None, uri=None, extensions=None, version=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, ignore_implicit_conversion=False, copy_arrays=False, lazy_load=True, custom_schema=None, _readonly=False): """ Parameters ---------- tree : dict or AsdfFile, optional The main tree data in the ASDF file. Must conform to the ASDF schema. uri : str, optional The URI for this ASDF file. Used to resolve relative references against. If not provided, will be automatically determined from the associated file object, if possible and if created from `AsdfFile.open`. extensions : object, optional Additional extensions to use when reading and writing the file. May be any of the following: `asdf.extension.AsdfExtension`, `asdf.extension.Extension`, `asdf.extension.AsdfExtensionList` or a `list` of extensions. version : str, optional The ASDF Standard version. If not provided, defaults to the configured default version. See `asdf.config.AsdfConfig.default_version`. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. Set to `True` by default. ignore_unrecognized_tag : bool, optional When `True`, do not raise warnings for unrecognized tags. Set to `False` by default. ignore_implicit_conversion : bool When `True`, do not raise warnings when types in the tree are implicitly converted into a serializable object. The motivating case for this is currently `namedtuple`, which cannot be serialized as-is. copy_arrays : bool, optional When `False`, when reading files, attempt to memmap underlying data arrays when possible. lazy_load : bool, optional When `True` and the underlying file handle is seekable, data arrays will only be loaded lazily: i.e. when they are accessed for the first time. In this case the underlying file must stay open during the lifetime of the tree. Setting to False causes all data arrays to be loaded up front, which means that they can be accessed even after the underlying file is closed. Note: even if `lazy_load` is `False`, `copy_arrays` is still taken into account. custom_schema : str, optional Path to a custom schema file that will be used for a secondary validation pass. This can be used to ensure that particular ASDF files follow custom conventions beyond those enforced by the standard. """ # Don't use the version setter here; it tries to access # the extensions, which haven't been assigned yet. if version is None: self._version = versioning.AsdfVersion(get_config().default_version) else: self._version = versioning.AsdfVersion(validate_version(version)) self._user_extensions = self._process_user_extensions(extensions) self._plugin_extensions = self._process_plugin_extensions() self._extension_manager = None self._extension_list = None if custom_schema is not None: self._custom_schema = schema._load_schema_cached(custom_schema, self.resolver, True, False) else: self._custom_schema = None self._ignore_version_mismatch = ignore_version_mismatch self._ignore_unrecognized_tag = ignore_unrecognized_tag self._ignore_implicit_conversion = ignore_implicit_conversion # Set of (string, string) tuples representing tag version mismatches # that we've already warned about for this file. self._warned_tag_pairs = set() self._file_format_version = None # Context of a call to treeutil.walk_and_modify, needed in the AsdfFile # in case walk_and_modify is re-entered by extension code (via # custom_tree_to_tagged_tree or tagged_tree_to_custom_tree). self._tree_modification_context = treeutil._TreeModificationContext() self._fd = None self._closed = False self._external_asdf_by_uri = {} self._blocks = block.BlockManager( self, copy_arrays=copy_arrays, lazy_load=lazy_load, readonly=_readonly) self._uri = None if tree is None: # Bypassing the tree property here, to avoid validating # an empty tree. self._tree = AsdfObject() elif isinstance(tree, AsdfFile): if self.extensions != tree.extensions: # TODO(eslavich): Why not? What if that's the goal # of copying the file? raise ValueError( "Can not copy AsdfFile and change active extensions") self._uri = tree.uri # Set directly to self._tree (bypassing property), since # we can assume the other AsdfFile is already valid. self._tree = tree.tree self.run_modifying_hook('copy_to_new_asdf', validate=False) self.find_references() else: self.tree = tree self.find_references() if uri is not None: self._uri = uri self._comments = [] @property def version(self): """ Get this AsdfFile's ASDF Standard version. Returns ------- asdf.versioning.AsdfVersion """ return self._version @version.setter def version(self, value): """ Set this AsdfFile's ASDF Standard version. Parameters ---------- value : str or asdf.versioning.AsdfVersion """ self._version = versioning.AsdfVersion(validate_version(value)) # The new version may not be compatible with the previous # set of extensions, so we need to check them again: self._user_extensions = self._process_user_extensions(self._user_extensions) self._plugin_extensions = self._process_plugin_extensions() self._extension_manager = None self._extension_list = None @property def version_string(self): """ Get this AsdfFile's ASDF Standard version as a string. Returns ------- str """ return str(self._version) @property def version_map(self): return versioning.get_version_map(self.version_string) @property def extensions(self): """ Get the list of user extensions that are enabled for use with this AsdfFile. Returns ------- list of asdf.extension.ExtensionProxy """ return self._user_extensions @extensions.setter def extensions(self, value): """ Set the list of user extensions that are enabled for use with this AsdfFile. Parameters ---------- value : list of asdf.extension.AsdfExtension or asdf.extension.Extension """ self._user_extensions = self._process_user_extensions(value) self._extension_manager = None self._extension_list = None @property def extension_manager(self): """ Get the ExtensionManager for this AsdfFile. Returns ------- asdf.extension.ExtensionManager """ if self._extension_manager is None: self._extension_manager = get_cached_extension_manager(self._user_extensions + self._plugin_extensions) return self._extension_manager @property def extension_list(self): """ Get the AsdfExtensionList for this AsdfFile. Returns ------- asdf.extension.AsdfExtensionList """ if self._extension_list is None: self._extension_list = get_cached_asdf_extension_list(self._user_extensions + self._plugin_extensions) return self._extension_list def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def _check_extensions(self, tree, strict=False): """ Compare the user's installed extensions to metadata in the tree and warn when a) an extension is missing or b) an extension is present but the file was written with a later version of the extension's package. Parameters ---------- tree : AsdfObject Fully converted tree of custom types. strict : bool, optional Set to `True` to convert warnings to exceptions. """ if 'history' not in tree or not isinstance(tree['history'], dict) or \ 'extensions' not in tree['history']: return for extension in tree['history']['extensions']: installed = None for ext in self._user_extensions + self._plugin_extensions: if (extension.extension_uri is not None and extension.extension_uri == ext.extension_uri or extension.extension_uri is None and extension.extension_class in ext.legacy_class_names): installed = ext break filename = "'{}' ".format(self._fname) if self._fname else '' if extension.extension_uri is not None: extension_description = "URI '{}'".format(extension.extension_uri) else: extension_description = "class '{}'".format(extension.extension_class) if extension.software is not None: extension_description += " (from package {}=={})".format( extension.software["name"], extension.software["version"], ) if installed is None: msg = ( "File {}was created with extension {}, which is " "not currently installed" ).format(filename, extension_description) if strict: raise RuntimeError(msg) else: warnings.warn(msg, AsdfWarning) elif extension.software: # Local extensions may not have a real version. If the package name changed, # then the version sequence may have been reset. if installed.package_version is None or installed.package_name != extension.software['name']: continue # Compare version in file metadata with installed version if parse_version(installed.package_version) < parse_version(extension.software['version']): msg = ( "File {}was created with extension {}, but older package ({}=={}) " "is installed." ).format( filename, extension_description, installed.package_name, installed.package_version, ) if strict: raise RuntimeError(msg) else: warnings.warn(msg, AsdfWarning) def _process_plugin_extensions(self): """ Select installed extensions that are compatible with this file's ASDF Standard version. Returns ------- list of asdf.extension.ExtensionProxy """ return [e for e in get_config().extensions if self.version_string in e.asdf_standard_requirement] def _process_user_extensions(self, extensions): """ Validate a list of extensions requested by the user add missing extensions registered with the current `AsdfConfig`. Parameters ---------- extensions : object May be any of the following: `asdf.extension.AsdfExtension`, `asdf.extension.Extension`, `asdf.extension.AsdfExtensionList` or a `list` of extensions. Returns ------- list of asdf.extension.ExtensionProxy """ if extensions is None: extensions = [] elif isinstance(extensions, (AsdfExtension, Extension, ExtensionProxy)): extensions = [extensions] elif isinstance(extensions, AsdfExtensionList): extensions = extensions.extensions if not isinstance(extensions, list): raise TypeError( "The extensions parameter must be an extension, list of extensions, or " "instance of AsdfExtensionList" ) extensions = [ExtensionProxy.maybe_wrap(e) for e in extensions] result = [] for extension in extensions: if self.version_string not in extension.asdf_standard_requirement: warnings.warn( "Extension {} does not support ASDF Standard {}. It has been disabled.".format( extension, self.version_string ), AsdfWarning ) else: result.append(extension) return result def _update_extension_history(self, serialization_context): """ Update the extension metadata on this file's tree to reflect extensions used during serialization. Parameters ---------- serialization_context : asdf.asdf.SerializationContext The context that was used to serialize the tree. """ if serialization_context.version < versioning.NEW_HISTORY_FORMAT_MIN_VERSION: return if 'history' not in self.tree: self.tree['history'] = dict(extensions=[]) # Support clients who are still using the old history format elif isinstance(self.tree['history'], list): histlist = self.tree['history'] self.tree['history'] = dict(entries=histlist, extensions=[]) warnings.warn("The ASDF history format has changed in order to " "support metadata about extensions. History entries " "should now be stored under tree['history']['entries'].", AsdfWarning) elif 'extensions' not in self.tree['history']: self.tree['history']['extensions'] = [] for extension in serialization_context._extensions_used: ext_name = extension.class_name ext_meta = ExtensionMetadata(extension_class=ext_name) if extension.package_name is not None: ext_meta['software'] = Software(name=extension.package_name, version=extension.package_version) if extension.extension_uri is not None: ext_meta['extension_uri'] = extension.extension_uri if extension.compressors: ext_meta['supported_compression'] = [comp.label.decode('ascii') for comp in extension.compressors] for i, entry in enumerate(self.tree['history']['extensions']): # Update metadata about this extension if it already exists if (entry.extension_uri is not None and entry.extension_uri == extension.extension_uri or entry.extension_class in extension.legacy_class_names): self.tree['history']['extensions'][i] = ext_meta break else: self.tree['history']['extensions'].append(ext_meta) @property def file_format_version(self): if self._file_format_version is None: return versioning.AsdfVersion(self.version_map['FILE_FORMAT']) else: return self._file_format_version def close(self): """ Close the file handles associated with the `AsdfFile`. """ if self._fd and not self._closed: # This is ok to always do because GenericFile knows # whether it "owns" the file and should close it. self._fd.close() self._fd = None self._closed = True for external in self._external_asdf_by_uri.values(): external.close() self._external_asdf_by_uri.clear() self._blocks.close() def copy(self): return self.__class__( copy.deepcopy(self._tree), self._uri, self._user_extensions, ) __copy__ = __deepcopy__ = copy @property def uri(self): """ Get the URI associated with the `AsdfFile`. In many cases, it is automatically determined from the file handle used to read or write the file. """ if self._uri is not None: return self._uri if self._fd is not None: return self._fd._uri return None @property def tag_to_schema_resolver(self): warnings.warn( "The 'tag_to_schema_resolver' property is deprecated. Use " "'tag_mapping' instead.", AsdfDeprecationWarning) return self.extension_list.tag_mapping @property def tag_mapping(self): return self.extension_list.tag_mapping @property def url_mapping(self): return self.extension_list.url_mapping @property def resolver(self): return self.extension_list.resolver @property def type_index(self): return self.extension_list.type_index def resolve_uri(self, uri): """ Resolve a (possibly relative) URI against the URI of this ASDF file. May be overridden by base classes to change how URIs are resolved. This does not apply any `uri_mapping` that was passed to the constructor. Parameters ---------- uri : str An absolute or relative URI to resolve against the URI of this ASDF file. Returns ------- uri : str The resolved URI. """ return generic_io.resolve_uri(self.uri, uri) def open_external(self, uri, **kwargs): """ Open an external ASDF file, from the given (possibly relative) URI. There is a cache (internal to this ASDF file) that ensures each external ASDF file is loaded only once. Parameters ---------- uri : str An absolute or relative URI to resolve against the URI of this ASDF file. Returns ------- asdffile : AsdfFile The external ASDF file. """ # For a cache key, we want to ignore the "fragment" part. base_uri = util.get_base_uri(uri) resolved_uri = self.resolve_uri(base_uri) # A uri like "#" should resolve back to ourself. In that case, # just return `self`. if resolved_uri == '' or resolved_uri == self.uri: return self asdffile = self._external_asdf_by_uri.get(resolved_uri) if asdffile is None: asdffile = open_asdf( resolved_uri, mode='r', **kwargs) self._external_asdf_by_uri[resolved_uri] = asdffile return asdffile @property def tree(self): """ Get/set the tree of data in the ASDF file. When set, the tree will be validated against the ASDF schema. """ if self._closed: raise OSError("Cannot access data from closed ASDF file") return self._tree @tree.setter def tree(self, tree): asdf_object = AsdfObject(tree) # Only perform custom validation if the tree is not empty self._validate(asdf_object, custom=bool(tree)) self._tree = asdf_object def keys(self): return self.tree.keys() def __getitem__(self, key): return self.tree[key] def __setitem__(self, key, value): self.tree[key] = value def __contains__(self, item): return item in self.tree @property def comments(self): """ Get the comments after the header, before the tree. """ return self._comments def _validate(self, tree, custom=True, reading=False): if reading: # If we're validating on read then the tree # is already guaranteed to be in tagged form. tagged_tree = tree else: tagged_tree = yamlutil.custom_tree_to_tagged_tree( tree, self) schema.validate(tagged_tree, self, reading=reading) # Perform secondary validation pass if requested if custom and self._custom_schema: schema.validate(tagged_tree, self, self._custom_schema, reading=reading) def validate(self): """ Validate the current state of the tree against the ASDF schema. """ self._validate(self._tree) def make_reference(self, path=[]): """ Make a new reference to a part of this file's tree, that can be assigned as a reference to another tree. Parameters ---------- path : list of str and int, optional The parts of the path pointing to an item in this tree. If omitted, points to the root of the tree. Returns ------- reference : reference.Reference A reference object. Examples -------- For the given AsdfFile ``ff``, add an external reference to the data in an external file:: >>> import asdf >>> flat = asdf.open("http://stsci.edu/reference_files/flat.asdf") # doctest: +SKIP >>> ff.tree['flat_field'] = flat.make_reference(['data']) # doctest: +SKIP """ return reference.make_reference(self, path) @property def blocks(self): """ Get the block manager associated with the `AsdfFile`. """ return self._blocks def set_array_storage(self, arr, array_storage): """ Set the block type to use for the given array data. Parameters ---------- arr : numpy.ndarray The array to set. If multiple views of the array are in the tree, only the most recent block type setting will be used, since all views share a single block. array_storage : str Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. """ block = self.blocks[arr] self.blocks.set_array_storage(block, array_storage) def get_array_storage(self, arr): """ Get the block type for the given array data. Parameters ---------- arr : numpy.ndarray """ return self.blocks[arr].array_storage def set_array_compression(self, arr, compression, **compression_kwargs): """ Set the compression to use for the given array data. Parameters ---------- arr : numpy.ndarray The array to set. If multiple views of the array are in the tree, only the most recent compression setting will be used, since all views share a single block. compression : str or None Must be one of: - ``''`` or `None`: no compression - ``zlib``: Use zlib compression - ``bzp2``: Use bzip2 compression - ``lz4``: Use lz4 compression - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None. """ self.blocks[arr].output_compression = compression self.blocks[arr].output_compression_kwargs = compression_kwargs def get_array_compression(self, arr): """ Get the compression type for the given array data. Parameters ---------- arr : numpy.ndarray Returns ------- compression : str or None """ return self.blocks[arr].output_compression def get_array_compression_kwargs(self, arr): """ """ return self.blocks[arr].output_compression_kwargs @classmethod def _parse_header_line(cls, line): """ Parses the header line in a ASDF file to obtain the ASDF version. """ parts = line.split() if len(parts) != 2 or parts[0] != constants.ASDF_MAGIC: raise ValueError("Does not appear to be a ASDF file.") try: version = versioning.AsdfVersion(parts[1].decode('ascii')) except ValueError: raise ValueError( "Unparseable version in ASDF file: {0}".format(parts[1])) return version @classmethod def _read_comment_section(cls, fd): """ Reads the comment section, between the header line and the Tree or first block. """ content = fd.read_until( b'(%YAML)|(' + constants.BLOCK_MAGIC + b')', 5, "start of content", include=False, exception=False) comments = [] lines = content.splitlines() for line in lines: if not line.startswith(b'#'): raise ValueError("Invalid content between header and tree") comments.append(line[1:].strip()) return comments @classmethod def _find_asdf_version_in_comments(cls, comments): for comment in comments: parts = comment.split() if len(parts) == 2 and parts[0] == constants.ASDF_STANDARD_COMMENT: try: version = versioning.AsdfVersion(parts[1].decode('ascii')) except ValueError: pass else: return version return None @classmethod def _open_asdf(cls, self, fd, validate_checksums=False, extensions=None, _get_yaml_content=False, _force_raw_types=False, strict_extension_check=False, ignore_missing_extensions=False, **kwargs): """Attempt to populate AsdfFile data from file-like object""" if strict_extension_check and ignore_missing_extensions: raise ValueError( "'strict_extension_check' and 'ignore_missing_extensions' are " "incompatible options") with config_context() as config: _handle_deprecated_kwargs(config, kwargs) self._mode = fd.mode self._fd = fd # The filename is currently only used for tracing warning information self._fname = self._fd._uri if self._fd._uri else '' header_line = fd.read_until(b'\r?\n', 2, "newline", include=True) self._file_format_version = cls._parse_header_line(header_line) self.version = self._file_format_version self._comments = cls._read_comment_section(fd) version = cls._find_asdf_version_in_comments(self._comments) if version is not None: self.version = version # Now that version is set for good, we can add any additional # extensions, which may have narrow ASDF Standard version # requirements. if extensions: self.extensions = extensions yaml_token = fd.read(4) has_blocks = False tree = None if yaml_token == b'%YAM': reader = fd.reader_until( constants.YAML_END_MARKER_REGEX, 7, 'End of YAML marker', include=True, initial_content=yaml_token) # For testing: just return the raw YAML content if _get_yaml_content: yaml_content = reader.read() fd.close() return yaml_content # We parse the YAML content into basic data structures # now, but we don't do anything special with it until # after the blocks have been read tree = yamlutil.load_tree(reader) has_blocks = fd.seek_until(constants.BLOCK_MAGIC, 4, include=True, exception=False) elif yaml_token == constants.BLOCK_MAGIC: has_blocks = True elif yaml_token != b'': raise IOError("ASDF file appears to contain garbage after header.") if tree is None: # At this point the tree should be tagged, but we want it to be # tagged with the core/asdf version appropriate to this file's # ASDF Standard version. We're using custom_tree_to_tagged_tree # to select the correct tag for us. tree = yamlutil.custom_tree_to_tagged_tree(AsdfObject(), self) if has_blocks: self._blocks.read_internal_blocks( fd, past_magic=True, validate_checksums=validate_checksums) self._blocks.read_block_index(fd, self) tree = reference.find_references(tree, self) if self.version <= versioning.FILL_DEFAULTS_MAX_VERSION and get_config().legacy_fill_schema_defaults: schema.fill_defaults(tree, self, reading=True) if get_config().validate_on_read: try: self._validate(tree, reading=True) except ValidationError: self.close() raise tree = yamlutil.tagged_tree_to_custom_tree(tree, self, _force_raw_types) if not (ignore_missing_extensions or _force_raw_types): self._check_extensions(tree, strict=strict_extension_check) self._tree = tree self.run_hook('post_read') return self @classmethod def _open_impl(cls, self, fd, uri=None, mode='r', validate_checksums=False, extensions=None, _get_yaml_content=False, _force_raw_types=False, strict_extension_check=False, ignore_missing_extensions=False, **kwargs): """Attempt to open file-like object as either AsdfFile or AsdfInFits""" generic_file = generic_io.get_file(fd, mode=mode, uri=uri) file_type = util.get_file_type(generic_file) if file_type == util.FileType.FITS: try: # TODO: this feels a bit circular, try to clean up. Also # this introduces another dependency on astropy which may # not be desireable. from . import fits_embed return fits_embed.AsdfInFits._open_impl(generic_file, uri=uri, validate_checksums=validate_checksums, extensions=extensions, ignore_version_mismatch=self._ignore_version_mismatch, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions, ignore_unrecognized_tag=self._ignore_unrecognized_tag, **kwargs) except ValueError: raise ValueError( "Input object does not appear to be an ASDF file or a FITS with " + "ASDF extension") from None except ImportError: raise ValueError( "Input object does not appear to be an ASDF file. Cannot check " + "if it is a FITS with ASDF extension because 'astropy' is not " + "installed") from None elif file_type == util.FileType.ASDF: return cls._open_asdf(self, generic_file, validate_checksums=validate_checksums, extensions=extensions, _get_yaml_content=_get_yaml_content, _force_raw_types=_force_raw_types, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions, **kwargs) else: raise ValueError( "Input object does not appear to be an ASDF file or a FITS with " + "ASDF extension" ) @classmethod def open(cls, fd, uri=None, mode='r', validate_checksums=False, extensions=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, _force_raw_types=False, copy_arrays=False, lazy_load=True, custom_schema=None, strict_extension_check=False, ignore_missing_extensions=False, **kwargs): """ Open an existing ASDF file. .. deprecated:: 2.2 Use `asdf.open` instead. """ warnings.warn( "The method AsdfFile.open has been deprecated and will be removed " "in asdf-3.0. Use the top-level asdf.open function instead.", AsdfDeprecationWarning) return open_asdf( fd, uri=uri, mode=mode, validate_checksums=validate_checksums, extensions=extensions, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag, _force_raw_types=_force_raw_types, copy_arrays=copy_arrays, lazy_load=lazy_load, custom_schema=custom_schema, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions, _compat=True, **kwargs) def _write_tree(self, tree, fd, pad_blocks): fd.write(constants.ASDF_MAGIC) fd.write(b' ') fd.write(self.version_map['FILE_FORMAT'].encode('ascii')) fd.write(b'\n') fd.write(b'#') fd.write(constants.ASDF_STANDARD_COMMENT) fd.write(b' ') fd.write(self.version_string.encode('ascii')) fd.write(b'\n') if len(tree): serialization_context = self._create_serialization_context() compression_extensions = self.blocks.get_output_compression_extensions() for ext in compression_extensions: serialization_context._mark_extension_used(ext) def _tree_finalizer(tagged_tree): """ The list of extensions used is not known until after serialization, so we're using a hook provided by yamlutil.dump_tree to update extension metadata after the tree has been converted to tagged objects. """ self._update_extension_history(serialization_context) if 'history' in self.tree: tagged_tree['history'] = yamlutil.custom_tree_to_tagged_tree( self.tree['history'], self, _serialization_context=serialization_context ) else: tagged_tree.pop('history', None) yamlutil.dump_tree( tree, fd, self, tree_finalizer=_tree_finalizer, _serialization_context=serialization_context ) if pad_blocks: padding = util.calculate_padding( fd.tell(), pad_blocks, fd.block_size) fd.fast_forward(padding) def _pre_write(self, fd, all_array_storage, all_array_compression, compression_kwargs=None): if all_array_storage not in (None, 'internal', 'external', 'inline'): raise ValueError( "Invalid value for all_array_storage: '{0}'".format( all_array_storage)) self._all_array_storage = all_array_storage self._all_array_compression = all_array_compression self._all_array_compression_kwargs = compression_kwargs if len(self._tree): self.run_hook('pre_write') # This is where we'd do some more sophisticated block # reorganization, if necessary self._blocks.finalize(self) self._tree['asdf_library'] = get_asdf_library_info() def _serial_write(self, fd, pad_blocks, include_block_index): self._write_tree(self._tree, fd, pad_blocks) self.blocks.write_internal_blocks_serial(fd, pad_blocks) self.blocks.write_external_blocks(fd.uri, pad_blocks) if include_block_index: self.blocks.write_block_index(fd, self) def _random_write(self, fd, pad_blocks, include_block_index): self._write_tree(self._tree, fd, False) self.blocks.write_internal_blocks_random_access(fd) self.blocks.write_external_blocks(fd.uri, pad_blocks) if include_block_index: self.blocks.write_block_index(fd, self) fd.truncate() def _post_write(self, fd): if len(self._tree): self.run_hook('post_write') def update(self, all_array_storage=None, all_array_compression='input', pad_blocks=False, include_block_index=True, version=None, compression_kwargs=None, **kwargs): """ Update the file on disk in place. Parameters ---------- all_array_storage : string, optional If provided, override the array storage type of all blocks in the file immediately before writing. Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. all_array_compression : string, optional If provided, set the compression type on all binary blocks in the file. Must be one of: - ``''`` or `None`: No compression. - ``zlib``: Use zlib compression. - ``bzp2``: Use bzip2 compression. - ``lz4``: Use lz4 compression. - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None pad_blocks : float or bool, optional Add extra space between blocks to allow for updating of the file. If `False` (default), add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. include_block_index : bool, optional If `False`, don't include a block index at the end of the file. (Default: `True`) A block index is never written if the file has a streamed block. version : str, optional Update the ASDF Standard version of this AsdfFile before writing. auto_inline : int, optional DEPRECATED. When the number of elements in an array is less than this threshold, store the array as inline YAML, rather than a binary block. This only works on arrays that do not share data with other arrays. Default is the value specified in ``asdf.get_config().array_inline_threshold``. """ with config_context() as config: _handle_deprecated_kwargs(config, kwargs) fd = self._fd if fd is None: raise ValueError( "Can not update, since there is no associated file") if not fd.writable(): raise IOError( "Can not update, since associated file is read-only. Make " "sure that the AsdfFile was opened with mode='rw' and the " "underlying file handle is writable.") if version is not None: self.version = version if all_array_storage == 'external': # If the file is fully exploded, there's no benefit to # update, so just use write_to() self.write_to(fd, all_array_storage=all_array_storage) fd.truncate() return if not fd.seekable(): raise IOError( "Can not update, since associated file is not seekable") self.blocks.finish_reading_internal_blocks() self._pre_write(fd, all_array_storage, all_array_compression, compression_kwargs=compression_kwargs) try: fd.seek(0) if not self.blocks.has_blocks_with_offset(): # If we don't have any blocks that are being reused, just # write out in a serial fashion. self._serial_write(fd, pad_blocks, include_block_index) fd.truncate() return # Estimate how big the tree will be on disk by writing the # YAML out in memory. Since the block indices aren't yet # known, we have to count the number of block references and # add enough space to accommodate the largest block number # possible there. tree_serialized = io.BytesIO() self._write_tree(self._tree, tree_serialized, pad_blocks=False) array_ref_count = [0] from .tags.core.ndarray import NDArrayType for node in treeutil.iter_tree(self._tree): if (isinstance(node, (np.ndarray, NDArrayType)) and self.blocks[node].array_storage == 'internal'): array_ref_count[0] += 1 serialized_tree_size = ( tree_serialized.tell() + constants.MAX_BLOCKS_DIGITS * array_ref_count[0]) if not block.calculate_updated_layout( self.blocks, serialized_tree_size, pad_blocks, fd.block_size): # If we don't have any blocks that are being reused, just # write out in a serial fashion. self._serial_write(fd, pad_blocks, include_block_index) fd.truncate() return fd.seek(0) self._random_write(fd, pad_blocks, include_block_index) fd.flush() finally: self._post_write(fd) def write_to(self, fd, all_array_storage=None, all_array_compression='input', pad_blocks=False, include_block_index=True, version=None, compression_kwargs=None, **kwargs): """ Write the ASDF file to the given file-like object. `write_to` does not change the underlying file descriptor in the `AsdfFile` object, but merely copies the content to a new file. Parameters ---------- fd : string or file-like object May be a string path to a file, or a Python file-like object. If a string path, the file is automatically closed after writing. If not a string path, it is the caller's responsibility to close the object. all_array_storage : string, optional If provided, override the array storage type of all blocks in the file immediately before writing. Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. all_array_compression : string, optional If provided, set the compression type on all binary blocks in the file. Must be one of: - ``''`` or `None`: No compression. - ``zlib``: Use zlib compression. - ``bzp2``: Use bzip2 compression. - ``lz4``: Use lz4 compression. - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None. pad_blocks : float or bool, optional Add extra space between blocks to allow for updating of the file. If `False` (default), add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. include_block_index : bool, optional If `False`, don't include a block index at the end of the file. (Default: `True`) A block index is never written if the file has a streamed block. version : str, optional Update the ASDF Standard version of this AsdfFile before writing. auto_inline : int, optional DEPRECATED. When the number of elements in an array is less than this threshold, store the array as inline YAML, rather than a binary block. This only works on arrays that do not share data with other arrays. Default is the value specified in ``asdf.get_config().array_inline_threshold``. """ with config_context() as config: _handle_deprecated_kwargs(config, kwargs) if version is not None: self.version = version with generic_io.get_file(fd, mode='w') as fd: # TODO: This is not ideal: we really should pass the URI through # explicitly to wherever it is required instead of making it an # attribute of the AsdfFile. if self._uri is None: self._uri = fd.uri self._pre_write(fd, all_array_storage, all_array_compression, compression_kwargs=compression_kwargs) try: self._serial_write(fd, pad_blocks, include_block_index) fd.flush() finally: self._post_write(fd) def find_references(self): """ Finds all external "JSON References" in the tree and converts them to `reference.Reference` objects. """ # Set directly to self._tree, since it doesn't need to be re-validated. self._tree = reference.find_references(self._tree, self) def resolve_references(self, **kwargs): """ Finds all external "JSON References" in the tree, loads the external content, and places it directly in the tree. Saving a ASDF file after this operation means it will have no external references, and will be completely self-contained. """ # Set to the property self.tree so the resulting "complete" # tree will be validated. self.tree = reference.resolve_references(self._tree, self) def run_hook(self, hookname): """ Run a "hook" for each custom type found in the tree. Parameters ---------- hookname : str The name of the hook. If a `AsdfType` is found with a method with this name, it will be called for every instance of the corresponding custom type in the tree. """ type_index = self.type_index if not type_index.has_hook(hookname): return for node in treeutil.iter_tree(self._tree): hook = type_index.get_hook_for_type(hookname, type(node), self.version_string) if hook is not None: hook(node, self) def run_modifying_hook(self, hookname, validate=True): """ Run a "hook" for each custom type found in the tree. The hook is free to return a different object in order to modify the tree. Parameters ---------- hookname : str The name of the hook. If a `AsdfType` is found with a method with this name, it will be called for every instance of the corresponding custom type in the tree. validate : bool When `True` (default) validate the resulting tree. """ type_index = self.type_index if not type_index.has_hook(hookname): return def walker(node): hook = type_index.get_hook_for_type(hookname, type(node), self.version_string) if hook is not None: return hook(node, self) return node tree = treeutil.walk_and_modify(self.tree, walker, ignore_implicit_conversion=self._ignore_implicit_conversion) if validate: self._validate(tree) self._tree = tree return self._tree def resolve_and_inline(self): """ Resolves all external references and inlines all data. This produces something that, when saved, is a 100% valid YAML file. """ self.blocks.finish_reading_internal_blocks() self.resolve_references() for b in list(self.blocks.blocks): self.blocks.set_array_storage(b, 'inline') def fill_defaults(self): """ Fill in any values that are missing in the tree using default values from the schema. """ tree = yamlutil.custom_tree_to_tagged_tree(self._tree, self) schema.fill_defaults(tree, self) self._tree = yamlutil.tagged_tree_to_custom_tree(tree, self) def remove_defaults(self): """ Remove any values in the tree that are the same as the default values in the schema """ tree = yamlutil.custom_tree_to_tagged_tree(self._tree, self) schema.remove_defaults(tree, self) self._tree = yamlutil.tagged_tree_to_custom_tree(tree, self) def add_history_entry(self, description, software=None): """ Add an entry to the history list. Parameters ---------- description : str A description of the change. software : dict or list of dict A description of the software used. It should not include asdf itself, as that is automatically notated in the `asdf_library` entry. Each dict must have the following keys: - ``name``: The name of the software - ``author``: The author or institution that produced the software - ``homepage``: A URI to the homepage of the software - ``version``: The version of the software """ if isinstance(software, list): software = [Software(x) for x in software] elif software is not None: software = Software(software) time_ = datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())), ) entry = HistoryEntry({ 'description': description, 'time': time_, }) if software is not None: entry['software'] = software if self.version >= versioning.NEW_HISTORY_FORMAT_MIN_VERSION: if 'history' not in self.tree: self.tree['history'] = dict(entries=[]) elif 'entries' not in self.tree['history']: self.tree['history']['entries'] = [] self.tree['history']['entries'].append(entry) try: self.validate() except Exception: self.tree['history']['entries'].pop() raise else: if 'history' not in self.tree: self.tree['history'] = [] self.tree['history'].append(entry) try: self.validate() except Exception: self.tree['history'].pop() raise def get_history_entries(self): """ Get a list of history entries from the file object. Returns ------- entries : list A list of history entries. """ if 'history' not in self.tree: return [] if isinstance(self.tree['history'], list): return self.tree['history'] if 'entries' in self.tree['history']: return self.tree['history']['entries'] return [] def info(self, max_rows=display.DEFAULT_MAX_ROWS, max_cols=display.DEFAULT_MAX_COLS, show_values=display.DEFAULT_SHOW_VALUES): """ Print a rendering of this file's tree to stdout. Parameters ---------- max_rows : int, tuple, or None, optional Maximum number of lines to print. Nodes that cannot be displayed will be elided with a message. If int, constrain total number of displayed lines. If tuple, constrain lines per node at the depth corresponding \ to the tuple index. If None, display all lines. max_cols : int or None, optional Maximum length of line to print. Nodes that cannot be fully displayed will be truncated with a message. If int, constrain length of displayed lines. If None, line length is unconstrained. show_values : bool, optional Set to False to disable display of primitive values in the rendered tree. """ lines = display.render_tree(self.tree, max_rows=max_rows, max_cols=max_cols, show_values=show_values, identifier="root") print("\n".join(lines)) def search(self, key=NotSet, type=NotSet, value=NotSet, filter=None): """ Search this file's tree. Parameters ---------- key : NotSet, str, or any other object Search query that selects nodes by dict key or list index. If NotSet, the node key is unconstrained. If str, the input is searched among keys/indexes as a regular expression pattern. If any other object, node's key or index must equal the queried key. type : NotSet, str, or builtins.type Search query that selects nodes by type. If NotSet, the node type is unconstrained. If str, the input is searched among (fully qualified) node type names as a regular expression pattern. If builtins.type, the node must be an instance of the input. value : NotSet, str, or any other object Search query that selects nodes by value. If NotSet, the node value is unconstrained. If str, the input is searched among values as a regular expression pattern. If any other object, node's value must equal the queried value. filter : callable Callable that filters nodes by arbitrary criteria. The callable accepts one or two arguments: - the node - the node's list index or dict key (optional) and returns True to retain the node, or False to remove it from the search results. Returns ------- asdf.search.AsdfSearchResult the result of the search """ result = AsdfSearchResult(["root"], self.tree) return result.search(key=key, type=type, value=value, filter=filter) # This function is called from within TypeIndex when deserializing # the tree for this file. It is kept here so that we can keep # state on the AsdfFile and prevent a flood of warnings for the # same tag. def _warn_tag_mismatch(self, tag, best_tag): if not self._ignore_version_mismatch and (tag, best_tag) not in self._warned_tag_pairs: message = ( "No explicit ExtensionType support provided for tag '{}'. " "The ExtensionType subclass for tag '{}' will be used instead. " "This fallback behavior will be removed in asdf 3.0." ).format(tag, best_tag) warnings.warn(message, AsdfConversionWarning) self._warned_tag_pairs.add((tag, best_tag)) # This function is called from within yamlutil methods to create # a context when one isn't explicitly passed in. def _create_serialization_context(self): return SerializationContext(self.version_string, self.extension_manager, self.uri) # Inherit docstring from dictionary AsdfFile.keys.__doc__ = dict.keys.__doc__ def _check_and_set_mode(fileobj, asdf_mode): if asdf_mode is not None and asdf_mode not in ['r', 'rw']: msg = "Unrecognized asdf mode '{}'. Must be either 'r' or 'rw'" raise ValueError(msg.format(asdf_mode)) if asdf_mode is None: if isinstance(fileobj, io.IOBase): return 'rw' if fileobj.writable() else 'r' if isinstance(fileobj, generic_io.GenericFile): return fileobj.mode # This is the safest assumption for the default fallback return 'r' return asdf_mode _DEPRECATED_KWARG_TO_CONFIG_PROPERTY = { "auto_inline": ("array_inline_threshold", lambda v: v), "validate_on_read": ("validate_on_read", lambda v: v), "do_not_fill_defaults": ("legacy_fill_schema_defaults", lambda v: not v), } def _handle_deprecated_kwargs(config, kwargs): for key, value in kwargs.items(): if key in _DEPRECATED_KWARG_TO_CONFIG_PROPERTY: config_property, func = _DEPRECATED_KWARG_TO_CONFIG_PROPERTY[key] warnings.warn( f"The '{key}' argument is deprecated, set " f"asdf.get_config().{config_property} instead.", AsdfDeprecationWarning ) setattr(config, config_property, func(value)) else: raise TypeError(f"Unexpected keyword argument '{key}'") def open_asdf(fd, uri=None, mode=None, validate_checksums=False, extensions=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, _force_raw_types=False, copy_arrays=False, lazy_load=True, custom_schema=None, strict_extension_check=False, ignore_missing_extensions=False, _compat=False, **kwargs): """ Open an existing ASDF file. Parameters ---------- fd : string or file-like object May be a string ``file`` or ``http`` URI, or a Python file-like object. uri : string, optional The URI of the file. Only required if the URI can not be automatically determined from `fd`. mode : string, optional The mode to open the file in. Must be ``r`` (default) or ``rw``. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. Requires reading the entire file, so disabled by default. extensions : object, optional Additional extensions to use when reading and writing the file. May be any of the following: `asdf.extension.AsdfExtension`, `asdf.extension.Extension`, `asdf.extension.AsdfExtensionList` or a `list` of extensions. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. Set to `True` by default. ignore_unrecognized_tag : bool, optional When `True`, do not raise warnings for unrecognized tags. Set to `False` by default. copy_arrays : bool, optional When `False`, when reading files, attempt to memmap underlying data arrays when possible. lazy_load : bool, optional When `True` and the underlying file handle is seekable, data arrays will only be loaded lazily: i.e. when they are accessed for the first time. In this case the underlying file must stay open during the lifetime of the tree. Setting to False causes all data arrays to be loaded up front, which means that they can be accessed even after the underlying file is closed. Note: even if `lazy_load` is `False`, `copy_arrays` is still taken into account. custom_schema : str, optional Path to a custom schema file that will be used for a secondary validation pass. This can be used to ensure that particular ASDF files follow custom conventions beyond those enforced by the standard. strict_extension_check : bool, optional When `True`, if the given ASDF file contains metadata about the extensions used to create it, and if those extensions are not installed, opening the file will fail. When `False`, opening a file under such conditions will cause only a warning. Defaults to `False`. ignore_missing_extensions : bool, optional When `True`, do not raise warnings when a file is read that contains metadata about extensions that are not available. Defaults to `False`. validate_on_read : bool, optional DEPRECATED. When `True`, validate the newly opened file against tag and custom schemas. Recommended unless the file is already known to be valid. Returns ------- asdffile : AsdfFile The new AsdfFile object. """ readonly = False # For now retain backwards compatibility with the old API behavior, # specifically when being called from AsdfFile.open if not _compat: mode = _check_and_set_mode(fd, mode) readonly = (mode == 'r' and not copy_arrays) instance = AsdfFile( ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag, copy_arrays=copy_arrays, lazy_load=lazy_load, custom_schema=custom_schema, _readonly=readonly) return AsdfFile._open_impl(instance, fd, uri=uri, mode=mode, validate_checksums=validate_checksums, extensions=extensions, _force_raw_types=_force_raw_types, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions, **kwargs) class SerializationContext: """ Container for parameters of the current (de)serialization. """ def __init__(self, version, extension_manager, url): self._version = validate_version(version) self._extension_manager = extension_manager self._url = url self.__extensions_used = set() @property def url(self): """ The URL (if any) of the file being read or written. Used to compute relative locations of external files referenced by this ASDF file. The URL will not exist in some cases (e.g. when the file is written to an `io.BytesIO`). Returns -------- str or None """ return self._url @property def version(self): """ Get the ASDF Standard version. Returns ------- str """ return self._version @property def extension_manager(self): """ Get the ExtensionManager for enabled extensions. Returns ------- asdf.extension.ExtensionManager """ return self._extension_manager def _mark_extension_used(self, extension): """ Note that an extension was used when reading or writing the file. Parameters ---------- extension : asdf.extension.AsdfExtension or asdf.extension.Extension """ self.__extensions_used.add(ExtensionProxy.maybe_wrap(extension)) @property def _extensions_used(self): """ Get the set of extensions that were used when reading or writing the file. Returns ------- set of asdf.extension.AsdfExtension or asdf.extension.Extension """ return self.__extensions_used ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/asdftypes.py0000644000537500020070000000101300000000000017062 0ustar00wjamiesonSTSCI\scienceimport warnings from .exceptions import AsdfDeprecationWarning # This is not exhaustive, but represents the public API from .versioning import join_tag_version, split_tag_version from .types import (AsdfType, CustomType, format_tag, ExtensionTypeMeta) __all__ = ["join_tag_version", "split_tag_version", "AsdfType", "CustomType", "format_tag", "ExtensionTypeMeta"] warnings.warn( "The module asdf.asdftypes has been deprecated and will be removed in 3.0. " "Use asdf.types instead.", AsdfDeprecationWarning) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/block.py0000644000537500020070000013554300000000000016172 0ustar00wjamiesonSTSCI\scienceimport copy import hashlib import io import os import re import struct import weakref from collections import namedtuple import numpy as np import yaml from . import compression as mcompression from .config import get_config from .compat.numpycompat import NUMPY_LT_1_7 from . import constants from . import generic_io from . import treeutil from . import util from . import yamlutil from .util import patched_urllib_parse class BlockManager: """ Manages the `Block`s associated with a ASDF file. """ def __init__(self, asdffile, copy_arrays=False, lazy_load=True, readonly=False): self._asdffile = weakref.ref(asdffile) self._internal_blocks = [] self._external_blocks = [] self._inline_blocks = [] self._streamed_blocks = [] self._block_type_mapping = { 'internal': self._internal_blocks, 'external': self._external_blocks, 'inline': self._inline_blocks, 'streamed': self._streamed_blocks } self._data_to_block_mapping = {} self._validate_checksums = False self._memmap = not copy_arrays self._lazy_load = lazy_load self._readonly = readonly self._internal_blocks_mapped = False def __len__(self): """ Return the total number of blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ return sum(len(x) for x in self._block_type_mapping.values()) def add(self, block): """ Add an internal block to the manager. """ if not self._internal_blocks_mapped: # If the block index is missing we need to locate the remaining # blocks so that we don't accidentally add our new block # in the middle of the list. self.finish_reading_internal_blocks() self._add(block) def _add(self, block): block_set = self._block_type_mapping.get(block.array_storage, None) if block_set is not None: if block not in block_set: block_set.append(block) else: raise ValueError( "Unknown array storage type {0}".format(block.array_storage)) if block.array_storage == 'streamed' and len(self._streamed_blocks) > 1: raise ValueError("Can not add second streaming block") if block._data is not None: self._data_to_block_mapping[id(block._data)] = block def remove(self, block): """ Remove a block from the manager. """ block_set = self._block_type_mapping.get(block.array_storage, None) if block_set is not None: if block in block_set: block_set.remove(block) if block._data is not None: if id(block._data) in self._data_to_block_mapping: del self._data_to_block_mapping[id(block._data)] else: raise ValueError( "Unknown array storage type {0}".format(block.array_storage)) def set_array_storage(self, block, array_storage): """ Set the array storage type of the given block. Parameters ---------- block : Block instance array_storage : str Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. - ``streamed``: The special streamed inline block that appears at the end of the file. """ if array_storage not in ['internal', 'external', 'streamed', 'inline']: raise ValueError( "array_storage must be one of 'internal', 'external', " "'streamed' or 'inline'") if block.array_storage != array_storage: if block in self.blocks: self.remove(block) block._array_storage = array_storage self.add(block) if array_storage == 'streamed': block.output_compression = None block.output_compression_kwargs = None @property def blocks(self): """ An iterator over all blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ for block_set in self._block_type_mapping.values(): for block in block_set: yield block @property def internal_blocks(self): """ An iterator over all internal blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ for block_set in (self._internal_blocks, self._streamed_blocks): for block in block_set: yield block @property def streamed_block(self): """ The streamed block (always the last internal block in a file), or `None` if a streamed block is not present. """ self.finish_reading_internal_blocks() if len(self._streamed_blocks): return self._streamed_blocks[0] @property def external_blocks(self): """ An iterator over all external blocks being managed. """ for block in self._external_blocks: yield block @property def inline_blocks(self): """ An iterator over all inline blocks being managed. """ for block in self._inline_blocks: yield block @property def memmap(self): """ The flag which indicates whether the arrays are memory mapped to the underlying file. """ return self._memmap @property def lazy_load(self): """ The flag which indicates whether the blocks are lazily read. """ return self._lazy_load def has_blocks_with_offset(self): """ Returns `True` if any of the internal blocks currently have an offset assigned. """ for block in self.internal_blocks: if block.offset is not None: return True return False def _new_block(self): return Block(memmap=self.memmap, lazy_load=self.lazy_load) def _sort_blocks_by_offset(self): def sorter(x): if x.offset is None: raise ValueError('Block is missing offset') else: return x.offset self._internal_blocks.sort(key=sorter) def _read_next_internal_block(self, fd, past_magic=False): # This assumes the file pointer is at the beginning of the # block, (or beginning + 4 if past_magic is True) block = self._new_block().read( fd, past_magic=past_magic, validate_checksum=self._validate_checksums) if block is not None: self._add(block) return block def read_internal_blocks(self, fd, past_magic=False, validate_checksums=False): """ Read internal blocks present in the file. If the file is seekable, only the first block will be read, and the reading of all others will be lazily deferred until an the loading of an array requests it. Parameters ---------- fd : GenericFile The file to read from. past_magic : bool, optional If `True`, the file position is immediately after the block magic token. If `False` (default), the file position is exactly at the beginning of the block magic token. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. """ self._validate_checksums = validate_checksums while True: block = self._read_next_internal_block(fd, past_magic=past_magic) if block is None: break past_magic = False # If the file handle is seekable, we only read the first # block and defer reading the rest until later. if fd.seekable(): break def finish_reading_internal_blocks(self): """ Read all remaining internal blocks present in the file, if any. This is called before updating a file, since updating requires knowledge of all internal blocks in the file. """ if not self._internal_blocks: return for i, block in enumerate(self._internal_blocks): if isinstance(block, UnloadedBlock): block.load() last_block = self._internal_blocks[-1] # Read all of the remaining blocks in the file, if any if (last_block._fd is not None and last_block._fd.seekable()): last_block._fd.seek(last_block.end_offset) while True: last_block = self._read_next_internal_block( last_block._fd, False) if last_block is None: break self._internal_blocks_mapped = True def write_internal_blocks_serial(self, fd, pad_blocks=False): """ Write all blocks to disk serially. Parameters ---------- fd : generic_io.GenericFile The file to write internal blocks to. The file position should be after the tree. """ for block in self.internal_blocks: if block.output_compression: block.offset = fd.tell() block.write(fd) else: if block.input_compression: block.update_size() padding = util.calculate_padding( block.size, pad_blocks, fd.block_size) block.allocated = block._size + padding block.offset = fd.tell() block.write(fd) fd.fast_forward(block.allocated - block._size) def write_internal_blocks_random_access(self, fd): """ Write all blocks to disk at their specified offsets. All internal blocks must have an offset assigned at this point. Parameters ---------- fd : generic_io.GenericFile The file to write internal blocks to. The file position should be after the tree. """ self._sort_blocks_by_offset() iter = self.internal_blocks last_block = next(iter) # We need to explicitly clear anything between the tree # and the first block, otherwise there may be other block # markers left over which will throw off block indexing. # We don't need to do this between each block. fd.clear(last_block.offset - fd.tell()) for block in iter: last_block.allocated = ((block.offset - last_block.offset) - last_block.header_size) fd.seek(last_block.offset) last_block.write(fd) last_block = block last_block.allocated = last_block.size fd.seek(last_block.offset) last_block.write(fd) fd.truncate(last_block.end_offset) def write_external_blocks(self, uri, pad_blocks=False): """ Write all blocks to disk serially. Parameters ---------- uri : str The base uri of the external blocks """ from . import asdf for i, block in enumerate(self.external_blocks): if uri is None: raise ValueError( "Can't write external blocks, since URI of main file is " "unknown.") subfd = self.get_external_uri(uri, i) asdffile = asdf.AsdfFile() block = copy.copy(block) block._array_storage = 'internal' asdffile.blocks.add(block) block._used = True asdffile.write_to(subfd, pad_blocks=pad_blocks) def write_block_index(self, fd, ctx): """ Write the block index. Parameters ---------- fd : GenericFile The file to write to. The file pointer should be at the end of the file. """ if len(self._internal_blocks) and not len(self._streamed_blocks): fd.write(constants.INDEX_HEADER) fd.write(b'\n') offsets = [x.offset for x in self.internal_blocks] yaml_version = tuple( int(x) for x in ctx.version_map['YAML_VERSION'].split('.')) yaml.dump( offsets, Dumper=yamlutil._yaml_base_dumper, stream=fd, explicit_start=True, explicit_end=True, version=yaml_version, allow_unicode=True, encoding='utf-8') _re_index_content = re.compile( br'^' + constants.INDEX_HEADER + br'\r?\n%YAML.*\.\.\.\r?\n?$') _re_index_misc = re.compile(br'^[\n\r\x20-\x7f]+$') def read_block_index(self, fd, ctx): """ Read the block index. Parameters ---------- fd : GenericFile The file to read from. It must be seekable. """ # This reads the block index by reading backward from the end # of the file. This tries to be as conservative as possible, # since not reading an index isn't a deal breaker -- # everything can still be read from the file, only slower. # Importantly, it must remain "transactionally clean", and not # create any blocks until we're sure the block index makes # sense. if not fd.seekable(): return if not len(self._internal_blocks): return first_block = self._internal_blocks[0] first_block_end = first_block.end_offset fd.seek(0, generic_io.SEEK_END) file_size = block_end = fd.tell() # We want to read on filesystem block boundaries. We use # "block_end - 5" here because we need to read at least 5 # bytes in the first block. block_start = ((block_end - 5) // fd.block_size) * fd.block_size buff_size = block_end - block_start content = b'' fd.seek(block_start, generic_io.SEEK_SET) buff = fd.read(buff_size) # Extra '\0' bytes are allowed after the ..., mainly to # workaround poor truncation support on Windows buff = buff.rstrip(b'\0') content = buff # We need an explicit YAML end marker, or there's no # block index for ending in (b'...', b'...\r\n', b'...\n'): if content.endswith(ending): break else: return # Read blocks in reverse order from the end of the file while True: # Look for the index header idx = content.rfind(constants.INDEX_HEADER) if idx != -1: content = content[idx:] index_start = block_start + idx break else: # If the rest of it starts to look like binary # values, bail... if not self._re_index_misc.match(buff): return if block_start <= first_block_end: return block_end = block_start block_start = max(block_end - fd.block_size, first_block_end) fd.seek(block_start, generic_io.SEEK_SET) buff_size = block_end - block_start buff = fd.read(buff_size) content = buff + content yaml_content = content[content.find(b'\n') + 1:] # The following call to yaml.load is safe because we're # using pyyaml's SafeLoader. offsets = yaml.load(yaml_content, # nosec Loader=yamlutil._yaml_base_loader) # Make sure the indices look sane if not isinstance(offsets, list) or len(offsets) == 0: return last_offset = 0 for x in offsets: if (not isinstance(x, int) or x > file_size or x < 0 or x <= last_offset + Block._header.size): return last_offset = x # We always read the first block, so we can confirm that the # first entry in the block index matches the first block if offsets[0] != first_block.offset: return if len(offsets) == 1: # If there's only one block in the index, we've already # loaded the first block, so just return: we have nothing # left to do return # One last sanity check: Read the last block in the index and # make sure it makes sense. fd.seek(offsets[-1], generic_io.SEEK_SET) try: block = self._new_block().read(fd) except (ValueError, IOError): return # Now see if the end of the last block leads right into the index if (block.end_offset != index_start): return # It seems we're good to go, so instantiate the UnloadedBlock # objects for offset in offsets[1:-1]: self._internal_blocks.append( UnloadedBlock(fd, offset, memmap=self.memmap, lazy_load=self.lazy_load, readonly=self._readonly)) # We already read the last block in the file -- no need to read it again self._internal_blocks.append(block) # Record that all block locations have been mapped out (used to avoid # unnecessary calls to finish_reading_internal_blocks later). self._internal_blocks_mapped = True # Materialize the internal blocks if we are not lazy if not self.lazy_load: self.finish_reading_internal_blocks() def get_external_filename(self, filename, index): """ Given a main filename and an index number, return a new file name for referencing an external block. """ filename = os.path.splitext(filename)[0] return filename + '{0:04d}.asdf'.format(index) def get_external_uri(self, uri, index): """ Given a main URI and an index number, return a new URI for saving an external block. """ if uri is None: uri = '' parts = list(patched_urllib_parse.urlparse(uri)) path = parts[2] dirname, filename = os.path.split(path) filename = self.get_external_filename(filename, index) path = os.path.join(dirname, filename) parts[2] = path return patched_urllib_parse.urlunparse(parts) def _find_used_blocks(self, tree, ctx): reserved_blocks = set() for node in treeutil.iter_tree(tree): hook = ctx.type_index.get_hook_for_type( 'reserve_blocks', type(node), ctx.version_string) if hook is not None: for block in hook(node, ctx): reserved_blocks.add(block) for block in list(self.blocks): if (getattr(block, '_used', 0) == 0 and block not in reserved_blocks): self.remove(block) def _handle_global_block_settings(self, ctx, block): all_array_storage = getattr(ctx, '_all_array_storage', None) if all_array_storage: self.set_array_storage(block, all_array_storage) all_array_compression = getattr(ctx, '_all_array_compression', 'input') all_array_compression_kwargs = getattr(ctx, '_all_array_compression_kwargs', {}) # Only override block compression algorithm if it wasn't explicitly set # by AsdfFile.set_array_compression. if all_array_compression != 'input': block.output_compression = all_array_compression block.output_compression_kwargs = all_array_compression_kwargs if all_array_storage is None: threshold = get_config().array_inline_threshold if threshold is not None and block.array_storage in ['internal', 'inline']: if np.product(block.data.shape) < threshold: self.set_array_storage(block, 'inline') else: self.set_array_storage(block, 'internal') def finalize(self, ctx): """ At this point, we have a complete set of blocks for the file, with no extras. Here, they are reindexed, and possibly reorganized. """ # TODO: Should this reset the state (what's external and what # isn't) afterword? self._find_used_blocks(ctx.tree, ctx) for block in list(self.blocks): self._handle_global_block_settings(ctx, block) def get_block(self, source): """ Given a "source identifier", return a block. Parameters ---------- source : any If an integer, refers to the index of an internal block. If a string, is a uri to an external block. Returns ------- buffer : buffer """ # If an "int", it is the index of an internal block if isinstance(source, int): if source == -1: if len(self._streamed_blocks): return self._streamed_blocks[0] # If we don't have a streamed block, fall through so # we can read all of the blocks, ultimately arriving # at the last one, which, if all goes well is a # streamed block. # First, look in the blocks we've already read elif source >= 0: if source < len(self._internal_blocks): return self._internal_blocks[source] else: raise ValueError("Invalid source id {0}".format(source)) # If we have a streamed block or we already know we have # no blocks, reading any further isn't going to yield any # new blocks. if len(self._streamed_blocks) or len(self._internal_blocks) == 0: raise ValueError("Block '{0}' not found.".format(source)) # If the desired block hasn't already been read, and the # file is seekable, and we have at least one internal # block, then we can move the file pointer to the end of # the last known internal block, and start looking for # more internal blocks. This is "deferred block loading". last_block = self._internal_blocks[-1] if (last_block._fd is not None and last_block._fd.seekable()): last_block._fd.seek(last_block.end_offset) while True: next_block = self._read_next_internal_block( last_block._fd, False) if next_block is None: break if len(self._internal_blocks) - 1 == source: return next_block last_block = next_block if (source == -1 and last_block.array_storage == 'streamed'): return last_block raise ValueError("Block '{0}' not found.".format(source)) elif isinstance(source, str): asdffile = self._asdffile().open_external(source) block = asdffile.blocks._internal_blocks[0] self.set_array_storage(block, 'external') # Handle the case of inline data elif isinstance(source, list): block = Block(data=np.array(source), array_storage='inline') else: raise TypeError("Unknown source '{0}'".format(source)) return block def get_source(self, block): """ Get a source identifier for a given block. Parameters ---------- block : Block Returns ------- source_id : str May be an integer for an internal block, or a URI for an external block. """ for i, internal_block in enumerate(self.internal_blocks): if block == internal_block: if internal_block.array_storage == 'streamed': return -1 return i for i, external_block in enumerate(self.external_blocks): if block == external_block: if self._asdffile().uri is None: raise ValueError( "Can't write external blocks, since URI of main file is " "unknown.") parts = list(patched_urllib_parse.urlparse(self._asdffile().uri)) path = parts[2] filename = os.path.basename(path) return self.get_external_filename(filename, i) raise ValueError("block not found.") def find_or_create_block_for_array(self, arr, ctx): """ For a given array, looks for an existing block containing its underlying data. If not found, adds a new block to the block list. Returns the index in the block list to the array. Parameters ---------- arr : numpy.ndarray Returns ------- block : Block """ from .tags.core import ndarray if (isinstance(arr, ndarray.NDArrayType) and arr.block is not None): if arr.block in self.blocks: return arr.block else: arr._block = None base = util.get_array_base(arr) block = self._data_to_block_mapping.get(id(base)) if block is not None: return block block = Block(base) self.add(block) self._handle_global_block_settings(ctx, block) return block def get_streamed_block(self): """ Get the streamed block, which is always the last one. A streamed block, on writing, does not manage data of its own, but the user is expected to stream it to disk directly. """ block = self.streamed_block if block is None: block = Block(array_storage='streamed') self.add(block) return block def add_inline(self, array): """ Add an inline block for ``array`` to the block set. """ block = Block(array, array_storage='inline') self.add(block) return block def get_output_compressions(self): ''' Get the list of unqiue compressions used on blocks. ''' return list(set([b.output_compression for b in self.blocks])) def get_output_compression_extensions(self): ''' Infer the compression extensions used on blocks. Note that this is somewhat indirect and could be fooled if a new extension for the same compression label is loaded after the compression of the block. ''' ext = [] for label in self.get_output_compressions(): compressor = mcompression._get_compressor_from_extensions(label, return_extension=True) if compressor != None: ext += [compressor[1]] # second item is the extension return ext def __getitem__(self, arr): return self.find_or_create_block_for_array(arr, object()) def close(self): for block in self.blocks: block.close() class Block: """ Represents a single block in a ASDF file. This is an implementation detail and should not be instantiated directly. Instead, should only be created through the `BlockManager`. """ _header = util.BinaryStruct([ ('flags', 'I'), ('compression', '4s'), ('allocated_size', 'Q'), ('used_size', 'Q'), ('data_size', 'Q'), ('checksum', '16s') ]) def __init__(self, data=None, uri=None, array_storage='internal', memmap=True, lazy_load=True): self._data = data self._uri = uri self._array_storage = array_storage self._fd = None self._offset = None self._input_compression = None self._output_compression = 'input' self._output_compression_kwargs = {} self._checksum = None self._should_memmap = memmap self._memmapped = False self._lazy_load = lazy_load self._readonly = False self.update_size() self._allocated = self._size def __repr__(self): return ''.format( self._array_storage[:3], self._offset, self._allocated, self._size) def __len__(self): return self._size @property def offset(self): return self._offset @offset.setter def offset(self, offset): self._offset = offset @property def allocated(self): return self._allocated @allocated.setter def allocated(self, allocated): self._allocated = allocated @property def header_size(self): return self._header.size + constants.BLOCK_HEADER_BOILERPLATE_SIZE @property def data_offset(self): return self._offset + self.header_size @property def size(self): return self._size + self.header_size @property def end_offset(self): """ The offset of the end of the allocated space for the block, and where the next block should begin. """ return self.offset + self.header_size + self.allocated @property def trust_data_dtype(self): """ If True, ignore the datatype and byteorder fields from the tree and take the data array's dtype at face value. This is used to support blocks stored in FITS files. """ return False @property def array_storage(self): return self._array_storage @property def input_compression(self): """ The compression codec used to read the block. """ return self._input_compression @input_compression.setter def input_compression(self, compression): self._input_compression = mcompression.validate(compression) @property def output_compression(self): """ The compression codec used to write the block. :return: """ if self._output_compression == 'input': return self._input_compression return self._output_compression @output_compression.setter def output_compression(self, compression): self._output_compression = mcompression.validate(compression) @property def output_compression_kwargs(self): """ The configuration options to the Compressor constructur used to write the block. :return: """ return self._output_compression_kwargs @output_compression_kwargs.setter def output_compression_kwargs(self, config): if config is None: config = {} self._output_compression_kwargs = config.copy() @property def checksum(self): return self._checksum @property def readonly(self): return self._readonly def _set_checksum(self, checksum): if checksum == b'\0' * 16: self._checksum = None else: self._checksum = checksum def _calculate_checksum(self, array): # The following line is safe because we're only using # the MD5 as a checksum. m = hashlib.new('md5') # nosec m.update(array) return m.digest() def validate_checksum(self): """ Validate the content of the block against the current checksum. Returns ------- valid : bool `True` if the content is valid against the current checksum or there is no current checksum. Otherwise, `False`. """ if self._checksum: checksum = self._calculate_checksum(self._flattened_data) if checksum != self._checksum: return False return True def update_checksum(self): """ Update the checksum based on the current data contents. """ self._checksum = self._calculate_checksum(self._flattened_data) def update_size(self): """ Recalculate the on-disk size of the block. This causes any compression steps to run. It should only be called when updating the file in-place, otherwise the work is redundant. """ if self._data is not None: data = self._flattened_data self._data_size = data.nbytes if not self.output_compression: self._size = self._data_size else: self._size = mcompression.get_compressed_size( data, self.output_compression, config=self.output_compression_kwargs) else: self._data_size = self._size = 0 def read(self, fd, past_magic=False, validate_checksum=False): """ Read a Block from the given Python file-like object. If the file is seekable and lazy_load is True, the reading or memmapping of the actual data is postponed until an array requests it. If the file is a stream or lazy_load is False, the data will be read into memory immediately. Parameters ---------- fd : GenericFile past_magic : bool, optional If `True`, the file position is immediately after the block magic token. If `False` (default), the file position is exactly at the beginning of the block magic token. validate_checksum : bool, optional If `True`, validate the data against the checksum, and raise a `ValueError` if the data doesn't match. """ offset = None if fd.seekable(): offset = fd.tell() if not past_magic: buff = fd.read(len(constants.BLOCK_MAGIC)) if len(buff) < 4: return None if buff not in (constants.BLOCK_MAGIC, constants.INDEX_HEADER[:len(buff)]): raise ValueError( "Bad magic number in block. " "This may indicate an internal inconsistency about the " "sizes of the blocks in the file.") if buff == constants.INDEX_HEADER[:len(buff)]: return None elif offset is not None: offset -= 4 buff = fd.read(2) header_size, = struct.unpack(b'>H', buff) if header_size < self._header.size: raise ValueError( "Header size must be >= {0}".format(self._header.size)) buff = fd.read(header_size) header = self._header.unpack(buff) # This is used by the documentation system, but nowhere else. self._flags = header['flags'] self._set_checksum(header['checksum']) try: self.input_compression = header['compression'] except ValueError as v: raise v # TODO: hint extension? if (self.input_compression is None and header['used_size'] != header['data_size']): raise ValueError( "used_size and data_size must be equal when no compression is used.") if (header['flags'] & constants.BLOCK_FLAG_STREAMED and self.input_compression is not None): raise ValueError( "Compression set on a streamed block.") if fd.seekable(): # If the file is seekable, we can delay reading the actual # data until later. self._fd = fd self._offset = offset self._header_size = header_size if header['flags'] & constants.BLOCK_FLAG_STREAMED: # Support streaming blocks self._array_storage = 'streamed' if self._lazy_load: fd.fast_forward(-1) self._data_size = self._size = self._allocated = \ (fd.tell() - self.data_offset) + 1 else: self._data = fd.read_into_array(-1) self._data_size = self._size = self._allocated = len(self._data) else: self._allocated = header['allocated_size'] self._size = header['used_size'] self._data_size = header['data_size'] if self._lazy_load: fd.fast_forward(self._allocated) else: curpos = fd.tell() self._memmap_data() fd.seek(curpos) if not self._memmapped: self._data = self._read_data(fd, self._size, self._data_size) fd.fast_forward(self._allocated - self._size) else: fd.fast_forward(self._allocated) else: # If the file is a stream, we need to get the data now. if header['flags'] & constants.BLOCK_FLAG_STREAMED: # Support streaming blocks self._array_storage = 'streamed' self._data = fd.read_into_array(-1) self._data_size = self._size = self._allocated = len(self._data) else: self._allocated = header['allocated_size'] self._size = header['used_size'] self._data_size = header['data_size'] self._data = self._read_data(fd, self._size, self._data_size) fd.fast_forward(self._allocated - self._size) fd.close() if validate_checksum and not self.validate_checksum(): raise ValueError( "Block at {0} does not match given checksum".format( self._offset)) return self def _read_data(self, fd, used_size, data_size): """ Read the block data from a file. """ if not self.input_compression: return fd.read_into_array(used_size) else: return mcompression.decompress( fd, used_size, data_size, self.input_compression) def _memmap_data(self): """ Memory map the block data from the file. """ memmap = self._fd.can_memmap() and not self.input_compression if self._should_memmap and memmap: self._data = self._fd.memmap_array(self.data_offset, self._size) self._memmapped = True @property def _flattened_data(self): """ Retrieve flattened data suitable for writing. Returns ------- np.ndarray 1D contiguous array. """ data = self.data # 'K' order flattens the array in the order that elements # occur in memory, except axes with negative strides which # are reversed. That is a problem for base arrays with # negative strides and is an outstanding bug in this library. return data.ravel(order='K') def write(self, fd): """ Write an internal block to the given Python file-like object. """ self._header_size = self._header.size if self._data is not None: data = self._flattened_data else: data = None flags = 0 data_size = used_size = allocated_size = 0 if self._array_storage == 'streamed': flags |= constants.BLOCK_FLAG_STREAMED elif data is not None: self._checksum = self._calculate_checksum(data) data_size = data.nbytes if not fd.seekable() and self.output_compression: buff = io.BytesIO() mcompression.compress(buff, data, self.output_compression, config=self.output_compression_kwargs) self.allocated = self._size = buff.tell() allocated_size = self.allocated used_size = self._size self.input_compression = self.output_compression if allocated_size < used_size: raise RuntimeError(f"Block used size {used_size} larger than allocated size {allocated_size}") if self.checksum is not None: checksum = self.checksum else: checksum = b'\0' * 16 fd.write(constants.BLOCK_MAGIC) fd.write(struct.pack(b'>H', self._header_size)) fd.write(self._header.pack( flags=flags, compression=mcompression.to_compression_header( self.output_compression), allocated_size=allocated_size, used_size=used_size, data_size=data_size, checksum=checksum)) if data is not None: if self.output_compression: if not fd.seekable(): fd.write(buff.getvalue()) else: # If the file is seekable, we write the # compressed data directly to it, then go back # and write the resulting size in the block # header. start = fd.tell() mcompression.compress( fd, data, self.output_compression, config=self.output_compression_kwargs) end = fd.tell() self.allocated = self._size = end - start fd.seek(self.offset + 6) self._header.update( fd, allocated_size=self.allocated, used_size=self._size) fd.seek(end) else: if used_size != data_size: raise RuntimeError(f"Block used size {used_size} is not equal to the data size {data_size}") fd.write_array(data) @property def data(self): """ Get the data for the block, as a numpy array. """ if self._data is None: if self._fd.is_closed(): raise IOError( "ASDF file has already been closed. " "Can not get the data.") # Be nice and reset the file position after we're done curpos = self._fd.tell() try: self._memmap_data() if not self._memmapped: self._fd.seek(self.data_offset) self._data = self._read_data( self._fd, self._size, self._data_size) finally: self._fd.seek(curpos) return self._data def close(self): if self._memmapped and self._data is not None: if NUMPY_LT_1_7: # pragma: no cover try: self._data.flush() except ValueError: pass else: self._data.flush() if self._data._mmap is not None: self._data._mmap.close() self._data = None class UnloadedBlock: """ Represents an indexed, but not yet loaded, internal block. All that is known about it is its offset. It converts itself to a full-fledged block whenever the underlying data or more detail is requested. """ def __init__(self, fd, offset, memmap=True, lazy_load=True, readonly=False): self._fd = fd self._offset = offset self._data = None self._uri = None self._array_storage = 'internal' self._input_compression = None self._output_compression = 'input' self._output_compression_kwargs = {} self._checksum = None self._should_memmap = memmap self._memmapped = False self._lazy_load = lazy_load self._readonly = readonly def __len__(self): self.load() return len(self) def close(self): pass @property def array_storage(self): return 'internal' @property def offset(self): return self._offset def __getattr__(self, attr): self.load() return getattr(self, attr) def load(self): self._fd.seek(self._offset, generic_io.SEEK_SET) self.__class__ = Block self.read(self._fd) def calculate_updated_layout(blocks, tree_size, pad_blocks, block_size): """ Calculates a block layout that will try to use as many blocks as possible in their original locations, though at this point the algorithm is fairly naive. The result will be stored in the offsets of the blocks. Parameters ---------- blocks : Blocks instance tree_size : int The amount of space to reserve for the tree at the beginning. Returns ------- Returns `False` if no good layout can be found and one is best off rewriting the file serially, otherwise, returns `True`. """ def unfix_block(i): # If this algorithm gets more sophisticated we could carefully # move memmapped blocks around without clobbering other ones. # TODO: Copy to a tmpfile on disk and memmap it from there. entry = fixed[i] copy = entry.block.data.copy() entry.block.close() entry.block._data = copy del fixed[i] free.append(entry.block) def fix_block(block, offset): block.offset = offset fixed.append(Entry(block.offset, block.offset + block.size, block)) fixed.sort() Entry = namedtuple("Entry", ['start', 'end', 'block']) fixed = [] free = [] for block in blocks._internal_blocks: if block.offset is not None: block.update_size() fixed.append( Entry(block.offset, block.offset + block.size, block)) else: free.append(block) if not len(fixed): return False fixed.sort() # Make enough room at the beginning for the tree, by popping off # blocks at the beginning while len(fixed) and fixed[0].start < tree_size: unfix_block(0) if not len(fixed): return False # This algorithm is pretty basic at this point -- it just looks # for the first open spot big enough for the free block to fit. while len(free): block = free.pop() last_end = tree_size for entry in fixed: if entry.start - last_end >= block.size: fix_block(block, last_end) break last_end = entry.end else: padding = util.calculate_padding( entry.block.size, pad_blocks, block_size) fix_block(block, last_end + padding) if blocks.streamed_block is not None: padding = util.calculate_padding( fixed[-1].block.size, pad_blocks, block_size) blocks.streamed_block.offset = fixed[-1].end + padding blocks._sort_blocks_by_offset() return True ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9255526 asdf-2.9.2/asdf/commands/0000755000537500020070000000000000000000000016314 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/__init__.py0000644000537500020070000000112000000000000020417 0ustar00wjamiesonSTSCI\scienceimport importlib from .exploded import implode, explode from .to_yaml import to_yaml from .defragment import defragment from .diff import diff from .tags import list_tags from .extension import find_extensions from .info import info from .edit import edit __all__ = ['implode', 'explode', 'to_yaml', 'defragment', 'diff', 'list_tags', 'find_extensions', 'info', 'edit'] # Extracting ASDF-in-FITS files requires Astropy if importlib.util.find_spec('astropy'): from .extract import extract_file from .remove_hdu import remove_hdu __all__ += ['extract_file', 'remove_hdu'] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/defragment.py0000644000537500020070000000365600000000000021014 0ustar00wjamiesonSTSCI\science""" Defragment command. """ import asdf from .main import Command from .. import AsdfFile __all__ = ['defragment'] class Defragment(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("defragment"), help="Defragment an ASDF file..", description="""Removes any unused blocks and unused space.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to collect.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.add_argument( "--compress", "-c", type=str, nargs="?", choices=['zlib', 'bzp2', 'lz4'], help="""Compress blocks using one of "zlib", "bzp2" or "lz4".""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return defragment(args.filename[0], args.output, args.resolve_references, args.compress) def defragment(input, output=None, resolve_references=False, compress=None): """ Defragment a given ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. compress : str, optional Compression to use. """ with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to( output, all_array_storage='internal', all_array_compression=compress) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/diff.py0000644000537500020070000002752400000000000017610 0ustar00wjamiesonSTSCI\science""" Implementation of command for displaying differences between two ASDF files. """ import argparse import sys from numpy import array_equal import jmespath try: # Provides cross-platform color support import colorama colorama.init() RED = colorama.Fore.RED GREEN = colorama.Fore.GREEN RESET = colorama.Style.RESET_ALL except ImportError: from sys import platform # These platforms should support ansi color codes if platform.startswith('linux') or platform.startswith('darwin'): RED = '\x1b[31m' GREEN = '\x1b[32m' RESET = '\x1b[0m' else: RED = '' GREEN = '' RESET = '' import asdf from .main import Command from ..tagged import Tagged from ..util import human_list from ..tags.core.ndarray import NDArrayType __all__ = ['diff'] RESET_NEWLINE = RESET + '\n' NDARRAY_TAG = 'core/ndarray' LIST_MARKER = '-' THIS_MARKER = GREEN + "> " THAT_MARKER = RED + "< " class Diff(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): epilog = """ examples: diff two files: asdftool diff file_before.asdf file_after.asdf ignore differences in the file's ASDF metadata: asdftool diff file_before.asdf file_after.asdf -i '[asdf_library,history]' ignore differences in the 'foo' field of all objects in a list: asdftool diff file_before.asdf file_after.asdf -i 'path.to.some_list[*].foo' See https://jmespath.org/ for more information on constructing JMESPath expressions. """.strip() parser = subparsers.add_parser( "diff", description="Report differences between two ASDF files", epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter, help="Report differences between two ASDF files", ) parser.add_argument( 'filenames', metavar='asdf_file', nargs=2, help="The ASDF files to compare.") parser.add_argument( '-m', '--minimal', action='store_true', help="Show minimal differences between the two files.") parser.add_argument( '-i', '--ignore', action='append', dest='ignore', help="JMESPath expression indicating tree nodes that should be ignored.") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return diff(args.filenames, args.minimal, ignore=args.ignore) class ArrayNode: """This class is used to represent unique dummy nodes in the diff tree. In general these dummy nodes will be list elements that we want to keep track of but not necessarily display. This allows the diff output to be cleaner.""" def __init__(self, name): self.name = name def __hash__(self): return hash(self.name) class PrintTree: """This class is used to remember the nodes in the tree that have already been displayed in the diff output. """ def __init__(self): self.__tree = dict(visited=False, children=dict()) def get_print_list(self, node_list): at_end = False print_list = [] current = self.__tree for node in ['tree'] + node_list: if at_end: print_list.append(node) elif not node in current['children']: print_list.append(node) at_end = True elif not current['children'][node]['visited']: print_list.append(node) else: print_list.append(None) if not at_end: current = current['children'][node] return print_list def __setitem__(self, node_list, visit): if not isinstance(node_list, list): raise TypeError("node_list parameter must be an instance of list") current = self.__tree for node in ['tree'] + node_list: if not node in current['children']: current['children'][node] = dict(visited=True, children=dict()) current = current['children'][node] class DiffContext: """Class that contains context data of the diff to be computed""" def __init__(self, asdf0, asdf1, iostream, minimal=False, ignore_ids=None): self.asdf0 = asdf0 self.asdf1 = asdf1 self.iostream = iostream self.minimal = minimal self.print_tree = PrintTree() if ignore_ids is None: self.ignore_ids = set() else: self.ignore_ids = ignore_ids def print_tree_context(diff_ctx, node_list, other, use_marker, last_was_list): """Print context information indicating location in ASDF tree.""" prefix = "" marker = THAT_MARKER if other else THIS_MARKER for node in diff_ctx.print_tree.get_print_list(node_list): if node is not None: node = LIST_MARKER if isinstance(node, ArrayNode) else node + ":" # All of this logic is just to make the display of arrays prettier if use_marker: line_prefix = " " if last_was_list else marker + prefix[2:] line_suffix = "" if node == LIST_MARKER else RESET_NEWLINE else: line_prefix = prefix line_suffix = RESET_NEWLINE diff_ctx.iostream.write(line_prefix + node + line_suffix) last_was_list = node == LIST_MARKER prefix += " " diff_ctx.print_tree[node_list] = True return last_was_list def print_in_tree(diff_ctx, node_list, thing, other, use_marker=False, last_was_list=False, ignore_lwl=False): """Recursively print tree context and diff information about object.""" last_was_list = print_tree_context( diff_ctx, node_list, other, use_marker, last_was_list) # If tree element is list, recursively print list contents if isinstance(thing, list): for i, subthing in enumerate(thing): key = ArrayNode("{}_{}".format(node_list[-1], i)) last_was_list = print_in_tree( diff_ctx, node_list+[key], subthing, other, use_marker=True, last_was_list=last_was_list, ignore_lwl=ignore_lwl) # If tree element is dictionary, recursively print dictionary contents elif isinstance(thing, dict): for key in sorted(thing.keys()): last_was_list = print_in_tree( diff_ctx, node_list+[key], thing[key], other, use_marker=True, last_was_list=last_was_list, ignore_lwl=ignore_lwl) # Print difference between leaf objects (no need to recurse further) else: use_marker = not last_was_list or ignore_lwl marker = THAT_MARKER if other else THIS_MARKER prefix = marker + " " * len(node_list) if use_marker else " " diff_ctx.iostream.write(prefix + str(thing) + RESET_NEWLINE) last_was_list = False return last_was_list def compare_objects(diff_ctx, obj0, obj1, keys=[]): """Displays diff of two objects if they are not equal""" if obj0 != obj1: print_in_tree(diff_ctx, keys, obj0, False, ignore_lwl=True) print_in_tree(diff_ctx, keys, obj1, True, ignore_lwl=True) def print_dict_diff(diff_ctx, tree, node_list, keys, other): """Recursively traverses dictionary object and displays differences""" for key in keys: if diff_ctx.minimal: nodes = node_list key = key else: nodes = node_list+[key] key = tree[key] use_marker = not diff_ctx.minimal print_in_tree(diff_ctx, nodes, key, other, use_marker=use_marker) def compare_ndarrays(diff_ctx, array0, array1, keys): """Compares two ndarray objects""" if isinstance(array0, list): array0 = {"data": array0} if isinstance(array1, list): array1 = {"data": array1} ignore_keys = set(['source', 'data']) compare_dicts(diff_ctx, array0, array1, keys, ignore_keys) differences = [] for field in ['shape', 'datatype']: if array0.get(field) != array1.get(field): differences.append(field) array0 = NDArrayType.from_tree(array0, diff_ctx.asdf0) array1 = NDArrayType.from_tree(array1, diff_ctx.asdf1) if not array_equal(array0, array1): differences.append('contents') if differences: prefix = " " * (len(keys) + 1) msg = "ndarrays differ by {}".format(human_list(differences)) diff_ctx.iostream.write(prefix + RED + msg + RESET_NEWLINE) def both_are_ndarrays(tree0, tree1): """Returns True if both inputs correspond to ndarrays, False otherwise""" if not (isinstance(tree0, Tagged) and isinstance(tree1, Tagged)): return False if not (NDARRAY_TAG in tree0._tag and NDARRAY_TAG in tree1._tag): return False return True def compare_dicts(diff_ctx, dict0, dict1, keys, ignores=set()): """Recursively compares two dictionary objects""" keys0 = set(dict0.keys()) - ignores keys1 = set(dict1.keys()) - ignores # Recurse into subtree elements that are shared by both trees for key in sorted(keys0 & keys1): obj0 = dict0[key] obj1 = dict1[key] compare_trees(diff_ctx, obj0, obj1, keys=keys+[key]) # Display subtree elements existing only in this tree print_dict_diff(diff_ctx, dict0, keys, sorted(keys0-keys1), False) # Display subtree elements existing only in that tree print_dict_diff(diff_ctx, dict1, keys, sorted(keys1-keys0), True) def compare_trees(diff_ctx, tree0, tree1, keys=[]): """Recursively traverses two ASDF tree and compares them""" if id(tree0) in diff_ctx.ignore_ids and id(tree1) in diff_ctx.ignore_ids: return if both_are_ndarrays(tree0, tree1): compare_ndarrays(diff_ctx, tree0, tree1, keys) elif isinstance(tree0, dict) and isinstance(tree1, dict): compare_dicts(diff_ctx, tree0, tree1, keys) elif isinstance(tree0, list) and isinstance(tree1, list): for i, (obj0, obj1) in enumerate(zip(tree0, tree1)): key = ArrayNode("item_{}".format(i)) compare_trees(diff_ctx, obj0, obj1, keys+[key]) else: compare_objects(diff_ctx, tree0, tree1, keys) def diff(filenames, minimal, iostream=sys.stdout, ignore=None): """ Compare two ASDF files and write diff output to the stdout or the specified I/O stream. filenames : list of str List of ASDF filenames to compare. Must be length 2. minimal : boolean Set to True to forego some pretty-printing to minimize the diff output. iostream : io.TextIOBase, optional Text-mode stream to write the diff, e.g., sys.stdout or an io.StringIO instance. Defaults to stdout. ignore : list of str, optional List of JMESPath expressions indicating tree nodes that should be ignored. """ if ignore is None: ignore_expressions = [] else: ignore_expressions = [jmespath.compile(e) for e in ignore] try: with asdf.open(filenames[0], _force_raw_types=True) as asdf0: with asdf.open(filenames[1], _force_raw_types=True) as asdf1: ignore_ids = set() for expression in ignore_expressions: for tree in [asdf0.tree, asdf1.tree]: result = expression.search(tree) if result is not None: ignore_ids.add(id(result)) if isinstance(result, list): for elem in result: ignore_ids.add(id(elem)) elif isinstance(result, dict): for value in result.values(): ignore_ids.add(id(value)) diff_ctx = DiffContext(asdf0, asdf1, iostream, minimal=minimal, ignore_ids=ignore_ids) compare_trees(diff_ctx, asdf0.tree, asdf1.tree) except ValueError as error: raise RuntimeError(str(error)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/edit.py0000644000537500020070000002657600000000000017633 0ustar00wjamiesonSTSCI\science""" Contains commands for lightweight text editing of an ASDF file. """ import io import os import re import shutil # Marked safe because the editor command is specified by an # environment variable that the user controls. import subprocess # nosec import sys import tempfile import yaml from .. import constants from .. import generic_io from .. import schema from .. import util from ..asdf import open_asdf, AsdfFile from ..block import BlockManager from .main import Command __all__ = ["edit"] if sys.platform.startswith("win"): DEFAULT_EDITOR = "notepad" else: DEFAULT_EDITOR = "vi" class Edit(Command): @classmethod def setup_arguments(cls, subparsers): """ Set up a command line argument parser for the edit subcommand. """ # Set up the parser parser = subparsers.add_parser( "edit", description="Edit the YAML portion of an ASDF file in-place.", ) # Need an input file parser.add_argument( "filename", help="Path to an ASDF file.", ) parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): """ Execute the edit subcommand. """ return edit(args.filename) def read_yaml(fd): """ Read the YAML portion of an open ASDF file's content. Parameters ---------- fd : GenericFile Returns ------- bytes YAML content int total number of bytes available for YAML area bool True if the file contains binary blocks """ # All ASDF files produced by this library, even the binary files # of an exploded ASDF file, include a YAML header, so we'll just # let this raise an error if the end marker can't be found. # Revisit this if someone starts producing files without a # YAML section, which the standard permits but is not possible # with current software. reader = fd.reader_until( constants.YAML_END_MARKER_REGEX, 7, "End of YAML marker", include=True, ) content = reader.read() reader = fd.reader_until( constants.BLOCK_MAGIC, len(constants.BLOCK_MAGIC), include=False, exception=False, ) buffer = reader.read() contains_blocks = fd.peek(len(constants.BLOCK_MAGIC)) == constants.BLOCK_MAGIC return content, len(content) + len(buffer), contains_blocks def write_edited_yaml_larger(path, new_content, version): """ Rewrite an ASDF file, replacing the YAML portion with the specified YAML content and updating the block index if present. The file is assumed to contain binary blocks. Parameters ---------- path : str Path to ASDF file content : bytes Updated YAML content """ prefix = os.path.splitext(os.path.basename(path))[0] + "-" # Since the original file may be large, create the temporary # file in the same directory to avoid filling up the system # temporary area. temp_file = tempfile.NamedTemporaryFile(dir=os.path.dirname(path), prefix=prefix, suffix=".asdf", delete=False) try: temp_file.close() with generic_io.get_file(temp_file.name, mode="w") as fd: fd.write(new_content) # Allocate additional space for future YAML updates: pad_length = util.calculate_padding(len(new_content), True, fd.block_size) fd.fast_forward(pad_length) with generic_io.get_file(path) as original_fd: # Consume the file up to the first block, which must exist # as a precondition to using this method. original_fd.seek_until( constants.BLOCK_MAGIC, len(constants.BLOCK_MAGIC), ) ctx = AsdfFile(version=version) blocks = BlockManager(ctx, copy_arrays=False, lazy_load=False) blocks.read_internal_blocks(original_fd, past_magic=True, validate_checksums=False) blocks.finish_reading_internal_blocks() blocks.write_internal_blocks_serial(fd) blocks.write_block_index(fd, ctx) blocks.close() # Swap in the new version of the file atomically: shutil.copy(temp_file.name, path) finally: os.unlink(temp_file.name) def write_edited_yaml(path, new_content, available_bytes): """ Overwrite the YAML portion of an ASDF tree with the specified YAML content. The content must fit in the space available. Parameters ---------- path : str Path to ASDF file yaml_content : bytes Updated YAML content available_bytes : int Number of bytes available for YAML """ # generic_io mode "rw" opens the file as "r+b": with generic_io.get_file(path, mode="rw") as fd: fd.write(new_content) pad_length = available_bytes - len(new_content) if pad_length > 0: fd.write(b"\0" * pad_length) def edit(path): """ Copy the YAML portion of an ASDF file to a temporary file, present the file to the user for editing, then update the original file with the modified YAML. Parameters ---------- path : str Path to ASDF file """ # Extract the YAML portion of the original file: with generic_io.get_file(path, mode="r") as fd: if util.get_file_type(fd) != util.FileType.ASDF: print(f"Error: '{path}' is not an ASDF file.") return 1 original_content, available_bytes, contains_blocks = read_yaml(fd) original_asdf_version = parse_asdf_version(original_content) original_yaml_version = parse_yaml_version(original_content) prefix = os.path.splitext(os.path.basename(path))[0] + "-" # We can't use temp_file's automatic delete because Windows # won't allow reading the file from the editor process unless # it is closed here. temp_file = tempfile.NamedTemporaryFile(prefix=prefix, suffix=".yaml", delete=False) try: # Write the YAML to a temporary path: temp_file.write(original_content) temp_file.close() # Loop so that the user can correct errors in the edited file: while True: open_editor(temp_file.name) with open(temp_file.name, "rb") as f: new_content = f.read() if new_content == original_content: print("No changes made to file") return 0 try: new_asdf_version = parse_asdf_version(new_content) new_yaml_version = parse_yaml_version(new_content) except Exception as e: print("Error: failed to parse ASDF header: " + str(e)) choice = request_input("(c)ontinue editing or (a)bort? ", ["c", "a"]) if choice == "a": return 1 else: continue if new_asdf_version != original_asdf_version or new_yaml_version != original_yaml_version: print("Error: cannot modify ASDF Standard or YAML version using this tool.") choice = request_input("(c)ontinue editing or (a)bort? ", ["c", "a"]) if choice == "a": return 1 else: continue try: # Blocks are not read during validation, so this will not raise # an error even though we're only opening the YAML portion of # the file. with open_asdf(io.BytesIO(new_content), _force_raw_types=True): pass except yaml.YAMLError as e: print("Error: failed to parse updated YAML:") print_exception(e) choice = request_input("(c)ontinue editing or (a)bort? ", ["c", "a"]) if choice == "a": return 1 else: continue except schema.ValidationError as e: print("Warning: updated ASDF tree failed validation:") print_exception(e) choice = request_input("(c)ontinue editing, (f)orce update, or (a)bort? ", ["c", "f", "a"]) if choice == "a": return 1 elif choice == "c": continue except Exception as e: print("Error: failed to read updated file as ASDF:") print_exception(e) choice = request_input("(c)ontinue editing or (a)bort? ", ["c", "a"]) if choice == "a": return 1 else: continue # We've either opened the file without error, or # the user has agreed to ignore validation errors. # Break out of the loop so that we can update the # original file. break finally: os.unlink(temp_file.name) if len(new_content) <= available_bytes: # File has sufficient space allocated in the YAML area. write_edited_yaml(path, new_content, available_bytes) elif not contains_blocks: # File does not have sufficient space, but there are # no binary blocks, so we can just expand the file. write_edited_yaml(path, new_content, len(new_content)) else: # File does not have sufficient space, and binary blocks # are present. print("Warning: updated YAML larger than allocated space. File must be rewritten.") choice = request_input("(c)ontinue or (a)bort? ", ["c", "a"]) if choice == "a": return 1 else: write_edited_yaml_larger(path, new_content, new_asdf_version) def parse_asdf_version(content): """ Extract the ASDF Standard version from YAML content. Parameters ---------- content : bytes Returns ------- asdf.versioning.AsdfVersion ASDF Standard version """ comments = AsdfFile._read_comment_section(generic_io.get_file(io.BytesIO(content))) return AsdfFile._find_asdf_version_in_comments(comments) def parse_yaml_version(content): """ Extract the YAML version from YAML content. Parameters ---------- content : bytes Returns ------- bytes YAML version string. """ match = re.search(b"^%YAML (.*)$", content, flags=re.MULTILINE) if match is None: raise ValueError("YAML version number not found") return match.group(1) def print_exception(e): """ Print an exception, indented 4 spaces and elided if too many lines. """ lines = str(e).split("\n") if len(lines) > 20: lines = lines[0:20] + ["..."] for line in lines: print(f" {line}") def request_input(message, choices): """ Request user input. Parameters ---------- message : str Message to display choices : list of str List of recognized inputs """ while True: choice = input(message).strip().lower() if choice in choices: return choice else: print(f"Invalid choice: {choice}") def open_editor(path): """ Launch an editor process with the file at path opened. """ editor = os.environ.get("EDITOR", DEFAULT_EDITOR) # Marked safe because the editor command is specified by an # environment variable that the user controls. subprocess.run(f"{editor} {path}", check=True, shell=True) # nosec ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/exploded.py0000644000537500020070000000637100000000000020501 0ustar00wjamiesonSTSCI\science""" Contains commands for dealing with exploded and imploded forms. """ import os import asdf from .main import Command from .. import AsdfFile __all__ = ['implode', 'explode'] class Implode(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("implode"), help="Implode a ASDF file.", description="""Combine a ASDF file, where the data may be stored in multiple ASDF files, into a single ASDF file.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to implode.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with "_all" appended.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return implode(args.filename[0], args.output, args.resolve_references) def implode(input, output=None, resolve_references=False): """ Implode a given ASDF file, which may reference external data, back into a single ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. """ if output is None: base, ext = os.path.splitext(input) output = base + '_all' + '.asdf' with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to(output, all_array_storage='internal') class Explode(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("explode"), help="Explode a ASDF file.", description="""From a single ASDF file, create a set of ASDF files where each data block is stored in a separate file.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to explode.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with "_exploded" appended.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return explode(args.filename[0], args.output) def explode(input, output=None): """ Explode a given ASDF file so each data block is in a separate file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. """ if output is None: base, ext = os.path.splitext(input) output = base + '_exploded' + '.asdf' with asdf.open(input) as ff: ff.write_to(output, all_array_storage='external') ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/extension.py0000644000537500020070000000511600000000000020705 0ustar00wjamiesonSTSCI\science""" Implementation of command for reporting information about installed extensions. """ from .main import Command from ..entry_points import get_extensions __all__ = ['find_extensions'] class QueryExtension(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( "extensions", help="Show information about installed extensions", description="""Reports information about installed ASDF extensions""") display_group = parser.add_mutually_exclusive_group() display_group.add_argument( "-s", "--summary", action="store_true", help="Display only the installed extensions themselves") display_group.add_argument( "-t", "--tags-only", action="store_true", help="Display tags from installed extensions, but no other information") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return find_extensions(args.summary, args.tags_only) def _format_extension(ext): if ext.extension_uri is None: uri = "(none)" else: uri = f"'{ext.extension_uri}'" return "Extension URI: {} package: {} ({}) class: {}".format( uri, ext.package_name, ext.package_version, ext.class_name ) def _format_type_name(typ): if isinstance(typ, str): return typ else: return "{}.{}".format(typ.__module__, typ.__name__) def _print_extension_details(ext, tags_only): tag_uris = [t.tag_uri for t in ext.tags] for typ in ext.types: if isinstance(typ.name, list): for name in typ.name: tag_uris.append(typ.make_yaml_tag(name)) elif typ.name is not None: tag_uris.append(typ.make_yaml_tag(typ.name)) if len(tag_uris) > 0: print("tags:") for tag_uri in sorted(tag_uris): print(" - " + tag_uri) if not tags_only: types = [] for converter in ext.converters: for typ in converter.types: types.append(typ) for typ in ext.types: types.extend(typ.types) if len(types) > 0: print("types:") for typ in sorted(types, key=_format_type_name): print(" - " + _format_type_name((typ))) def find_extensions(summary, tags_only): for ext in get_extensions(): print(_format_extension(ext)) if not summary: _print_extension_details(ext, tags_only) print() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/extract.py0000644000537500020070000000307300000000000020343 0ustar00wjamiesonSTSCI\science""" Implementation of command for converting ASDF-in-FITS to standalone ASDF file. """ import asdf from asdf.fits_embed import AsdfInFits from .main import Command __all__ = ['extract_file'] class AsdfExtractor(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser(str("extract"), help="Extract ASDF extensions in ASDF-in-FITS files into pure ASDF files", description="Extracts ASDF extensions into pure ASDF files.") parser.add_argument( 'infile', action='store', type=str, help="Name of ASDF-in-FITS file containing extension to be extracted") parser.add_argument( 'outfile', action='store', type=str, help="Name of new pure ASDF file containing extracted extension") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return extract_file(args.infile, args.outfile) def extract_file(input_file, output_file): """Function for performing extraction from ASDF-in-FITS to pure ASDF.""" try: with asdf.open(input_file) as ih: if not isinstance(ih, AsdfInFits): msg = "Given input file '{}' is not ASDF-in-FITS" raise RuntimeError(msg.format(input_file)) with asdf.AsdfFile(ih.tree) as oh: oh.write_to(output_file) except (IOError, ValueError) as error: raise RuntimeError(str(error)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/info.py0000644000537500020070000000237600000000000017631 0ustar00wjamiesonSTSCI\science""" Commands for displaying summaries of ASDF trees """ from .main import Command from .. import _convenience as convenience __all__ = ["info"] class Info(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( "info", help="Print a rendering of an ASDF tree.", description="Print a rendering of an ASDF tree." ) parser.add_argument("filename", help="ASDF file to render") parser.add_argument( "--max-rows", type=int, help="maximum number of lines" ) parser.add_argument( "--max-cols", type=int, help="maximum length of line") parser.add_argument("--show-values", dest="show_values", action="store_true") parser.add_argument("--no-show-values", dest="show_values", action="store_false") parser.set_defaults(show_values=True) parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): info(args.filename, args.max_rows, args.max_cols, args.show_values) def info(filename, max_rows, max_cols, show_values): convenience.info(filename, max_rows=max_rows, max_cols=max_cols, show_values=show_values) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/main.py0000644000537500020070000000365700000000000017625 0ustar00wjamiesonSTSCI\scienceimport argparse import logging import sys from .. import util # This list is ordered in order of average workflow command_order = [ 'Explode', 'Implode' ] class Command: @classmethod def setup_arguments(cls, subparsers): raise NotImplementedError() @classmethod def run(cls, args): raise NotImplementedError() def make_argparser(): """ Most of the real work is handled by the subcommands in the commands subpackage. """ def help(args): parser.print_help() return 0 parser = argparse.ArgumentParser( "asdftool", description="Commandline utilities for managing ASDF files.") parser.add_argument( "--verbose", "-v", action="store_true", help="Increase verbosity") subparsers = parser.add_subparsers( title='subcommands', description='valid subcommands') help_parser = subparsers.add_parser( str("help"), help="Display usage information") help_parser.set_defaults(func=help) commands = dict((x.__name__, x) for x in util.iter_subclasses(Command)) for command in command_order: commands[str(command)].setup_arguments(subparsers) del commands[command] for name, command in sorted(commands.items()): command.setup_arguments(subparsers) return parser, subparsers def main_from_args(args): parser, subparsers = make_argparser() args = parser.parse_args(args) # Only needed for Python 3, apparently, but can't hurt if not hasattr(args, 'func'): parser.print_help() return 2 try: result = args.func(args) except RuntimeError as e: logging.error(str(e)) return 1 except IOError as e: logging.error(str(e)) return e.errno if result is None: result = 0 return result def main(args=None): if args is None: args = sys.argv[1:] sys.exit(main_from_args(args)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/remove_hdu.py0000644000537500020070000000252400000000000021026 0ustar00wjamiesonSTSCI\science""" Implementation of command for removing ASDF HDU from ASDF-in-FITS file. """ from astropy.io import fits from .main import Command __all__ = ['remove_hdu'] class FitsExtractor(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser(str("remove-hdu"), help="Remove ASDF extension from ASDF-in-FITS file", description="Removes ASDF extensions from ASDF-in-FITS files.") parser.add_argument('infile', action='store', type=str, help="Name of ASDF-in-FITS file containing extension to be removed") parser.add_argument('outfile', action='store', type=str, help="Name of new FITS output file") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return remove_hdu(args.infile, args.outfile) def remove_hdu(input_file, output_file): """Function for removing ASDF HDU from ASDF-in-FITS files""" try: with fits.open(input_file) as hdulist: hdulist.readall() asdf_hdu = hdulist['ASDF'] hdulist.remove(asdf_hdu) hdulist.writeto(output_file) except (ValueError, KeyError) as error: raise RuntimeError(str(error)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tags.py0000644000537500020070000000310600000000000017624 0ustar00wjamiesonSTSCI\science""" Implementation of command for displaying available tags in asdf """ import sys from .main import Command from .. import AsdfFile __all__ = ['list_tags'] class TagLister(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("tags"), help="List currently available tags", description="""Lists currently available tags.""") parser.add_argument( '-d', '--display-classes', action='store_true', help="""Display associated class names in addition to tags""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return list_tags(display_classes=args.display_classes) def _format_type(typ): if isinstance(typ, str): return typ else: return "{}.{}".format(typ.__module__, typ.__name__) def list_tags(display_classes=False, iostream=sys.stdout): """Function to list tags""" af = AsdfFile() tag_pairs = [] for tag in af.extension_manager._converters_by_tag: tag_pairs.append((tag, af.extension_manager.get_converter_for_tag(tag).types)) for tag in af.type_index._type_by_tag: tag_pairs.append((tag, [af.type_index._type_by_tag[tag]])) for tag, types in sorted(tag_pairs, key=lambda pair: pair[0]): string = str(tag) if display_classes: string += ": " + ", ".join(_format_type(t) for t in types) iostream.write(string + '\n') ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.929506 asdf-2.9.2/asdf/commands/tests/0000755000537500020070000000000000000000000017456 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/__init__.py0000644000537500020070000000000000000000000021555 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9344876 asdf-2.9.2/asdf/commands/tests/data/0000755000537500020070000000000000000000000020367 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/__init__.py0000644000537500020070000000000000000000000022466 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/block0.asdf0000644000537500020070000023512500000000000022410 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev870} foobar: !core/ndarray-1.0.0 source: 0 datatype: uint64 byteorder: little bizbaz: green shape: [9000] ... ÓBLK08€8€8€’Ù­ÙžZàv/$‡ŸGt·®æ? ³]çÞ?Xkßrå?ÚŽ?õ¡è?­ŠrÓ“êä?X¸âc^ï?Ó]j7*ðá?pÉnÒ')é?¨pD/±?“5§Ò?æ{G¸8š?x_\¼|í?Èf¾^â°?w‡zö^á?Tƒ¥¿?ðnmÆÆÖ?HìS?'¢Í?Ê`髎î?ÂÍàŽÞ?¦v\Å"á?(Vë_\×?ðM_ûÏÄ?@@´êºUÀ?ìÆj˜NgØ?P!H¡?€#Þµ™?­ÒNéë?µ QÂç”ç?ŒFü{¼?ËŒÞ2ï?@ J/'¥¯?ãÆoé„Ñ?XjŽ—Hª»?sG¨'²Àê?L/Ý"­ƒÛ?㾉Ðùç?ˆ*@7|]í? ¸[6 ¥?±“6L™ïï?[ÉPïxç?è?Üw1ü¡`È?€Q³"uݺ?NÕ?¸.æ? mªìRÁ?% ˬ?yå?ÌÖ—q*½Õ?* ‘‚í?à„†ŠRä?èWcR\š³?ųM3@Ø?|Æ{š.$ê?zþãùt‰é?²“ m·Þ?ü¢Î©g<à?Rï) Û?X,¨´Kà?×ÜýQµ¢ê?°äŒo‹¹Ù?Æ`5‘Ãï?æÉNôwí?a©µæŸê?ØGBû¿?2ê$zðNâ?s°Ÿö§üë?‘¬.Ø6Íå?Ûß_»Í5ì?8¸þÿúç¸?G³ÔÓ?Êç?Õ0§ªUìâ?ÜJÂöçþã?„»<€Àá?x»ØÏÁï½?x`« >½?*µ–I†Õ?…¦ ÛÁóê?á‹1¿þæ?îÆŸX—Ùî?rê`Ëùzä?ËÛ¢-Aã?Ì©ëp;Ì?˜5ìÞ?¡ „Kî·é?Ú¯5K%í?°¡Û¬UÎ?,Ô8ø³ëã?Éi§'ñÚ?ºÂs¥AÖ?1eóU¥ã?µx&2,…? tÍh+#ä?G­avµÊ?{â×ÞšNï?Ä"2 >Ö?bz˜xxê?JDË'Aç?HÍ-ý¼?´vÐZ‰æ? IÀ‰é#Î?Ð4Ü㯧?«$"sm+ä?ŒÇÙ·RãÓ?«à%Ðá?È´'†OØÝ? æ¼W®¹?“šËÃf@ç?™ê<#à?´xçV‡zÂ?6X(<ÖGî?x=g›¾Ï?Áf¢Š æ?ûƒ]Må?è¾¢/â?ÌáK' À?¶ÒÞßW0á?@꿤?`,ùÍ¥¸?0ºD!Áæã?ÆyjÎ?¡GŠlé?÷uò[Îæ?Êj´Så?‚ǾÁ0Ó?7[ðo<Ô?Ü2EoƒæÆ?vPš§±/à?¦ÈH‘" â?¤e=°×Í?5½:þš-î?`³.Ÿß¤? Œc_í€Û?£xÔ,d1é?îkÖÕÔ×?jxiAE€æ?yê$Âå?à §Õê?È @ӻŴ?+Ë•×íé?¸¹lÉQã?'HJÕž²î?.},‹.©Ð?´Mãf< í?vÀJ¤I´Ô?+‘N¿¾í?ˆÛ\xØJÌ?“²=' æ?NƒNÙ7Þ?r0ŽÒÌþß?Ñs[ á?›¥‘CWà?94¦‰˜Ç?»pÍÒIMà?@.íµ”Â?À ~ë5sˆ?<,è}ÅÛ?€úô*5®?¿É}lï?NIÊ¢Ñ×?3öWnw»?u÷·Ù„¬æ?ÌJ²1TÏ?™KƒüWè?è=÷¦Š¹Ø?އs¤0»Ü?‡yR $Æ?…±ÁOä?Sx°­]í??ߣ,^à?¬(ÖÒ³¢ç?ÈÀ²6ôuÊ?à!Ê 6Ô§?ÍWQD0Ãé?@·1ã?Y´O^ðã?žóaXðy?À4S Åí¦?ˆ¼ÄêÐ?Zkë6â?b£I×J‚Ñ?­_Ó@¦Qç? :ÚXóë?øj f:"º?ºTÝκÜ?±ù=»^ˆâ?¦ì˜ïé?›ú=É´)é?Šy›6à?°â”oÍNß?p£lqÁ'¿?8‘ŽïÚ?)·Híèîê? íx±€Ò?œñó¦tdÕ?ìúÈZÊNÈ?xÀüÅK+ê?¤¢g°Ö?@¨(É»?W 7ùrÝã?ÿ ÷á·4í?[”™ã@%é?%Îô4äà?)íZ¯`Fç?ò“ÕÂyè?[¬Ë°´ë?*c@­’á?n·ËWP|ç?+HkìÔ4â?.Í‹r½ÓÚ?²¹Óç×?à·òðËÈ¿?|“áJä?eçú¦é?UhÂæê?Œo7ÆÇ3Ë?`ÿOC“ä?_¡cOÃ?n—X`D5é?Ìzè]y„à?t5“xýçÎ?œ/wÁÒÇ?š+îöá?œÆ„Ú&ÿÍ?QïR"yæ?^Á*¡º<Õ?t·áiÎ?2¬†kÙ?&X©Ø?G<«Á_ã?z&Mê–Ö?¯¶T®»ì?ÄÙú:7&é?¢ßÀÜbø×?;â@$Àå?ð7+€CÔÔ?ï¢*4ß?pȉË$º?–‰†2Ñ?à„ä/‘ä?ô|±p­^Ý?ö’Ȧ©³Ó?ºz2SûÑ?øÐ¨PƒkÒ?SúzòÙÌä?ÀW•0‰=Ì?qîàÔ‹.â?L]¸ƒì÷Ê?Øub¬ØS¾?jc-Í=Hæ?¤¤¡¸Í?óÁee³?Z“«EœÕÔ?f¨ƒOï?hFªóq%Ç?5…^ZwÆ?8XNÁëüï?¶Á±>&ÉÓ?ðº\û¹? nªRÝ9¦?ž:ëƒ Cé?úr–å?FúÛ.àØ?­iÌÃ)Rá?€b‘•² ì?4*ª9€sÂ?¬+ êÝ?×´ã*0ï?0ÒçEeÄÍ?ÀnÕâÇ‹?8oôt7QÐ?õÅ?:üÉî?ðÀØÚSÒÕ?àD¼9ÖÓ?²c:ÏÓä?š|ëÊeyÒ?böÍáòTÓ?Å*×ú\ç?:À÷E.ä?öUiíÑ?(ew¯±Ð?)c§‹—´î?Ò¸i {Ô?4tŽÆ¼âá?^µpîÕ^ß?óÔ[ XRç?È—ñ{øç?½Ã;|{á?wõ‡}ÁNæ? tµ¿NP‘?Ø4×4$Õ? lK”‡æª? ¶^¾î?$sLúÑmÞ?Vâ<ìä? ÑI)Btá?ÆNŽeî?þ.ÛÀ3ê?à{'X?ÛvÇ\¯ä?Rq1Îêï?$zgñþà?\ôKµ¥Ð?ºGþHb¥è?ÂIUÇ’Yï? cZ6Ѷ?`nýÉÜÑ?̈üíòØ?þÀ»l€Ò?ÃÃ^Zä?=ÀÁCÛEà?ô~JÏÂÉÕ?}ý_!ê?¦ªÎÈÀí?Ч­Áø«?jœ§ í?ÒA$ãà?8¦h*±É?¨lz-~ºà?XÒ‘ž‘î?V&ÌIQ[à?Èò΄àÁ?¶%ªÉÙ?F Uã¨Ü?*0¯l8ë?µ½ƒgjIì?NßQûÆæâ?Ï7ãXö‡é?úÛTì·?FÉx$š-Ø?ÀaUt\Ø?<MBÄß?ص7,*É? ic$ÖõÏ?’˜Oä,Vã?ѵ;í"™å?¢aÉʉ‡ï?o‰é®¢à?Nuet‘üÝ?EÓ³8ká? ÿ(¾¹òà?£>ªOêÖá?»¿XÎjì?oþµ—UÎæ?¯f"%p«è?a è_øç?<—I‚€vÂ?ªéGã?Üazé”^Æ?dRʵê?@9’p@kª?ä<‹vÿkÔ?Œ¢â ?šÄ€ÄAê?Û^]Î?ç?Ï‘!ŒQ‚á?SóȈnè?†•i¿üë?ÿu„‰Zºã?ìW­fÓ–Ô?¨.Ƶ0Ì?¿Äã} ä?8’.D©Ã?5 (ZAî?ó ‡×šæ?þ…¬Æå?è¡]£© Ê?rÐt#ÃTç?[‹¢¾ fî?bIz½vì?0Éï:“hÑ?Äø8 h±á?# ?´Šï?`8Ù(JÞë?X‚bhÍFÞ?þ£äZ¼Ñ?žß°÷ºØ?sp_£"ä?ÙËмÉ?lnºCç?»ƒƒjæ?0òàßÙ¤?R¡X*ëÛ?‚ƒ™ÌþïÞ?n;s¿Ø?8”YòÍíÙ?$‡)j«Å?õñ2i?´®w„ŒzÖ?Ä!ä^€_Ð?ü÷ƒë?ê’>˜€Ý?`&\-Í|¬?t @ßð¿Ö?X…üõ¯dÌ?€;¶œ#ûš?Aù‚Øu-é?Àà"Ò›Ó?ˆg^Ú}Ãî?¤€N%åÐ?x‘Ltв?Šôï&ú=Ð?$‚‹•îê?›2á ³êì?o ä&è?ÄV_øÂ?ƒˆ¥\ä?6Ç•| Õ?%ñĶSì?šåè]£:á?4¾X‘‰Ç?0ˆëý\ï?{&»Ì€¶í?øõ²pÑ?àfXÈÑ?¼ç­â%×?`•0±;‰”?‹o“ à?Îuñ"éÊÒ?É..Ëéê?|R¿¼QÒ?L,×KuÔ?îgíÑ ëÞ?¨Ã6ŸÓÌÈ?`&ëè…JË?”Ú¢óæÍ?GéðÌšä?怸Áè?dÍŠ¼ÀÐê? þc¶.?xd¼xð¡Ú?!Q,·S°ê?waè2!Ç?@ëâö$%Í?f»}Ë Ñ?ÐÛó-½Z¼?8ÿËHø·?H ¤ÀÀßÞ?øÐ¥†¾MÃ?Då¯ìòä?Ø#¯Ô?x-ìFcÈ?Pü¸Œì¸?àsæý [Æ?8)˜7XÂ?ák|þˆâ?P)õw"ÕÔ?|ßäVÑ?€xwø“ý{?Ry/­Å¨á?n1`2þà?À,[Út€?^ù HmÑ?fYÇÕÅí?wÌ3Ƭä?€)_ï‘‹Ô?Øw'æ,î?à¶N¸®º?¾¾<^NØ?wsÍÞÇôí?X(6êþ4¶?HÝÖ¥ÄÎ?x[Ëò¡¤Ú?l1š­Ç?9&{(hÙ?èÞ¬†Ù¼?¸:РÆÖ?êÂ6·ÂÒ?*{un_yê?:)^<ì­ã?˜™ eh9Â?i7¨^ì?ʺF˜á?Ffo yë?b'hˆÛ?d¿ƒ;)9Þ?UÉQ Ú·?r1CSç?„ñ9R‘Ã?K¼6!¾?wþ3l?Âà™z¯ é?ʯXÐ?l¿“j_¡ß?ÙJ(¹³ê?‚¤”Lx]Ñ?m1ȆFÐ?Ôã 8Ï€è?©sáøé?€êF“ Ap?¦î­V~Þ? ú°o@ì?3¥ÍlòÏå?`²“†Wœ?(p'õ ×?e¼‡;Щì?`UJ]Ùì?Rb~‚æà?防Pø.è?^v`®Û?Y˜ºþK˜à?A¼¸~Tòá?CË¢ Þyè? †”îËÞ?¨›}Ÿ¼[Ù?lwu6È\Û?8æÊµ.Ê?„öKÕ,Ì?¶¾‹ÿýÙ?s¼³C‡á? jZ¥ÉÊ?§¾HáÚç?>'x³õä?àÕù’~8Ú?×kB5!ëì?¸I' ÉEÅ?Xcbè?l÷ÉüÃÞï?€Ö.? ?Šæ<è¹?Ôàj pÃÈ?àôð…›?<)¥¿šì?¤§òÇÐÁ?Î+Å#ûNë?ò3O;uuÞ?J™½1ß?B1š•>(í?iƒ˜ÙÔä?Èc0Ó Ù?Ž™.qÇöÐ?îœ,ÿN×?žr'ÐHØ?Ó:׺ñé?vMÓÀúÕ?ü=º\H Ó?¨áoH.)à?™4s*‚vå?²üQ}ÌÔ?˜¦;_W“ï?hVÃSø»?.£ÅN"¸í?*MVöÛæ?VàfœFì?æÅåU7Æé?Њ)g(×?há4^kÍÄ?»è?¾åæsÆØ?›ý_9Ð_ï?&‰L èóé?8 #å¹?Ò¯I%ÌÚÑ?«[iñå?Ó5oÒÆê?]?% ³@ê?Xç”Õ,Ø?X˜GÜsæ?[Ì‘±¸î?N@YTÝAÔ?ωCgJäá?¢oÉQÇ? hvϰ’?LÖêk¸Ç?r¯cÓaùê?ØÈ_ïL“¼?éåïEÙ?˜ gâ´?ëµsºè?¬ùJ‡qïì?ÜÁôã¸Ñ?XRÖ×fI¿?p³l´âã?hWÒ^ª¥Ê?Œà@)ß?ˆ²-ü‡Â?í›ý*²æ?pÎä@Á¤?¸q_.Ý?û‰ PÍ?ŒJã›7¯ç?Úí&/ìðë?&äºêÑØ?çbÓÉžé?$Å“Ôhë?Oø2ä?P‰i¥îä?H±A-Sæ?½³-;ã?…Ì7|'í?N®Æ’Ú?È6>Yï?nX‘ SÑ? &é:“Ä?.ðæ…Ùë?ä®ÃV×3à?úÚÁ•?@FÜÔNˆÂ?xÍrXFì?<ÆP^þ˜é?ÖqýécÝ?n‘S}Axì?2QOð÷ï?‚ªq˜xç?åõ¸àËé?À¶DLêá?µ”È‘îùí?á ù^ç?©5(í?À|?£^lž?‹!¢_{–î?ÌIÖïÑÀ?Vî¢ù:“Ð?Â|¹N£±ä?ïoãgðfä?ÓAzßVë?àÉ?å½XÃ?Û‡UmÒà?(µ@¢q¹Í? v[KÄÞ?««BÉÕåï?{ˆU«SÃè?ÔSyþë?%t#°î?Þr[y†…à?>9ºÂ~ä?C†ÕÈr±â?P+çJïFÖ?^ÔõJž0Þ?õ½ä¼?ç?zX¥ïè?ZހѳÚ?TÃÉ”jÛ?/¯÷é?¨aövÿÑ?j=:E¨é?l»cbtÂà?à¦9yq°?‘˜ø‡{â?gÓue€?`Á§ý舿?0šd'Î?¬nIÐ?Ë>¥JØ?ô„¿L:ê?¹íظv‘ã?È4ÐÉ:Ó?(EHr6{Ó?œ1¦fÌÿâ?`³sCË?¡]ÈëÞæ?X¿}ˆÖ?)Û„ûÐÕë?h7³ÉiçÀ?bOBëòì?Ù­¤)åà?;ÃgEHí?|øó%G,Ñ?*v½yÈ×?͉3Ó?"(žè?áú³Å‚&á?̳F·RË?pˆ¤üò±?NÆz¼‰â?áŸú¨ŽÙæ?€K ”>س?ËNðþúî?HxŠø`Ú?aÙ …Â?0gO"Š[·?Šè\ñÓ?d®z+Ù? Œ|ÎOÀ?ýÙA߃Ð?kx }sá?¬*„¯Må?7$cz _è?û¼QúªÍå?äŠf¬«ï?Ï0Zõâ?’Ç8_ªÝ? 8 oƒ?<¥Ìi‘Ì?  ~9Êã?Œ„†ýFÅ?Ä¡d>öã?ò£‹©'Çí?Tñ1ú"Ô?à—»3 «?þïVÒà? »9çòÙ?ø³)p³?pJhO÷¸À?SìãD"}ê?0þ—dä)®?/Œ"uèå?01Ê„Ù?pY[væ°?“ÿºé?«èÒjÏ?–ffé?Ë?“¿â-è?v+,/iéá?æ JBêÛ?àfìÕ•?8T×/â,Þ?f›å¿ñuß?÷«v`¼?ÆSüàãdÛ?F¨nÐJë?¹~…ÄL!ä?¢ (*ÿè?Ðɳÿæ?öqM¯¬í?¼Ÿ£šÝ?d$®5‰8À?L ã^³y? °¬7cÎ?¶.÷¹,å?´°„cbáï? ”R7[æ?Ì؃à:Á? okXí?Ì |%ŽºÕ? s¥ßÜ‹Ó?Tš˜ƒÍ?j\0 Íä?ÂÏ68ì?ùèç}Òé?nÏ*šhøâ?Þê$ƹà? ÚþâÂÚ?j·ºø„×?Öã‹Ë …Ù?ËEë%.ì?¼ÿ)5ÓäÒ?¤6×ñÛ?›wMcª¿?ä'½ƒÅ?pÑÏ2ä?„|  ·cÉ?ŽÑ[Âñ¥Ô?z!ù‰€Ü?ÝVCfûæ?RÆ1#'ýå?P@`Jæ?ëåÝgóå?èR®”¤AÐ?ˆÄÍ _Ú?ªnïúÕÖ??¯HbGøç?¶ðRëDî?äþØcí?0=Ä»ë?0ÒP—ŠQî? >zö…é¼?#YÕË®â?nÿÍ>°dÖ?6¨–âb¯Ú?葆]G°?„Eõ]„¹è?ü3Ó¹cWÀ?ŒÚÕf‹™Ü?ãî?|z ¾^Ç?‰©m°z±?ú2‡ï?Bf¥¥ýå?ˆÊ”Q+Ü?—…#5ˆlæ?ÇRŒkMî?HUĉ ê?C‰Ä3½î?hæx?a@ƒãÒ¾?Ð6 #)¡?…Ê‘Ä?ŸæP2'°?.Ý’[ÊÕ?ê +±œê?Ïm‹Ýè?@ q2‡Â?ðø8Ì~öÓ?D¬žø Íâ?üV%%À?8§ó½F¸?°—ù Þ?ÊâèÞ?pzÏŽë¨?M>›¼éÇí?™®~ïé?` n Š4©?øØºe˜Â?ø“ú³Y•Ð?ø·&?3ß?›Ï-Íç?·æ…ñ‡(ê?4æ•I<Û?ö臖œžß?ãæ”vÖÕê?1µ2OvÑì?ZsÐU°Ô?ð­uFwý°?ÿSÊÎç?ÅÚI™µ?ˆÓ!Ù?Ù~qÕî?®6)´Kè?Ð’˜àm¸ê?Â\êãèí?¤‚“•"Då?Ä+³~Ù?J‘ÑkcÞ?çúÌÍ?,©ÚuË?¢®¦[¸ç?©*ÐÁ¾á?ð1OLHAª?¯Ø”øöaã?/îÆã?yªÉšLÎâ?ÊHHÍÁ^Ô?䕿ˆKÓ?‘û÷sà?ÔJ>wnê?°H2úêæ?íc(¬aá?(FâŽÀ²¼?;z!ïõyå?=$uiJµà?‚¡×ý^?&×)“,#è?ø%ãì½ÎÊ?ú­ëÔ?Þ8N=aÒ? …\þZç?cÚÁÀ­“î?¶Ì8‹Ò?wjí©‘ï?<×-Î_©é?äy° ¾äÖ?¬,ýÆ&‘Ø?ìÚ-#à¸Ã?õ–—Båbë?‹úˆý5ä?ñc˜IYì?±%»¼žå?ÄÊ´öyßÌ?døìÔ~é?P²¼9'IÞ?ø…À(ƒè¾?Æâ¶,2:ä?Ú©næzÕ×?êýñ<¾ë?Ô3@õœÀ?xˆ_áxÊ?é ±[òôà?À y£¸Iß?!ÿ@Û° é?µ šiì?>·{äCï?hê%÷AgÌ?r×zž—Ô?x±©… È?LºÀÈ0Þ? ¡ƒEŠfÂ? ïçt¦¨×?,0»£ƒÛÝ?húºœ¥Ô?p߈Ճ,æ?%+²Û”å?Íh•É‹[á?~ùÿ05Ò?@\†¿ î? ”ÎE9ì?€8ä­ü`Þ?0¹üâWB©?Œ¿½ئÒ?ôdûs Å?ª*N¬?íå? ûe„2”?dL¯¬Ã´É?ñ@ìÚSâ?åíï‰!ã?bx£ñÍã?Úðü“,è? ’¾ÒÕ?ÖÄ8§¸SÔ?Öeq iî?P5SÙ߬?›‚ˇãç?ŽŒÏ™2¥Ð?/v…_Rã?þ¤êÝ«ì?¨à+í8È?ŽR]@í?2ý&ŒÚ?­h0óÝî?ЛòØg]½?@Q9 墡?Œ<¸¶pQÇ?&uÐa¢ØØ?ºj­§Õ?¤X4D‹Î?$ Òë¸9à?¹…ûô²?°Biµ‹“¼?G&™õhœá?†êúº£é?dP_àšÓß?€¥äaÜÒˆ?ôŒòž-Aß?à>ü ]¨?ü¼¢Kþ Ç?ƒ &­­›è?I@˜~Óã?TO»Éfã?×|´¤ã?±g yP´?zÔÇb·ç? ÄûXç?æ?À‹İã?‚%¦©¢ß?Kå> –ê?º¯SûCPÕ?>ù*”ã?¯È?X/[á?ðQÃÖ k¤?ø€]r“QÂ?Và¥gÐ?àቷBÒ?ÈŸ³0ÓÊ?ˆ–7Ó>Û?XÃÝÛ¢Ôê?£!ß?õ­é?²ÞO²Ó?–¦w9å?TÓju=Æ?Ð|©ñcÁ?ñ°y±…˜æ?J±{|‡mÕ?ºÖ½¯ñ&é?~é‹ã›Â×?`Œ[Ó@@À?œ‡ŽóHÄÖ?À·ŠM¸Fš?pZN ô#?ÙÀÿªá?„ø±ƒÑÊ?;>)â8Ø?øÐF(wK¿?0?b3¸ÖÉ?Ü,‰†Ëí?m¼Ö­Ÿà?¶\P9cåÓ?Î…oî’'í?X:Qô>0Ø?¬®íD»­Ê?ßSOÑ“ê?L Ø|´ÛÐ?&Gà=¥Òç?ò|¬(Ãå?¬³ ôyß?Ô˜Žç‰Ü?®KfÞv×?cþ 3xÕï?¶cT†éÑ?L|i"ØÒØ?ÄÛ½Z¶É?¸CŒ&Ê?~^?$¶9í?p¾ ßÖfã?€9(/K·?#à—/?_á?Òm¶Þ–å?kç5ŽQæ?G*Öf¢jé?Só°öÕ/í?Ô)¡Ê±Ü?ˆ|BÍ,#Å?-?näÚ­ï?Tæ´GíBê?Ð:tfD £?" Wcüå?ñz„Èdºê?L9>Û“å?hTÀЫç?æ:£ãOÛ?HöD~iÄ?òWL Åá?ìŠjбÛ?t\qŒ+&Ô?+hìÃvHç? kÏ´È–Â?Âæ„D!î?2Î?À$2Ó?R‹Ä'è?¨š*0àß?]–‘ŽtÑî?T•ܲç4Í?šà›ö?Ý?k–X§’ë?p-&Û±?áù.}{·?À–Œ»q9Æ?<´'_¾9ï?µípÇDaç?ö¤–¥“>Þ?!§y¤î?Ê7ûæ[“Ó?xØPŠàÆ?4¹°P'ÁÝ?Œ>I2¾êÜ?¨E÷Ù¡¼?ÒrÊ·Õ?ÐðFcÉë?¬P¼Æž›Ï?z ³ª÷ä?PJÁÊ(Á?ØÎ„s‰á?;DQoå?rábhÏùÞ?4â»mØÁ?¬›¸}Ÿ0ä? ’ƒ?LÜÞ?Žƒˆ¨é?ÜZ>ÊT-Ñ?Ô3…„µ²Í?°]ÿeÂNï?i†8ZQá?@®úÇfæ?˜•ò±ëÆÚ?}¶´Ýåï?ˆê~Í·Ê? ž |ÃÆÓ?ÇÈØ^[ï?ŒêÌzãË?G…§dQ–?|©ÓÍà?eâÖnÒ?À•ö¼qÒ?^ŠrÛhwä?l¬bœÒ¿é?ÿÜÁ?æ®Sð'ä?ßFæ—Óç?²«In‡—Ö?^K¤ià?8·)H"ià?6o0*»­ä?Äë_éZÔ?Ó-6pì?€±1H½’É?`–3-6¹±?2,®ÁÒrÔ?¢N¡âÜä?bt®þ– Ù?|d5KƒÜ?Y*À©æ?Ò¥”$3 Ö?6O9?BÛÓ?AM(½Zìæ?@‰ õÍ­?÷oœXºðæ?c>¸ƒöé?ü¹ï*Jë?5knÕé?p¡õ=sP¦?®ëÎçÚ?ƒ.ç7NÎâ?°Íé«ä4Ú?ô9:>pê?ô׶пB×?ÐP"L8ªí?Ya®2ôÙ?tŽZ 'Ã?…¤€|$©î?èÁ_1NÚ?h¾~r—á?þÄìÒ?Áˆrò£Aæ? ² —‘ÍÖ?”™åW ë?šH*!Ø?;©i8Øc?2:ŽÂQýÛ?ärIç?¦á„XñÜ?`{ðÀKøâ?$›²A@à?ÈîEò$Lµ?ls\ؾtÆ?ä;sÎÔÛ?èÝHDˆÏ?©d–Áwå?4ð~¸ÀEÙ?¸Á÷)öÓ?wgY¡¯`á?Ƭ¨±Ñ°Ú?4.ì»Ò?XÛX«Æ?¿o**‰è?Œë¹ŸÊ?—O4Ÿˆî? „_x¼’?FöŽ=ã?‘Õnãfï?û)èÛÌï?rä‹ï¸Ð?`k¿Á¤e±?÷ÓR3Æê?À}i îGÀ?ÐIÛ{w­?Ö0b0Åï?:ô…hüfÒ?¤”Ì €Â?Ã&ä*´¸?jL0èÙ?èKfD¾?8ÌÖÔ¿©º? )A.4¹Ê?ȪBÄ„Hé?´uØøÙ é?½µ mœ¹í?`0X´ ¨?ÃgÝÈÚkì?Ääö{0ÃÍ?R1ù¶oä?ôšĘÒÚ?¨²nûFKÛ? ÇFë?3£¨žçå?]—håå?‚µ¸/Ø?ÉÖãñúöá?Xþ 0„ À?0~™Ìé7ê?ûj¬–žá?.K6OÍÔ?ýÃs‚Ò‡?œUâÂ?—šbŒOGá?Ø!ÔùÞ³?llEDÛÑ?ØÓÞ >G¶?î;¯¨þt?ÌÛœ;×ÓÚ?ð¸SAKÃ?N|†’Kí?À¿Ö¦.4†?ã"ùëõä?<h=Íà?ÀW 7ö•¥?ØtJ‰y³?è¤í)Qæ?Òâ\*YÖ?œ¶‡§ÃrÝ?ÇvCàmê?îDVÒ ×?˜ðe 'Uà?J®º.ç?Þ¥4Òm5ä?@’ذæå?'‹»; Ê?lªÏB™«Í?â­3.ýë?kJêl¶på?Ú¸d•Õ?>Ç!tx©Þ?\ÝáÌoë?ƒÊ”3ënå?|_ŠTk]ï?$˜BÃCâ?æÀéa¾cî?°f º†Ý?`!‡1™ë?#)™î?$”B Ú?É+—Í¢"è?¼ É)XÀ?êœS;ï?ˆÏìkíµß?ÀL÷Û²êÙ?çC’i í?@0r4/!‚?ä yèçë?€ç~—£†Ø?†äyÊwÓÞ?àôwΑÒ?×ö‚òzè?6§—ÞÚã?ºèž.“²Ù?XŠñp#»?º"L€,¿å?89F:F³?lâŸÙÍ Ñ?©Qÿpá?žÙ¢ ”sÛ?hŽr=sÛ?1æãÙ0õî?,ŸNp/Í×? kQ¡*?°µ”ª›Ï?-jº*£ï?é˜{,@ûà??Ü–r´ï?qUÖ€Û?˜¹BUVѱ?Ðl6 äa¥?¸T;‡´1Ö?\ð¸~Ç?àð+¢îÞ?Ø ÜYØÕ?A4ÿ_×?®eGž"ß?Îí¤WÛ?3Q‹Úâ?ýÜŸ.åâ?±ØîE©sê?lÎ×ÐÄ?8Äp¿`½?&íÒ RþÚ?$v…ƒ^bÂ? #pï5íÖ?€ZÃd‹?|c\?=Ç?%’xÔ±?4Dt6ÙÌ?€Á{õu?ÈXÎ áÐ?ÚZCÑ?¨'™§.Ǽ?”ÖÁ‘E­ë?ÚX Îýè?6ÿ.æ?x<аizÝ?öÁ© È,Ò?EðB aá?¸$ù™'Ûæ?À—%ûèL±?~ñÁ-Î×?„¼Ú6æ›Â?”y«kÇ¿Å?׿°ªéõì?U‡w«‡î?+0TZ©é?9Ê’@à?ÛÒ8쌦â?Ø cvÝ(è?Ç$7m×êì? qY9ëÝ?Œk¹’Ç?H#ã&¯S½?\ÿ~Ò¨Ì?Ô5…¶RZÒ?›¹Ñ¬äy?Çþì¬Hâ?³0RÔqå?~ÿlÛÚ?öo‚ÛÓóÙ?À¤O¡£Ý?€€zйnÐ?Àã»M¢„í?Ð$dФÉ?T^oRhÝ?DÙ4$8²ë?¹}ØBu¦è?µÊ*Ê¢î?x87‹‡Ï?LiÿKié?–¾o{†¬Ó?(6KÊyªÑ?(èeíÇ?(¬è¸¾¤ï?Ènšõ˜°?a$öûºÊ?Àg+é¸î‡?^È1Ð.;ì?Œ¶~7ÌB×?8 ¶iCÕ?~uLAŠÚ?"»× Ü?œL1ý¯Ø?EÉ” ˆí?ÝìyÎí?LЩÌ?ÄgrYAÅ?ŠH¤‰ºXÔ?›•èé Wá?œBë&Î?pš±vÜ¥?Äuà{"ê?îˆOl#ÐÝ?õ¼ý»¶â? °ô°›žÌ?КkȾ?zÿ 7~™ä?PŒÆÂ€ç?û·]qO·ê?þÍ´©Õ?ÆRñ Û?îÖ?G£ŸàPé?ŽþÍ/ŒÖÚ?Ü¥7?³ë?á´˜:Ÿå?´ÂVHá?]'41£žì?<Ûrô¢Ä?‹Õ[ç?Š—OÜ}Û?[ürj$ï?Ì£.7—ýÖ?PµqikÏ?&/}êuã?˜6‡ #´¾?J©±¾ûåæ?¤¤g¬÷î? Dû¶UœÄ?ŠŽˆÉûè?J³ûǰÍà?6°©¥›îÛ?¸{T)žß?¼õ,t„ç?äŽÀµ˜ä?”7ÒúÇoÎ? ¨ †JÐ?ƤÔM6µ?dhQ¾â?MÀÊë?³GŒ6rDæ?L°ÎKЉÁ?`±ýPÖ?6|ÁK"³Ù? ˜ZÒN²Æ?×ꃣãï? 4ê0ŸK‘?‡ˆÍ‰_ûç?ò½äŸ;çï?R.ã#Œî?8¨Šæ?•á3±é?Ô‡ÿ£îËâ?4qÞõÈ?J$楞Õ?ž­ë©}Ó?TôÁê.ëä?Xå_ ²Ž·?#²©¢-ç?Úq«sÔ¤ß?Ò¢ý?¶Ö?Àò–°Í>Ö?‚úð|þÖ?ÚzIâvå?²W¦u±Tâ?|»{Ù~4Ó?lÞ:ç4È?+YÑx×?L@/ܧ3Ç?Ó@9¸ ê?È_@gÜ?.9P†Îç?PñéD¹?†i«å?"Þ[%Sè?·¥pwÄè?Èåk‹‘ݾ?@–)Ó,îÃ?`ø[}¾ž?è·X64À?2ƒRl5=à?À+ž°}Â?Ä? &/%á?n…,ë?bMœÏé¸é?xl2þ‚8ß?î!nû`gÑ?Û'½ Šè?ûeIàíç?ô»x5Foí?‡ú@­(è?tܽð¬¢Ê?°% JÄ?9^ •þâ?)–Ÿç½î?€*¥·? "rö¸^ê?žB[†òè?@!6칿?ð…ü¶Èå? Ü5òbµ?€NR}ªÖ?†TéqÜ?ʉÃÜþAæ?d/ª(×Ô?w¸Ós©ä?Ä —`}ê?äwk=ºë?8&z'S—Ð?d3ÁzÄ?âçˆ %é?\¸ –ó“à?× ?×pï?ì çQ4Ê?%´’ƒ>€î?eo"W’â?¼Ô:þ­Ë?¼Jº;Å ê?P ‡GÖ?öñy‰Qã?!mòˆå? 4Eå$×?„f¬ >¶Ñ?£™;þ–Ì?cG+üÊ?†¼©)FÔ?¼ú4ÚÓ\Ã?2òœ° ýá?ˆCÖ§ð·Ð?E—Ôš.Ñ?¢_…·ì­ï?TD~Æe«Þ? œÀ^Œê?œó~ÂÊ’Ä?“ñÁNŒç?°«c<Æä?Ê{¶Øï?ô”+ˆÕ?}>§±då?¸™÷#MŒé?”Á w¢°Ü?ü8 ;ùoÝ?&:l–pé?h²5ΧËï?§žríÚí?ñVZ¥_ì?˜½¹D\Æ?ô&ªz\ç?´|–9fä?ÖÕåÉ«í?Î&¬³÷Ñ?à…K=-^»?@t ¸²¬?‘6\jôé?àŠè•¾?T”!a1!í?YöXÝ?0þ57± Ú?ú­þbÍ—Õ?x…Ñ0Yƒá?; ‰H£íä?¿ÇÑÒ!ç?ì¿Þ“+Ñ?4]u1Šà?É'òÓ#š?ºU)•×iê?Ëvižgå?²À(Ëe²Ó?ÆÈPõká? «J%]•™?ºíJ0Õ?L,Í£4Â?dK^ÍÅ?x€{‡EEä?ª:tÀûnÓ?ðdàôþ~¸?aKšvÛ?„•ÙÛ£½Ý?@ãÀešÄ?@˜­8=θ?¡#˜,á?ðDD4gÊ?º}­bÔ?ðŸ;JÊŽç?¬ÂÆ‹Qtì?æÏƒö>Ó?,`“±œÝ?R=j™ÒHç? (¸¹¾­?A!dÛ?æþ.(¦Ú?$¼JTÙoÜ?ÈÙh¸ÛÜ? "728"ä?h¡_ìiIÔ?|êídÐ?07I¿Ã?p¨lÌLO¹?¬êm)î’Ý?Xh¡Tüë?p Š‚ iË?#D&u ±è?ŽàÅä²Aí?p šO±Ü?Ø` ;Iº?k½{ï‹‹ï?,øžŸ¿íà?L{Ѷ#êÚ? =5àå?ƺ ”æ?ߌ‡ ê?´|ËMÞÇ?Œ1šs1Õ?ª‚öý*ïè?Q ‰Š8tâ?Xö gÍyÂ?žd@ǼgÖ?Àšâ’˜t”?|nFÅ16à?[Ò‚j)è?zé-á? á'Qóß?h(ûËôüä?@;¡PàQ¾?¨¹ÍçÓ?âoÒ|ÊÑà?|ãyÅEÙ?êÉŠ¶ã?Ä÷ž_¦à?Hõ«Š¢½?‹jø~‘â? ^JÊBdì?žìU9¯ Õ?Ô¾Ÿkyï?+÷ÄÔ| é?~O°y–æ?œ-%ÿ«ß?Å'<Ô?êS{Yë?§KÒó€æì?Bš*Ý=ß?“K'‡æ?HŸM'Ø??Ï6ᬽã?K½Â”$à?DémU[@Ä?(œ7í½?;nap>Äà?Í^U½râ?¦dÛÛÀ;Ý?X(ºK[íÏ?<ïdülé?ôE h É?¬âÅtúÉË?#¦‹ K±ë?07aŦ?‘TbÕå?bV:¸¿]Ò?ELASìóç?Å&ˆúÝì?hn´éøï?hÞ9 óÍ?” §p!Â?M¿ÿ\A“ï?úšÕ—Ö?+C‡G¾xî?ÖþÙí?_" \óå?Ú±Ñçè?ö=¯ÐÆDÚ?p³EŒîÙ?ª$µ*:Ý?ý‡ Bê¹ï?†›èùxðí?Á–æ“9-î?ì,ߢxAê?#ùò½õ+à?ô¼ë—uVÇ?À‰—Ÿ?(Ú? L49³Æž?ü\¤+Jï?ÐÂR|çá? RGŸø¡?+ãxÿqë?Bm‚Sîä?¨'p:À?HPeç?@8Hbd¬’?=ômì–è?<“… u®î?'ÿæ ‡æ?ºÀñR¼ä?6+ÚŒ„âé?Hš xË?m•óøJ„?`Ú¤…7®×?2a;ƹç?HžxZ;Ú?¼^;Ñ-Á×?.0{ÃÍÚ?@y‡K¥å?ñç©F¯Üì?éªöé?¢¾Ï6Š,ã?ë÷ *çî??±«6DÀâ?~솃G;Õ?€ ºöí[}?ö ¡Ú¹¢å?ž{% ܵé? ˜ _jKÖ?^)°–å?S\™ê?2¥8ѹ?p}ëÁÊà?TbÜ2‹0Þ?6q|!ɲÞ?ž×ßbCÙ?+*ü*È?®€æ¼ŠÊß?Kxh#+¶ã? ŒivæAÓ?6ÀâäNnë?Å8¦RWá?ÔuĹs|é?tJ¶†é¿?óK„£Óå?\¤º¨îžÜ?|owúþ…Ï?åð]Ø÷íà?øëð:ËîÜ?¸:Çåñâ?ö}ÄXÒÓ?áyßí+æ?ÎÛ£ÙÕgã?”¿™‰ÞSÞ?ô{xÈÅÓ?ÕÕ(¢ë?ûÌvìwë?¼kÍŽë?L±à°"Æà?æe M›<ç?˜ …r~Ü?úÙ“‹¸LÖ? ý@ bµ?z´Ð‘keç?&×¾2œ]Ý?E—€‹âì?Ò ¿Â?ÌÁ{wWßÅ?fZc‚£šÔ?<ÊRK’˜â?Æ•Çê?L½njÿaÜ?lÜÁšÒÑ?CùìÜ»êë?à‹ùUSÁ°?P‰÷Ë?g‚Bììí?`üìÒä²?’¼s3¨?¸…åŽwå?ˆ{Ë«Íï?€)`~%Œ?ƒê(.@ã?xƒ—8q”Ï?`–g¿ýñ¡?J¹ÃBÚÙ?(d»‘ —ß?‹ô§ì?hPÇc¶?V@EîyÉÙ?w+ÎÓ¢…?¤Ä2æÓÍÆ?€vÌŽ2V~?`×0ë™?²N֘ʩß?¸tȹ¢·?.÷yö«×?¹ötuì?,;¯Tí?ß[ÂPXÑ?Æ®J ±¼?@-ÂÄ?DTsqz£Ù?Nh¨qêç?rÇžx#Ñ?@8ÖÒ`º?°&óÂ?€F·«Ëdã?æ¦G‹Ø?°ºÛføÑ?^“Ø79Ù?À´‚Þv¨’?N‰â’æë?\ª´î?æ?RÙvÙ?"ÁJOä|ê?ªtOGÁ?É?3bO‹ï?4ëD4[Ô?ìOözùë?_Ë'Huæ?pÙ‘Ùèí?<Âë¦1 Ê?µ`ºÀÓá?”(¹èk»Ä?fdô"ÁZí?dpeYØè×?Rí’ÆÄrÛ?:ñµÔ?€ D¬³a¬?ù‰´g›¨â?e×p½ê?Ð%•9Pß?HH-hXå?Zå!rï?0hÖŸí¼î?N®Þ½z›Ð?])y)Wæ?åÕšEê?pÀKü1°?>Â>¥PNå?ýzÀURî?äE;g1=Þ?œkÿ¨Úè? ÛšµO$ª?PûjüYIµ?&6™“¼ï?¼‰ž¢Ì?pæ²MÕ?˜•ýsËÎ?¬¨USå?Ãè¤vå?0‘#[¿ãê?BæHBäÔ?ÙM~ÿÚ´ã?K>¨c¯)ç?fx·ÂÔ?EU0†µ]é?¤©äçÌPê?õðõÏÏ?”œ%ÝZ“æ?@ŸÜéç¼Ì?*# üÐÝ?¸ÇS§¬ç?0?Ôc‘§°?N9*²Ö?jzÎú–ë?,ÝÔæ×?PÉçlò¾î?ØDiÁ“Ø?@àHûÄ$È?¬†Åÿ6àÔ?¥ãY…÷sí?(3{ßd‰¿?­±MN"æé?äTÂ˨ýê?ZG¤Çå?ºNÃvúé?©$V^´è?<ƒþhÍÙà?šÀé;Kšç?ÊoŒ4 ë?Æ%&÷¡Zã?ð¸×¬ê;ä?.¶nõ¼ì?œå·U.–ã?D-lYO¨ì?p_N,:,¹?eŽz³Lé?¦D­ÆîÚ?5æ~¯äë?´Ÿ["Á? AqççÏí?X˜ ±¡·?)3Æ¿Lëç?v¨ŸÀÙ:Ü?†7Ëy"{?ŒO9C°À?΂=ü`›Õ?êVÒöÚJÜ?ôëc³[¤á? ~JXJà²?¼øÊ’ðwÞ?,1 Zë–é?Dn3[ Ï?`|`ê¿Üß?³vÂeCê?^?Õ#¢@Þ?xXÖ¼±W»?²Æ;5ré?Pf…›…°?H„ I¯ZÈ?È6:ÆÈß?`KdЦß?púrâÎ? ±U‘;˜?ÿœYH÷ß?|·ªˆ°?°‰¦ýdÇ?Øé[” ‡º?žªAŸÚ?nTvgJÔ?n«´\uÓ? â €+þä?áæÙNÆ0ê?w9kÉ>î?¢øD½jwë?•v…- ã?²¼.í·oá? 9òê‹æ?€çpµm7x?¾ÛE¤¼Ð?Í¿6Ô?Lo8ƒÅÐ?¿©Ñ|Šë?À > ”ª?FX›Ìvs×?²6¨õ¿àÐ?`§cÔÝÁ?Z©iÕÊ|ë?\‚”†Iá?pbTÎ4«?Ð1OÖ¼ì?Ü£L1Oí?ˆÈÒH”à?”£\¡‘*À?JQ_1•ì?|RáY=nÅ? sûqÑ?Ð÷:EŒÍ?®Éóråì?º(ý~ì?õKë|é?P#qþ{†§?Ž÷'”òÔ?NƇ•%uÓ?ø1éËÝJÖ?=Ï¡Š*à?¶`cã?+”ñ¾Lä?èå€þyê?fËÈm<Ö?”ŠÖwÿå?/øBàuÕá?Ÿ(§|1á?û~Îæú€á?÷J<Üé?ÐDd-øÃ?hò²H sÇ?Bª®'èá?¼qÿh¢Àé? sÏ#ý#î?Îo ŠtÁè?2qúµcÑ?°i~²J#ª?ƒžûÅáî?  q@¥?'Z«'жì?¢`á4$Øç?íDÃéKê?ì}¶¨À?€ñ‹á>»?PJ… t¯?9 ƒK?àF‚ƒäÒ?¦äWÕúÕ?a‰tæ?[T¼GOê?€|¬oÅ?à'á—N§?³°•пê?s†¯¦ªå?_„ÕWLîæ?x@…ìoã?èšçÐ Â?@Šžo Ïž?¨6A‹“]Ü?x8*]ÂRº?BÔ¸’në?X!RÞ˜„Ê?Z—ü!à?Ûl Zšì?ⓨ\ cÞ? ß:Eá? 6RxÔ?ˆï{SÚå?›Ð«3&è?Ržž[Ó?ð’°Ú?]ç?WQ+áæ?šÝƒa«óÙ?öò¥HêÔ?mrreiIâ?¸¹\6É?¨'17¸?”´¾jÞî? Ér|£ä?4¾7±à? hL3Æ?„S!€ÝÝ?¦î$…äÀÚ? öŠqÙ–?û÷Ɔo?”¯_æå×?—ðóyÔä?­¹Ôèä?½0²ôî?$rnOÚ?Q‘w«ÊÙå?n·ÐQÕøè?ékƒæÐ?]Ô ÿà«è?¦­a"»ã?ºUpÍ€`ã?~*ì'Û?ÚY,פÛï?T¦Ø²Ñæ?ªp”«E×?0$®Þk&ê?™Òçd•å?îž¡ž0õÖ?›@ß7¯å?íàV8Ññä?ük_Gk$é?Ð{T;2¬¸?d0b ¤ÂÉ?Þªcî?Rfº…ê?ó.ª¶û{à?†;ó„æ?6LÊ#Ü?h"mza Ö?€¼ø‡p„?œÚ³K­Ó?`Ý`•ÃM”?Hß°>âÀÎ?†½í­ûé?R²MŽß7Ö?ðq޲?Àa$R'·”?tȉ®Ã?ÍrñßÞê?Bì²<Ágï?9¥3™ç?¤bã¹i Ñ?L-)³,â?s4%†òÜë?è™úIîÃ?ìÓÒL²Ù?‚üOÝÂç?ö*Ög½ä?`ðŠ{({Æ?2Šh²‡™Ù?þ¦AQ&mÞ?è1Ë¡õÑ»?Åö¤†c¦ê?ðmc*¹÷Ô?˜ÀKF޵?.9I:™Ð?Þ¼‡­ã?e1®?„â?ذ¼F3¢ç?.qŒƒéVí?.¥¨F3à?R…fï? p áp®?d4„UFˆÙ?:îY²öKí?Èe‹±Œ!ë?‹Lm;å?ìeŒÄ?„a<»¤TÉ?,Åúæ?RàrŽ•ä?øKÐÖXÂÃ?!JpO®Uï?à)¾â(»?¼ù«ÔUß?q…™\í?(ªêâ¥;ï?Ö‹QÔÅØ?Óc¹Ãî?ÂÖñÓ?Æá»À?p—ÕN±?mÓ ~~ê?¸Ê«·è?*†ï zxä?6¡Yàà?d( ²ÚÈ?¨æ°².ä?áo)ÿïï?Ѐ£Uº¬è?ì…\ÝÇà?È\âóöÜ?±{ŸØÌ`ä? ø Ód‰é?2;±Ñ0ê?õÿÎx9 â?®Ð´¶HÒ?€Þ ’Y€?” fÄ.]Ý?Îî͘H Ü?þ\óSç°é?c2îûZá?Ö›ýÝŽê?pÕš+cä? D3ìÓÚ?Z&ÆakØ?ÄbAá«Èè?Mmëµä?pCæl‹»?LÆ8—°Á?z5j™µâ?ÈôdäÊP»?ZØÆÚì?('cT¥Ê?­:"ßvõå?©›¸È/ìî?¨H_æ“{Ï?|. R¼:à?0‡Ç%µ?}¾ZÃuì?üL¶Lë?8©‹M— Ô?Û¨>B`ç?öוéîØ?„’¶JPÃÜ?ãöõ¸â?P¢ñN¿Ñ±?GCôÍ?X™†Í2Ð?*i÷Ÿ×©Ö?[îÊw‡Ë? YÅÊ}=™?Æf t$ï?Bémø•Þ?@TÕËÁî?ùëꇜä?`~{nH ?7‡øë?8³‚¥[¸?µCN­ä?Y²eYÖ‚æ?º ›…²¹×?‘ÌeÜä?æ·‚›u?6WÖp!Ié?u7T Vå?€šŸÝ“Î?ÁÁ-I>Ô?”œwm­sã?øK1˦·?Lp>o×?°ß–>¡?ŠBÜ Çî?€äV Fôt?†öÖ혇Ò?ðm¥ÁwÊ?ú?c±•™Ý?XLE=êÜ?!½ÂÏè?Ô¨øüñXÎ?â]”T¿ÌÑ?N§™Ò?:ã©(Eà?x„_Ü5Õ? sŒÈ¬æ?â÷/*aÜ?úsîJfžÓ?P"Å~ì…Ê?Ö6N… å?àG)­°f¶?ö7åµUä? ž’]>áí?`{¾u#DÝ?õ"ßÍwQè?€›áq%ž?0Uy§lØ?l¿8ÇuÛ?~^R¾M6Þ? iµ‚Õ¾¬?é/‘<›˜?x?²¾ÅƒÍ?]’vƒð2í?®i–•è“Ù?®«h–«$ê? ½ª6‘ïÖ?€±ƒÊ'ÚÝ?.Aë ô\Ô?ÐÐ]ê]pË? 4 ¼Í?/ÜRMÇœé?½é³¼?½ï?¬åͦ7Á?"£Ùü£Ô?|§Í¬î?ã8Ïyè?ºÛ‘/&í?â1´p?Qã?TiȇyìÕ?Þ—ì“E¶Ô?âq¨wí?0ÀÞëè³?·J=zLê?XˆåüÀD¸?ÚJÊß¹m?ñ5¸ ‡¯å?ô)GÌŠê? 1ݵ—÷ê?šžV¹mè?pæ2GžÕ?ÏØ~â°â?–ZÏó°á?~lÜÆß?L2ôîc’è?¸SϹ•¼?è°ºµõ Ô?Ó¿bŒúÞè?p u§,žâ?µCì¼åè?a滕ßé?Z—~yÑ?1Þ:ê?”4»ŸÉ?jüàS [Ô?ùêI•*â?æ™ã®?Ô`ÉлÞß?ÆÒ"mÎeç?)*Ï Éæ?ü!9·¤Zê?äín ¼cï?Ð"©ÓÐ?ièb¬ëíí?œO*£iä?v˜ÞÎ{Û?ÀJœQŽÝ?ÆŠMÖÕé?£Å™A†nï?,Óî,~©Õ?ˆ3 Þ¯Ó?¹,:¨Û#ç?ïî£ð8<ê?ï{´´Neê?£l$¶è?$1›~KÓ?z£ÓpzaØ?¸ÊÞÕ?V+ñ;@YÐ?¸;ÒI(HÏ?‚d6oKá?¾‹:>³¼ê?yêÍ0Ÿøé?a4gñŽhå?ÍÖ9‚ÈVç?P7`—iÃ?rùéÀÖØ?ÎÉ*›ÉÓ?lÇ„Ž¥Ñ?Ô­<ŠZ¬á?Êq ¡æ?†²S¡?0æ?œ2[Ÿ5Á?÷yÐÒßá?¯»xS4ä?òVn¿ Ö?@)îaÚI“?\÷õ‘¯kè?H1]¬³à?©Kí…<æ?†zo­Ö¢í?ú2&…c÷Ø?l\0&BÁ?ë7üã?"H~î? ä"7@?Õ?„XºYŠ Ð?Pª£´vÞ?3œ³}úï? A®b¾Í?Ì(%°Jƒç?”}9Àtåì?ÇÔ­iÉGä?$‚hUðÄ?Y”XŽYêå?à‘Otu® ?ÏÓƒ<^ë?E>žÂÓe?¤“õK‰¾?Ød‡4Ý?´¢Ú`úê?˜¥o žÝ?•gÁ*á?£ðGc{'ì?èèDJ‚ìè?êwÖÁcœí? q+U?>{XÀâÆî? Šá´ªÿÚ?~”³ó£ç?\« º›É?”½¨œ‰Ò?œMï?){,ÃÈ?„5Õ˯é?Ìz +LÔß?*.ÁS¡#Ñ?ê¨N»ðî?Èjä"ñ–Ó?q\=é8Žç?P3øxR¸Í?‹Ëê~-¥?„8:TòžÀ?”Û¼“¹Þ?€¿õõ鋟?<—(D©Ó?ƒhÄ(Aã?óM¶Zé?ÓIÝëæ?ÇІçì?Ñåƒ öÛî?p¦éÑ}·È?(3Ó-r×¼?Dn-q¯}Î? t¨€ðnœ?î8¦)÷6à?üÜÙÄžÝ? k€†o¥?ýÿ·¯Ù“å?ä_ýùQ;Õ?Š'øæÑà?4Ô±~Î?ssGÜ?¸üµÊouÄ?F,¸ÞYÑ?ì¡1¡³Ò?x*Ro28Ð?ø,¸°éÙ?BÉäW}Ö?ž"3Æ•â?æµÝÖ?0\7— üß?]ïCŸ{Ð?`Ö¶òé—¥?ó hí?à@¯æÄÞ?‚ ïh‡Ô?­.ð‘,ê?ÿÆ`Œ£ï?°2ÿPxòÔ?ºº†Öh´?C2Ç(ý†ç?*ïñFÒá?2o9æzŒ×?·Œ¿º?„êȆFí?ôä#WðcÒ?ä-ô/OƒÏ?rÈ¥Wyvã?Ð>10U¿?€¨-r¦RÄ?j°Èe¡ÓÔ?tø :"Â?ð/¹â:Z ?…ñKÕ‚ùá?z’ûÒnã?€øÁìƒúÆ?ü2iHæ?Kþ8d¡iâ?‚ž ‚Šï?Îý#Ýrï?ö'y|Cß?Ÿ×LMEã?¥þúxk ?P‚ÒIx«?püW¤ùê?âþã"Óç?†ðøêˆßï?Ý®õ0Íðè?²»‰²Šë?àÆ ?÷Ï?¤RŽÙŽÇ?]ð¿ ¼úç?bmP.͹è?J} …¨Ø?‰Ií?tŽâ?\“lxíÇ?d5Ú‰ bÁ?ÑE¯þ™¿ä?ÈgÎ(Ü;Ç?¢TÑÁ è?&ÿÓ3šNß?Cå5Ù?À¼M¸íˆ?æîžÌ?xp,5 À?4E;ŸßaÓ?ð®Û|µ«?*?Q ¢`í?¶ìF…Gãí?ˆÑ!&^eÛ?Ø«½P’ë?8—@B&”Û?Ô1±$wÊ?Çê…g)6ê?T M³ÍÒ?ȯÔüŠÑ³?’°àGÚ?ƒ ƒyWå?Ù -¬Êè?@¦¥D$é–? „ùé?ç?„„MDW–È?É/`ÍÕì?¦Ÿžéð´â?””mÄ Ç?PÅ ¦Ë~¿?Øžxʾ?­ †Ty|ê? I9ûbXÅ?ô2Ÿ|¹—Ñ?£ÅÝÛií? j¨€î?SU…äí¸è?ìñ§òY\Ý?Òâºð|†×?Páz(˜ã?Ÿ•BT°?V{rÀaîé?3NÕ¦}ç?`WÁ|-Û?p`¿ÀÖ?¡éA ìì?£I‡å?h‚j„¶?Cƒ0ÆPàï?d?ÃDË¢ì?ƒ.ï þÓ?D«ãE`(ç?P ¯pC»¼?V´N9|è?ò–8BŸªë?ÞGê?à’ñ!Ý?ÃØÛXíˆé?„ ¸ü²æ?Æ£R£"ÁÑ?ã ¬09¡î?kÑCŠwÖâ?ö„ì~æ,î?ü}Md®â?Î{…Ñ£×Õ?€¿èTGwŸ? ÑëÝzî?™?ªD:ï?ÕÊ“W8Úí?h[õË1÷ï?ªÇõA%ä?Ê­e^CÕ?‰ŠÌVòä?\€TV]Ú? ¢ÓäuT§? xG_•ï?î|7ºGë?þ±‡t¦†ç?Vžš†z3Ó?Èéÿ^°ã?ä,/½µæ?IÅümî?:Ø©ô»´é?¢_ZH‡ á?˜ ‘Cz ¼?8û$V¥ž²?‘iÞ7Àá?CÉT:Û?7ùy×uZà?pj©\¹êå?š Sú¨Ü?š _âÂä?‡9ÇBh`ç?€¼S¤@÷ê?L8¢ G í?!Š ¶ÐPå?È×ojÍÅ?äü<èì?ãp û[Ùí?Û$­†Ddè?)IˆL™Çë? ñnCy—¿? 1&é?õ º~å?[õº(§èã?Ô"é•@ÝÓ?p†ëˆ1ç?]>Aå¤è?Xªzÿ±ç?Óå'±Ð?«ÓÁ/Þ”â?j %ðV à?0zçHá±?ºU”ׯÚÐ?8Lþooî?§nHy#ìë?S,¸ˆ÷Šæ?€ç9­K?乆k2è?ø¶lÞ´?£—îßà?WNãTnLï?:­i´é Ù?pø{L§¾?詵4ŸOã?¤šv®è?€âq̼®?m2ç1ųá?&¡uœ’à?ptaЅ¶?Åú:é?ã@w•§è?vÔ˜Q/gì?%V5!Ôã?tdšÕÖ?(äædã?àýØÅ«8Õ?îÅV-è?˜ª‰U‘ª?—ž)?¢Dé?$øeä|)Ò?ä*ßg¤©É?øÆ/sÆÖ?m·ÙÖ7vâ?pÆ—'vâ?ü:Ñÿ“Âè?Æ7¦ ÓRã?´ÐÚR)í?¸Ò¬6S…î?¤ “Ò£ñï?žÿF—Læ?–ñÔ@Hå?´ü4so«ê?ÄªŠœ Ö?véöÜ5]Ñ?¸°º-¬ê?0M²FѾµ?˜–ñKFxµ?¬èp$|Çá?ø†œp¶?7b¶@\¯í?gñ›kÌfí?nÉè?p€_Ý?Ûe¶*Œß?wÜ@ã ñá?@v# šã?ˆ¶y,c¾?P°Š9ÊÚß? nùç?Ãkk,Jà?©P3pié?&–•°Ô?dI°ÞÆÇ?‚îÄ×¢QØ?ÔšÈzÃvÙ?æ ~Ðb?2g¤«æ?Jx¦·’æ? …:ØÚÉ? A\’‰é? z†bU¶¹?Ü!Eä—]Ö?ØjزjWå?Ü?°ÇÐØê? §Ïx"ã?D„÷2Ã?2-º³Ó?ú՛Ͷé?€ÞÈæ¿?¦$Y1â?Bªá¤2&Û?Ü¡aI/Þ?ºnb<¯•Û?©3+¬Êç?…uÆÄå?¿»¦'Žè?pþÁ¿-ò¥?ã÷Ï2ã?„€ÖSÂÆ?(K™^hVæ?‘œ<(ã?.P¤ºã?ºP îî›í?ÅRQvÊÏ?oÁPÞ;¬é?â :5pì?7Bd$ã?ÆÈù¨nšÚ?4 Õ™Ò?¶Xû¿t¸Ý?.ŽâKšï?þ‡ÛÔ?ÁÿÆ¡àÒä?`¢»Ô`€Ç?\øÙð?Ã?<°Ü2ŸÓ?]R‘ºÓqà?âHTxæ? Â4{€¥?¸pÓÎÚré?;Ïãs1Œä?ÿO*F7í?Pcp.‘ˆ·?,_j·˜?ç?TE _Ï?ÉÚ{¯ì?УLï• ª?¼”£ç‡#Ö?Znv7´fÞ?6ýJ›Ô©Õ?ŸSxò·ã?‚±çgÜ?ƒ¬Ô1”è? G:?¤DŽPv×?.7‰²©£ß?ÉE§ Êê?“6G\§å?ÈG†ŸÊ?P¸üÀÏ­?Zœ T0æ?"ác[p¨Ò?Â,ߨ[oå?Í¢NL¥øä?°rË´á©?w÷‹Èù à?ȉ{Že¾?r´°Â¿§Ü?Ô8œ‘Së?ÜBL°N[é?NŽIõCì?é¬hº@ç?(RÒÛE&Ù?ª5Îvü·Ò?ÌÜí ¹Ü?V nÙ?¢¸`+ÊÒ?c}ƒÀä?± ³Èmë?à¶2„›w¨?ÙT¼”Û?W0‚Ìžé?ÆÁºÄM†×?zÿué•ç?è¿âpß? $nTã?•úuÉî?èù?xî›Í?Hå¿Ö¼?d\ñ@r`ä?¬a3¼Ñ?¸hNþ'å?]Ev‚S®é?н†Çä?„ö¦Î Ç? ¤x ¥?áè#äÏê?jŠ=ê®Àç? >›½!„á?,Ž«ÕÊ?fHDîÞ?,4õ÷ÃÁ?ßö'é?Eµ'ö~?øÆ€ÍÔ²?\‘;!™Ùß?l‹]¬=Á?¤£,tž\Ì?²!Øô¬é?ë¥ dÙ?[Ê °¬ç?ǨӱPé?ªd¿Ý?Úb'"€pÙ? 4Ö˜ú¡˜?H„‰î›Ýâ? мyö´Í?»ðÀÖâ?¦(#¿Ü?üp«Ü§Á?‚žWP0à?’ƒÊT~‚ç?Ó×¥1ªÛ?øªŽGI Ø?D]Ðí?óÙ^æÁé?„O‰ÐЀ×?š$NÔ /Ý? •Ácq”?Åù¤µ¨í?½Ü fÄ]ã?±IªðŸøæ?hWSm Ö?ƒ2—‘|å?ž~ù›‡Oà?‚=‰üíë?ÂùÿmËÇë?ÜKå¹'Û?†êºÕ?3½hV¡Èâ?@±+»?àl""½=Ÿ?ˆ )šÑ?}" ÄsMï?J¼KQ®îÑ?` ‰vPòÕ?T£Ô¯ Õ?èE´Äo»?JƘ"ú´ï?Øt†ñ`E±?±Å-å?À­Ð&×¼?\ŒðFbÀ?꺅\óß? tŒ@f¬Ÿ?ƒLc‚&æ?ZÃsþŒá?Hí‹ì Jà?‹¡áO²â?¬ÙØjßÁ?À_:Í]²¸?fÍMiàç?(WÈñ¥¸?°hR‰ Û¹?¤åÖ¹ÿWÙ?ÌjÑX•â?¤wÃ’Ð?¯Ì©2aä?,w4ËÜ£ê?B¥¸ê'æî?ŠnìC[–á?‡ÿÞ“€ì?`‚Ëe®?¡]N=‚à?·þÏk¯`á?„];VãÌ?mÖhAï?8žÖø¡Ó?¾z‘”ÊÜ?´¬‡ó¹jé?“ROÁZ5ì?àJhl> Ó?€óGb•ã?÷rÞ?pÛnA:ÆÂ?î´x5Ïâ?‡D¨vüäà? ÿ0¼3³?zµ2óØ?H˺ ýÕ?_B¬J†â?û ‚Öƒ»ã?’m½næ?ˆ¤š÷’ ·?ÉÄþÙp³è?°†+$_µ?¬µà:8ûÇ?þe÷zÉöè?~ÛÝtèå? .úÇF?dGö}^aÜ?õ1ÐK¢Öà?J -Æÿá?lQþS Ï?æaÉLÖè?ÖWÖ®fí?à"®ºÌUÑ?d¡“ƒ…Û?îâ]"êë?©@3–Ç? ‹Ô¨÷\Æ?\AÈÒŒŽÅ?¼e¶Á?Ì€:â]Ó?Ü=a¾³¡Ì?rãyÑÀÙ?÷ÌW1ï?É<¾ó«ì?ç\ò×J¨î?α ùGÚ?°•ÍÝ`Ç?X†ß '€±?öÃ7è§ Ö?>³u+æ?u½¼§vê?³ž-ûwì?XÏ=ê ÿÄ?xºìÐ?¥=ä­#é?˜ ó‘Hç?Æ\C{«?xº¾Õí7¶?O£‚IŽ?$´;*[4Ü?¥a<±Óä?~ÌéÑOÛ?¸AS¿l´?Ë×’§¾ä?O5\¬í?Ö.êqä?Q%ų2Î?ÐOR²·´?êðñÝ"¹?d/ó/zÛÍ?ˆ±Ú»?@b†ÿî+Î? ç±¹é{Ý?ä7rI;ðï?h×ÞDz? >ß;Iúµ?¨‘ú®»ýâ?$i°ž­è?`ý‰ã?Ü„Å÷Ð?¬Î|Èp°ã?¨ï×]IÊ?©&½kî?}Øi?å?ön”:_Ý? `jí?E–?¢{Lœë?±6ŒK.èî?ö¹yõÐÒÝ?à­Âeaª?°Ä/~‚+é?ȨU(ûøÂ?\èéNCÏ?/Ðcò»ºä?°œÎÚ¬ ¡?Qð!ëÓ?0ö_BÜ »?&âf‚wä?Œß46>ÝÇ?rB ô¡zÒ?Ô*ü:K’Ï?À¹/ˆôj„?¤§kÔ=ªÝ?0üé©ÏS«?ë>ÉpÏ4å?=(·©ë?¦¸~r$Ø?Â~tLÚ‚è?r ä¿ÆÔ?ˆlç?–îÌó}í?ÈœoÅÎ?‚¼È)¯ë?‘Óú#1ï?vµ¤¼µÐä? 9SŠÊ?t¿@©ƒì?™>ÊobIí?ÞÀœ.å?† *OÄzÔ?§)[žŽî?ÌÀ*°êŒî?.ý÷Nä}è?Fž¹œ}è?8ZF>xýà?1‰±®Äç?†ÃÑÔLá?8§Wæ?$tE§sÅ?cjjé?ÒíŽÐÚ?¨ò‘! Û?” ™ÚÈní?Qá˜]Œäí?2L³Í®Eå?øÕ‹µ·à?VBï zï?4¯ÉÃä?t3„Ê3Pè?nTè¦Ó•â?p±ý(tß?R ëFçFÚ?ï×jê?°~1έ?§×aé? Ì%µg‹Ï?JÃ9×?÷ýs,ñìì?¾w9åç?°w¯Ðýº?ÃÇ]½Ñì?À²—@ E®?Œó3ûöTË?]< ‰°î?d*-«º­å?èSDŽ‘ê?õ7¾¬º<â?­‰t¦¼ê? S3@åwß?$Œà Nã?§y\ Yå?`Lt¨ÎÀ?Øi¦½àÎ?µÎi"kòë?nÖ d]Ò?wy¡Èê?5†ÙxUá?k ’£ÆIå?°ß® Û? ”‡æ5å?ð\7nÓ˜§?lVú'5×?ê¹äÝ3ß?@Pœp‘h¿?#+·{Úá?¦¢£„õûÚ?*–Àl-Ò?:q¬f¤6ê?PuÔß\úÊ?@9¬%i”Î? Õ“ë)Ãé?ÈΠ{jgÏ?Ñå§A—'ï?ß0ò;¨â?œ7s•/næ?ã5ÇLKƒè?°2m­Ü?>Ì Ò×?ÌšÂãê?ˆß«Çw«Õ?f“1~XÉÐ?öË·ñw&î?€–΂ðÙ?J? 5Sä?//¬©Œá?c—_ 7ê?8oa Þ?V [e§p×?­•0dåê?uÎ=Ÿ±¢ê?– òE _Ö?õú‘£6ç?´-¯I´Â?eÀMÎjfå?>Ä\"7ß?ñ‘¦+ã?X›syÌÊ?Ñ–på»â? u^oÕùÕ?ºË‹ÇMë? ɈÐÝ?:VŠá?­²(·Þ?Q¤E(Šî?Í89Ihì?†ŒŽ%ÖÑ?hž6¿¦Ã?n¶ÏG=3Þ?jp9ëØ?ée@=§yè? ‹¡„âºì?é$Èá3á?AÃÆ~³æ?½Ÿ àÌXã?ø¿+]$²À?²j0HÚ?ÆK™žX?øØZs—Ð? ?Ú„3Ù?`~g$µ?c¶³æ4í?>d=î?¡l×ι®å?l¦jîIÝ?ïOg¡Tä?p\óh!¸? ?r6Øá?`A£¸B©?x2xÛuÞ?´.|gf—Õ?c÷‚“ÿà?˜“Ø*å?ºyŠÁËŽÕ?2¥Eµ hí?êпè§)ç?TQ€¡ÌÃ?ÊX²ö•Ú?ë3‰/í?1·¼ü­Hå?”Ð-EKÞë?wƒ¼õê?Vd!o`Û?üVμzèÂ?ËY.—;ã?¨ë£>ªç?ð÷² ¨¿?ý»fúOìà?”¬ÒEMä?°Ò·ÊôL×?@/ÒLÜ?ôy‚ïÅÑ?éMZ¨¼Öâ?1æ‰ÄÃ?Ûð%Bí?+k’aè?@`A‚‚?îãDïé?ŒA0'e¨Æ?è¸Iþ0yÝ?LÚôiÑéË?IK:ò€ç?à¢l‘ؤ?’ÞV;ªï?Bº­¥ iã?¿ “~9í?«˜ ZÇê?ÊúÄZå´Þ?LhµVaÚ?à¢/ÔrþÒ?„na€aÌ?Ze`f¬ÄÛ?îúŒ4ï?Pñ@§ÄÚ?pdº;¸³?´îÞ.ë?Óî‚–Oì?Ø@Ò™Ö?ðµ…Äz”¨?hQÚ¸É? hÞÀ“(ª?6ô}äIé?0°›§?±Þ«oÒà?,ÞÈÑáÎ?)J.µƒªì?òMå©â?…›=Šë? Ä'L9ñÅ?PHˆ²Ÿ¾?ÜóŠ„g Ï? 0 ˜NÑ?wCÑŠX_æ?={ÉÅÃé?oß8оÃç?ðµûŽ<ß?[ÑÚ‡Cï? |éý­è?âÉw£Ží?rƒÓ€¯Q×?X‚÷è˜TÙ?è˜Ç{†é?w$NvR×ê?x|io Ï?€å&Ï “×?Öȱ,Ö?m–¢î4â?dúåNKä?üÎñkÕ? ¥_Øa:ã?ÀSæc¡’?G¾1Ûî?‘Ô¼ââ×?Ä}Ü:Â?l@õu“?Î5fÈçoâ?4­Ó[GaØ?‰çù7l(â?ÇFÎÔ—ï?˜»)Ã?ÝXXh‰gã?{*‘I§ì?é~±¤öì?®ÇF<ùÜ?ú)ÿENéä?fH'o4â?€/ÈÜWä?ʤE|£ê?ÈG)!5Ò?ëÆ'»Çä?hêBõTãç?š ¢5)žê?Ð ÍàÚÚç?~~÷ßÕ?§~âŸYYà?ÉUЏæ?04Z[!Æ?~D¢ã îÔ?Î(ë/¿å?CF|:H®ë?f¬ßuÔ?äȉúÏâÔ?ä¶çî wß?³ù¸|×? ²5AÌé?øG’¨ ·? í»oò’?Š®£±§ôç?n¦Ë¼Î©á?35J ¨¯ä?Xªšî{eÆ?n‹C¾á?:sap.ªî? ÃÍ÷ì? o’ ™©Æ?½ZRÎß?4Y0‚Ý?^TëT“ã?]1ÔÈhè?u ÏàÉ?­êrá? »¦ì?ÚÔ ® ï?ÄäŒpÑ?¨y>TêQÑ?ÌÇ9fÉ?¨zï.Ó?ŽRðã‡í?øô¤=¸?x’¡ìÞê?䯹F°ÏÀ?´#RÕÔÚ?.v,¢BÇØ?â ÚÝôã?Ó+ik¿ã?Õ´7ÀŸgç?B­“Þáà?”2Ňð§×?8¬8Ÿ½î?(kñFÔÃ?Ø®ø¨Aê?h|a{¸·?¦Sˆs´?;õ¥£èê?às•ÙàR¢?%6F0é?€šÙ¡Ü¢?z_ñbmÛ?ÏÚž€Ã–à?_`Nvä?îŸï™ß?Ø(4ï4ß?*‡3€gEß?ðØ £÷¶?Û¦•f$ä?4QùrÉæ?Ðý,Öj-ï? ãÖ?T·ÉN4@Í?vïXBEÕ?Hª$µ+î?¨°rïÀq°?d ò9ŒÍ?ƨ˜å?6ªØ©àÑ?vÉXÿýÚ?ØuúÚ9÷ä?¸]|ƒùSÈ?•xü©õhë?2~00]‰Ø?E>?Lˆî?˜JÜ›vë?±Šo IOì?5–ûûsí?ȾA²Tã?oCìM¼ç? i)2•D›?FÀ0Øßî?µ±¨`nkå?¼3!ÈæÞ?u’'S׿?pòïjØ?——Q6®Œè?Üäq&KKâ?Ür©d˜Cä?ÀõyÌ?—ŽSD°á? #8øq(»?(ãÕ¤É:É?.„ ¦¹?6l°Gn—î?óî3yÙä?¿a-W¹ä?E^â›hë?š†ú$SîÖ?T’™å|kë?¨ÒZ¯Eüé?ô¶gbåÂ?x®‚RÚ=Ö?ŒF`šÃûÁ?°¢4èmº?à k¢yÌÝ?D–Wg0úÛ?p23þzãÂ?ÊÀƒÑyî?Ó2iF­ æ?U{¬¤NÛ?šÍôÐÒ?ž_«‰øè?%Œ«H¼"é?ÆAÅs ´ê?$jx[½Ð??#E,Š…ë?kþû+âæ?߀”ØÈ¸î?¤É,¨¿vÈ?ô .hÂ8Ô?õu ^Ö?c|ÇÎ9˜ç?ôîÕèöùÒ?[¬€BG‚î?˜¹Oשß?4Iüx/œÅ?0 ˜ž Kà?È(V7Áâ?:ŸëT óÓ?äpŒÑ¨ß?úhÞ?rj$›d›?ÀòÿIT‰â?ޏ×uß?|ðH1Ö×?‡oÚD¸¤?²ó® äãÑ?¬¥-° ²Þ? jã@*Ú?,‡k¯í¦?ðmé„&!®?nå’±ôß?ðúÁåC¾?žAÎp7ì?8Qƒâ?îþ3¨Éæ?C)YÔ¨ï?@ër/¹?@bñ|‘?DfÖs#Èé?XÓpâsÂ?øjpKˆ½?ˆܨüÚ?ÿrNŽzâ?}Àsy!få?e ìɸÑ?ä`é&lRÉ?xgƒÔ¿?ŽrWiä?‹a[YÅé?Lm®Wí?Ð;´ûdz?Ä r/Þ?J›Àr.î?°o¢ Ý?aÛáŒvèæ?¿œ=ž>Ñé?¼¶Õ½Ý ï?À/ná†?ÖnÄsr5ä?J¬=áÄÒ?8ÌøÓ?BÑ BÑÞ?€É»˜…? øI²õÎË?¨GBIç?ß-Î=õì?7ÝuÙEË?6 ŸâwS×?0ˆ9õ¯7ä?‡ I»?ÃÔ챜æ?Ä1’œ–AÏ?°5ÝŸ!õÎ?à´M&Ôÿµ?&>’à?•q؈"ä?ý’»xyæ?†ßŒ†›¼ä?¼`Ü„>€Ö?%'Zë?ž=ùÌ«ëï?<‰Œb3¡ê?LQ]âß¶È?pÊ,4ÒÕ®?¸S¸‘AýÑ?,©ÆÊ¥Ë?´õ›Ð§!ã?î¨ì€Nê?;^«•{å?+öb™æ?·tã?À1^¯S‡?d<ÙìAi×?TÃYfaß?<°úÉœÜ?H³@‰ÿä?žxüDí?úC 5Ú%Û?ÊfÈ& ³é? +ÓÀu½?k|·áƒï?„ÿÝÀÚsß?ô¯"–Ï Â?zÁCt¬ûÓ?˜k&Ö âå?~3×£Bî?×±PŒc¢à?ÕƒAþMá?¼ÞtìrØ?ŠFƒ’Ò?ê–"òA%í?ñNžKþë?^ìuî?/åŠ(`è?vC’øËÖ?Š S¥×®Ù?ðÔ³^Œû»?Bî`Y4iÕ?jß>E¤:ì? y± ·Ê?Bª™FÉç?é‡z}Ûtæ?h<ç\¥Î?x P¦_Æ?è JPî?·Ç.§fá?ù|× ïgè?cÛ•ÂtÒ?´j½<=æØ?Ú=90Ø/é?Di¶é``Í?6LÃD¶é?Lj¶Š/3Ñ?Ð5K–\(¬?\DZŧÔ?À}¬0¡Õ?ú ÿ®î?TÓxgàÆ?,Li¾/lë?–Á½µòì?ìMËÝ~Ê?ÃSr–Zíí?µ²š/Dä?·Ê`b­í?ÇuÚ 3ä?®î™«²¹ä?aÅ™,íë?+G©zì?jbÝË¥5ì?¶|*­õ¦×?Œž óCÞ?°1¦%: Å?èùÊ4«­?ÌÂÙ%ÕÃ?üA2cÇ?TÑŠA€é?L^׬ÎÍØ?ç•!ÞÄ*ë?tMnÎ_ëÅ?¸qáwô-´?&ÿ¶ åâ?¿Ãˆ¸»à?@fIþº?4h<. Óæ?òA*‡Mã?äÀ¶Ü˜Ó?˜(ñƒùçí?D|°0’ãÝ?¼é` oÐ?B/þ8²\Þ?Šªädvöß?%BªeÛë?ä`1›« ë?–:c÷ðæ?¦¯âåå?L~,ø‘Ú?Ø'I¢”Ë?>êËÔ?•Á·»¬Ëí?èèœQ½yÃ?ŒSôÊ\ãÏ?Ø2›,›†Ä? c·ÅsY‘?CGLêí[é?àÜþØ6Ö¾?ïûzí?pNÝÇ¿?\6X”±Ý?u3AüÆ<à?HÜH/6Ð?Ø‹¬?ÙÓÛ?@þT½aÑ?ØÎú¤W!¿?ޏ¬û!Îá?Ìâdžz‘Ä?”X()uhä?D³f’¸Òà?V™ .C%á?ŒõØššî?Ô²øÄ"Ý?„)˜šŠÕ?ýH‡âÀ?È…4¢°Æ?HËÙSk¸?P +ø(áÂ?ôìŠÉ@|Ê?s¯žˆõé?üX6aÌDÚ?64d"[í?€^9m¥“?0`<ô;Â?BÎAå¥Ö?T®ÕI`ÍÁ?}µïôDëê?°K•Þ? ê?‚Ø4®¥?Ý?ŽNr¥•Þ?òÅß–3â?fÝÁž3Vá?Sêú®ö{å?çX±¥è½??§õF%)ï?ò4çFñï×?üC O{ùç?*/gÁúÓ?cÜ Ù˜î?„ÖE¨‚Ìí?Nô€bÏá?6ˆ‹wäØ?æFd¬ß?îúø¹üXÙ? ÉÙ7Îê?¥d’”â?$4\Uªã?HÛ8 \&ã?æ 4Ò¹HØ?`mùRcç·?¤sÚƒgÜÝ?ZÑeɾè?8LÞ‡Ô?æÀÚá?àY­”­ÕÜ?¢!”böˆé?Êvä Äáç?ÿ¿§}%Ýî?ý1 Û.éå?gÑÏw}Ðç?~ʆ!m]Ò?¾p?6å?@å;qx•?”¹@†'âÐ?©»8èkà?عà©ÍÆè?‰Uÿ<µ?‘f,MŠ×í?2;ß< ní? L¬û¦à?ÀißTqY–?'Æúúä?61¹Ù>×?h-ÛÏAgÔ?p ÿIjت?PO(N6Þ¿?˜N%¬´?ë®ô=-¼ê?¨\ª•­º³?äÌ“‰í@ê?¶‰P/•éä?æAþrJSë?jN7ÜgLí?:#!ºÏ°Ù?©ãy’)Mì? RßF_sž?ð[脘´?Ϲ3/Iöç?w ¥Ä^iç?ýÅä(Ÿé?Èþö‘e8É?GK5ü‰Òí?Û¼‚#Lã?˜8ÃÜ?ÁG2ß·é?/9ã á?"¾úÿ* ã?x®è‘Cxç?ð‚yNÆ™Ñ?`q9 Ð?ç>ã‰BÉë?fÝÂ"Äë?… àrâë?Ñ5oÝcÃ?\¾Û¥aAé?cºeQLæ?¯ r…KÕ?QyI{ÊRå?"N®3‹í?Ðké{Òå¯??ßþéç?~æêºêÐ?Õ"Àü/î?Ðaª/ ä?´õàjÝ?ØÈB€À¹?v§&¿­:ä?B’Ç`fÔ?fá÷¶ÕyÞ?¸šËáã±?U;oDí?ÀÖV`fœ?€Ë<,¡¯´?;•Ÿ &Áæ?ût/öðé?"|(”—ªÜ?Æ\®é}Â?t ¨¬Óä?"ºÀ~&ä?ÆgíèPÀÖ?ùš$ ïë?ÔžTÀ×ä?„^ܰݘÊ?8ÇÇâ+Ø?óýw jµè?ì×£—E‡Ï?‚¡YÀˆ×?>´Ìl å?äÝ’{‹Â?Úýõ•èˆâ? Ê[áõâ?Øúiú¥ðÈ?L}(‚¶LÙ? (XÀØ<å?œpoO¶â?ÐÊá´Þ½Ú?,õ–X7Äç?ºùU8\Uç?ó 60µÿí?ô‡=3¤2ä?Š%¡éá?ÁYð®¢á?N “…W=Ý?)¾Öa/æ?€sï?x¾ž0FÞ? Jd”©ç?Mf2 g§ï?¢ê! ªì?©ÜOðÜ?q6 ¿iå?ù⚸Þ)ç?„ÍúcŽÛ?8üŽÛxÂÛ?æk{üaß?è{a?œþ±?†§#mê?™Â†§oèí?SîO–€Xì?ÐCÎÏÖä?E\›qé?â)â%)×?Šìái«…Ø?cˆñİÙ?ÚŠ TGã?ĦÜòäÚ?ÉÛ\0Œå?”7!.ÏbÃ?0ŸØ3rïª?à6E(¸»Ö?H׫‰ Û?èSÔŸÂ\í?#axN?†ï?<†——uVÁ?@j&!é?Ƥ0åï?;bŒc½?ð"¨ðû¿?ÔMðÿ{Î?Ö¶þ{ˆDÙ?OÖqטüé?®îŠv¼ä?È8ê7î¹?6F3Å2³Þ?ô1øìá¦Å?0HݰË?DÉ<÷×Ë?X\ ô³?³3m¨ôÑ?Û:Sã?×fÒvLýê?:³VäÕ?0\-EæîÁ?vÜ(<â?Ò¨pÿà?©ýæ/Ó?pŽ„ÀK«?I«ø Íà?•+<Àïí?ˆO†©âÇ?dd¾Ucà?¬› ú±Â?†±d'KÉÛ?[è!®‘ã?¢ ÆŠ‰í?Ôö!l¨é?ªŽîª*í?pǹwì É?†Ê¥„ç?¢Ì숻­?(nŸx²?(!’šêC¶?t;49¿IÊ?|)³˜ÊÅ?ª(ß{°9Ð?'ÔùÀ ¶? ˜Y‘] Ø?½r Ï„í?Ü>ØèÐï?ËâºÞé?6 Í?0vž¸?›bıå? I A{ÂÜ?Š/úUëÕ?Òz¡³`Ñ?jÌ­î Fß?8+LÓf¾? üäæ?FÁĆí?-Ðm膪å?Gÿh„X-á?ÅjÍüÊãå?ðuŒð5Õ?—œñ[=È?&¬4[:ß?×z‹šÂé?[*ºîýâ?Û#¸àÕë?+  ê?ˆ`=YúpÈ?L5¬+ë?×v‡0ë?êÝâÁƒEì?œ¶ŠƒÏ?¦Wm›¿ã?}>âµ9é?iù ¯ž?5ã¬á?¾ûÚ‡©¡Ò?ø·M²&“µ?ñŒ4Ñë?ì…dÁnã?Ä:Ñ?Õ[“ŸNÍä?þ¡ûP¼ Ø?´ŸóêiÇ?ÊáqÙ?êM ¨ñ@Ù?2ýÝ«‚„Ø?î2À‡ÚÔ?d®ï!Ù?€vÁ\îp?ƒƒ?|¥ç?¹UãK!0æ?ÀMË~: “?]ãu×É?2.)UÓ?$w»I-á?ã@+¦5ç?‚˜EÛ@í?ž®Éùž¨è?yKShZÿî?`#pÚ~ë? %Q°Ð&á?ÃÃýÀ¹í?~’bkÌ?„gû›Í?êÖ•¦dÑ?PŽmR³?‰h#Ž&óæ?âÍ1·5hå?ŠTwa~ÛÔ? mêø¬? vä}ŠÌ?0ŽíaL=£?ã ,þå±?_þ;€Ãá?Õ$JÕ)ùç?DºV³óÓ?/±0ªoØ?ÄëÇŽË´Î?5xÕyCCå?ÀK zȉ?q‹cvÒá?¤‚«ijì?MÞsœUé?`3@íØ?€àæ&Ÿ½?Ø€D³a9³?Áµ"ˆÀê?8{}-ÎÊ?f÷auÒî?ÀÀ—@#„•?ô@ ¾ê?Ø9Fvp=Ä?ê²hD,à?ü‡@Ú—-Ü?8L?š²Ÿ>B ê?ƒ<ðÈŸç?€nµ• ’?0nÄÐ-°½?˜›YèÙ<Ô?,V-AL¬ê?M¸]ŠÓê?2K9ö;~î?$VÏMæ¼Í?;Keýuàî?ø0‰7þè?¤Ì¶Ý?ò ±ªÌæ?X=Î{¨Ð?‚Ý_?{à?i%ç¾Mäã?Ù…Ñ€Éjä?ˆ_4¼ Öä?©Ü?[„¾©ÙAï?\ÏoŠîÏÁ?âò—žK¢ß?2ܧãNÞ?°­øèÙáì?¸¦Ô,Ö?äz£dMÄ?î̬S\–ê?R>ä¦+ê?i*9HÝ?ÇÙ…fÕ?Z‡­XúïÖ?™!S²B¶è?¥&|ó=î?²z:Ø—?u×Þ'¦á?>Õ6 Ô×?ÐòŠa¢°?ö5v2ÚØ?ŠÜÀåM+Ñ?É1ój"ê?¼P@Êç?ã lñ†žä? ˆc[†yé?™îÇ·5cì?UQ°‹Âè?ÆËè¾ï Ñ?Àô³™ïZØ?§Ë+E\…á?…äãxÁ?HþN}&ÆÁ?Õoƒ§?^xwÐsë?Ùt.+oè?(OS3“ŒÔ?À«8úr@†?úMŽQî?âô.kªì?G3pÖà/ì?@ ˆ6Od›?ëûiAâ?-àðþçé? àÍÎàà?^ÉQ ElÓ?àÊ \z¸?\Ú?qaÒ?pð•8­Dä?–ŸýÀÿé?5޶ÕÚÜ?x5Çu¼ÅÅ? JMm{Èì?Д9Ù»É?8¡–Ú?M&w|Vî?sîšßí?@OAˆß€?‚ª§Ÿ™:Ö?Œ½0­ýIÊ?n»Õ›Øç?ªm>@„ˆé? sFňÆ?°´ùœŽGÐ?fçƒDp£î?ªýP÷‚²è?ÀzÈ?} ”?ðÍ…Ð\Ã?žY½_Ö?|R<ì5tÖ?2"ߪç?Ú*ìëÒÑ?a°Q$é?´ø#ëÎ?T5¢Çñ•Ï?týæé‚2É?¨ÇžX«h³?]áI»è?Žæ¤=è§å?b€âçoÖ?Ý>_ßÉÐç?é"ƒ àê?Ÿ‘û½y_â?G_ ßvà?˜Qe¡$ì?L<*0°ÂÜ?†6Ã6–Û?œg a1ïÚ?UùzHâoà?îÉmýøÚ?øD2]Èwã?â}õÀqXà?~ýU£jÖ?6ÄšÏ×ää?ÊbÚ0Cç?ÂJò7Õ?ä­ÿ56à?\dr§BòÒ?|¨­öá å?¶À†-=ì?Gt]h6ƒ?øÈ ·EŠé?VqÕÖ?FÊ™*ìá?Š|®¼éè?¼Ä¯Í'î?[´²±à?rÎn€2æ?êQþX+í? FñŠ£ê?± ²9Ö‰è?ü€‡„WÞ?$˜@’Xbã?öI ûhªÚ?*$ü8‚(Õ? Ú‰ÑLÁ? ^š´ìjž?Ö÷/;oÝ?’hõt, ê?Öy_>Óà?¬®‘Ê vÎ?¶KúÏFqè?+…Ž0Ö±ä?þÖ01Zqà?¢àŠ}b3ç?˜ ²Âí?_ƒp‚¶åã?wÂùƒkÌ?*-[ó¢T?:@:*åí?ä†é(ô½ç?úyÀ,ÑÝ?l ö©òSÍ?äSÿ#÷ß?¢AÓ Ð?>k‡OÅØ?ˆxGâé?àAèøÈkÅ?It¢ûÙ?«¯TNHï?€yþbä“?H÷ö¬¶,ë?õ³X™ï?+¼Q§Ù?@¢Ý…dÞ?¬Àzl ã?謯“kä?Ý^è&ûÜë?k¿õâ<æ?Ülë?±ŒÐ?ú’ZÝ?ý²¥ˆcâ?²§cª`Ú?`rÂä?¶%&©yÑÙ?Ž…Œ¸ƒzÖ?d`®Ÿ|Ðã?ÊB+¬±Ø? ðŽi ì?D‘ör­2Â?È/àðbâÊ?@¯Mö‚?Àâ]L5¶´?dŽ“’ÃéÆ?ØÂà®"ã?ÛfyTüç?ïñ÷_‚å?„%e—úß?Hœf—óÛ?•\YŠ®ç?º'[Ù\÷Þ?ØÍ×—+ÓÀ?ä1øÜHÛØ? ÐEøÏOï?ˆ÷¼eä?lº}A¯yå?ûs6¿ˆè?FC—ûçÚ?¡WEÜá?bCŒÚŽLì?fsÀí?бQp<Ë?x ðõ¢Ü?÷}GgNá?ttÅ£fÓ?`O–Y~­?¶"Ö\GZß?Øè«bv ´?.ÅʤwÚ?Fm˜9UsÕ?ࢶ¯ðÐÀ?þpS·U4Õ?üõ—ìxÇØ?Ø3ÙYdKæ?¤âIÃÉä?„—˜'ð™ì?Xóiâ°Þ?ô¦ÓqZÉ?ýaw.ï¼á?‹é³WÍjà?šwDçÂLæ?-)8ß9™ï?Šå5¾Wâá?¨ÂU=Aî?7¢7ö(ßà?Ò›®TÄé?HÁôú¡³?àÃsǸPÑ?Jˆ<:°×?Ïõã Ñï?ųÊÚQâã?|áýzì?6Ð7yáµâ?]IêF Qè?‚••é*Þ?ÄΟ9×Í?àN[ª!P™?ô}õÎX[Ø? c{±A À?03nT…'Ð?¢wÚÙ›qæ?dõË~.Ô?¾¤[I!¾Ñ?@’—¥¿o…?ÑEDýSEã?N·õöúè?Æ”-…*Œï?Ÿº÷¸GŸâ?g¤¨ ¼à?ÌÚÿœž®Þ?{Ž0áÓŠæ?âQ%|¼Ù?ƒê‹‘°ï?§ã! yÒï? Û&P ´?<+QÂn3Ã?ÞEIÇTí? U¨ßö²×?*}aÝÍÜ?¹o³:Úë?œ2¹ð€Á?#M6¢ç?ǧ_‘%à?‚NÍâ?“'u‘±ï?Ehœ³äí?h§a#=Ó?Ê|¶"5‹Ô?äl9#ê?–¯àÑ’hÔ?JÈ­Ó?^Aqæ-ÇÚ?M¥èžë?»þ8~þIã?0jêëé?P"±ãî«?µ¾Á8ßà?ÞyÏêâ?Ðj{V¦?ô#gÕ ¡é?Þ}ʧê? k.îû*§?7?-º÷ê?aY…"yà?å¢nŸNuì?j ?83¡#ä»À?/Ú¯ y%í?|T jÙ Ê?@*õÚ¥³?Z»ÿàš+Ø?&•E¤ì?¢å‚±Ó?@¾ZGÏÂ?Ø,ݹ:g°?¬ÆÊcÅÁ?n5½¨‘¼è?˜l5‰ëÿÀ?45®ß£×?Àñn$Á?Ç~B&î?{/¶ñ–±æ?0Uu¡ÔZï?©Þ&‰ë>ë?˜UËKÏÜ?8¸äߥî?¶ˆXj›¢?D¸âžé?Ü^5*ò“é?œ€£<Ä?XXñŒÇ?¸lð§Mƺ?ž¼! /á?¹û¤2ê?­rà?…*οf?Þ"™/í?˾cXb^í?œ9¢Ÿ1Ä?W9'†‰?N|5èæå?›±qÁ»?rÚÖéãÜ?.? ¯›ÎÝ?ˆ´¿ÍÖÛï?”hb3}žÉ?DÕweúŽì?4̨þãgÃ?¸v¿ ”¶?0¤«öçå?‚ >Ê lç?,šÜ€4¾í?®4ǨÈÔ?«ßÀrý)æ? }"4·˜å?I (Hšç?wÚúÍ?‚:¼@=,Ý?¯WÝ“±Ù?˜óB(0ºÙ?DÔäN€|è?š7ÖgåÝ?@Þr¡Ãí?{¬Ôekæ?@zN2ç?„ÌA¨ù¢Ú?zÄTzÂÞ?(=ï§Ø?zíýÌßìÛ?½GÖ3f+ã?4µZêù‘ç?Gçý[ã¦?@*]f—ƒç?ªÉ:Êz¥Ù?þ,‹¹æé?xr]C¾?ŒµežJòÅ?-k‹Ý¡à?€’,ݸ0í?`׉_EÎ?T½ò….Î?3ùʆLÊ?R«5‹X–ê?¦šÒ—Vß?åîMXë{ä?À¢CQ%ˆ‰?¨ü3Jé?×ÄeÔJCà?† ¶U*#Ú?3S¹5{Lê?ò"cù,[â?wððT_Xí?ZJ¥J ã?¼%‚‹\*Á?˜#©§Öëà?üÃQ£«á?Æ–îqè?)Ããú|äè?޶…M;¤Ö?)×#Á’vå?&$Ð1H®Ó?Æ(z8…Ï?ε‘Fáâ?xÝÐä?è8N2Ú?¶“)tÔ?L2*OréÔ?HÇïï²æ?h¹ƒ*³Å?ôí8&3Ý??›Õ,ÑCè?6ôßNÉÐ?¼S!óºÆ?uš‘b8í?ÑÂ÷M5¦ì?tY&õ¡æï?"ò&5¶Õî?€¦íÀØ6‹?ä8PÉ×?<]ìO®?_8çœÑ?ÄJã-ÈÞ?¢ÓÝ áÐ?ZøÛ0ß?A!Dbê?hQÈõ¡Þ?'¢Û'°Ûì?ÊNTñ3Ñß?0äÆÓ?⑘ÆWJØ?™ûŒÔ°æ?›ö*ûî?¬­=©ûë?”«´>"ì?¢í kÏ×? ÑÆµžˆÑ?Ð߯`Íì?ìÙYx­6á?( ¸ä$ à?¨²‰R2|Å?̱Ç;3­è?³þ-™O~æ?ÐøQt–ÈÔ?ö!ÕqGÞ?ÀaYÄ{.Ú?¨r¯îÜ”é?¥¹ÏWï?"#%Æë?p}{ZQ+»?XJ%JDŒÃ?ªЮºRÖ?L2=wKî?À¿£f°;º?0ñ4q·¶?V¬ƒ)ë?—&zÉ6ë?$7˜*åEÁ?0Dé—†Ê?„–•ŒÜ?,¼ µÜÌ?xƒÇbþÇ?°.ƒHdÍ«?TD&Ù4ˆÒ?@×GØ:™¶?ó4»ÕÍâ?À4¬²6Ð?RÎâ®Øë?Åò/Ë?è ¨Ì#â?=³¸t‚á?éªhxÕgã?„gˆ^‰Í? +pà?&÷ò½ à?&X¨ÓzÏà?‰d)šÛ"í?Ø}Î1ûÃ?öJn5)Þ?ðjl@{º?¦3¼"ùÝ?ŠrþSµõá?IÅûÅôkâ?  K=Ñê?à [¨Ì·?{KÎJ6Çï?ù6º½à?µ=¿ÛŸë?Qç"E½?0Tžù#¿?0´ÚA˜Õ?èØŸsM2ß?yªñ•ìå? ù^*õZÐ?Z‚;ºeå?írV¯á?øÐÂBÎÔ?¡l;$ä?pÍ?™Ò?iªÁVä?d«÷¯ôß?HȎРâ¼?BŽ­&aáÕ?Uµ­:qî?$Ùæ0óã?dëT2Zuå?p4V›mÉè?æ«’“Äè?TÀ‘ȤÐ?ÎNÊná?0)n#´?,MÐ Óç?Á,‚ÑUæ?ôæ­+ªjÉ?¸=v` ë?4蔯Õ?s¨«k¢è?>ðítûº×?¬i,;7æÆ?ßN}”¼.ê?ÿïj¦÷ã?\;ˆ×ÕÅ? x0+º?>>¬¼XSÒ?B=a2Ò?4Å(NZæ?Õ`Fäæì?¼’4’ÄÝ?\PÌÎøCÕ? ¢-aÀmÓ?l‚Òĺì?–üáÓKç?ưèX'Èê?öjÑÕpÐ?žCý¤^î?  ¼Þ Ä?og%èšä?¤]€â£'ï?ÌÏÖ¿É:Á?€õ”ÈcÍ|?ìhÝIì?p²ÀûEÃ?@l„6èé?.wx…0óá?oÞÚÈ®à?°DŽ?Ò^Ë?3èXŠçïá?ã·~þŒÖ?ÊúpV,ää? Ú.î1·Å?Ë6ÊÂÅ'á?W‰ªÅ[å?'¬ñ&æ? Ö,ΙÇ?Hù m•GÃ?ä³t<ê5ç?=eÁëKÎå?VÚ“Ó,\ë?´Èo“Ü?`â{RDª?Pcbv'¸?p}H¿3¨?æ1ðH{.æ?m8ˆ»LÎâ?¼Z ?ŒÑ?Ÿ®%è3“á?ÙŸÇŸä?)aøâ?_J‘ãa?5Më€*ê?º­5zÚî?l9ë5-Í?²W²Ê/b?Ð#”–zY·?mÔZŠ©ì? ôRg£Á?Ø•uhuÑ?ÄÉåNÐøØ?p^3ošô¼?¾ÑŒ³PÔ?@¤¶Nò·?$áV"¡qË?(Cï›”@Ó?ø¹¡®1?ýDÝøÇ?Öí%:Q(å?´Y«F½î?Bâã=¨ê?x³\ïÈæ?‰3Û„B{á?à¯øh0F“?He˜Bš¿? à?7êj?ÌjsN |Ú?€PwÅõ¿?H<—i²?$ßý–TÓ? žH¹è?¯ñ³á?€ÙÒêë|´??þŠì? Šz…çé?Pö3æWÞÙ? ÿ-W¶–é?È»ËjóÃá?°"Q"ÿ\ç?èˆüp·ÔÍ?^Ýžm¯Þ?@$•‡~ÆË?´Pœ›D[Ã?ÈTNžÞÇ?ôÄ Ô}úë?0°’’/—®?fÑr’Sâ?4ŠXÛMôÕ?Ð6s= ‹Þ?ÈÚ‹g ¿?2öó£ˆzã?ìøMÇêEÏ?è¥GÐÂç?±Îsaé?ªÎà?Iª9“þ’ï?d§vioê?¸fñ{TÈ?^pzJ[kÞ?]Ñ3'÷ï?¨i­wìÕË?8ô\]¡OÈ?t| •V·Ù?†žºè5Ù?X®{¬ï?èe7¹]ƒÚ?ÒÜ,Ñ6mç? B£\4<â?˜€¶ &Î??1æq\`ï?6;j4ë?¸¯Ðªéœæ? Ï’ ™ï?âf#øš€á?8-2;'Õ?|DJ.°*à?l¿;Y™Ù?xFt¥¹ê?¾R­ó Õ?¨EÅG{´É?˜Rú@›Ý?9_-4.æ?tp¬<5È?àô9wnc˜?[Ќ͎¹â?P‘S¿.ùÔ?§§^Òîßé?¼~3‚ÔÜ?˜^0mÉØ? M @PÂ?\¾Ü:` Ô?&Â(*Ñ?€XQ¸¼?7SÒ÷xâ?Äç¼È-jÍ?zÒU_š-Û?4›ŒŸÃ?C;xüæ?ȼ­’GcÉ?DÅÀÛFÍ?gïñßç?‘Q&¨è?8Þ°Ö80¿?!¦+)}ä?”Z±&é?v‚Ž˜öâ?¯½ éDî?IF'^%Çî?Èͽ~‘ã?È-Òî°'î?°h¢I¿\à?Ö7A×¢î? dÖ?Üe-í¨eÚ?Ðùs4_ß²?H€‰/Íá?T5)Ò†ôç?®ÄB)ÑÒ?Μ èé?c Œ¨Èè?Š"ÈÏOöÖ?ÒàírCà?úˆKü“µç?8P¥#{0°?ÑN:Oíí?‰ßy­ëúê?Ö¨í Ò?ø_Úª¥ä?G¨Imá?bülBÞ*à?êÅPРÛ?\áÏDä?ÞŽº¾Ù?dBþ“uvË?mЙñÓ?(ðYÅYMé?SÍLòœâ?–<ƒkiøê?•êZHá?)ž}­¢î?…@÷ ;;â?ظ—nèå?Ååžéwê?7õß lã?ˆ¼Ï•FÖ?p%­"¼;µ?ð\ $Pôà?ÎW@â?`“K&êÄ?wQl¯÷›ã?,­œWñAË?bŒ7–§ Ú?ÐÊ!çÝ?ŸÃJË4\æ?Cðdš sé?8S >rÔ½?€÷Ü>ù¨?æj{h[Þ? àq…à?ÈG$œ{Ý?6B®’cèØ?’Xüo¸4Ò?”Õlí?þ4¹&.ì?`EU7ÿÕ?𘼔»? ‰©W·Aè? É+^Ö¹Ý?,`'òî:Â? |Ö¶à?êeÒè?Ô,,|æÝ?QÂêÞéÀì?Ðû xà…á?taU%:ì?Ì-¬þå?Rw¥ðºÛ?F•R‰á?h/Êj>ë?ì»yC£Sä?ràÿ&­ÿâ?’& …wæ?§ç•„ÇZé? ¨³%(Àå?äK|µFôÕ?çѦ{³ è?ú°M­TÝ?4Bã‘Ò?ð!>1 ¼?˜M5{µ?Ü’B†˜â?øz›Ÿ{´à?öÁ#èÁã?È1 ÷,Ò?‘MȤ¸¶è?‘@!ùí?øé´C´È?ôMÚµÆ6ì?¦Š„Ý?€£3Ï…?øÕõp÷à?“êè(ðï?ŒÄj´:é?”DHMŽÌÑ?Z¿µîÒ?\Ìâ1í?`Þp:vŽÉ?WXýÂ?’†I¿ï?øàB:¿?(ìV†¹?òò¹=£?ä?Œ—0,IÅÚ?Û§¯<á?Èç©LÚ±? ˜ÛÓÄŸ? ÿŸKǵ?(²4C?Â?€Áp-©®ä?Ùä€|¼ë?™12ôâ?lÎâhDóÑ?– ÏVÓ?ΗFÕiç?6üÑ1|Ö?ĶPºÓÒ?"]‹:'Ò?àÍc‰“m»?Ö6V½3Nà?P{Î׿È?Ôc›sýûË?ßÛ>?£â?æea”øÁì?8„€•¡Ù?é¬Iæ­?bYZXÜ?®GKÉß?œq¾ˆ¡Áî?xMgü µ?`¸ß4Ã? ;oI rä?xËÜLí?’™Übœí?yAÀ‘î¨ì?¶Z ÝòÕ?xß5ù¿‡º?ŠE~[ðÓ?¸FèŽTõÔ?í2MNá\ì?LKÑ`w±?鿼~í´?@‘ýé¸VÛ?ÙóžzUæ?DÄú‚±¡Ë?žÃ‚ÉÑÜ?‹aU­ç?Nӡذæ? pƒ±Jï?˜0%d´?ywq[“°ã?DÄZ®= Ë?©°itƒê?TÜsŸaÈ?mÐj˜Aí?ÇÑžâê?܈©\”%È?‡ë=ä±´?%ÒI—ß°?r=¸ôÛ×?°K 4ÿ‹¬?B± ]­ÂÒ?NŸÛøÊÔ?ÐÜMYª®?,¸ô@ªÇ×?Лr[P°?J\»2ýJ×?]KÌP2ï?qK©2†·à?¼U+”‘.â?o×Þ?­¡Íuï?ÒÕ1þÜ?e4ëf¸µà?œÚMÿk•È?²©gyÖÍî?Ð*èK¢Èª?¹nk*ã?>M®N5là?þ£´Ôjæà?ZÍÎù<Ø?âR_›‘ºè? ƒÊæ?Ù Â’²³ì?sOPi¾¾ì?7Äòß;ê?v‘æ«oâ?ÎØt eâ?d&Ë™€²à?&,H*€xß?™û Ñæ?ð˨ËÔAÜ?2l£&‰Û?Ê(v\ˆî?”Y ¢ÖœË?Ôªƒ’ã?–$à®F;Ó?Î Šlâ?o<îÈ#–æ?´ÀZØË?, Ít%9ï?›A‹í? ü%"áùÉ?¤I¸2{»ß?¿JQ§â?æ~(Ïèrì?ÇâZþá?¼öP€RRæ?öžK:Rì?Z ¯œÀå?Šõb1§\Ú?944 ãï?ãé(ð¬üé?¬¼˜Èë?Z šc¦”é?0§bŽË?høÚ–`²? úˆJŠuë?9ô&yà?k]2Ou¬ë?¨»eqTØ?2%Äíç?ì>œÂzÏ?(nœÃ? ÐOûÙ¤â?˜«I±°Ù¹?Ä—-Ë6Ó?é!?¯Iì?9Ô´øÊ?®Æ §¼Û?À½ÖÁ¥ˆ?Ø_tð«¡ê?ñ'GJ¬á?ÂÁôKî?4»Ý:œ0É?”>>‰k¯?úõõÎöí?‡Ð’|=Nä?®$ÿ¦*Ü?xµäÝæÊ?kU­qî?8{Zòº Ö?e›´“å?ÝïTw®ã? ,/kAæ?`ý|iÏž?æò¯väÈè?Q7UÌÐî?¶.Í…3ë?@Ý‘‡ Ûˆ?Ï%Þ*Óë?ôzK¾IÁ?f"Ë~¼ò×?”¤¦êRÌ?Ȇw‹…“Á?6?³{¹ì?+fÊ“Lœê?aˆ6 Ù?Êq?\Û?ÐzªÖñÀ?`RÈÑê/Ø?sÏöÙ?ÇË俸æï?ÏÞÞ?8}ÑÉ ä?Uw©cï?ðì9(VðÁ?%óUÀê?,OĦØèÀ?4Q"HÊÐÆ?€Ÿ€·\|?Èí ¼‰þä?…¯¾]OEâ?QéBN æ?)›aËíHà?ð™þKýž ?QOþL°ï?p%Q”¹ê·?Íûjåâ?P9P?L ¢?4þ¾!ï+Ä?iíaÚcê?F>TX)ÄÙ?j2»¾¶Ú?à½gà`ÉÀ?¦dŸ,!â?¥Ê¢?ç?‹áå]¥)é?à>îùÂF˜?ÑxÌ”7ß?ƒyo¥ifá?Fùœý²Ú?H?¬zÄà?wø÷°à?1Â'å–9ç? GŒÝ£Ô?%ÿgrÎ2è?f ïÃöÅÖ?âûä8SÞ?î=´ŒÒåæ?Ø„’8ÐÆß?}ûã?truÉÀØ?æùrò¢ê?h¡]}ìà?„Q½W.Ç?&lÄ”DÔ?0¡n¡ó¬?ôñ[þçé?\øˆ/5Zæ?ãÕ Ì?ëF„›)°é?`xEw¼(ê?¶fdÀCÛ?4TÌÚYÆ?\YÊ7Ó?Qÿóß”í?oçY_ê?´°aÁæÆ?³Lw.(Ë?É£Ôødí?ì`_(®Å?$7l#¾°î?˜ùÕÑ?5 êŒçÌê?ØâëËÒ<Ã?êì;džüí?¨ó1Žå:Þ?¢í´š^_à?vJLÂÀ‚í?¾¯T|®í?F{~¬MGÒ?ÆžOèà?q¹¶ë8eï?ÜÁ –iÄ?& :Çsè?0pþTbé?À¾ªØi?•l”Wäì?`°g·?´?´†Ýƒößã?RH´Ç›ÞÒ?!t-f¦µ?¹õÀù’è?¬øÔòL,ß?~_~îÛ¸Ö?:ؤgÜ?¸5ð4²ÓÚ?ÖbÛë?¿QV÷é?nSÏàÝ? œÕnÄœ?/®E‡»%á?ø_„á…¾È?粜$^ í?HSÍXrÞ?Pâ³ìÀEå?ð/Iå?¬#3éÂkÞ?¤ˆSä°Î?|¸–ÊYÖÒ?|Ýpe7ÒÛ?Ð}Êïí?æì§+DÖ?äbNl®á?±KÏŸÐþê?²ö>Òâç?€ì› æí?iÓTôæ?¹ãȪ©å?ý©àGí?zEûä?–qŒ(?ýè?ÚôñŒòÃí?(b÷¦Šâ?¬ÚT ñÎ?³†óJàë?„>‡<ìÓ?$Öʬþ‹ë?ÄÍ:~}zÔ?P’&:¼Ì?žè£Øºã? €ìžkÅå?`‰¥óìAÕ?`„X3kÅ?øÛ؉ã?¾>¿UMà?èuŠà?sÖø–Yá?ôdZ~ðÍ?öÁ‰©âÂé? û ßkè?h–~¦[‡ï?À êÀ_â?srQxB›æ?‚¦œË,Ræ?$ﺶ!Å?ðÑå¯?³ Szaï?IuÓ=æ8à?¸Âªľë?ÄR„l3[Ü? 4ד¢×?üxsFmå?HU…g OÃ?hAð‘™ñ±?«€ßHÜÒ?šÛ Ûéè?sfë­†i? êÐW² ?Ã1¦—>è?Ú/þ‰EÂä?”?Ö!eß?Ø´•·¦BÇ?ƒ»\~ðã?ŒÅz¨Õ?0/_¶ž—µ?¨/ ."Ô?t–¹8Gé?=þÅ\áà?.d`oNvÚ?€T‹Ûv~{?i^Ü¢y·é?oðwƒýî?_Òš¿Yá?h׫ÛÎØÄ?ægyE˜¥é?¸Z[wµP½?j¬ÓÉäë?¿#–‹›íï?¦j1c†Ð?$PõöCGÑ?X‘ì‹éçË?Ñ„ŠäÖâï?hÌhZñç?Ö¹cÐàÌå?€5W·|´?˜`Ç«dÞå?¥'Ì–›â?Kybèä?­;ßžRì?Xðý„™?RöC2‹*×?TªkÁvÓ?6}\‹Ø?£¥L#¶«?¨AÒç‰Ì?FµæÎ$Oá?DFÏô;Ú?¿⨎×?·l9l_ì?ˆž_æbÐ?²ÓGÂüÙ?·çQîë?P–<*ë?ãsÀõç?‹ó‚âªã?L¾Û?€Nâ?].’#×?þãSÛÔ?"ë€Å?d¬…Ö7âÉ?ú €°œÛ?¼›Aé={ë?`&'Þ¥æœ?­ˆ‡ ßî?cžj¶?Ö“×¹@Ø?úƒ?Cô#ì?P1 Ýò¼?Ãà#I— ê?£‡Á`v¸?^0åd5ÒÔ? uEº•¡?•»€© Cä?US´(Ûã?¤Ò.Ô¢Å?¸göx;¼?€e²/’™?¢Š*aCæ?X»mµÍ?ÚüÍv÷{Ö?Ë^¨ZÞÝ?Õv†KÌÿä?=`×U_På?µÐÙôðä?ÞV¤âšÔ?ˆíâ¿Þ?g__´Òí?H3[f³?ß¿ô'ã?N¶á«?¬gâà KÕ?œfVÛ?‚aòRN±Ø? NQ –«?Æ#K_VZÑ?¦zH.N)ì?ày•¿ª¤?npù¨I™í?²È^øÆ7æ?•}É—Ô›æ?œ b;æ?d?æVçå?`«xQ÷ƒœ?dxçÿøYÞ?È‚ì°@½æ?N(BˆÐ§Ò?‚‘°qãÓ?òa·êêé?€(m¹?jø~{nØ?’ùÒËôÓ?PÆÃ½žm¹?0m|UÕ¡?rðzðaë? ´ÓâÕ?¼‚€­¥ï?L€£T3é?ßø£,î?~j #Ù?z ½·ÑØ?Ê9¾{A[è?,´]x¹í? DjT–{è?T&ùá?ÀÈv Ø,•?d°l¬ÄÛÓ?ÄÉâ«y•Í?ï¯>§–ì?n[q°üÓ?å@¥@ …ì?JL÷Ð?ÏÚu&®Ð?\ñdD"Ì?8¦ÄüÒáÅ?ز*Åí¶?çÎ&„Á?¨W›,v¦Ã?7¨•¸ ä?Œ¾ê&ì?ƒª½?Ïâ?0¶ÈwTË?0ÿìúøç?Ìå6sdÃ?ôão°)Ñ?VZF ‘p×?í×:x„Ôæ?MµŸh~í?9L§›N]à?6 Á侑î?ÔzœqÓ»á?{øä? @‡¬¾æ?¾]°EÒfä?÷I>=$ì?ˆñ*;Ú¹?ȾÁdº²?Ä3ƒó„î?wÆË)Aì? Ð¿| è?Í`‰û›ƒè?(Ǧ¿^²?ñºægâ?H" ¨B ê?ñ»ÒàÔLà?Ð5(+޵?òó²ígØ?ÕäÛ‡L¡ë?üàO¿*Í?äÈ©AÐ?DÓüâ?,ÇMÏÞÚ?D–ö"B~Ê?ÐåcdQµ?™ m‹±ì?ˆ€”碶?o"9‡ã?04^d £?s3};)æ?5Þ¡Ä­â?àîäB¢?òô¢kõ_Û?ö]Iéìæ?$Ü1í‡ZÇ?´6 e Mã? /ýXrä?¦%š9Þ?tuÞá@UÈ?mâè?Ä{㾃Å?)7W“ç?ØËïd=°?ú$cï_ï?€ÖS½Íì?»¢ËÎæ?øGH{Ì×?¾8jˆÕ?K©+PE¹è?uX<ÃÏá?<§›åu—Â?@‹l z—š? v{¢œ*Ð?òOÊë5Û?à䉦bÙ?´s#à`Sß?`øšÏ£?nz6ÛÛ?ìJÎÝ|ï?ß¼¶GÙÏï? M®rÓXÕ?ÇlƒÔ¯®î?ÿ©T–Îç?šÿ(Ó?¾òP £è?ÞÙd˜Šâ?¤Z)^6MÖ?v›ï³¬à?µcY*Ð?-ÊqŠé?ltf cÆÓ?Ö1ÍõWÄá?x‘ùÔâã?.? ¸ ÅÓ?Œ@P®oÐ?€ÀDQò ?PHGHb·?XÃàÜãŸÏ?Õ3ÞuÈÑå?ußÏÇà?}…Ê·ç?€1Ç”=˶?óóbÔÃí?Ž`šˆ«î?È[\¥òî?ÐîH¯$ä? †¿©Ý-è?Äé€?·ã?ÀšY¹å?€{kË (‹?Ä^5 :lÊ?vËG9¿rÓ?ú3žþ (Ò?°Ñ}Y£ž²?jkS@ ×?.\Üïš…Ñ?Y¨Z?˜ê?ž-—·`2Ý?¤Í¦Û?V•à ¯mà?Àâh›»ô¾?„–®×'É?ªqZ[<Ó?”fi.ðã?&hX÷…Ý?¼è.«Í?ú}‘!ˆî?LZ¡¦vsË?ˆ3.××­Õ?¸Q› ÃéÇ?@_% Î?Êí¾ èá?ò…UÀ!õì?»[ÐáyÓ?tÓ{åû£Ê?@=*©UMŠ?¬bZÆWÕ?,#à™°”æ?Ç›8AaÖî?âVrþtöë?7†|!Tï?†ØÜá3Ùã?´2†«r¹í?q“]#Qá?æ¢PN¯“å?Û×ÚXÄê?˜ÕqUÀ…Ô?­±Z4­‰ì?žTŽ%Å?6µ¥‘PÙ?`³˜I‡‚™?Öp‰»áFä?¤+ݯšm?FÂcºÆ‚à?~À¶Éëì?–ì®Q¹ç?¬Âª`/á?/LþÐ?ü?IÃî+è? dÛÜ5Õ?u߆L'è?fzÔ÷ÀÔ?…øL‘¥«í?X%sŠƒåÃ?sØC%¼é?Ó¢eÓÑIä?¼Ñ!±²Rã?ä¼ncç?ð¢,3à?ZšJšë?„l*HÉã?|Øâ¹!Ê?ôNŸÖ?%~œYòÎ?\ -Ž›LÈ?Êx¸Qå?¡o·Õ‹á?H&?gº?"À@£³Ñ?Î_ü:”ÔÐ?œV:ÛÌè?bᯑ–ï?¬f;â*Æ?Gaöp€ì?Ù¯Ó> Ðè?f]zh¶ä?Z¬„àå?¯Gj$ì?kعbŸ¿æ?û¾±hyã?„' öUè?Ž•Ú|–Ú?ƒÞô~Í£à?Ø£ï¢1ê?¸ã0¬më?à[¢ôï¼?8jy:‹Ø?\KKþ—å?Z2˵Ù?„Ò0Ý~{Ð?æ…^ê0-Ó?à)½>Õ ä?]f°Îã?X?mGŸå?\õ4vþÎ?„6}wJâ?¥ö‘ÉOá?ð<<†òÓ?jâ&ðCè?@@ÖŸ¬?8à×E ï?Ä®˜ï)Ü?Áviì\æ?¸Q©^œ¶?Dº‘À]Ü?V;8Þ®Fí?ºÓŒ¤9hÙ?dcb7ëà?@ú°xGØÌ?áÎþIïï?þXŠ»à?šÖ,vOÝ?ÕváÀ)é??êX„å?)çÝZW3ã?&¢·—Ç?8¹[Ÿp¸³?Œ:o¤ÌŸÎ?Äáp&øàÙ?>â…(¶«Ô? Ù'—è?=-Üä?À„3ŸPŽ?¼JÍ* ]Ø?£1ž=>â?âhÚJ‚wì?r ã8‹ è?øØÆÕ‹Ü?Ì6pʼnIÙ?6†Õ4ú§å?C( w¿²ä?¨;‰ù ÿ?t(ý€¾£Á?´Óô€'.Ú?.¸V£â‘?ÂX ÆùØ?Ö…`ÔåVÝ?Râ(Ó.ï?cÌEþ6í?/ ?ç¼æ?’†Ø…É\á?Öm+ÇH×?è;“eR/ï?œÎ ÝiØ?ËÀ]>Ðã?êê£S-â?Ú—™èë}Þ?Ä'ë÷ÿKÈ?ƒ1ìo»?êî=@äÔï?´³Ê3äÔ?LsBÃkÞ?úìŸÄ.?è?`º%JÇ­?º0!1DCÐ?`<Ùj” š?–šI9¡Zí?`é!•Yí²? y쎬Ì?Ùi¿ðº;ë?ékû‡Xíè?@”+33…Æ?Å9øÌ†zî?ݘÞ®î?•š%·ž·î?øm=ž¡ôØ?þˆáù>‚Ñ?¸pgÞã?1x_­ç?*ö£'´á?Ôœ'Ïd*Ò?J¯âØóq×?X5ôÙ¬>å?¾VÓ`°Ð?¬r)) È?X%nýµâ?'òös—î?´&Æ—ûçÔ?¬‡oíÉ?8’ ˆ™rï?¨ò^žQ!×?àÚÇ÷kÖ?@*8âU@à?°"(4ÒÖ? Ùm踦?Ú6cè?”uÃ^ÀÀ?Xì<Ìœ-ã?(ÜŽû~nÝ? 9ØO(á?àN£„Ç¿?…=ó„•°ë?ÅO<(í?ˆÀŒþÂÀ?dVmøÿ|Ý?>1‚ø·ß?àg*@ð½?âiC÷Ä÷ï?Š>âõ‚×?ï§³QŒã?Âb‚/ÿÖ?LTÄýhÔ?V¶3L_ôâ?à`« Æ«?lrçØªÐ?¸VÈYÏÇë?úýéLÂï?Bƒjñùí?„€$ÔŒóÃ?Ž‹™ž÷Ø?ô×âÈ?žÂGScäë?r› #‹ï?ˆO¤uƒ×?o|½#Ïå?Ä•- މÀ?HµÃLî Æ?¬fA[©À?LúÚÂî?I9rõ¼zæ?uÝ0¿Mhå?QFTñíá?æ™,ʬ/Ý?Ü»…˜²Â?¬¼ÕCÄÄ?9Ô(a\öæ?@œµ3ˆ’?roïgÒ?ºP乨ŒÜ?>´»Û?³Á6Öì?ô{ˆ<Î?ê"AŒ£ÿÙ?îÏ#A&æ?»y¶M®ä?v 5­5ùØ?`9ß?f ?‹š}x•Yé?ä8«?p×?ØiÃ?¨# ¯¡?H®w]¥‹å?`ý”vÎlÒ?€û•bBݪ?Šü½MýÙ?°Ìƒ"Ø?˜!óúCä?X†šBÙ÷Ø?¸í†tFßÖ?þ ±=hå?2øä.<ÙÚ?’§Þ<ìÑ?:ÔY€Ù?ž!qäQê?Pï3Bìr®?{ ‘’9â?êìK%£è?Øôœ²?[5SCÚî? ÝŠ¿ žì?š÷Úö¤¡Ù?èJÑ Pâ³?°¾C‘°?ÍÚ_¸^ß?Ü©¤:zê?°¨Ú4 θ?°â-´¿æ?øÿ\íX«ñÄ?ô‚J˜ÜJì?t$ÐNR†Õ?Ù£.U”Ûí?N\±¿Bë?ÄxT)(ß?&´Äaî?¬à4ñ˜Ã?<Íß$øªë?ôÜsT·Ô?K…TÇÇÀî?¬ÎOêí?Îê@ 2ß?˜Ì¨jÜÒ?pÊ<ªÎ¦Þ?Ù¥·ÒRé?ÈÖú¨CðÃ? ó?Û“ÐÜ?rNkwµýã?ÐIü±6ÃÃ?Õ\1K‹ä?¶sCzË?z2¬‹ò»×?ðL™2žL¡?¤ KÍâ?*ÛÝxd-ß?V¿·#ß?¢yÍóé?´“YÝX«Ö?í±=-â?¶¹·ÂÞ?b˪`&Ú?ŒÇê´®ÏÆ?ôg§C;]í?IFÊlÙä?²…7ç…¾?¬]× ˜Õ?À‘‘ÊÍ?VžQMŽÛ?ó¼^ÚTœå?¬D,éSXÕ?›¯y°O´?<Éö6\åÎ?·n&Þ…“?ë_J¢Jæ?îý&¾ å?îÕ,åMè?ÌņÙ?Qä…†|é?€;Ä€Þ‚?Jø¿#¿ ß? %ÙÉH©?ˆôÝ—Ñ?ÞÎãbhÕ?°÷”zçM¦?pfØ¢ç?ôtŒš6±Å?Mã|&£Ùä?‰_»TIZì?ÈÑùüì•ã?¼™|¦ÔÔ?„ñÃQ^áÊ?%qH‹Øæ?Bð&aCÐ?¦IáNØ?ð²Ë[Ëb¡?„@, ‡ýë?o¬7Ä•é?X: u9Á?¼-Wå|uâ?ž/È%ë?²S7ßè?{‘ãëÖå?ðgÎÕPÁ?(Ùä¡%É?kÛsßÀ?¢ñÊŸà?1fNĉnì?;Àðnvñì?CûfWë?ÈÛ£÷[©Û?*$1 l€Ý?|3NUImÕ?¹3‰à„å?9.dœ¹ï?Ïzl´ë?쓃•ì?€ÁŒðzq?|7£ÐQXÎ?Þ£?JÕ×?¼ hé9Å? 5¼0Ýûè?ð-Ù‚Ù?øKãÕ†¶?–%é“:²ä?‚iüøe)Û?UµÎþÙ?ÓOXìä'à?ÓÑ^g|9î? ‰ÊMä1¯?3à!gs?H+´´+å?eRÏÅ[tè?„A. 3þÉ?¬U¤#6Ýä?Ãö!ï æ?û>Cr†=ì?èÞuY£º?®ïÈR,ÊÓ?¬ÛpŽŸè? úÿ7/Ïë?¼;¯Õ^–ä?ž”fͬé?g?¨L§ì?vÈ" ©œÖ? ¸ÍpÌ¡?rÇ]Ïá3é?M/µe>Ò?PÜðj…ð ?kÇ0Æ%áå?‚  §šÖ?¨\ÍÑp¶?Yê5ßÑ‹î?“‘Ôûlé?f(è'þâ? 5Í#{[¶?uòåÙç?F$Uà&æ?èU¬¬Æ?€«ú~¹?ñQ^Sží?hÇ^u8Ó×?Òýé•OáÕ?jñKήÕ?d c6 òÂ?¥“z”þï?ªÔ4Ñ—‡Ð?×ø%LÕæ?Ø“ßÖ`oÛ?¤þI¯È?dšì†Ö?6÷ú+(ìï?qZò&çæ?TLòNOã?aØÃ>eì?y#Tmïç?tÛA¡â?¥$vÚ?ð^øL?Cß?8^®ºì?“§þÝæà?¼°Â®;×? ƒßº™ì?w«Ð†Âlé?ÔåXÒÏê?úLk—\Ø?+É ~k¯é?àú²LfEã?!Ìö<ç?xLyk¡fß?úï%¼ÀÔ?n©­u²rï?8×HÄè?­"$\~™å?àsÐÀw¬?húnùqã?ìh€iZQï?nû¯î Ò?Mvदê?åûî7ï?`2ÀDu¾?ü–! ñÌÙ?V]@'¥˜æ?OÊ)qãìà?˜²¾Å °?ÛŽÜÓ«v? :%=ÿ’?·¸KÖˆÉî?§ŠvC âë?­CŠ´ï?Òž³=ï?ê±’æî?süùLÉ?‹îlµ§Žæ?:¯ô Îòç?6»r„Åmî?zYOIÖÇß?ÔU5>•Ã?~9ôy+ì?>9ØÞÐ?H¨\$ 'Â?«ß0ÑŒ÷ì?=3ëo´ï?”ZŒûê?ïãB@"5ë?mÏhš_â?(ÑÕ?0nçê„Úç?Y\ÐÀŠÁí?u1µÑªæâ?R$H`‚Ù?€Ïÿj´?¿g’ ’å?¤ÇÔÙ?}`Ñ÷³ì?zY³þ²?Þ?ȱ®iÿrÍ?ŠáºÍ $ß?ú6rT‹xì?®Ó®²È´á?þAEW×?ØAïÓ/Ã?úVP0¥'×?P/ƲiÀ?ðd[?<>›í‘3Å?6AàîÛdà?ϪM5×ç?øø÷ÉsAÑ?áù¡€Û…à? \áò Ã?6d+Zâ?Êïü%Lï?j·¥—GÝ?‹Ri¬˜Jï? ð‡Ìª“å?Ô”{LÅ?àOcLN|?.Òvž¼#Ñ?õ™NA ï?ünR ;oç?ÎOâyÈÛ?Ëp”*åµé?ù”e{Vç?ê°LÿŠxÕ?3N¨Z,èì?`4;d¨ýŸ?0@ÿ©•Í?°Œ—«z©?:„µ€¹ê?J OÄfå?ÒŸ§yêÆ?€«©š? Õ¾2åå?øLm¤?™l¥ùÁ²å?ÝÖjnàâ?L-ˤ"°Ö?FîsÌÞàÓ?¸ÈZ¹&n·?·,ù½'Uã?HT9å:YÆ?$Û^“¯î?"š,©Ïuì?RâðŠåÒ?Z§Wêí?^îç¶â?ÈÕQÚR¸?C7ôíð%ã?¿g3D(Ý?¸ô¯‚/ç?up–¬˜ß?âÙ=rß?zëZ³ÍÞ? ”:_~IÊ?ýÚp |ï?týáÃèEÐ?t|ï»;§å?àßþ™Ï?0­¥ºÆÜ?Êp98æ?€<’‰âŠ?6¦@6áï?O³r1í?€Ó#Û?’™¬DXÛç?úWþ£~>Ù?ê—±.ºUÛ?“Ý߀qî?åæP«~ì?L‹XåIËÍ?k#…ì?rÒq™¾Ó?$TµÞ?êS+dÛ?‚™hQÏÝ?”#…TQ—Æ?·ÎßMãÜà?u mÿ›¾é?tÇÈAî?Òä éßÒ?”Ùe™ná?Œc¯ˆEÌ?´¨^zôã?ÁrWÛõ’â?ÊÑÿšÒµá?¬»&÷³ï? <š·S„Þ? Äƒƒ|Tä?åÑ r¢í?X_D¿(kà?%óg$ê?l—™Y%Žß?ÐvïMŠzÆ?€\_`ü´›?PsIL®?<±SY®¢ë?Ve#F…NÜ?<‡É˜Öy×?¸€W$·›´?AÆ}j‰ê?ôHÁÑ?7­íÙô‡?ƒ×r—°ì?7¿æá·æ?€µW¬Ç?¸§ãP\¿?-¢—ø0ç?þų•æ?O¬_²8ðæ?ç{ð9ÓVë?h?r{Mµ?°k.dë?…2ô»tw?à¢@÷§+Ø?î¤ü•ŽÜ?GF Œù¬ï?4C¸)¦’í?n¢sˆÇ*ë?ßÓ¥j­®ä?ªgÓ?Ô;çôÃå?VÉï~}×ß?æªçÃŽÍÖ?Ø zPyºï?ØÏkÖ÷¶?jE³ÍLÆë?ŒïêÏ÷Ç?. kÂ¥Ó?A-Œbzä?¼.k<Ÿ+Þ?Ô9í7‰ØÌ?Hi“ ¾?‹™ôU—é? .§Hï?Ì`Ì5âØ?Kªò@„xë?€EW'R…?¥ÍÁ–ô+ë? ¸’Ä9Ú¼?M_åMBê?¾—bXÊ?S«¥Z‘°?]¿Î߃áâ?",Yæ?6žÍЉÓ?äôpº»{é?4ƒ¹ÅBÑÀ?0¬÷ÛÖÀ?ôÛ2åê? …5©žè?{<æRíÆ?°T\\–ßÜ?º×ÎD¶Â?à\SÄÁî?¯qöÍq^ä?v•]Ù?æZVýw ß?Î/ÖÙÃ?H¸xiK§Ô?ÒsuBTì?pÀüaÝ?q̈ ¡ê?æ²Ôºöî?0ÚiÅV'Î?‚C›kÄ&ë?Ò\YÉYùï?>1Læ'ä?}¯mhì?™´ÝÛ¼ä?$ªS¸uÄÐ?ÐŒUޤ²?Cy¢K"ã?,Û ÿ¥Óî?êåã¼øã?hG3Žÿ¸¼?T=-' ˆÖ?X9+æóö³?x,á£çØ?2y'ÙÓ?hžä§rê?[¶JÿÈÛ?^(ùD¾ÀÒ?^4ß)“Ú?Ðé—Y1ï?.ÒÓ] œã?ð’Ð!eÏ?¸Þ½Êè?MIßíæ?þ†>ŠÑ?ð \Uþóï?¢ù›Ì}zÞ?RGŠjÍî?еÕ²ž×?Q¯²9u¸í?ÚÖ5•Ô?xY£Ç?è³/¬)‹½?âîARî?dA¨ÚãÎ?þcÆpeÕ?³³ßtžÉé?KâUòÌYã?3c…¿æ?¶£™Ã<]á?¬3x‹×?¥ßü¸ùÌà?PÜw©¼¢?ÛÌ!â’ï?w–l‡ì?.2ÃÂyOâ?ª8jà36×?À—ñÖû‰¬?y©ù49 ë?qe]( í?Cô#”Šé?æÍ'€eç?4Žsu0’è?ûÜï…Ñé?l ztªÑ?@16ið•Î?¡fŽ®œßã?mvÖꥼî?„Öñüýß?àlá*™»?ìŤ°Qã?Nœ¬Íiáé?NG±=õà??é?m’?Ad3¶Q™å?ÞݪÿÂ_Þ?ÇÅá™â?M×á¸Ìê?\=çvuÐ?Ûl‚Ÿã?À)8 Ï ?¹ÈnÓ½oä?ìj7„VÉ?gBn‰uÛ??Z®sÛßã?À ®‚?Ú#ÏlxÜ?ŠEþeÔZì?m¨N—Mè?~ÀÎJší?HYÈõáÔ?â¦M°&ìí?N= ÑV"Ò?n~¿–Oªï?.ò‘ *Ð?Faž…=ê?a¯ùùeÐ?§œ& D·é?®«@e ðã?ŸËJä˜ç?¨¸%¿uy¿?ȯ›9Â?Ôß\ä?`åm÷aÁ­?\üÃÀ˜ã?‚ç?ÐÙ? PÝm…±Ò?«[ÁÁùrè?ê ùòï“×?0öx>k¾ª?ŸÀ¤S•Ïé?Â5\ò7ÂÜ?£ «·¤wì?_ÖŽ·—Òæ?\a0˜øÁ? õÓó!Ø?Ù§[î?TŠgŸâ¨æ?óIP|.±í?ÄÓ”Ö?£áÎ^×è?L¤õ²íï?þ¥áFÇÖ? Ú§««é?¤™'‚7Äê?1#Ÿ­Šå?€ `íÜ‚?”ˆx{`pÆ?˜G·ãê?DxeT÷Þ?>—×ßãÝ?F§Øwó6×?Óg{ëèÓ?÷[÷öhµ? ºgÚè?ÀJÉ›”Ñ?>0ŸƒJ/Û?P.ÙS3LÔ?€d;<*÷ç?ØQeØÓ`Ó? Ë{ÜYß·?(/ZÃî?vؤï÷Zî?w›%ÊäÖ?ÀÌ%žW4ƒ?pO5 íË?ÀñÝÔ ¨â?°Œ«O¥?'•ø,94ç?ãÊãw‹Øî?<Ôx­³?Z(__!Ö?xEÇt¤]ã?¤‰FcGß?õ°~?Fâ?4ŠevŸÌ?à{Ö'É?ÆXîõä?0ž!dÜì?ËHŸ¶Ðá?F#a5×Gî?†Ò ׳?‡Ø`„ ùâ?¿‹¢â?ŠPü ÈœÔ?:ù?Ù?¸K7øËÉ?jã0EÕ­î?ŸR:”Ýã?Pñ”±#ÿ»?Øî£Wï?@èË#š`š?Ä1J¢’gÞ?œèჟ×?IcAÚ;ë?¢{µeçÕ?fÛm•Bï?LŠ3Оáç?[¢ŠþKýï?ˆu¥¿/EÒ?8Ç>z »?8  ôÚRí?ЯDGW;Õ?Žù3~ÐAÜ?—¬÷/ã?ÿý©ìéöä?Ó¡;G¬ï?,_Áîî?DM‘Ù×Î?q ùµ}.ì?zrä2FÝ?NÛ&äïé?@)¬Wc8é?@’u¥X ˜?à¤ÄÝ÷öÐ?ÑÉ.Gè?G"\çƒá?L.Jî¥NÖ?\À[9º`Ù? .}ûPêî?ô“™âÈ?_@=XÚ?°‘ri™í?ÚàT£êõÔ?Ö^,bŠæ?®2³"Uá?°V‰ãÎ?‚ò LÓ?¸edÑ!òã?ÂWsªpÿë?z·´¼í?šn°Í¾â?/ ç( é?¸ºË=§*Ú?jë?îćì?WF"eãë?ÿ»éįá?ã˜Á¨ã?e1• r=ê?rã×Â2få?ôÙè¾ûÏ?€ÿæ„ø%w?Ž Ý¹›Ö?(^$½Sã?@V„”T§?ÄA.Î<ç?V’.–ðéì?££C˳ïæ?ErÝØê?ö,åæhá?ŒY18cÅ?0!m-V=­?<2é0ê?4 "²,Ê?c$2ïä?üóEÍáå?0+Þ²»vµ?g´#@ñâ?í¶±,["ç?^~•)Žâ?FÅqvçï?«öF+Ç?âó(VbÖ?FT°Ó?kà? /ÚüÒî?¦fõ ˆFë?¦?OšÖì?ur¾ƒvmï?lÒu¶žãÂ?‚“zu+hê?HŽšP…é?@a2·,”?DÉêuågæ?b`Ç8 ã?˜ ˜…-±?ÍÝÈJ±ë?s é?f¸*ã?‘RGmÃ?õƒUã¨Sê?ðQ›ø¦?hã Û!°?4#As™yã?#‡œ'á?ê«ÅèÊâ?H«ˆéµ?ú ÕgÞ?,DR­ÈAÔ?¨ªw+¢÷Ó?*\B¬vÒ?x2[œ Ç?â®ù(Üæ? ¾/±9Øï?fÉBCg?ÐÃFÙøÑ?`û¼e±?)DWÜSë?ø5? Ô½?Œ O[ùÒ?ã;VÇuç?Z‡4··TÓ?`wŒ´jË?iŽ™&#På?]ÓFðÎÁê? c õMéŸ?"×i½Áã?èÛ¡ÛfÒ?‹Ú¹ÿé?¤Ý ÜÒ?B_¾ŸÀá?ˆ†0#4ß? $Â@«ŠÊ?vÑà <Ù?ýïÙyCå?š$íÂåµã?÷V8©ç?Q5æJ›à?gœúè?¼ËûøÑÞ?9çÌmJ=æ?dÈè˜ãÁ?Š}ß¼Z½ï?ÄOœù–#í?<Øe{ÄÓ?oºÀ†’â?Õ”i¾?RÆ0qžüæ?u÷r¬é?üY˜›ûUÄ?`’„æíšÕ?[àäÂÌ?=,­ÙøÌ?TBV—ë‹Å?̸¼ØÐ?Ï|HòâŠè?Tî…ìÀàÄ?fBs™å? |)jhÝÑ?¼%þÅLþÄ?3ãäÌì?è9R£"Ó²?0ÀöذãÁ?bTKÈÒË?¾4`Éó3Ñ?á#-úé?¶GÁØï?E-¬6ܘé?ÒÐ8írqÙ?“Šv¥.â?knRµ’|ì?¸Á%R×?Ü­Œ4a â?´°Û‰ÉÏ?Xߊµ?Üs¿§>È?õU¶[Ø?ÈMß!í@î? #Âf.8¡?¾TúÅÂÑé?b{ôJ è?p›žwõÁ?(Ó·]}ØÍ?®G²£Ó?€-±MEfÞ?Œ{xžëÖ?ÜVÇšwì?òÀ@ÅâGÝ?²r™Ž"à? ¼þÇÒËâ?@ý³\ÛŠ?„“V{?´ŠÓžäDÔ?pÑzÇ Ûä?D<…]¢²È?Laâ¤m¼â?bÜ„°à?ì@ÞÊ?£AòiÃTã?$Ò ¸¤<ã?R—lÂÒ?ÜÒzªÙîæ?½â¡*Tä?`Ž/¥¸?pjKÌt·?Îk1z,Ò?p ë…Ë?¬*úŠÔè?F›+çÚÆí?X¯/Þç?2 Ž9T?4)/9ŠÖ?7‚DÖCì?6<*‰™ä?¯_´ÞÃâ?º#\-¾Ã?˜‘gúA×?›Ì/®îí?ªÏÒËYDÜ?܉0ëöùÔ?ª" ðËÝ?\öíõÀ×?>‚ÒO¿Ú?K@{ â?ðÜ£+dÕ?LnŸ$Ä?}`ÛU}£?_4EÛ×?²õ´Ÿ¹Ú?€2cú™ï¥?8úªfwÚ?êj¤ÂÒ?µ‰Ùé¯^é?€oeAPÐ?jêÅ58¦Ñ?(ù–+øÙ?(+Ý4ˆ}Þ?˜t$íÀ?ÀŒ¼{z¨?·–Dj”þã?qó­üƒâ? ²HÕ<$æ?ìí…创Ì?œŸYІÂ?˜T µjë?ØÙ7·ÞuÈ?ŒñRñ\”Ë?D Š0æ?8²ääƒÔ?lØèžÛ˜Û?hTlë­°?ppß?ØêÆÑ?ð s¥«?@ïŠkà?ª¶(U°Ö?Ò Cp™ä?™ æ4â?ˆás.ä?23Ûjdá?*¾z)´ôØ?=Dê&s|á?èzyãÝ?îíÂ@dÞ?Œó/m™kå?ý—ë«6 à?±WÖˆè?/A;s'à?pAíI.Í?@l]=¨v‡?†?Üp¶GÔ?眳Ñê?b;›¦Rqß?éïo^"Öï?“Ù•.¢ê?ßHô‰µ¦í?XÀµ6¡nÉ?&¿‘aþÒ?²ŠCDPá?hÿ=ÕÔ)ä?.”lqžYÚ?©vÊJ@æ?ƒH‹Ï"í?ÌêÆóD²Û?XuQS,Òæ?øymà2vº?X™Šs¼KÙ?@ÿx¿¾)Ô?ŽŽN -à?«z)~'ã?GîLˆMë?NHØT3Ð?謼vP%³?á åóÂç?˜Ø$âܵ? §7¬èdä?Ü+¨–)ê?Iªe`³Ží?hC`˜æ?öìBÁhÄæ?{TØ+“ké?(–Ò›©ß?LFWöWÒÀ?ÄFªõHüÇ?€­k÷鈰?ÀÛ̳á?\öWp<ÿÇ?ý¼TÚ±?NG}-¦ï?.ɵ£î?#ŠŒUà?”™‹ñZ˜Ã?t*q­Êí?¶H5È­;î?Ë}Ý¡žfæ?”…Á‡–ÔÜ?®LêµnÑ?â8~i0y?ºys™šÛ?pJî%q»È?{ Út[ã?¼¥¥þ=À?€\aV‘¼›?¬(êÿ0IÖ?X"WÐd™Å?<ò‰!Á?¼QŠ'Ýâ?V–n^cÀ?‚‘Ÿ½ŸÝ?˜?½Ñ/õÏ?F–`ˆð&í?ŒÔ]÷«?Ý7Zô¡ß?@Ú¿í=ä?L4öl€å?x4 À¨Ù?×ÃáV£?PÏ©y"¹Ô?†{`º?"BP¸«?ìA½f^üÝ?ͧ¬¾¹¼à?à$ˆ'Ø?ø…ÎD:·?Ä®¶Ì¦>ê?ðÊ´ª¹Z³?ÝÈHlË$ê?½X˜´í?ñÇÏ&zÂã?T›ûöçà?o¨ŸŸÙ?SÂÄŒÄê?ÆùÖW²ºÚ?ë"r›Óå?|˜‹%´ä?«fb­ÛLâ?vFˆ„×ôâ?•¢¨f×?9Duˆ÷á?>aÊ!Hê?:=r3.–Ñ?x¶8ßYÅ?x‘&4Ç?w˜5—à!æ? Ü|€À?ÄízG†+Î?Òwì3Ý>Ð?€oÃâczé?V RÿŸß?Ä4×°?ÙÆ Àóä?Þ 0Ø´¶Ù?PÚ˜²dÀ?”ycÍåÓ?‚ægÎ6qÙ?à¤ÿ^Â?6Í“Áñdí?Z1Íâ:‡Ü?6¬RÞÜï?š¥ºª?-@¡Žâ€è?HÖØÚ?þ4pytzå?ã©È?$%`\Ð?NYHë§‹?ˆ¥ÁÀë?@þš›Â̵?L /LŸ‹Ô?àú¥€<±—?ÔÔ3ÈÞ?,ô,–Ûß?hƒ§Lí?Q5Øvq¬?&p¨(xÒ?Lª$‹ÞÑ?ðêXP…žÂ?:>ó”ì?U¦_s\Lç?Ë9þÀê?Á¿|¿Ôé?€W³ÈŒñÄ?êFùþø„Ô?Ù5z/—”ä?ÍPï“2â?HC¾à§µÄ?T®Sà$šØ?Ñ £ÄÌá?!mŸ«–†ï? 7Îï¶?”wI)ýcÄ?¦&0<½õã?[‚S;P¦è?}ÐK´q¡ï?|}tتì?‡þ €ê? ƒmß?x «·-É?¸ ¿|}ê?Ý æî?èÏÛ~dkÃ?ã×£„Lì?¸ ¼áÜÎ?09·[ä?ë [z©è?ßDÕ¯ Èì?Àж°¡/Ý?±öÎsùå?\¾û´°¦Ù?H¤I!€`ì?VèeW„è?˜nô–¶Ã?®Èµ£ß½ç?Û‰v–à?d³!Ñ™Õ?½ß´rõµ?9Î [£ê?ÀÃj4?Â?rmW–bdì?ˆ7.{½?ˆ¼¹.ØJâ?e>\ ‚hï?¶ŸykGõá?’äC´ÃžÛ? ÍHÀÒa»?°y¥í»ÆÉ?z9öI{Þ?¹èåÈZá?ž»Ä…(‚ä?;º½9%ç?ËåAÞªå?0«N–¾Qê?é"æQ*ÿä?éW©a(è?Õìú”˜rå?À0&g¹?+Ðí\g|ê?i“N¶á?Wdáʈøâ?'õ›:Î? X,Ⱦ’Ñ?ŠwMîï?ü=f„ÚÏ?°¼ûè~·?˜_/®î?„i-eê?å?ÐOðo6M¬?ñ.Mç‘yà?ªÃ¼Ψê?¥œƒÜä?B+-j0Ø?¢ô“ê:è?pÓ ßÚì®?û®;ÀÝûâ? ‘ê¤! Û?¤¯Dn°Ã?Ä\E5÷•ê?TC:ž2í?nÃwœaÙ?Àg)½ö'Ë?2¦¤T Ú?’¡æ`y6à? ,Ýî™+è? ¿ P²?[Äië9è?ÕÛù9`ß?T^ÚÇžÝÇ?0k³ªÍ†Ì?<ý9ðEç?ÆfaÜ.°×?¼øŸZ¦Æ?½àü‚MŒê?t¥­}[/à?¶/ºî©ç?ÀÛå]žÙ?š!ýõ~@ê?èofø@æ?;§×mF‡ã?†S*žî?ôúÿ9UÉÃ?vÿ›cNïÑ?ÖrȇzÅå?šÄ¹CR\Ð?<¾˜¢ÞÃ?¨hœã?ç„·½î×ã?è ÕoáÊ?+¹6Q»kî?X5Ofž<»?(Dɉ8>È?à‰MÆ»»?·Å—t.á?„òA£´=á?`:ìä3hÄ?·¯.\mä?e˳˜R˜à?p+Û‚!uà?µBBâ?&«¤Àèdì?”ÿDHÐ?¥&:º•à?æçѢȴé?:âw­ãï?×qÏ]—Êä?@;>ýã?]6ß‘Aœì?¼¢oÌÝ?Lñ•h½eÃ?AVo>ZFì? 8›ÖÇ?ð&r©Ì´?x3«w Î?pÁÖ½pØ?—–N™â?X(ÉgTEË?DBVu€Ç?¦Ü>5ªÙ?öc6Cu)ß?¸IÛé¡8Ó?ö3Ü#YÆß?W&o¶tBà?z6rÆ×?zöûá?uYF?´¡?øØ‘&1Ò?ó{«ü°ì?`½æä×”?àÉÎ䦶?å b¯{â?ÙUÅOè?¨‰t/Ÿ‹Ò?2ÒT‡ñ´?&&ý”å?o×ÊZà?¼Ò»½˜ä×? @ÄD @Ð?Èñøïß‚ã?Š…N&8Ü? æ×/È•ž?<)ÈíòqÄ?LžòMÚ?€ïË(žû>ÓëÒ»!¸ä?cÿ}¶¹³ï?Œö;ˆµÿÏ?r0íÎÆ?˜HXÖp½é?ÖÎÿmbñã?°Ó AÞÚ?# à-ä?—g†ëÙ?S\'…á?u‹Á½íï?ü=RÑ5Ï?oP4†Æ á?H]þÖ×è?zÛIÝ4åÔ?ùí•è]à? òþI.[›?õ¡Þ¢>è?úá#6"à?• l’,µì?ð~6Á ï?:…Bcùí?ÐvkÞ¿ä?@*#¬Nä?hŽ÷ân~æ?z¶ªzÚ×Õ?|¿T»Ñ8Å? Vì÷/ Æ?°Óc2ʉÒ? ¸lÇï×?GUÉ»Ó?d2N‡•á?L~Liº\è?zh­¦Óî?Ê¿0ý:­æ?swò`iì?¸v]q@â?4rçÓ„lì?’¿ûºŠsâ?‰Íö„Åä?°-dD‘§?()Íõu²?[½´~;úá?;wúãíÓã?ëñëÈúÿí?€US„n)w?ÈÔ;óåÅ?s¹Sè¨ì?–(YÅ­«ä?´±-<—#Á?¤"°`eê?“¨¡‘Èâ?>º«(Ñ?€ÊG¥ƒ$½? h×õE7î?EÁu M.ï?`[¸™læœ?™ Û÷§ï?ÆA{³LãØ?6ð÷Aò"ã?ëCP¸ß¾å?f~Qn Gà?X©”Ý7Ó?ŽŒËÁûÜ?1e&w @à?ð(OiàÀ?âxÇ^IA×?6š6 Úè?"Vò¯úã?øú·*\ä?€IŽ] IÜ?vßPßôÝ?T‘|ç»æ?<ä@Û®¿á?ð/¦°µ¾?À@ÏáuÔ ?VN&K;žè?ûy¸Týë?YýÆ7Efì?yG†[jpë?dLÈàÙ?âfÀe¡ç?@M“T4¥?+þ]iÜ? 7ž?è½g(ëÓ?¿lžlWæ?+GCmá?›ÞUR˜î?Т«¼™Õ?ÐÆ—9ìì?ËØŠËÈ?/íɦ&Uë?³ðÈåõmë?ÛtßÎPoå?½òx~Xaç?Æ b”Éê?Ò!7>'ðÔ?T¿‰0ß?hWà(Î?^Äh”(bÖ?*¬Òy5£Ò?®ïÕñ-é?¨¢ã©ÅgÈ?ʶԙÙ?öFLlF]ç?¹äƒìÍ?ÎKý fÖ?ž™ˆWûè?b’¥õ;Yè?ù9k} Õ?PáT6.…¨?¾èiP­Ëß?4My¯>Íß?¤#âØŸå?r9ãà?è EYÂÏ?¸ÖÑVÙ¿¼?ªí€<"Ó?˜…ÀgÍß?½Z01Ó¹ä?ÆGrØçï?Im€ïÌå?¬ö‡n%Í?¨Þpí³¿?ŸS¨‹é?õ³å?´Ïñ„RÈÄ?¤fëoÂÈ?:{úûùmî?p¤ØE´œª?€‹Ñ›Ê/„?N°@;ʼnè?àǯæ?Ôª>ÛÊ?h. ŠßÎ?᪤Øfï?\x²éþcß?`’?ǯý©?Ýø $’â?€’½\úž?(·<­¥#Ò?è§£lÒ?¨Ó°ì´0Â?ŠÙ.q9â?¬W^’o/ë?ƒov·zDê?A¤Õ¯B±ç?.üy#ºhê?Ä„f@,Ä?˜ž Rä?ÚPw³ûÓ?[y”·ýÂ?Læ¡Oaà?Dª|]‹Ê?v?nš”áÒ?;M ]«Õ?¸»ù™ê³Ä?Ä¢lzÎ'á?Ø.|ì%¢Ê?tçïWÐÂÕ?æ4t²ïæ?øB_vöÃ?S“µ½”Aã?àœÏ› +ž?jÕœa¢å?åß3ýÛë?àŨI¼?îÔýÓ„”è?TÊó÷Ùä?ð#xq´?|<çêD6Ì?YeÈ!›è?WB0z ï?,ÿÿ +èÁ?üÍ„ƒ·Ú?ªI=&‰æ?ÈZm\Ï“ß?ºu$¢äŽ?øõtZÂ?BíÐ\0Ä?s2H 7ë?dŸ¶ :Ï?¬ýzDüÜ?ÈnÙv¤M·?ôÿhhU÷Þ?ï¼¹FsÄæ?8=Çb²¾¶?ÿ¼G´íeé?9Oí2º¡ï?†…[±Ê`ï?‰O7>ô¡â? ]E€7¹?á–•Ó?âi‹·ÐÜ?lavò­ë?Ó—BpD,å?2H7£ÚÛ?@MÈ¿þí§?¤†Ì"šTê?x´Öæƒw¼?Tk#é3×?fçä áEà?ö =ÎmÛ?yýe;â?JexVþæ?X.¶ö®Zï?³Ìˆ®«àä?› ‘ÖÝ?T©±¦ž—Ñ?ó á¿j`?xÒB^í?_ÑÚ¯è?@Q#u¼?Ðý¬Xu™¼?Q€„ãbRâ?饪ïÇä?à¿ðco´©?• m—å?Háägß)±?ëOX1Aï?èÍ}¥2Ì? ÍžXŒ£?ï ¹fì?èŠHt×SÕ?˜ û±^ZÝ?ØÃ«Uø©ê?Ùk³¶Pã?î²³{Ü?ĘybBQ×?#5®Á”%ì?ÀRuÀËÆ?@1±Cs……?äãt †×?{ýȸG®æ?ÒÀ Ñ´è?£±Çävç?€žùFUØÔ?^ìÅ@Ÿ;Ñ?t ÍpÒ?BÔFÄç? >mäÀ Þ?àUúƒ"î?£œ;·™Fí?ÈÅ}^bgê?^.e6õŠÑ?Σº_±Ý?öòï0מÓ?ÃøZ¡Þrï?}õj–d\å?§Zî:ã?Ü_îz¾×â? Ö)¡‡æ?<ÀtC-}Ä?xiÀz‡²Ø?\ËÕ‡ÚÖ?,4õl¤Ö?®i–I©/Ý?¹|¡aRVé?c< ¬RTæ?w‹Úñqí?ôÈiËôÂ?-„ÉCNÑ?4–µ6«É?X|J†Ù¿?ãV†½°?ÀOÝF²»?0ŒÛVÔì?Ì¡ênÊÒ??ÁŠh–“â?XnÛÞh~Ü?ð3K;R¯°?¬ŠIš½ŠÃ?¤É´.Ú?3è³v%äé?V{p /¼?bºÝTå?øÿ³K.ä?ù»{N¡?¼ÕBÕÆ?A‹¾Ë|á? € ôƒ)à?Œ`×=LYØ?¤E³ …è?Üy… nÇç?è™Z–I·?æ«H£Ó?Ö×ÿ·ùÜÖ?6¡öÔüùÖ?”uÒ6&Ç?|æ&’Ð?ä2¾di‰Ö?/òò9Õ?ùOytËÎæ?¨vw"“ÜÁ? uWÞðÙ?üû©<'qÎ? ÄzœO!ç? ÁÒéV¶á?ÑÜIÄá?°—ík¹ª?)Ì00Ôà?ÿ’«»$éì?ºh¥/cæ?ïžl‚³Aî?@+0wË‹?À—Ô°ÔÓ?œ¤V,Ýpï?k;b1êå?O(÷Óä?°¬?"¬ÁÍ?èd]¥ÖÒ?>‘k}ÿÛ?«»üå?ÐÙÐÐ&¹?hðÀ jÏ·? E (hç? mÚƒæ?µÎ²˜®Eã?f£ – Çç?@* è4Èï?”4­M¤H×?àäH“Ê—³?Ö&Ÿ4ì?“}ôlƒ¶ä? G]»žÒ?ÞG|’å?˜ —ßÏî?¾O«LØ+ä?zu~¸@Ñ?bW·.Ñ?À1*óÊ8º?¶ˆ¤øœÝ?#/4äç?2·°†&Eï?(\+óBÒ?Þ% «wŽì?^ÌrPÕå?0iÒ@2òÌ?ÀÀ`´•Í?tÓE×r×?¬‡1ëeÌ? 5döDjØ?™3èÆ?ƃ”öáÞ?vÒ ªF‹Ð?€x¹ ¾î?¨—haÏ?UO©ýï?þ•=He á?8þ®-yÏ?…íè €åâ?€Mξ{¨?MØ6Ѳøï?÷öK½?;gÆÒ‚ç?þv¼Rð¹ã?§8–„Ñí?&`hyl¤ã?¾´Ûy5Mç?( ŠÝE±Í?{ÿkVÇã?—ˆ&6"î?Pb,¸Ç?bu3ÜE³Ù?j:>Q#•ê?02­ž«Ûî?¢X.bøÜÙ?03H‡´ì?¦£}þ{à?Øj"9í?3¦ÄIà?ÄYÜ!âè?·Õ¡± ï?‚Ïu¨¸RÙ?šÌl1Õ?Òò b á?L^-gÎ?&H^ŸdšÚ?€·d«øu¥?À\£‹Ú?¥E(tÊà?€ù 9›þ½? G‰89ÚÕ?äÄ+ñÿäÔ?íK.dØ?ü$‚V©{Â?à+­“4Ó?ú­ks–ï?l§ã ŽÑ?PM¨J¨é«?pŒ)`ÄÇ?Íy™ë?`âeûèÁ©?=v™\Væ?Ó…õ„Ø?ûbréSIá?øY3<úÒ?·×„«a‚ê?°ñNOÔ?”9uÕ?`´vÕº?$Ò4uô¡ê?(@w ¦»?q=ŸÂ?<ì\ ¥ÖÕ?\^¶x~”ê?&2Ô¨Óî?˜}ùüYJº?  y¸?ílÿ»‘¨ï?LtJºªÆæ?Îz]F=Uê?†šC‚Ÿêè?Šøœ‹¡ì?·)ªžþ`ä?X{Ù;§ZÁ?Z6àõoï?d‡—EËÌ?ZÌÊs]ê?ËJ$B˜â?˜JðláÔ?Ü|Š‘šØ?‚Qæ¢Q”Ö?Oq§Œ!æ?ú.uä›Yá?À©‘A‹“Ò?D¼ê TÙ?äʨé?¿~nx~ê?µø`Wã?|!³¸>ï?Æu¥ °Ü?»R”à¦è?t«x/ÑÂ?æÊLÚºã?³¡ìrgaà? =8ÃQ3Î?€æHè*å³?n ¦S·Íä?xŽo¿ˆlâ?c¾,°B¶ë?ºeôVZÓ?v–®â8Dæ?à ¯âJ”?hÉy¯ðÃ?ÇøˆSè¿å?‹=O v—î?¼>©ˆ[Ë?j«É®ì?©-çáŠðë?þ‚Êšl¿Õ?𢙰ҳ?-2k`Fqí?ãf É›Fì?š‘¹lIî?ò¶ôÓÊÞ? '¦ÒdíË?˜—Õý`¾?ÚP_®è?4QåÑ"Ã?Öû¸:\Ü?ƒ‰¸´‰¾î?—’{GÉ?àx+®ú©×?“ÿ©ê¾Lê?;Þjì§å?Q¨ŸÄQØê?š‡Üy;FÐ? ˜=ÀÔî?%ˆËhÁì?$`3¬{é?Ðd/Q¶Â?XÍï®w–½?,zøÞæ?¿–!Á‚3ì?~5õá?hÞï?oc·Öí?@‡lÈ4þì?fŽÉÞÇtá?rÖâ(вê?·ëÚ™Á£æ?Ÿëd@tá?§ém‡¶é?X$Ç®£À?VŸã¸Qí?Ü•¶ó±?™³©ªÈã?à©;æµ?"ŸôØ´¹Õ? 6Qá~Íî?€ÙIK£>£?þ5vµÄÑ?¸šÔ=“gå?­í4/™ç?пâGqÚ?º2+ªÕué?ÀÈNsÔ?´<í#M•×?øe:Yõá?Ê¿½ÓÜAÓ?ÃÞ‡é¡ç?øØ'/Ô?|"E»!è?q§¤í?<š­@më? §$™9î?ÊWb"Hcå?<©3¯_Ù?›>$šÖè?¤ì Q¾ä?v› #*ç?`Rº¼!ôê?æ“¢°ò}ê?4WufÛ?Ÿú­i¡ƒè?’KÊõ+Õ?¬ŸÜÒV$í?Gî R‡¨â?0[ÜÐÿ—»?2úÏžŒ¨ï?ñ9ú*øšâ?0Jõë­“á?FÉÜŠ¹Ó?ÝÎÄsÕ•ë?ÿ Od®?yýˆO®³?bŸf5Ó?¶ÌbÝ^ë?’WŽ á?Ò™á˜ÕMÕ?<Ã¥Ô{»è?ÖyË@@Ü?öR‹‹ßÿÕ?õûm¬ïÉç?&-ô”BuÐ?†i3?ŸÑ?ðãé½¶?£^;äCî?ê1Û›ÝjÝ?9¦0‚wÒí?1ôR©4ä?l³o 0¨Ñ?L$eµZÁ?`D—ikâ?ö߯·jÛ?JBâ¤#^×?<3,ü{lî?¢G±ÊÖ?1ãÑ: Ó?Ào’:°Œè?àu¨þÈŠÐ?ziÓ ñhÜ?ü¶XŸD<ß?ØJ)GÛ?hu’hS†Ú?ìjÄAwÇî?ç?­š¬_ç?æœrá?ã;?xÒÄâ?È3ŽÖÐ?y:cîá?Ú{ ¯Xé?Dsw/Ö?ÀŸaÇ/ßê?ù1Ž1á.æ?1|Tï«Ùê?ê ³úà?À¥ÑplÒÑ?¼^õq¾´Ï?jÃÜúÓß?TG/š3Šá?ðD]¸×­?VjN\mtê?¬ï7¶ã? ¾óü½Á? Œ„QÔÝš?\TÚ“Ã?eä–ÒŽ ë?LN…ŒlÐ?=êÕé?øêz¥þ»?ÁÇq­m¶?ä«—¡Ý?)‚Ôköòæ?ã)* —ë?æ lâ°è?Æ!„³è?§*˲Ãí?\dV\ÏÎ?B«INE!î?lÖc0€‹Ô?–«ÌŽ9 ã?p>é|Ö ?µ8}Gʦå?Ò±x,r®Ö?“°ÿèLÚ?Z¸»låMÓ?žšçÎBî?ÛZÀ½Oç?M~IqÄrì?̺Ó\©à?4 èŽ/Õ?Oî:ò ’à?Þ§¡í\|Ô? z7( <Ç?€<Öžîç?ñׯ¹Ê?LhwræOË?:³×i\ÉÓ?šlWlÛÑ?Ò¦º˜íÜ?†GZ²1Ü?æ‚9ªÜêê?Å…GWï?ø-0ÔŠï?0«ŸÍM˜´?b+qqʯâ?fõ§ëmë?¤+ÆÀ¨=Ú?jÚ™køé?€•ï³²>Õ?M"!ÏÖï?§#ŸààOä?{&Èýˆ¼é?®q--ñ‚Ð?›à.Râ? Ô޳?€“ï…€‚?’”g>› â?<tÅ%Ú?BPà†Û?”\ |ž3Ã?$TEì›à?+pE®#ï?€Â]`mœ{?––<•½è?fàþk§ß?B’ž¶à?Йð‹w®?YØ}œë?cCÍÃ?¦ÞžÙ6å?0¶o¸]®?¹UÅp©?Ä[B­Ñ?P ·é¢5°?\º{V]ã?xR±“É?ÁÁ}V€#ï?Úzî?ÀßCEú4–?€!skå?§Dad½à?¢QÓ?‰˜á†$úî?õ®(6+)ç?ÌŽ‚ÄP¢â?×dú/7á?Cé%™<ôä?¦|´;4ç?Æ>Õë¨Ö?gži®à?æGc¾„á?ðøì¥ï´?&ûaòÿýÐ?òT'ü›´ä?˜™Ð¬‰·?öd«3'Ó?„˜ 3¦ å?›+kØÏ¼î?Ýò™¸/]ä?†R«{âÐ?„ùhoK×?tp‡Œâã?8Ëœç øØ?bC?ôïå?‰±ÂŸ8ä?çÃê¥é?æ ÉÍÐÖ?MP…ÉFá?¬ãž½MgÝ? æFzëß? `þ¿zÞ?\IÿŠýîë?§û¥õdÚ?{fûû?¨ƒRŽ ~½?eM§‘ôá?¶ï·Š‚<ï?Ô¯â’ðå?ZKþâéÂ?¤ŽÖ†Òí?ïÂÎ âdì?xí×·TÙ?dØl*sQÆ?[†XË9á?ᥣ.–Åé?³è‘ûwØï?ØÞøº;ÑÃ?Q·ôì™èá?¸ŸùNh×Î?Ài¸þP?–²ÃÂë&ì? ?{Œ¤ŒÙ?x­¦oµ?S¶E|.ââ? 2¿öXì?iù¾Î–¬æ?°U_KÛ¬?9³gÿæê?àMi—³ê¡?€\«¾o̶?CÊ¡‚L©å?JÛt,§™Ü?V¿Ñýë?á½ §åÉ?èûïÏš×?-”þÑÓå?2¸uQÌÝ?}le€%ãã?Ú~©zjdí?ÜÏ²Æ …Ö?‹fô,‘Ñâ?§¬ˆÒ!â?Ô”Ð]_­Ò?ô7•ä=¢ç?ƒÜ‘!å?ÖMx\»Þï?ÇÕ^†–í?HÝŽúéMÆ?ÌJb³Q¬ã? ”Ø€´·?°QëÉ?yÛ?€K£nÃÛ? NŠC¸?™ú¿¥í?û´0ä?Hé&Fuî?ßtæ è?â%c`^í?Ó$3,´ã?¨x•Ö³?°°à»²»?ˆp.™iÔ?7ŒOÎÑ:ë?–HWš}Ð?†aG-|ß?ù4âîœûé?xiçkŠ"Ý?„™K³ÿ9Í?¯Ø>»Úæ?ë@wܽà?⊹ŸB²ì?ï»îó&í?æPøäÒÛ?Ë“Ì'cqé?ò4,È?ˆª\÷]¤Û?²¶ÀØ]ä?‡¥Ô¦Üñæ?TT1K8Ù?Ȧ«!üq·?Èi Ê?ú\äÌtÙ?ÄjÓs\Í?­œ#0W#ì?HN2Ž&_Ä?H8[VL@±?yüÝQ@ î?ûŽr6%¸í?Þç›IöQä?dEdûÌ5Ü?®p`Ô?8ÇJˆYÊ?ÞLõ¤0î?ÛŽ^r”Ôæ?hw°mQ¸³?JAº±Q%Ù?ÄYÛÝ ŠÇ?rMBýÆÀÑ?–‹{¥à?Ò¹fØ?ôíîÀqÐ?(d¬èª9Æ?„ÍÌêÒ?æÚ#ÓMê?U˜K $Cã?¼RšF¨Õ?Ìå iÝÙ?0K+’^®?°oo6ýÀÔ?ù…•‹,fè?a[g¤ê?X€»Š7ÄÖ?(¼K˜õPµ?X±ï‘ ±?‡d‚@µä?D¶4dôÊÉ?V,@ý½–á?f^]aê? õë°–ãÆ?<«=’Þß?²ßm¬Èì?¶Ê§=(Ž?“Q]Ùx£ê?ÓÝâƒÔçè?ÕûÑÇë? âÚ>ø§â?ƒÝKfãgï?¸]Õ ª„¿? „ÍÊØ?‰¬<¬xËå?枈Õ?Ìøæ?ÂûÊ?²ƒ±•™Ð? î0¦æ?Ñ¡¶1À—ê?öÖg°‰Ø?°é'<;#×?ºÔ}GÝÍÑ?}鯽â?¼(ië?¸”SÄ5Ï?ï²*‡¡pç?1ïó÷ë?ô ?ø„;Ð? u(ß–|Û?%/8Y4ï?€T;ƒ\‹?ñÑèH+ê?=VåçÌ?äÏ>\})Ã?°7`ß¿HÆ?< ]ç¯Ö?¦ë¾”ýƒá?³÷K%ÑÄ?‚ôËâ„Ùî?sO×ð¸<ê?€Ùª:ÓÐ?`½Øç“Ï? U8²å-‘?-¤“©Êä?ð¼uíG+¼?fµèÎGÛ?`d®0’?_ÐnõÍ0ã?¼¾–BN·Å?@ @ÄFªÁ?ÈÜOk¨ú¼?v€iOh–Ø?ˆòÝH‹kÊ?@Nõ´º·?›3ƒ'*ã?”%˜vÔ?·ö]!ä?ú²]¡è?À=„HÖÝ?nŸÂLŸÝ?ÖÕ¨ö„Û?0h¸¡¶žÝ?Ü@´<û”Á?àbžMúàÍ?Ðtñ e–Ô?ku©mé?Û×6ÄNå?Žoã?L¯ÄÝ{ç?n€,œ[.î?éö:·Òë?F¡[$Úïî?€·ÜZŒä? þç'D¶?öÏvÀîç?h4È‚æ?˜… ýì§ë?‚ÛÓJ{é?×?ðFz"í?8n+®‘ç?"!ø¥ìÓ?D’ËØ?ˆ¹¯üáÙ?Ø„¢è‘¸?貺J~°?ÜܵpúÔ?ˆ› ¥ðŒ±?z2€Œÿé?°uÿ&é‹§?\¿ò$‚Æ?”€šý…ùÇ? Ý 妣?ɶ¯ µì? °"c¸ÄÉ?øÂëŸz?!¬ë=,pí?®‚÷š’Ð?Uåãï<)ã?ƒ£÷?ê?ftœcsŒ?d3ý(Ñ4í?ÜØ=ˆ¤Ë?®~mb÷ï?çúímÚâ?ÞÍn7oã? k$âCÝ?£‘B×µè?z uÇ3Ó?ˆ6 £qj¸?ÓÎ3®ÊQé?Üȱ§8Aã?¨3å}wá?½ [©]uè?ô;ôÍ…Ý?˜œ„ô "Å?KŠ“0á?¨4LáA ë?‘Vý8î?‚ ôsÔ?;8fœç?X¾õµAË?ÍÚ?<ÐÒ?Hö4†ÀÆ?Œ>žð0ÆÛ?¬£šã±Ê?}_<ôafê?ÌKøOøâ?uwÉZ¬Ñí?‹çw{UQì?ŒdhÞ ï?h‹:¿’n¶?üx{>5ÎÂ?Ê^Äè.ã?®°@ÛÄ?V<ÁTð1Ú?P»G7á?DžF%öÜ?HÇ»rÏ Ü?æŽó6óþå?¼ïˆ¾‘Ã? ŒMá?J`ׯ7ÛÜ?þ•4hé2Ó?Íz4WÅ?èÄSP¯FÊ?q?#¼ƒ÷é?ø` Ä¿?ü+®vhé?’Èh±öï?l §þfkâ?¤"ŽG·æÇ?ô¢ÉkbØ?ž%lš2FØ?Aݼ{<æ?vÌÀÊè?ÛÃý8Úýî?À€md䔢?(IýÚr¸?>$ûêv.ã?É‚ž¡¯?xNgôQñ¸?°äGÁ°?È£Øß¬é?2LþïNÙ?=0jî5å?#¡Õлžá?rŸý xæ?‰ó‹h$Âè?·I«*òké?-žÄ¿ é?… <¯?ùä?>^8h¸î?àl-ÞÌ?þ³ì"Çì?uÛâ͵Ôä?”:ôOÞ?ºÿß·&Žê?³Y¾Úîžï?¡À z,ã?ûì:?Œá?“Ýñ§¼å?W]¼l\Þè?XOÀ¤´î?jÊøbÐ?ƒ6•åè?ÇÿBž2é?k/5Ùáyí?€Ë{oaÌv?Èä1ÓF´Ö?¨ðL¦GÁÀ?¨¸<æxüÄ?‰¬¶e¡ê?¾N€ò°ß? ŒƒÏ?8q¢&˾ç?Ü62¯1YÂ?sž­?Îë?J±Å©÷×?Ù\×/HÍ?Š£iµÞ?ÐhðjáÎ?P ‚‹ï?s_@ìdä?Þ¿Yɾaç?[Ùmä5ã?ÐúãW$]¶?˜‡§,»?p0ª¬¦?oeS2ØÆí?Ðß´z [¡?4ÍO>¿è?2Dë­ŠÂâ?šhHÙýÜ?ÎQïì=î?PÌÏœ¦ ?ÇUCîëé?ðG<ô Ûã?‚¸ ;ñì?PfÀF|Oã?ä„-÷Tà?h‚á´ºÄ?ÇÊ«ï?„¨$Ñ8ê?+›‹|žôí?:¤v‰zÍì?I% £C ç?Ð’dæ°8Ú?Ž£ÔÒ^ýë?¶!Å•î?J£`×sÿÛ?ÿRß{BZâ?ÌŸWÉÃÜ?4Â5;*æÔ?Èd°<52Ë?Ç,àIÅ?°{g¹šæ?%Õ‚`g:ï?À‡lw9Ò?{)ß¡wç?馸… ã?ŒæØ©¾!Ú?vCïØá?x‡þïdÃ?ÈåV³îñÃ?ÇU¯õ'½ê?Àäø=‡‘?8¢] ÒÜ?Àfq/G™ã?óV‡…²·í?€&rT1‡?"lHÿQæ?p9Z¬Î?\U«²¹È?¬ÍLìJÎÄ?FÜ9w2Ò?Øà{qãÚ? æç¬ˆPØ?¸T‚8 ×?Ð o'æÉµ?Üã(™»ÏÈ?ä²h®Ò?a)À—lä?výpÚ‚0Ù?. HFÛ?¾íÔŒƒGâ?(EX+ŒÕÎ?"QÕøÔ=ï?¶Xq†¹¦å?È.¤aŒ}Û?ã! ý €å?sÂøêBÛã?‰înnä?+ä‡ öï?¨û½‚sÂ?œëÓŒÏ7É?œÞF 2´í?8N)p#UÜ?V‘H2(Dè?§4 å?ù×—‚ç?DbÿðÕ?ÒyØSÞ?ØÎg“à?ÊáEüæ?1˜qiËã?@1L,5™Â?Ì?^®~Ë?Âýl+Û?`’°Týì¬?д7W¹´½?£Kúý‚â?W€¨Q)Ü?iHIbiAí?‚Cƒ/*á?ƒîÙPgê?hŸô3Â_¼?-¦÷Æ_á?È9=°˜¶?þ„8&ÇÙ?l¾ÃanÎ?t··Ô´3Í??­¤Ú?®l ]_Jî?%­º¤çè?_A§jå? §`1ßÄ?“®â0 æ?Mm°F9ê?xˆ¸1Ý?™yí9{šá?HÍ‹*ì?¸4S"^è?|×ÂebªÓ?òEV Ÿ¿Ð?p"º]r¥¾?îð˜–é?BWÜ?{Œ¹üáã?棟œ¾éî?‚l£Îð¢ï?°7¯…>²?p^&×Îç?#}îˆ Ý?´k…Žÿ,Ñ?ü~Òµ zÊ?Ô3îÉO¬ï?° UÌ^»?ÀXˆi›¸?ÐÍn B¸?V§A‹êöä?je~‰Îá?M.»ýçÍá?È HZÑÕ? ¼oGæ ê?€ÿPÔp;v?€Ä¸é*“?«cµ·= á?°a±óâÇÓ? ¥ÁùÆ?@‡;ÇDÒ?Ç “@é?&@Ý´`'å?°X}ÿä?ð¯ƒ?ØÖ¨Q–%æ? ií#wíÜ?IMqJià?Ž^ci\ä?:¾i¢!sß?Lþ^qæ?’š=[Ú8á?Ø:95ŒÒ³?ºƒlŸ×?Ø´!—¼ÜÒ?…烼¯Ùí?=%}¯Úðë?®r  ˜â?š®Aƒá?*Daÿxç?8}uî¥$¿?½ÂV4ú~ã?ˆ¡&>½^ì?ïx»¶±ßä?¼‹3=ZÇ?&¢®ágKà?¢»:˜äÍë?;!!N«cï?VÔÉô î?þ^ ,¶á?€c¿ØÔLÚ?S˵ÞÜ?ºìT$šß?°¶Ö bWÎ?p1‘Šb“í?,ë8X5À?Ì弞ŠÕ?cŸ(®.ã?<Òç¾/¸Ä?ŠW!9 æ?í [àñè?<ë+ Î?‰AþçÙ?FÔøëÅÓ?ˆ’Åc %Æ?ói”ëÄì?(›nÞ?gÛõ’ËÒ?Â͉3Ñ?ò–ÈÈáSÔ?©zìïUªç?¸_/¯šÏ±?¸òc&¹?þ!ß2‡9ê?<×hatÞ?ˆâ÷—IÕ?üóFÙtxÂ?+µSÏ·à?ì7gÂ?”­ëÐáÊæ?xš‚a×?;›u°-æ?ã+–ËÆ?c:+°z·â?à©¶À€ÙÐ?ì´à ŽLê?ì‹““{â?¶;ñv šá?ý¨á]è? ÖA0®?oO+UD¿î?~iÛæ¿à?6÷}î?``yÓ®¥?&'2”õè?Å ™ëÔÓé?N/%:Ñzã?hI͌ٻ?ÁHÂy Ãç?ÆÖnãÑ?´› ÈçÝ?Àw˜f÷²?[QÁoKëä?•~ó#[í?¸2θ©·?°A×»Ä?°Ž7Ä í?5pÎ¥ˆ^î? ݺ„ÝØÕ?$3ÒÌzíÓ?¬ åþØÙ?:XNæÒUï?~ì×Óä? ZÏD<׿?¤z*p–¦Í?27,S¼9Ô?1”­ç¿?¡y+;‚Cí?m3´t“ê?¶:får¶×?@óáøGÇ…?0‚“Ø} à?èH¼¬â¢Ø?7Êw÷Y•å? Bùåæ?5XyÏ‹ç?Œ\N±¯-Í?×­¸/ç1à?.R U(Û?@É®¨Ê§?÷¸}¼ýî?ˆº“ðd–ì?4<?Ïߨ?š3î Eëã? úàRš€ç?@ÞünëË? ão1ìç?¢íèiˆµÜ? RôÐQä?ù½ïW¥è? ÚÌ'á–?@ÛÌ“ýãÖ?¶xü 3æØ??õ@ÏIå? !|ÎÌ?á•þ¾þã?’‡fpÕ?—êº\­‚ë?Ð+íUz ¹?pš;!ãÝÞ?£ä‚[>å?@·ö?”b¶?¢ëºyÞà?ðZsè?¸~¨[š³Ê?D?eL¼$Á?D¨¤«T°Ì?X@Šã±?î:ÔnÊ?肨Ô?MòÙÛã?8kò«<Â?l> RùÑ?4§8C/©ê?|(ÇŽ­ ï?€^ÎEÊ? Þ¸žWÈç?P·L€·?h‡m°{Ü?~DÖu¥ä?´ 3©é?ä\¥N{OÛ?À½Õ1ÝÖ?8l‡if¶? çç±=ï?þqúòwÿà?âÿAd×’ì?Œ÷Ð^GÏ?p¸ÁÐ2*À?|+ålôË?Ŧ=7eƒä?ùº&uÜùå?êXg4ÅØ? Ÿ¢¹f¤Î?,ë šQÕ?¿‡PK‚}ï?øF°Ê¹Tí?™D6 ç•ê?Ï»ÏDüç?¥ç¼žÝé?Tà‰p1·ï?bf®ã›tå?¸.ÞWÀ?À¶ký4®Á?ó(Õ §ã?öI«Hî?äŸGh;Ø?@¡«(›ÏÑ?^ȇãïà?ï»^óÜ?!›„uæùç?µ3î< lç?ƒ‘ Ö–Êç?ÊXøo²î?”òZm6ðÜ?Kâ±g@É?êÃÌXFöí?ˆŸÒTÎ?£ðO›a°í?ì&*«®í?ìélåùâÖ?¶Î´ü-ßÖ?ÈZQÜ¥VÜ?D„„½™Ù? :î±iï?¶°A²×Ò?Æcl.f„à?OÑÿW(å?„Ì‹i¼Ï?ÍX‰ƒ'¥ï?ÑÎæÙÇ?¨Nˆí¹‚Ü?„|y}Pjé?aÉ„uçâ?‚>‘h ûè?~¬Ð5î? ¥Ø¼ÚËí?'kŠ¡fÇ?öúëê{î?Çä @îè?ú½7CóÑÔ?@ ø•R?ÔT«ì¢žÜ?üq#¡¯Ä?öÛ(c1ÐÖ?2ta±­è?‚l±gÁ°ã?pw nƒÔ¡?\6ÚO}Ë?äKí±þ*Ô?ü#…'{ÊË?B–ú ÅØ?v¼àþ‡ÊØ?DL°#Ö?ƒæ{¹Ä ì?ºæ ãFXÞ?‡ëÁs£ à?¶Ïß-ç?5É·™î?ä/<ÎKØ?´°çdO„á?¼¸É@Åmã?×:Sù–ßì?`G7ì¶¼?íºÖø.Ëã?Hd®L½?wvyð_ã?ZG}£¯é? "¼œ†”?àIlqæ?dâÏð§í?òžcyä?²}µ©¾@á?¦ÓgTäÄì?A5‘ö¬bæ?¶c'9Dé?º$~óûÀÞ?øëGÌŠå»?Ä—¿púäß?¶·Ïö…ã?ˆÙuc¿Å?¤jgééÍÔ?º@3 !l×?ÚÙáùSá?TŠò¡Ê? $ÿă“?àWJX—ß?àª:43ë?°u+pÊSÓ?¸a•Ä?ØæúYBjµ?`WÍM‡U˜?øO´zÖ%º?¥ ¬B:å?ÑSÔÃÁ?¸©TÖFÄ?Á‹Èñ5Ü?÷\ö½/°â?ô]"Âî ?.<˜Z¯Ò?§ÉÚFÔá?ÐóáêÜÒ?k9¯¾¶Né?bF§û€ªÕ?QFx»˜Þæ?ôÕMÉië?¤ÙŠSÐ?‡E`Éá?=OŒ†…ë?\B@à+QÁ?¤ñ*A\ ß?”§ãæ?¬FgÑgÆ?òilç–YÐ?`V‡JHyË?°,èVÞ?ÊšzÔØ?²òÁLŽÒ? ÅÐó¶Lã?(b½³ ­Ã?|XêBþÛ?.cšÊÐZâ?€ÁiO]Þ?ÀÀ踜¯è?=žnƒî?2AUe]Ù?‚ ÅôPï?ðÝ(¡jeè?Œ—d€`Î?`n-oÛ(¾?òÁQv%Ñ? ÀÁÆáÈè?¼f2I©Ç?˜¼>Lt×? Š‹W‰? ç}Ê@°®?.€[+†Ý?T2§nzèÈ?ˆ!/#Æ?ä£ày JÉ?\n;âÎ?yöˆ(aÎ?>^\+óØ?å–•98â?(,K5tsÙ?¼ÎQ¶PÚ?ômœ/½½â? `ÅAtðÛ?ãÈ@_ë?-ÏJSã?•hÌß?4ˆÀþ]sß?ލ~´V0ç?•Ö[—Äç? 9XE×?B”­àbí?d}QŠíæ?Õ K•ï?pH¶±§XÆ?j1¯ißÞ?̰’ǺÕ?¢LO-NmÕ?’‰%o¡gï?Ã8Ó6—å?£„zuª™è?*^ºòP=à?ï!Úy8™ç?BãÉý³¶?ˆ­;0¾tØ?:³JnÏsê?xŠ_Ä®‡±?OÖòÛví?þPãØ¢ã?[½÷~ë¤ê?b•Ùmnà?À™û„Þã­?8%X ´é? šKòbs?\ªo´ßÏ?æ%ŸNÁûë?a¾šø¹óê?¸E”‡oµÖ?„‚êf%Ü?ᯉ"_üî?Öt”ö†¿â?É®©…P»ë?žMÙ˜?ÛÚ?€½Î6xGÓ?Zg”ºÂ ë?^õ/Huzã?¾\4ùºâ?Îß ‚€¡é?ÿUïÝê¯ç?Ä™}íÕÊ?}ߎ…í?ÊÙ-æÚ? eÛPg­µ?†¢G'ê?‘„¸Í?´b1ä ‘Á?#Ó¸.q‘?ÏËdç?møÂWKà?LËã§ó ê?€¢loÇ?™Õ ì°ã?”àŽÐMá?]í[ ëbè?Lÿt!„Ì?‡kºÃá?:±£îSã?ä:!zÿeÔ?E„yœÔî?i{~œê?ŽG»µ?4Õ?€´Y¤˜?*ÁW¿Î{à?—º:º$Äâ?–bÈøDÛ?˜V¯2"é?8›·¨|Â?YíAìì?Dâå%q\Ø?ºò€ÙÌèÒ?ÖUÂêülç?„?о¥_ì?–ÆË» oÑ?ò$ÔÑ·oè?±¢9]Öï? ²º…\ϧ?X¬ 3¾?ìšiáY*ï?N' â_î?™Ùä? Yª?0su[­Ì?öŽ®`å? [dJ»?NZµk£|Õ?F"—¶¤á?€>G{@­?V–oqy)Ù?¾XÝœ\Cè?€ü»Â?¶‡é§ç?Ä’€ýë?ÄÒ–—å­Ö?À ™Íû|Ð?Qø ÊtÖä?K¦Çáéå?½ÒÄtœí?õýï|¬èì?·âµþ} æ?XÖ4oaáÛ?ÐFk]dæ?ÉX—FÇä?4ácÆÖÒ? X뎊ï?Wu.jzDã?º-½9Ó?4@eók[Ý?–„‡ÑŸSÚ?Ó ТÞà?¨j ,À?‰šª©ìà?h-/­Ê¸?ËV¤Ô'!í?)jÍbä?8üǶҟ?#‘NlÈèá?Zö§Ö§fè?,/zäëÉ?úZ¾á´ç?XÇm\DÔ?‡ðRâb2ï?…Óžî?ï?V¼Þ2SÒ?UÀ9Aº[ë? ;¤”«lÉ?×LŸu®\é?Œç‡¤é?åÔt•ÞAï?C4 ó]Té?£zàŸç?`8”q;ê?hÌ`º97í?ͼÙôDí?Lÿ}òjÐ?¬NiÛ™†Û?ø5ÀÒçä?X+Í!Ì?*“UHÑSé?T9ó—ÄÊÏ?¼Ã£P Ç×?s‡ë@à?Þn÷O¨ã?KDŽPMié? Ï>Õî?Ë&îÌr$æ?^”¯ntâ?XkæÛê?þ~NAê?¸Y°Ø½?Þq~ªÔæ?À²5e¡?€œñØÆ„?ÊÄïxý|ê?ÀþV‘¦?üµˆ*_ŠÄ?5ü¯Î dá?ãߘRÑê?Îí¡»tüè? J—Ö? '’…Fá?àžâ±]!â?há×Þ?DÑЯҶÌ?* Ô¬ä? 3¼¹üÎ?öF5Ûè?§Ì!î8ç?ØÚè÷¢ç?(ÆÜ«Ó?B<’ºæ?¨0Æ2.zÐ?ë¼~$ná?ìÎRÖË?ÈVá¦È?b@âé×’Ñ?ê:<£¾ê?ôýt…[ÀÚ?ÔM2Ÿý3Á?ž‚2¯öªÔ?iÍ«,_í?ïJ-u6½?ì[´Ø·_Ï?¡|_þí?iÖ$³ëÁä?ˆ4K)÷üç?Hþ"äË3Ü? >!¼EÍ?ªpb¥ÃÕ?½(‰%¥é?ˆ+ÿ.Hï?­8CU§è?t!r`A—Í?øbqË2tÈ?“hÔè?˜„Xø@»Å? f Í„rÅ?|Tû÷è?+é% í?Ào=öMŠ?°”ûôÍç?ª®+g‚Ò?TøÓTë?hR»Ò$©¶?©TÍ5Ÿê?B‚k·n©Ó?Îä³Zh6è?0üè5©#¬?á£Ö»±á?ô¬il Ó?L©ÿ§áï?Lé¼’ôÆ?šº¶ç‹ë?‹1ýƒ¯î?ü€Œ©ûŽä?‹èówã?åà(¸?ê?`ÄÊÖ?ÎÃ"ÌT®î?<œ»ë?"Ño yÞ?Cz´ç?þf!â$ë?ÀÄr˜ßâ?˜~äIÉ{Ó?o\Ý”Äè?øï,—á?Ðþè§í?–ýa·Ü?à› ±fgÜ?k¥¬Ÿ&Ò?´$=ÚmÚ?·>™X\)ç?@“\\”Ã?þŠï×nè?à*ùU„Ø?íïVÒê?V¢µŽ‰è?ìé4ÉÍ¡Ö? þØáÓ?ˆ‰¬ŽÏ?Œ¼hÍÝ?¨¤Ãlä?³Ào‚[ê?’^…x؇â?7âWÊ)é?@…Àós¼?Ô?—N Xä?=¸Cqeå?5Ÿ3bAŠç?l¶guÉ?šµ…JÀà?¥Í¢Â•sæ?ïàç‡úïî? p‘ †èì?Ö“[‚¼Fè?^ûËêñòß?ER<_˜í?‰9kÙ?@1zçÄçº?5V3K­oî?íSÓOŸDï?`Ä$¬¨†Ì?"Ü4â›1Ö?Ñ70"è?@œ½h•?Ä¥  Ø?di‚÷8à?uh(¹ªá?äjÊuBÙ?rÈxÊ»›Ý?¾ýž™Ø?ZVBç?³°è?u· Ìì?ŒÇ ­€Ô?NNǧdxÞ?üÂeýÏØ?\õ UãDç?ò8¤jcê?8,uŸ?!Ö?ùƒ+޲ ï?ÁGiuy?Ú œÌ¹Ø?oõ°ñVöé?Ü€hæùÐ?¢Äí;S6Ó?~ûàSÃ?&‚tFì?¹}ˆHÐ?£™A\A(ë?àÙ÷DÕ?Rm×` ‚?$É„'È?®ÜÌ•ïÊá?(4è‘,úä?fõ÷Cmâ?€<1¯Œt?]£Rt?lÀ±ì­®ä?yÊw΄‹ë?”âóÊüˆÄ?+IÅàä?šž‘ô–Õ?웤±QÃ?ARk÷í?P°N=C£?,!Gç³3Ì?Øžzrž(å?Ò$Ûs¤â? {ôæ÷-Ø?AIƒ¼ä?€Caç?¤\Ÿ#Û Ê?r“ÂC–»í?c*ç‰Dï?Š-ÕThTÖ?¿ƒ³éä¡?4ÚkþÄÜ?ñeŠ)÷í?fO»cJ¹Ö?îIYâ?¨ 2?oÄÈ?`– —?¨ÿ;:qÞ?¸!ÉI«´Ð?ÑZŽWý=í?`¹Îhöµ”? ‚Ï­U²á?^ÿ”¾±1ß?â»òÛ×?@úŒ.=׊?*­[ø§ê?Ø®ÎâŽê?F ®iÖ?Bã$Sõï? #¾eVçÃ?à–ø3ý¬?¸r€+ë×?¸‰KE¡»?‰¿~Ã^Eá?VyY«}Ø?˜~ÔU?åê?<6¬»}ùå?e UËä?”Öy×-Î?Î>¶ð›å?í~Ó4®™í?]¸«ébâ?þÊm…Ò(Ò?Î9kWoíä?ª_ßÌmrÚ?äž €VÀ?zp€W#å?ŸílôßÇá?HtzE”ãÇ?ÝŽ“A,.ê?@+RB¨?Åv’t~Ìç?Ô%s;lä?pIxøTBÇ?€ ;û«öy?x|ç{Ìå?B“ÎI\ï?P#4 «$¶?ºû™,ÚŽã?Xåó“pÂ?ÉžKuö7ã?´Ûv¾$€Â?63Ì ßÐ?Ú'ó-èä?€+ó"k?®gœðÛè?¹¬!{ã?Ì5“ô-¦ç?cå´¯³â?p’s3rÜÝ?ŠÛÕWï´î?0åk!íÃ?x+:IÌ?àÔt€!·?©¤L’wê?pö|ö†³?˜®x$‡´Ë?Öc¶±3.Ñ?À¢*nâ–­?D,îYÙ]Á?üo}mnÎ?0ËÇà!Dê?¹tWíæ?€€0p©°Œ?¼’ž¸ôä?@ ¶VÛ½¾?h#á³¢[Ä?á?Uê|¬è?sf­~áìã?ø’&ªš1Ý?ð¸T¼ÂŽÈ?@XÞ2˜cè?Ìçä.?Ç? )ãšRÙ?€oÏA;#—?ˆ:_¼NÜ?4Ù¥»Õ?Р¤ýØÎ?Ì ÜvjÄ?Ь„£83ê?v,ПЉÛ?ÊKŽ_<«Ö?qyÎÜ?“êiRõã?àÃxÝx½?¬U…ÔaµÕ?ø¦ÍFˆãÈ?€iæí4º¢?„¹²ò!µÎ?Ímó•äGí?{Yã˜zòæ?e}û÷î?Ó0¬Øç^â?à3€ i¯³?ˆïí3›è?˜'3î#¶â?'È¿Òa'â?•%ŸR;ê?ÞÍŠb×`á?¨6<…}‹ß?À1†g²?Hz aö>Æ?°bç_´½·?ôv–1Бß?*êAkŸŒë? ¤,ä|í?f‚6ëjÜæ??Sã•Ï×?äP"[NØ?„ }øöÀ?,œc fþÅ?€S7¯ùò¥?4+Ô KÜ?b¨‚¼:ç?ÒN†bñ•ß?`ùduÎ+´?0Ít@Ïž¸?<ÐåËy¦Ð?€ËÐþ4V˜?ºÿøô¢5Ù?Èö¤X·?ÐZ‰Ÿ Ã?޾"sÒ?RÞÑ“Ñâ?\R%zØ?h¯®1/ºä? ólJç?¸Ýp‡¬RÉ?¤u7`IZÌ?šV:Ä Û?àxÄì°oè?ø,»ezoÛ?8‡Í ¶aÕ?Îò7>vì?h­LêØ»?.ZÊÆï?!š÷Z°åì?Ø ÙÄe³?ˆéüðWíÇ?€.:JÁŽ?PŸq˜è©°?Œ¨… Éÿã?±âí©ï?èFO~§Ã?©¤–éè?d‡®µžÏ?jû â?ähîz¾ÛÕ?vô%«áÕ?oB éfØ?G÷éVÜ?PC÷ ¬Ò?¾:sÈýmæ?¨ ˆ;Ÿ×?QÉ„2æ?ð0Í03Ã?­ÕëÛóº?Äà\Ü?Û?¸7ÍlU®?¨ŽíigØà?NÝj×?’¼otå?ãøl¾á?Ýv$Ö†à?P«Ó9GÌ?Cö•zUâ?À¸_d!Èë?.Èmêß?¸àè-€×Ã?^V`å"!Ù?§¢f¯Òì?(÷ƒ%Þ?ª¹ÚbZÛ?PÖ¬í?bêÐU„aï?¼-Sõ}BÈ?H5Ëyb¢Í?À)Í—iʾ?:•â— cÜ?X¶š8É?œ£–ç?ÑýFAmà?€òÔò"uÔ?h“D—Ò?øôÞËBç?U)Îù¦Õâ?Þì–›ãíÕ?ö>áÅÐ? —Q©@í?ØWçiXê?¼hM.x:ì?U+jÒ?ÜeÀ(½ì?…ÚHƪ ã?à\PLFó—?(çÂ@Å‚µ?i@ƒ^á¾?èÁä¯Ufê?>,ÜýlÒÓ?š JÈñÔ?¾•)áÒ?¹à†.î?¡«~P§2ï?0œ†U—µ?Ôû‡8LÙ?8¿ÃàÊ{?ù™1²-ë?ß;0a;ê?Xq|ç,å?È›ÖÛŒà?H×7ÉÀÇ?²q4…'‹í?¬öçÑÎ?¼ «â7|Ó?x¥£×é)¼?¶˜|C¬ Ò?D‚G—eÇ?h^û+ Ë?øïÓ8HË?{1ÊÚ?D2N}í? ,K—­©?©76gè]æ?P·°¶BÅÜ?(Úkl£æ?(< ¾bä?²iýÏEÔ?ºÁ„ñ·Ô?Ž€®üá?ˆø’ŒsqÐ?ö(ÁL©Þ?»÷ã0Üí?@kyÄ­{º?6aycÒ?¨‡çJÉ0Ï?0œ¼AO¸?R© Ñ”?/PiTžÁ?pÆ‘iâÃÊ?¹xmßΪï?œëñõ;Ú?csLYý‘ã?À ò…’?<¥JÜè3Æ?H#n ×? ÍDÐÑ•‘?ðñ,,R¿í?ào#àò¤?”ùMGßà?²ÿLèÓ?¸ÜxëØÎ?¨}¹û‡â?ú—PCÎ6Ò?:Olc×?$ü/6Öì?…ù2{câá?p È×Aµ?é¦L×Wã?ˆ.>*ï.á?œ”ÄÒë8É?v€Ècß?¬y’¡4—â?0€PËP€Â?,/DÕ?N_MÔ$Ú?ÒdÄ÷þ÷Ó?^é·ÿï Ü?‡/+ A¸â?ÀzxË?®ÂÅ×ß?¬0d½Zá?æÅæ&¸EÕ?F&ì?Z1çW^þé?"5Ûà-ï?.ƒ#¬tç?yPã?§>â"ê?4t®'âÞ×?³ñÇusò£?ðqŠ“ª?¼mOþ¥Õ? {áQ£‹§?(¦§RÑ?ðŸt®=¬ä?H&2¸£ôÝ?êmŽõ[Ñ?˜lÖbƒŽÁ?ˆpš'ÆÁ?h?a\¬ê?æií®ØÜç?¹‹®`aî?þ µsHÝ?¦ÕÓ8ì?;h#×£å?6@í]CÏä?P Á0üÃ?h˜¡Ú·ìÈ? :Ñ”I?¸o8‘Þ?(ŸÉ­IÈØ?qN_|é6è?ª ˆÔòè?JÔÒKßßÕ?îX í@Ðä?“j-p½äì? .,(ûlæ?TrçÚgvä?Ð;”ަâ¦?Êh½Zdæ?V¤UÛÊê?ò§u²ÑÑ?"PTÔ¬Ï?F \±CÕ?¢xrƒÕ?¬:ÑnM<Ü?t˜5`Çá?ÊWƒ4í?³Xu‘´Èê?_ÝË üá?bæÉŸ3(æ?Üøg-—ŒÎ?´û{TÔ?'+Þ݃è?¨ëj¹ç¼?€[(çõ³?†’KõêÓ?ìšJ#üÐ?#ñú[e?aä9=¾?…0ÚóJÅê?œU‹“\Á?xáÆÇ?ˆ¹ŒãZà?dÁ3»ör?`…ë¸Ô?ÞC8fòØà?Àä^nò„? •Àõ/n«?I÷ÙUB[ï?Ìþ-ë-üÛ?æ–£ß?—–MÛ*Ùì?”¹4âŽà?×bÞU81ë?ç¾—Výã?`63m«FÈ?(§ÐÄ»Ì?25ÃIƒß?“\Ðòã¤í?¨áÈß:¤Ë?¨ìu'¥Å?×,[Ú†ë? pß—ïê?»¶O¹Zê?õuQ|KUê?„‚Çìéà?ÖHb°R\Ô?$ž1½÷ç?,:„8÷!Æ?€1—[c)u?¬®k ÝÓ?x —OE¸Ó? %úM~"è?X„ˆ2¬Ì?xƒóö¤«Å?@ò­u­-‘?Mw3âµ?â?ñŒBÏÔØä?Œ§¦Æ/Ü?Í#9¤Åûè? †^t‡]á?*í¬À¸Ü?w‰Ö}­é?(âq2ÐRÜ?Íxí°Î‰ä?GìKü¨1é?gÕ—°—×?ä¤hyRÐ?ŽA6ŸÔ?XÞŠ—ªÒ?€ !”@™? )†ƒî?ÙJ'YûÃí?ÂqaäØ?r.dê`?›Óç6®‡í? 3°ð6ä?X<{é¾?Ø‹D2`Ð?NSWNÌï?Ž#°.*¶ì?œ’Ñ"}*À?_˜ýûÛÍ?7 l‡óê?hÊîå/té?M/à? âavâ?@_Jþ°å¸?^by,óÙî?Èœ‚b9åÊ?~˜ „]Þ?ã@ñ[Ø~ã?BªÑ=YÈÛ?p§íwà©?"{xÆ4Õ?EQøÈNâ?þiZÈžàÝ?ÈÝßX*Ð?ÂtGÊå?øNNj8,Ü?<ÆÛqQÜÜ?ËÚ&YÉä?`óö”»?vZ-÷hê?`Ñ' ß?2·®êtÁ×?€µ$+É?¼Ø¶ºš;Ò?XXt6ånÓ?HõñM´í?òØd˜á?Oq]Œoÿä?ˆ$ÇãûùÅ? ÎªÉLM»?¼1îS.{î?ë óßUË?AÅxÕAâ?ÈY®7IÄ?*úF{Ú»ç?<ü‘*­ËÃ?”§íѾsÏ?¦â N$Þ?ê–Ý£EÔ?¬ÂËyCÉ?oÊŽÝ™ã?TJeÓÉæ?`r™$\¡?VÑËšRÛ?u[¼?ãa3:ð*ê?Ðÿ–÷pÓ?€THl|QË?¨ù»$óê?Ï4Ë'«€â?`¥á2Ë«?Kd¤á]¹ç?vŸÜí`ç?`/XŠ¢Øç?˜OÀ¸Æ?/ØTooŒê?Ça)Ÿgî?Ô-T‚•êÇ?î¼R:ÁÜ?lU+µ‘Å?.O,“í?xÜO„œ‹æ?­Ž¤ä]Ç?ˆ6y?KDß?¤:hâÅ?˦Oe¢Då?Sò‡Á ï?xÄöqhè?òÑñê(›Ð?Úš~ºª¾ï?]OOSÿê?ÅgçQà?e¿®« ™á?ö^1=8Ýì?v!²ª¶?„^(Júî?’·FxÈã?²ú% “èì?¤‚G⬙á?Àä\-]Ê?záÐ:…Ò?r@sÿâß?ÜËnRßâ?œ)ßÝJïÈ?ß æ{hï?¾CÿÛÌúî?è˜ÑhÀyØ?žyÇ({pÝ?lâ^­Þ&æ?7i(÷ Yì?¸~ë/ytµ?œÄ0¦åvÈ?héç]ט³?ôú€# .Ä?ù«þL â?.“©ýÜ? &ˆà?àßGÊ8¯?ÓÊJÅÁà?»× Q,ç?^eœ|A’æ?hŽ·Àãå?|ô-ó:KÃ?hJ‡×ù×Ã?ÌšêaaÛ? xØaÇ ´?4-_ÂÍ?Øæûži;Ý?ÞÖc ö$Ô?*Ê|T¯Ó?ø_¢•(½?샭ù®tÀ?)ºW ,î?B©mVì˜ß?¨Ö9 IIÁ?è8ªÚ?Âù0Dûâ?p?ÙG˜Æº?÷H/û˜eï?¤¢t]¶¾Ý?kn¬ò›à?Â^O‡ó/ï?ØŠ>à êÕ?·ŽTz$!à?¯±T×ä9ä?ªÉü’§kè? .-]µš? adF‚î? ©'·Qä?ð}þälH³?¨E‘&â´?®ƒÁuÑ?o`ŠGˆÆã?xªRÒϱ?胜֙’Ñ?8ê;BoÝ?d ½£z7Ñ?hœ/z-³?É®ˆAå?Z?”èÒ?Ÿž«šé? •0n“”?©÷„Xj5ê?2àóÔoÞá?€`ƒôV¥?^-­;“:Ú?7ûšÖâ?€G×’Ðev?/Åõ{­â? …óü¦Õ?ØGYMû|Ö?@@)GÃ?ذ>N^'Â?ió~îní?aI¦ëâã?6XnL²Ô?x;뿤Éß?l0óß?‘®¾¤`í?ó½°ô÷×?¢#Ý$µÓ?ÕuïÚî?´BŸûúÒ?aCš£½å?ÿh÷/9~à? 9?Znå?Z ¤¯ŠÃ?PqJqšá?8…ø‚ö~À?´N á3ÌÌ? €×!ªé?aä¨Eoç?-ºj rç?#ASDF BLOCK INDEX %YAML 1.1 --- [359] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/block1.asdf0000644000537500020070000023512500000000000022411 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev870} foobar: !core/ndarray-1.0.0 bizbaz: red source: 0 datatype: float64 byteorder: little shape: [10000] ... ÓBLK08€8€8€\¨¿7Z‡N%­ò><üІ«Éñ̯?¥MÎmÖØè?¨ãŠJDbÑ?ÒBã³+jÖ?F€>:cáÜ?Žgü¾‚Ù?°§v ÿ~¨?ôûé_òCé?`íæŠÉÜî?aœ,­På?'˜„·Øí?À›S­ŽPß?Õ“¦v2é?dq‚˜ië?ðçn¿‰FÎ?@MèqñZŒ?5ÿ^Ö¢Óï? ¸Ô$ðÇÀ?%±J‚·í?Q†©0Iä?\ÌÓÝàÝNÛ?zæ§Î=µÒ?øÅš­`Òí?P“Ys!í?9›&Ø? Ó˜§ø/ë?ì ø Õ¤Â?åÝÇT-Ý?l, ­Õ?*)YRß?=m2±xè?`œ™c@ž?ʳå¦Õ?&Dï¶Þê?D¬¥A›·? “ÿhY?À?:‰çl:Ú?²B.:ùÙ? ò|‡Ò˜â?±fÒ@À-ä? ÷îG%Mä?º’iFçæ?µ|Ϙ˜Ù?Ë|8Rª„ë?žÙÏ^B¶ï?w¶²Îî?÷)Fix é?ÇÏ6Aoä?Ä¡¢Þï?¼c8鱯?·ǘøÏî?à™ÌõDþâ?tô¼ê#€Ê?«WÎ-l`â?ÂIyûç?:VtúE×Ô? åogØ?0üÖ»Â?ît&hVjê?h…’lðøÙ?ÕJ6°?bô\Ø?÷‰Gü±?bÔƒCú-ß?¿€½Ddæã?ù#…_ˆì?æÓ¢pŸÚÙ?²ÃôýOrç?ÆE·ŠˆÙ?hé£ ß?œéë]@yß?ŽæhŸC?í?ÁúM¿Åæ?æÃmsä?‹@,…¾lï?( ‘­±*Æ?@«iÜð ¶?FH+J»ê?~94öÀ ë?ˆl'dõÆ?@? y2ë?gá7æŒÖ?!ý|a„‹à?Øçiƒí?°Œ3 »?ãt BCê?¹PMì…è?ìÁ¡pº4Ç?abÖ‡ä?"¸ 2CïÐ?ø|ù¢‚à?`¤®q µî?°Øò (É?²®›dlÕÒ?fîô…:LÞ?>øÝ‹’èâ?¨§¿Ä`¶?ɵ­5/ä?2” Q‹Ö?*Ô’žy&Û?è;¨yw¾?ˆ/åïD=º? '"NHé?—”N*î?ô¥ ]2°?ä7ó¸¹?Vã¦J(µé?%êW¦Çæ?¸Ôd“"È?j0éUú–î?Ê UP—cÖ?s%Ná?öÄ»Lë?hä–hÞ‡Æ?[¹¾Îè?! Œ!øç?µ@r®¨Uä?]X'Åáæ?–¶Jóô¯Ü?É9tè?¡{xoLvå?PPEpT Í? øGàWÊ?ÉÀ)¸î?m ®,ÃÇã?®uèâ?dN6ü¸¾â?ºÙÎæòè?@ö\r¬5Œ?›‘â…Èî?áંê?i¬øb­¬æ?v! RDÛ?8å“ã<ƒ¶?/Ñ8Vï?³áùþ±ç?ö¢±O›á?†‡Ðø¼¬?€ùn¶6À? ¯U8á?àƒ9 %Ü?Á\¢x#À?š£Ô2ûàÙ?9ðfø)ßî?¾Ÿ­C\öÝ?8„½½ù…â?ŽBeé9Ü?3%Ñ0µ{ã?B?¿®TÚ?è3þ Áà?„nþ'ºÔÔ?€ŠšÈß¿Ÿ?¤÷É¢ÞÔî?8˜<® ±?Ê)NõÏ?®X&ÃöðÛ?ZBŠy8Ñî?3ÆÏØH«ê?JЍý%>Ü?(M_gwÈ?zÁFâ¦hÕ?®Éxç Ü?–Tåƒ`ÆÚ?xƒ Õ³4Ä? ±9àôÆŸ?ŸÚèKе? =à“%J½?ÒÅ¥iæ¥Ñ?°€³‹¥.È?DZ'r‚ªÌ?–ËÂ4Â?ÀÁwi(0Ú?b%±#A2Ü? nÞ²Û?P¼_émSÏ?Ê«³‚Nç?häZ$„ïÞ?2Ýpë ×?4«B²Áé?lH(Á˜?±øV±ãë?-ZÌÏÛ‘î?A˜O!|(à?ÄYRéÔ?0æ•¶°?Tñõ–ˆÈ? ½ »¶Ø?X0¸áÀ? p|*á4Î?NäLÆ-êê?\ãyÏa·?>Éø,ã?8 â8×Ü?&Ú݈cÁÚ?ho§ XÜ?(þ=ý!ïå?-Cç1èç?¥€¶Šè?+]6MDÃì?c#ÉT¥°î?5¹VOÉé? ³jÇY×?5Ï<“¯Gí?ÈÏ­4Ê?cpŽ”¤^î?`eÍ”_Î?œÁ¶ïÎèË?^dæ=î?0áýõ^Ï?Ô ´DÑè?"Øã: 6æ?7ª¤›¥É?•ž2Ì4à?©Lí? Stehî?¤éÚÁ–È?(y$þ&Æ?bl a-õÓ?£Â,uê?XíXÅ‘î?þÀZ'êç?nÎqž¤?Ô [´7@ä?Ëái_ æ?êÙ¹à¯å?ùb€î~ùì?ÂŒ* àlÝ?8zF:q¿?ùÔ/rmžå??š ­•Ãê?øS¡¿·@Ì?¹.2!ã?ÚçÇ1½Û?è<.Ê•}´?l|…¯e;ì?¤ž•øòÒ?•^`ôuÖê?ª|õÆ×Õì? BPàÌÛ?ì’gX%pè? &¢G!¹?vcïZo¸ì?^ï0gEë??i îcï?(¥5»Éµ?2^»õß?Üň<µØ?"ÉÖÑ'¸Ü?$U¢À Â?ø…½2;Ý?1i h%Ô?Â9ÌãlÔ? Wkp2•?> `³\Ä?`¡ÐÎ<Â?˜Çj‚ ?ye3acå?»ÃŸbû¬ê?pí³ÁSÙ? íøÊ¦¢î?¤Ý):Þá?’afvÄå?€!¶ëñŒ?æ»÷qó‚é?Þv˜5rÛ?Bù‡dûß?HšÞž³üÊ?D%I#L ê?Ú —?Ô?bÇ­CÐOÞ?®•Ç&hé?¯7ºyíõé?<fcÐÃ?&p³‘-ì?ytõ(Qà?Å·¶@«ä?T¸,“3Y×?ÐÕ‚Ñz²?¤eÛ>Á?o-~t¹è?&ÍáV&Ó?å:+~rƒí?” ~Ýúè?€ô¼›?`Q8ÿWå”?Kð“‚ø»è?Õê­V&¥ä?¨N1øÏË?|¬Î!Ô?±54ÿç?çÉ(% ­ê? Û3ïÛXÑ?¸"úG…Ø?ošg ö¶â?èñ æH׸?³+ŠÜæ?Š?‰ ¼ë?†ž1º¢Ò×?ŠÇu¿ø:ê?ÉGæ4åã?¸tÕê½?ÁÑž]á?xså/,Û?Àµ •³Oã?m;=€üë?U¥ 9ªÞ?NÆNÿEôæ?0ˆƒoÕë?#âý—ûaá?Àæ{h§©ç?Ða,îÚ}è?s?ÚŠfäì?FÊIˆÆué?‰ƒÓ(ä?Â{¹Ùà`Õ?únÑeÏÛ?Úul‚x(î?`ïÎèwç?úˆµžÚÂí?©vjî?Žhå¾€˜è?C×Ò͇:ï?À—[€°?Rõá+Yí?àkÄ…²?N6i,´?þ ýóã?x¹À²î?›¸ŽŠÂÉ?×O–&˜é?n|Írì?Œž¿ñHÜ?)š°b²Èæ?ÆüRÌOß? Bó—ô3£?èÖJ\ˆå?ó/ô)†Æå?lŠ7=êFà?H¤ªï ÚÛ?ðYîÀ–ß?ɪòÿDŸê? ^•¼àê?˜ nÛ\¬Ø?„P¼:çyÝ?ÑÓ¾È?²ê?ˆ ?°Ó Á?á3¶P=é?DÉað÷Ï?@Èô©Âø¯?ˆ{Í\Û?vµ>­ÂS×?ïü?ß?ôuú¼?dÎjEç?÷¶†8‡¨è?z!uòã+ê?HE ›¼É?8ݺµ×ì¹?šë°,Yå?S]A-æ?ºÖQ?Ý?ðÈ_i}È?VO ¡¥ Þ?ÆŠ§s–ÁÜ?ጆï?D}Þ™’_Á?Ê}ˆ8ÖÛ?b‡Û`ã†Û?øRù‹AÐ?䌈 ºQä?L!kße¬Ú?\ö½ÿòé?eÈs¹Ñç?€“Ößì? øx‡?^­r{ß?‚Ž!Âì?ü o1ªòÉ? Ç¿ ©FÝ?ƒŸj×ãæ?ÜèÄi¡JÚ?r/Àó¤Ñ?èšÒ?ÇØ4)ï?ˆ{Sü¹ªã?lÆ)F(Ð?„‡—Ë?@å D¶ ?vÃâíw³è?-ŽkN¨hè?U» å×ì?ØÊ·ÉõÑ?ŠvY¹*ì?f1ïŠa^Ø?ÀÄ XD²?ˆÂÀWÈêî?¨0“YlÀ?0”÷”6 Ñ?ŠÞaFiÜ?P¿{Kôo³? ÍNyz‘Û?” >ù?×Pc2ë?ÎN»ÿÜÀ?‡Ø½°ˆFå? Â?èæ˜ü2»Ç? ÕÒÒ?(zeï ¨ß?[Bi‘jÕ?<‹ŽÎm9ë?+i ¬§¨æ?|ˆ5ÂCµê?FüÀ€Îâ?ê`6dä?Ø{dz4µ?JYpk‰ã?t˜ó?{é?<>çUÞ?…]íÆFç?ÔÞð-<à?6£x—æ?ú^à´4”Ð?RÄ/qgá?„‚9¿ÈÝç?Øg`wÆW×?\´·"}í?¦Ù°ÕëŸ?`^¸¯æ?â)ùù¤æ?p†ô0Î?r–xãg®ã?>“EEÆ;Û?—Bìàzí?wçéç»ì?©.ÇÐ9À?Mó­ä=ï?3àï/§î?8nÏFâ?0¦Ý¡/Ö?yª³¾å?ÈMQß‘¿?ä¨ y/Ú?Œ•ù9<æ? þùRµ¬?ºu-mxæ?Ïxr‹ê?'d­ÆÊï?:ŸÃ:#ß?Ã"”É´Àì?í+xÛ?? »äùËå?O7ZZ<â?|(¨),ÑÏ?EúG{î?Ðõ ’7­?¨^pÚλ?Ôšëm&ñî?\a-©¤Ë?]Æ ªÿä?¶ó@µáFî?¤9G!ÇÜ?h¶![­ß?< µQO¸É?"l[S…¼á?.˜<æ~ÖÒ?FŸªž²Åß?ÀOëÒKª?htA¼’ Æ? gÌ ŸÛ?\Êv½7SÜ?›íf¤ã?@¤ÄÒ´š†?¬Ö>#Ë?šôr(‡dà?€Y!ƒ—j‘?š5Øç>Ý?0I¥!{Ü? ¼oMCÌ?‹Ó¦öë?è¯R Ù?ïXæÏ" é?jŸöKAdè?[è¶|ì?v‰zIæä?À4Ã!€Þ?¼çYÞü;Ó?xÆü1Å͸?ý9àkÕ?ìIUå6%Å?蜋% x¶?Ô³8ôÏ;á?˜Rì¯NÜÊ?ùŒS¾}ë?èpà9Bí?Áx·î?0)JªÀã?úâÆÐnã?àÏK pàœ?> ÷Þ»áì?!<”ªà_ì?VÜ/Ã&Ô?=ŲEFÉâ?+ÎÐ5ðî?ôß±F;Ë?¤Ý¯ÐÈÀ?èd£}-è?ÔAˆ ¨ŠÇ?îˆ|@äé?¿¡Œ¼å?ª'ü|Üíç?¥ßÞ“[¾á?A ¯v§æ? «‡y¢›?§ï±–KQà?v4­U:œÙ?Bq›¸sÚ?a½a~“í?K¦s"ê? Å‚zvî?ذyG°NÅ?tº<7NVÍ?‘²‚.Rà?dy_õ_øÁ?“‰'z|šë?̾¶ >Õ?YQã%ˆê?xŸ¶v4"±?T$ÉØÀ¤×?\ƒ”GíÌ?ÀEL¸Îu¬?õˆ½£8é?䪚Þß*Ô?°âðäÞÌ?ðîˆ'µæ?à©cH´è¯?*nN_ÖÍÜ?þf^Y4Õ?š0JTðÖ? É_½1‡ê?x––YEÇ?²-Áê#/Ñ?Û/ò¹p:á?¨GÐ×±?xÜåy-;É?Ék£¡vë?†ÈcñmãÛ?æ×5Ï@í?-éÕtÓ?VYãJæÞ?yÅ®g'à?‚A¸í?®÷gBÀÖï?RDËIâ?®øÐA]vä?<º;¯BÞ?åOÖÐy×î?<‚–!Æ,í?¾½\êÄ?¯Ö¯£ì?f¸{±&ã?°ˆBËì?¿®³(1å? dWе?¨9 ,‚‘Ñ?œN/…½Ëï?@Ñ:zØ€?Lìø¬_"Ò?G 5Ö€Æè?@\õŠOË?¨ŒæÈ æß?BÎŒT¡:ã?aÑDšûé?` s¢?2¥D­é×Ü??/i'MÔê?I5ºÔÏ£é?l _ЭuË?Ø~¯BîÒ´?%ËÇŒªGæ?ç\9¹ÛYå?œÿØÚÐ×?žð¥™F_ï?âs1¦?†ìâeÔ?ü|ø¹YÉ?L "‚rHë?F»G† í?”'Ͳì?°M¯b žÃ?YiMÜÑÀ?žÖk‘ÖÕ?ঠiÌ?È[]FpŽ»?t*´©&ç?¢“iyäGà?Ÿ|ÜÎýì?Jj¥?¨ÕÔ? FÖ¤é?hx?O®Ò?hÍ<%1Â?tŽFêßâ?uÏ%ë?0Ä·R²ª? ÜÔ›Ÿ¼?¢ÂÐ/:ä?·ÉØiZä?ÛAD,ùì?|>aDZ]?~Î0ÙƒÞã?Ù¡V‡íâ?[Y{\âì?&LÓáöß?R 9j¹Ü?°ýôà¼?Žéúñ»–Þ?’Y[kXïÙ?þµ«r\ä?}¼6Ïdá?<ӦެÅ?˲Xj°?†2U#;£í?¨2iGJ‹Í?Xç}–ÂÉç?pÕF„l¥?hŠr)€ôá?¸bòWæié?ÔAOqÖdè?Ó»3Lì?‡·¥Ídí?í2Ì#Ÿ’è?1xJSÁ?Œg¤'²âä?*B½‘ÛêÞ?#Ÿø92é?Ù©1B#æì?|±Ö”ÞÑ?µ4–s7â?è%¡¾Rè?Y33Í[­í?ÌA›fVä?€Ïž‘eÝí?ð0´ˆ7¤­?š"çcb¸ê?ddRî2äÐ?éò1~è?f^z´QÖ?™þøM¶5å?XyG39:ë?¹Ð(TÂè?–Ç-˜Ù×?$ $&=ë?ŸÕâÁõ†ì?sˆÈT@ë?˜ØÖü!¿?°Fü¼‚êÎ?Ô¬dÒ®$Õ?ª ~ÔÀ•ã?œ¢~R‡Û?„ï½¥ Ý?Öʲ=ýÑ?è»cE½˜Ï?Nðç“Ò? %ƒìäŒå?JpàÔìÒ?xÔ)ÍYî?"/ȪiÖ?ôg’ Õ?E÷:é?Ò9Šç Ýë?„ºÀÕ´ŽÁ?0/,f—­?@T#ìç?v«XQeÏë?Pj®Nûžã?H¿ÿI”ëç?à Ÿ¤vú¯?Ö)‰{€ì?ÄH"jÍsÄ?\–°ˆ^©Ð?P’Ô³ŠUÌ?°wÅéö2ä?0oÎÕ®ˆ?¡è‡Ðï?ÚΔn;Gá?â9I5Öì?TL~}|Ç?ÚsI Ûç?\ya7}kÆ?ÒNU¨Fœß?\¨"Ä«ôÀ?ÄÆø’Ãâ?Õ`§€ï?~’çà¼pÑ?|Ê.Ö<é?8º€;:æÞ?Ú¶÷u+uÐ?ߓƖmiè?$ýPí`îê?Ô-µ Ð?ıɀŽZë?)â9Öàâ?KZ#$bàí?ÿîÂ^´â?Ô4„¬‡Ä?U†m&\ùå?%Ë‚ºxã?8‰g á?;feíØÙ?$´õX dÅ?ÎÂuÒØ?þCõµ Þ?¨µ@ÜY±ë?þoÊTèÙ?»˜ûÈÌIç?¨™B°Ñp³?§ßÿº?H„‘Ù˜'Ë?Õf*É?€oßÐ9V¸?D§fBÂ?pßø™¦’¸?Ý_ùLã?´‘[%P×?ΑÜÀ‡? Úhy¶Ç?´}GVSJÊ?QÄ#=c»è?Û‹öÇä?p=« âÌ?칿º?ÀUTÈ-ˆ?lõƃóöÜ?„‡E´ Ð?SØ0nËÏê?`±ˆv8[Ü?2ïýkîÚ?–3¬Üñ½ë?±ý°õ¦Sá?<'ÈâE ß?Šûj•×Õ?!GÆN…å?,ÀÈôÂê?€õ‘Z§Ô?nóö/Æë?“ŒÙáì?¥;Æû¡Åè?ˆI@ÏcÇ?€*$ì‘Ø?ÀBÄ1M¼?Þœ‹°Yæ?h¢rèÞÔ?x˜:>s‚Ð?C Å·ê?ÆÐ¾…ÈÂ?.ߟ/Cä?Ø÷ç iß?úç€7t"Ý?“;Š’1¨è?w]@Öá¼í?Œ‚œ,Ž‹É?tïUotÉÐ?1´¶š‘ì?¢,cùæ?`¹Ž¿×Õ?ÚdG#Øâ?†¨ÕK÷Ö?¼‘ðÎ?¤^>_@ï?bMõJmKå?Ø.pš*ç?èõóèɳì?:¦yf à?¬‚š“ºXË?ð«t×"à?X8èBå?ÜÑ÷8[Äè?¾“Ò!$íí?–A½Ê÷æ?J'¯=Å{Ó?p…]Á‡îÎ?éèßQ4°á?äü³ÏpÏ?D]âïgšã?À­:E•.í?ÖwÅŽòÖ?yîúE™Òë?sôJ7È?['Q”ÈÝà?Hu¼º›É? ×Kï!å?ãÜ4²Gùà?œý æ?Ñhɳé?,Ò‘ØÝÜ?ZjÂÁJšÝ?zØåSÜ?礛þÒ-ã?¼x„›%jË?L‰:Û?º®š^†]Þ?LGBΦ×?d‹oM—»Å?mYæÌ–»?ÜÉ~*Óíã?&^û‡‡Ð?XY¨=:Ë? ÒëÒ”?ÖróÊé?øa ã~¾?Àpw‘yUç?ø¤Îäœí?h¼ UgÔ?µ`R°Í×?ޝëÔë?ø1Ì®¤í?‘¢5µ^Nè?0›,ã)Ô?d·¯ÝÖÁ?D„Í_“jÍ?,_º›t?6äe&Þ?ð*³ï½†¹?`‡¯*Õ¡ª?݆U›×"ç?Ì7ÌF90Æ?džäbé?0\UÒ&Ö?H}2Ä .Ï?Ð=ðÉ »Ü?$z´XZì?n]‹Ê?öAïíâ?/ðd¡ñJî? ^mÙª@¯?€Ã)û_Q­?~ÁÖ@èúÙ?v¶úõSÜ?Jº–渣Ó?³\B—söë?ÍR¤GÙ ï?f®@yZ/â?¥Û‚ñ±¢à?!{4€ƒóà?Œ®èC»è?zÅ[ìvë?® Æëªì?c°÷Ö'mà?'Hú«¡é? M${PÅ?ÃÑ`¿#}æ?¬?NÆ?tJnUžÄ?,ÖÑÖ?:`7ÆÛ?d}QÆ›?ØM‰Ó›½?A;¦MÆá?ÀƒV H!?xY½e/Ì?4wGWØè?`8þp#JÖ?ÓÌ䵪zä?L»ulI>Ù?!l;»)•ï?`ü{~;L–?8#—2ƒÒ?¨ Æaù¿ç?}=—Àæ?íw+7|¥ë?0ÃPBè!Õ?eÌ÷Š‹{è?q€ßÁ0Uã?d×Ôˆ„ê? ª¾·óÄÇ?|uè" $è?Tk"ñÈGÏ?xÎFÍÉ?U¬«•?tTS aá?T] ”…ˆå?VGŠëî?¬SRÏÔÝ?^¢ÓJpÿå?væÂÈmí?|eõ›iÅ?›óþÄÞï?ðçªì%9Ï?¥=cPU»? CgMlÒ?¹t/jÐå?D­Ü;¼Õ?¤Ïa±Í{Ø?†å^´Okå?Êè^xIà?aʳÛ?Ü¡„|“Ì?×l¸díÀî?f6‘ð3Ò?-X@ì™ê?€.U"¢? =E# šï?¼¬†ÿÕ?¹( = ã?ñjÑŸî?÷ê• Çuè?K+¸Ü»ç?œÞÜ>Ï?lô ˜ûâ?’{–’ÅÓ?˜Ÿ‘–ÍÿÓ?ò±VrP¥á?ô±*¦Ô™Ü?D“®ðí?â?è>7æ? Ôî'J@Ä?tÜ)·ÿÇ?µ_åÃ?D>s‚%â?êþ™ï¸sÞ?xQÿ ê?°ÛUOAÞ?OÎQ¾ì?tÚ³áÖ?ˆ” غç?uo0Ìv|ç?ÒM]xUœ¼?ã9¥}ië?°òtl&L¤?2?ºýzØ?N`ß5ë?øÉæGÉ?à'ƒË…?óI¥ié?³ÍMpoì?z;†fhç?ˆ)´‚è©ã?¦„‡+Ö?dcðP:3î?¾ÉÀ`nÝ?œÖݯ¤×?h‘*òµ¼?t¾dC·8å?€B‚­ìa?Pei¾îÖ?ä<ÏbÆ?¬Yæ¸äí?³3KšÒ?Cî<—? ÃÓëK§?ÛØ¼‰&Ê?b$•ÐÓ?Zœ-I,2ç?Çàs‚–ã?/ëÓÁ–"é?Xþætк?Pïž<Î’·?JÏÏ æ×?Lá]¨ZË?$Áö¾?µ»™úDë?`gtÈ¥ï?;³G½ ç?šºùß?.ß--tÈ?71´{7å?æîX/ ×?–É·¢Àí?ñæ1Ôè?Îd/¥Âöà?r‹­ Ü÷ë?àù…úÜà?ÄZXŽÎÓì?œèþöNÝ?V¬#/MíÓ?ð>|·>i´?·… `Hå?¨0˜K àÎ?IˆÖC³uê?`ø˜¤Z›©?¨Ñbî^Ð?žà†ySÞ?‚Í|Ioà? ,W»¦UÃ?-+%—¿é?ß70ÿk_ê?èDÖ ß}Ï?Ò \4kë?RßžÏ:…Ñ?|‘º{Éï?®ˆ>üÍ?WýUé?Z>zÞâ?0<ûŒ¾çí?À n}èû¯?¹T»ªøØâ? «¹’Ë?\¸r«qË?=“§2µã?zH0ÜÜ?q¸›®*eç?ˆ"¯ÓØÎ?»“±AÖ?†ký;Eí?òd¦Éì?þF6ô4ÈÚ?¡DÞ;÷å?Ì9r‚\Ù?Ð f=@?ç?ÎA(0vÞ?ë± yªšà?åäktNÒ?€šS”ßþ¸?Û΃©EÍé?½ ³ºždï?à¦TøûØá?˜ÂtN×Ë?Ь»Ž–Õì?ÂÙf# UÚ?‰£¾È•à?X‘zó0»?‘sq0ñà?ì£ì*Ý¥è?Ôvj¢ßÒÄ?` =ô·Ø?¨…Å7èÌ? ¯8Ó–Ù?ºÉ ( å?hùanÄÑ?0¡ëI<½à?Û6ž™TÆâ?”3:ÜJ Ã?`¸s²_Ê?­` PBeà?@UÅch^?'ø¯ ¦cã?|¢Vˆ¼Ü? ç ô†D?*˜É¨ÖÐ? >ÑX,Ú?` º/ž<ß?4Àø@[üÎ?k¬ƒÿ­îæ?·ñ”oØ?ö_PVEÞ?)~y±ìîî?<¨Îm… ï?.nݦÍï?ç]Px&¡å?Di±  î?¢Á@HŸ½ã?ñZóQÓ(é?Šÿ{d¡úè?ê·.óë?áÄ¥>Hï?ôåh¯îä?é?vÙ˜YU•Ü?>ÐB´{]í?û¸k-¡Ö?W7Áæ¿?T/‡Ò?tݤ¶ºç?ì¡p˜ÖºÈ?8xþà?fÜ.b|9Õ?͉^ÐOãá?NÌ|Ä{Þ?XTr*Ïì?$C]sð.å?@Å#/Yï?˜bbIAâ?•DáG9¸?Âc hÖ×?tŒ´Å?+Ê8æ?Hû‡û‰–æ?bO1VæÞÒ?P…r;Y¡?,úE¢B?é?ìB#FiÖÍ?µßà>¿ƒä?ïcì³âè?\ñãçÒ?ƒ·êÏâ?SO£aÉå?DƒAÍSÍî?Ñ™r?ÉØ?HÕð(Ïw½?šäu\ üã?B§Nm“bÒ?æGT{g‹Ø?¶±‰få?œ 2+-ŒÜ?hÈv6îªß?¦óºßê?)¥f,­è?ìt.N°Æ?+ù†’Qå? ín—¼]Ã?èÿWØTã»?‹˜¹™=î?(£]’‰ÅÒ?ª)îÃ¥å? M•×Eš?r/a¦Ã?P©qy¼s¹?.¶œF>Ó?s7îJê?9õ‡»Ê…ë?žÍM­æ?9FäÌŠ6á?@ â”2›?ΧŽêrñÖ?_ã{eê?㙽Ý`@ì? ÐLR)á?nÃØ5O¡Ñ?¹œx´ á?f‡-¹”?¸/⨳Ù? BDsý‚ß?’]@0õÓ?`í?¬æâ?ŠˆÚ_aXÛ?ähÒ3éÖ?Ì©]‚úã?¬8rÞÖì?ÒbBkéeï?QPÚ?^¹•À¬Öá?ÆgU +Ð?À -k!Æ?‰Ž;á—@è?„“{b¹Ñ?Àž«÷@…?8D§T@¢Ã?]øtm²?¼•ýa–ÊÀ?’~ëßòë?ø ¬:– À?M”xªçê?жMšï ¿?ÐY¾dvÃ?óô+*Tèä?Lºå#öOÉ?ëH¥·é?üíAw~è?»}’ƒ½å?þ5Âè¯vé?üÌs³ ¡å?ܹõ&ƒä?d§—má?Ô' ÔÙÙ?§µbõ_à?¬ÒÜ”ç?yêéÙÈ/æ?@4çE]Γ? f£Gרå?ÕØ2Ù©ì?DÄ‹kïè?¿7¾>ļé?%ãvžždê?°7bÿ$W¢?‚fGÚ×ß?` útOÛ?’_0nî?'’ÙÛÉ%ä?qIjÊté? ôNhî:Ñ?÷ÙCnÌï?«k‰J>{ä?8b~?\òÓ?wJ˜¤í?' juáâ?ž žÖ?ªUÞ Õì?Úé3+­sÑ?-‰èùä?®ã$FÈí?0‰sŽyº?®CG‚lä?/A¯ ê?”güº¡yÜ?|8…¡Ëèë?dþ¯bZÚ?˜³;êÄÜ?"S–3è?”Ëj`Ò? ¢kãºBë?Á_@ÚLŸí?~Δ,”á?Æâß×âï?@}wŒÈÎ?Hn‡T‹ÀÛ?„´ì‚öË?!Á ”æ?8ºC¶‹JÎ?àÖùÐÕœ?õá­€B9è?+ø ¢Ìî?Î<: ‚î?ÚÙh?«ÿß?¬Ç‡±¶Í?øúqùR†é?P¿‘¿ç?c\~*Ùæ?ŽÞ°‹ˆMÕ?)ÆW_z8â?.T]ª2ã?µé¥c}ðï?€hNR°Ù?€#}H×?8J^"sÎê?„ ÁîÎ?í=7Ô0)ç?æ}4 kQä?ÿ“ÈM‘è?Ùò ƒ¸Îç?!à¦Ç¾å?Ѐ¥C€ôÒ?,U]Ž£ê? =TŒ—ß?À'Üîço…?Û©¥1Q¢ë?•Vx¨íä?V?ጠTæ?‹ìÈê?W?Ÿvê?q|è ç?ÉÉÀ9›ë??ñ[oè?E¬-Çã×?í-^Ó¨æï?œt×àáÚ?ýì„e&å?/oˆ+òî?жbëÃJÌ?œvšÑáÅ?Ì[£öÅ?<àÛnŒ3Ñ?$/(í?5Ø´ë–Lé?ÀƼ³ʼn?`Ê5øÛ/?ŽÊpžë?nœÔnÐÝ?€2xåšÅ?Wœ‘£Äãî?Pó…ßÒ?dAŒÉèß?éTmƒzÓá?ØÛݸXâÔ?¸J„UÓ?M\಺ë?L¾%C×DË?‚«Ø´ÒÕ?¬55¾ÆÇ?¼&Ξºzß?G*ñŒcÑ?Cõ„&•å?K,õ?ÀCì?€w5 Yß?–9®¨–Ö?äaLCð\Ç?Íapiÿî?D†zb‰ãã?¹0m£kDâ? {Ñïº?,åL’*Å? ¿b7³?¬û¸ÊÈÊ?hWâ¾Ñeæ? ²(æÍ?øŒG÷+³?ÑéãÞbŽì?æV'ëÔÔ?xÊ·dFã?#â-yí?CHI—“Êã?b’G†¿â?\¹iëAÓë?€R}ÐÙŸÛ?œ>à‚»“È?ÆîÔå?K¹ÄÛÎ?Rná¯0÷Ú?@fEsЬÌ?ºú—Aêã?#]ÝP:æ?©"ͶÄê?Þ?ÔkL.î?<’õí?’(¤6Íç?°Êð˜Cµ?^“e,¶ä?»YÃô×ä?Hòuõ—³?÷êªCòíë?Ö2â¯ú!ç?P’vç}¹æ?`=0POÒ?ˆEÝbÝ?F£\ƒÖ?9ú ¢}â?Òjbò4Ô?·ó½åí?³f!ž‡ã?$jË~»ÖÕ?Ò¥f}T×?øÄLe0Ó?„¹ññ‘é?ë²$ç?x {&j¹?—W•œí?nÏ%EhÓ? ïúÝ î?û•Ãßè?j2MÕ¹yä?Ä]ŽQßÎ?4éd¾ ÀÖ?ùÍ8ȯí?é)h„æî?xT\µ×?ÞÅÇ×ÎÔ?Û(+rØë? "6d5ã?¿”°ÜËå?»L•©óî?èÀN ¯ê?p–·DŽÝ?ë‡kÍê?•"³o} ì?ònu«SÓ?ô!Žô\Ìá?…Õµä?ñÆQñä?j“ÏÕù‹à?=_“'wâ?ˆñRë?| wªÑ¦Ô?L¢¿ÚCÉ?È^?ó–ë?Î’ðo|¾Ò?‰^ˆ‘ ê?›­›e ã?`8G:s˜ ?×P²´"Òç?°·°Š·?¼ŽõWHÈ?1ÓÈk¢àà?Äü¸Ñ?”×·™¶é?ªEžfÚî? K¥ìµ×?¶ >ŸÖkÝ?èÒô4P7í?\ä'sèÂ?MàÙ°ZPæ?-Zx<ã?0$Á-çÑ?;ZÞ?ìÍã?%‘»º1ë?ÐOßš¤?ˆÉÕ?øÜÝ5SÁ? ý7âäœ?l…ÏÃcåÅ?øðüCÕõì?k÷òVÇã?ìx•¹HÃ?m d§ãÊâ?NÚÙ³f!Ú?˜/6¾< Ä?ÅwˆÁ'Ü?f÷žÍÄÑ?P.Áì¤?ê¨ÅeÜ?ðú’9>×è?‚® ‡ÎÚå?°ï7bö#é?eϼkÑ?¨ÌãÙžÅ?,Âo¥‹êØ?±à¶§[|ã?7õï5¾â?»öj€,òâ?lI4ÌäÕ?5Z¶'½ï?û³Ñ gç?ÀžùKàÚ«?¸ãGÅüÒ?Êòl€Ø?w¨¨½Î¬ã?ÀoÞ­?–‰]Nðê?fn(`£Ÿç?–¥Žä:%Ñ?H©@€LÏ?Ÿ¡šÀ3î?´ÝV³Â½Ú?6#-ï=ì?hÃXд é?€hAO K?Þz€#hrê?‚Í›K é?[‘l™wËë?,þƒ„,é?@g)ƒ?ŽØ?X~¾Õ}Ó?a‚råê?`˜ ïÃÆÓ?GÊè?žJÐÉ<—Õ?~,·ÓØ?×Ç©5qâ?àéñ»½†•?Ð? ?8Õ¡?èvÓ®T¼?øgÂ71Ó?¶ˆ¼ˆPå?t–]¥¾Ï?¾­“áë?´×¬ÜŸ?P ¬V´ ?Ì»z=¾¦í?ËÃ`±ìÒ?ïNN¿Rï?ŒêRUkÙ?S÷×vVÛã? ß*ÐíÄì?8qž½RxÞ?ëU'–í?hõu‚;N²?˜@-‘ªØÐ?ΫÞÚ?Ž˜W³šá?žªš]/×Ö?¦µdƸÑ?DFÌH@)Ã?V(®ýDÚ?ˆõÎ „ï?l.õ°qå? ÀlÊ6òÔ?$9Im „Ç?ßà,‡›Éä?2„˜¶å?ðS ä³?¬è¥×žÇ?pÜ óÿá?×?ÊÐï?¥àKxÉaã?`Yš—°?âžwÃô î?h&‚®hè?(÷ÍãžÂ?×èGnCì?ˆ8™Q„?Õ?æ™ÍíH×?£ã,EýZé?éu ëÆ à?çºÎé-Jâ?lÁür‰·Å?ÐÍ*ŠôQÈ?x`‰˜3¼?„2ˆ„$í?5ËóLì?î|=µÑ|Õ?ö>ü²<Äá?å<ïXyï?Jÿ½¦Nã? Ð©í¨aæ?êÒRd<“ë?Š&¥ yþà?ØÁÞÀ³}Ý?Ø(˜“kë?Ud¬[wŠï?À!ýb?­Oémà?€ &I£?¶pq>†MÖ?œQãZÉï?œtL¦žÞ?!û\énÎá?.Õ¬+«ì?h.‡:ã?^Èlï7:Ñ?ŠŒþ龿? GÃ{Ù’Ñ?¶óYô'•Ð? Ð—Ë?|GQxvÑ?{¯.u‘î?ೂSËê˜?Ç£³ï¿ï?ŒÛK¥Úøì?@„YÇ]ü˜?[ó×£›/î?ž:{-mÛß?ß…6ú;ç?{<»p“±ã?™G´dw?C¡Î1ƒ:ï?xw忏o°?ä²uPÏ? fUf2É?) Ê eå?XñÛ®){Ð?ù’ÓKÐã?X<*ª™ß?êÚÙ†Ý?È órãeØ?{UöÚwðâ?Xûé’ŸÏ?PÂÕÈä=â?¶®ò—ë?\ªä1‡.Õ?ÌPaž¯Ñ?ÄYkfGÜÖ?*Ç (¹Lã?:_—‰DÃë?4€KOÙ|Û?„U„¶tªé?p e¾&½Ä?(ãœ*u„Ý?µœÓAâÐ?²Šüê?cÇsŸr.â?øYäÇc*Ý? ¼IÈ?ßk¶à¾«î?,M¹kÍ2æ?lÚf5‚Á?ྦྷV}Å?³ã´¸ƒwã?&BU7aØÚ?™¸¶à–ã?wÞ ×þç? œ¹¿â?€€ ®å<»?€®5¡ôoË?°Ô€£èÈ?* ±¨Ó?`?iH+7¿?UÚ 2Jºà?pk2—Sõµ?¨üTîŸOÒ?Ïu Ýcoì?­@ÁÞºë?·¥Ä î?Ü*>Ê?˜¦¢Çï?ÜßwÙaé?ÐoŒP îÝ? f•¡˜7ï?:|ÏS‚Øè?íÁžÆÄá?j½6 â?Ù·ÐÍØ–ì?Þ¬œÄ?üÃy†<Ç?k{¼¦måã?HN2ìò?á­|a»å?pá(ç£ýÞ?ëÇB¥5Ò?ðªÏ¨?Ü¡–ú}‹ß?Œ)û k¶Ó?‚îW~îMë?ØÈàèåß?$ L ?wì?èÞË`¡…È?¸U'Èê³ê?M‚ßçóí?™òÞºiå?€ûØ4—ò”?¦,î¥@&ë?X6¼û¦ê?êKÛà§Ñ?¾û£ *QÔ?{?—`˜á?UÒ¾Ô£è?;Âíóâ?‘æã–ê?v¿ÌúCsÐ?Ú>—,æ?ktpWCzí?`ØÜ=´’?7p%ÂZâ?»Î vó±ê?†ûl;ÁÊì?l~1¦³?>•þn?̧¡Ô>Šà?úÒé´©Ð?ü…+Šk8ï?h5…ÇŸÞæ?À™'2¼áÛ?1ˆQ$îê?òšv*•Ö?ñ š‡¥ê?h7í—…Ä?Ö’“M‘%×?–Mç=\Üà?»©Ø ì?’[\íœ8Ô?S3øj´ç?0¹~(}À?ð¥»"âÐ?åž÷Ÿ]ê?Ì ùnšã?4Ö¾&bFÜ?€ ovÕL ?P¸2©ß?õF–Ùæ?d’ò™~óç?ÔO^â©Ô?ž¢/Ú?I±¨ ñÔ?eSÃùµ0â?¼<éÍRwÊ?>ÜC]GÒá?-X¨5ð^ï?†G² ƒaÛ?½:ÛHìá?¤}Ÿß`Ö?PeîÓ îÈ?ÂæS™ÞÕ?Â(û“ûå?jTÐ…‰Ù?øô’øæ¾?Z‘ÇW4á?ˆ˜IÄÃÛº?mBa—á"ê?®sXbDç?Hê>à :?žP>oIå?\áÊÅå?P°ZCâµ?¼§^H*â?Û´ÿõ±&è?M–É_Uâ?D]@2Ÿ¶Ã?¨ßäµx1Ú?,/ïxÞ?÷j)ï?Ëð)=ë?BÊ@ŸÓ¥ï?ܤS,Æ”é?$¬7V«Í?ÈS®>Ó»Û?P¿å÷6Ã?)ps£†{á?ˆi©{jÂ?óܸ®Ëä?ÀèÖ ¥ŠÝ?0®GòhW°?õ2 Ù?p¬»!Tµ?ºÙ1hÚÔ?çÞ¡áT›á?Èù"ïåG×? ñ$%èç?¯§Øulâ?Efè Ú?ö¤S:qMá?i‚‚oï?Èa僾±?bSv5™ã?覗:¹©±?¼Ip¡fß?àÆ(ë¿ Ü?à%_`m¡£?‘Ê÷YÆ›ã?Nõ´àï Ý?0E oä?ãTçîî?ô-I""°Î?“ý8§g â?ä?%»VÑ?äÃoÔ¿àå?ï?P´Y½æ?VÉó.QÄ?8av…˜pá?6´ÛU7ËÒ?Òª*Ë‹hÝ?Yy×÷ïë?ãïv°tíá?ÀÌǧ•»À?!¸Æê?E 2Ñó€î?.0pàÂ?”<ËL Ò?hÌï&Ì?Pù!sØéï?ñ¼µÅÅì?ć<,oˆç?¢]PM±è?dT¨1GåÇ?c#œ”ï?maëÛ?˜§…Tüâ?(¬0Mf|Ç?pxã#‡0ì?ºÌ–,é×?ÃŒØæè?ŠÃa0w9â?àᵉ°dÝ?STŠ„Rä?›–¹-˜‹á?¬¸cï?Eê}6¦å?îìô®múÖ?Túôq\°Ç?ÔÇWð¢Å?³øƒPöÒî?’ º²aî?²Ïëq?0æ-¡öÐ? ¦9M£È?0Š‚‚³,º?(…pÅ ¾?XóȤw<Ë?Ë—Â)”Çà?í?øeðæ?àçiæë?På™QÒ4ì?óaýD¿>ï?n*ïSí?2ùX$ΤÙ?V<ÇÛÖKâ?fC.ûDØ?1¹úÈÆç?l'Kf”Ò?pýœ‘` Ø?Á+]óá?¶'CÿrÒ?f&ä½!BÞ?˜ÎöY¦±?ªEŠ…jØ?ðmp þÏÌ?ÄŠPÖBðÂ? …ómFë?„‹ÙMAYÙ?âh¹U“Ð?*P[ÕHŸè?ÌjË.®¿Û?÷3{ÿpÞ?bèG‡º¬Ñ?0âÃå)«?àq£Gî?ÔD˜i<¸Í?`h¢²ÄiÀ?›ÙµÅæ?V8,šÈ?ÞÊÃVB×?e ïMFRí?1|µ–s*à?BœÓ)\›ì?¸>h6Îjµ?ú¢)çÌÙ?òª~œ@2ß?†fö¥ðêß?D¯7[5Õ?,ÚË´Ó?½Âr—Wã?ž€QâÛ?˜³«ò€Ò?òNæ¥@ŸÔ?_q”Š.ï?Æ<ĵ(œà?àøù©3¦?Ɉµ5å?&uÌ© ç?V3áTƒ×?o— 9è?ȃƒ]ñè?÷ÎDÐéë?V¤Y48ä?Ü•*¹#Ú?öA ügá?°Y𠉝? ÎJÄÆ°?ÑÞM{ãeä?œ¥Ž·E~Î?zæÒ­ÅÝ?Ê$ýã?´1‡¸$Ê?GÈr‰Õï?ðóǨ¼GÇ?& Xç×Þ?„#ú±µ ì?ÎÙÏèãHæ?l—ßôpË?<Òï§„›Ú?6 Mi§‘â?DÔÌ´ºÄ?`ß]ãfl¥?B.H†gì?þôèÈ”Õ?óèiFÒÃå?¹ÀÀf|ã?iLK¿ŸØê? MÛj*C–? Øhy¤?ËOHíç?ö‹Ã|šêÒ?ÀÇK»OL¶? ʲOÓá?‰Ð³’ñá?Þw—»|Eà?aéÔ‚°uç?U'Œ ›¾?Ro©ª¶Ö?ã•ÎÚÕ{è?Àôw¸´í?^HåV×?è'Y ÇY¿?°ïÃ?à¸õO§?£¹4sº<é?JƒàÛ0ã?葆qá?,~íÞ.Á?¾ÆTŠK#ß?t–¡ ë?$þµQÜÑ?À ÊËàœ?õ¾~q›Cï?w½Ó::Û?„Ö•î+jß?à¾iZòêÛ?4ÖŽ1{ê?;|B“\î?ƺ§ì1bê?8ÿ覞 ß?J,ФxDß?Êþþkí?PBÌh¬ŠÙ?¼×Î…Ã?™°(åzâå?Ê+üRŸ:ã?õ¤Ýó•9ë?úƒ[«¹æ?TÅ{|Ä?=}R°ä?ôˆsZ""é?_· á?Åæ“}qà?äŸ&ßóþÃ?¨¤^çÙ$È?þ·°ä¯Ù? ’9Õ/<ì?òú+ÑÔÞ?é™5(p†ç?èn}ŠêÜ?†µÜÚ’ÍÖ?$Òý`]Á?jÊüleYã?T@ºUlä?•\CÄÐ? -þIì?dn!Jä?j_¢õ»è?,TZVûÐ?°¼¾Ó¨ï?¬Á‘#€£à?øqPÍÿ[Å?`õX;#Ô?žEXO>€Ô?˜ÂCÉ¿Uà?c†å6“î?è€\f)Ã?í¼ü¨påí?8îø“±à?IÈFúí?Í¢´æì?`Ãwòàã?úø¿¿Óæ?*Â…ž¼JÛ?"$¯’zä?JÛpÈøØ?>PsA«”â?ÀT ;k¾¡?⯛¦.Ü? AÓa:>ç?àžm´ ›?Ý`P …$é?`í£q¼PÌ?P½xX:’Ì?œ¥¤UÎâÂ?oO>ÁEï?œ3Ëdu¨î?L±å…¶á?>J™>Ú?=ƒC–[â?ª¾ú1¨“å?N'x¦Ö?À|dCã?NÆ ©UEà?P%dz{%«?¦ì1ÄÒ?*ž›F¤Ô?ЮˆÖߌ¨?b‹£ëä?~A'• Ð?`Y!~ €—?T $Ôb?XÂŽ©€ÍÊ?´G¢˜½È?Qd¾ß?Êá?jgú(2\ê?;¬žÎ?\rP30×?³Æ³¹Ç±à?l1:ü˜ß? ·jà?ß=I°UÞ?#ŽÖ!Wá?xn8º[x³?w%. ˜ê?°ÅaË?ÅÝ?G¦V½ì}ã?^–NìÙ?ªF'ÆÏŠ?óaPå?À_/$¹?,[.¯Âä?Ƚ§SGâ?¦Ë€/¨Ô?¸h_Æí?ætYº2ÁÚ?Ér¹ÿHè?ý·½nÐ?D·‚êx…Ï?ÒKÝ?°`d³LÌ?G}Z3öæ?O|WZÇÛç?Øœ?ñ{Øä?t{AÉ«Ê?À  ‹ï?bŠÁö\ë?%»Ð?av±Wã?¼ Îícå? 7©Ly8¼?º(›häì? Ÿs¯ƒ’? ¸ =Ï×ë?ÛT°Óã?p`üæùư?Ècšq¾?Bo±bËå?0’ºG×Ã?øÝî?@C¥ÂµXÈ?³ ’T¡ì?nrš/ÿê?a,6åvâ?Xó‚±Sqé?‡ôuGº5æ? äæDA½?x—ÄœøOÕ?Xø¶èA±?ÇÄ;÷Õ?ÆN(7æ?¦¨žúš\ê?^÷eõ4Äî?§b dá?Àˆ TC(ª?ivÇ$ž7á?Àv|MÈZµ?° ¼K¬Uæ?(CŒ9×C?DÛˆàë*Õ?€¶••»Ïz?5¥Í—S÷ã?PÝýøÒè¥?êäÖò ì?øU.xòâ?peg²jæ?kåË¿ýî?ö“ü—ßÖ?²°\¥Ò?`X¼o~ä?@êvÔÕÈ?¤è+1á?RüË +¨?Ú¦æJH`ë?Àæ¨kׂ?­|æ³vç?z´\è?XÅ6ì¹4»?ÝÑ„¶Øè?¶ýEÿ¯Ü?œ9„Ië“Ù?§‡pDñòç?0¿ÜÆòý£?š¯¿‡¨í?É_:_è?0úŠÎ”¤?L™]×+NÓ?ÌšAõLKÆ?Nš=ÂYÐ? €óFyع?`¿ü ów½?÷`+Óxî?©Û[Ê¥í?Þ"÷·`Ü?¨¸”nÎ?ÀÉØÇÈšã?"cuèäçÒ?ìªÈçIœÄ?Çñ2æ–Ë?|&Ìxß?§lY›Úâ?D9¸pdë?ÞŽåUóòç?hè·˜”¶Ð?vW_B¢ä?°Ž`ªHï?ëì-™õ9ã?.Éœs=kÖ?G™—Á¬ï?äõr׸ß??Úïè\úî?€ë&äÌ?ÀÞ€~«ì?ó@p-Î?°šòc‘ª? TTS~ª?!²z;1{ï?Nb½o~?b4B?ؼÜ?\w5ì\IÔ?æÄü)ºé?qf€T3æ?œy¦ Á?ýº.vÚ?¾ìü;‘fÝ?LJ”Á½À? ðÏN-Ó? B , «±?¼sAZµÛ?ÀðëÃËH„?vòwh.Ô?üQù¾#çå?¼à˜ÅÒòÂ?jª¸L4žè?#ÁÉ·ÔÜç?$ƒÆ—E{î?¦KÇŒÀ?¶5˜:4³Ú?ã†R¼jë?L¯Ô¿÷à?Ïõh°Œ‡è? «¥«Aè?øÕfã¢ÅÒ?àµ÷ˆë…š?lJ£G?àž´k[1¹?K-,@2¼è?Æ #±±¼é?4¾_ÐÐÛï?àï|Oê?à4çC¦Á¼?®ýed@—Ü?lÓDœì?SŒ,`Egå?VSæ¯ÄÒ?Èâ6ŸÏ?gB̧á?{à'ú òì?à}Ë>¹Ö?Ä*«9Õ?ùkénY—å?p9Y?’#?x4tû-Câ?XçŠeGÛ?(¨'»3á?J¿µÒ}ÛÒ?j’b—ÁÕ?LÏ龘ê?湸ÀÕá?‹®ÌÉéë?PTN+꽨?ܘ”­ŠAÆ?VÂaˆI¸à?Ê BKä?d¾å(‹Ú?îðm}n'î?R“Ù&qÚ?Ƀ78ò“ä?¬zÆÆº×?Æ4…õå Ú?ª{õäÑ?á÷ýqÎí?gŽmlЂå?d2cí¹Ó?ŒÚ“šÍá?{¬T¢’ûå?¤=!×:å?ICl°È?XRdbj²¶?lQúíeXË?«›Ç4já?x•ú:æ?&qЗnÛ?Ý3 ÖÁ¶î? âDòÈ?à¤và@Iá?\;0é?ÐËöεÞ?@Ü#°Ö»?oIðuÉë? é´5sÝ?µ¨q“ç?8 5‰ŸÄ?&û\û¦Ù?T¤ ”vÏ?›½ÄŽDå?É<åÁa¨ç?žw€ºöÐ?¼¼ZèÃ?åKóË-ê?­ÍØÔÍ?'À·ÒÝï?û†àÜwî?ËÀ;Aã?åBêjæµí?´¯K’*åã?1ùösà?ìþq] Ã?x\ÉmQÅ?ˆ¬19Úž¹?ø¼B0Û?¨*2Í—ß?}rÓðà‡á?“ƾjbã?ÚÞšgÜ?š?ŸÝ«ÙÑ?/SÎÛyûâ?@%¢y¯œ×?èz–iÌ?á(¹—¶à?¼«]ÞÊÄ?¹…$Ô?7×lcë? L¿<2Ä?¼¾Ài1ãÀ?ž}ˆ§NFÖ?PNé눪Õ?¤›Ç™-Û?Œ×øioÌ?€–9ebÏÚ?>‹ÍbœTÚ?#ܺ…Õé?3_‰¡Ž[æ?…¸ÏÝ Ñ?´Ÿ&ÔÄ?à½a\‚d¾?¨¦¯é@Ûº?öÜ\–FÂæ?ÐRÙq;dÙ?ò &é?-3•—MÅ? «&ÅÑ?i0_RZ8æ?°ª+z-}Æ?téæàaÕ?Ø”Êí9þÅ?Ð_ÔFxAÏ?:öÔ¹á?ÀÚÕT¸÷Ú?N!ñÇ,ÔØ?´¡"èÝÕÝ? rõ¿³Uä?†Ô0°XV×? DÐA’=—?y\Љ†Ðã?` ´å´ »?¬F¢VÞ?Î)ʰÞ?ÊÙ»õq0ä?׸_îWé?Ø|ÝÐ0Tæ?@©YÄf¹?¼<&8iÍ?@òPdn§©?pz§Ý‚½?ÈÑÙÝÀ?dwv”_ã?À}$À¾ÄÃ?"îó ÉjÛ?,Þy$× à?×KòŽé?8ü¶C!YÂ?„ €€å?YÒ¿§|Ýé?JüØ ‡ã?äÇõö®Ëã?hÿØ$Á?„ÆÃÈïòÀ? Â ªß©à?œÅ—qyóã?¨¸@ §Åë?p+êYJxÞ?ЖðÜóý¾?¨ÿ¶vnYË?^ÒS]ãí?#ÁÝqŸí?¸âZMÁåë?ê­ pZ¶Ù?=ÎÌï?ÔO†d¿YÜ?ìÿèádä?àBTÂN¼Í?ù`›ÝK¡ç?ùÀ?ôÐ${åÍ?w°†Bïê?~§„‹@©ß?‚z&' @Ñ?¼þœ>YœÖ?@_X£W3â?@ä˜ò§¯?Ö$²x¾î?$ÆŒ8OÐ?ÞÙ6¤oƒè? °â dÏÍ?ŸêhùäÍ?ÓfiœÝ5ä?ñé€n? ÎŒÃTZë?¥ëºÛu?áöñÈï¢ä?ËíŒî?°ûµÚvvà?ÔlE3Þ?€W_—j”Ë?è¡#cõ±?ìŸs8@$Þ?@½3R‰?z]{{4ï?EÊ¥S¶g?Ö­ŽÔç¦à?{Mæe“áà?z d¼Ïyà?`y–Yˆ‚Ÿ?t o^Pá?ø&<ò8?P‘›ë×­?zíi+:vÚ?5÷÷èî?€íÝþ¢Ê•?T—TÙa=Â?àçÚJvÒ?¤\gGEaÈ?…ºŒ-Þã?uÓÕFXä?ŠT£ ¶Ù?L âüU÷É?» +¨(}ç?­³gà3ë?þ3Æk'áë?(äþ‡6À?2”3ô‰VÞ?f~¬ ã?NaÇ NÑÑ?8˜^bŽm½?¡‹Ÿ¦¯ðà?`÷0°J„“?¤a]ª ê?Lå!3Ä?Z( œ]Bí? O/@&Ð?Èk(G6Ø¿?T ¼-9”Ý?ºÅÊyRùî?gâKâçà?;„¼‹è?øRÝ£ÊÙ?µŸñÈbè?$ªwuÚ?Œ·¦sà?*¶B&~Ø?­ÿ*©Oví?BÛ ß„Ö?îÆÆÓŠÕ?`ß`^ÍòÇ?ƒÚHóб?¢n¨òèáÓ?¢ Ôýº?÷”Ö‰lŒã?üe¤Û´¾É?>›ÃOEæ?zÑÃ]Qß?óc\ß<Éê?€mqj¢/˜?{ùgB.¥ê?â¶Œ^¾2Ô?.ÖÍiEì?¤$’S<Ü?êx”Tר?LÙ¦¤ ªÔ?$ÓoÑ;5À?€X©!'¢œ?NÏ–Ëô`æ?ƒ'E÷*å?®Î£ØÚí?ØN‰p:ï?HWFÞÒì?´&ÙföÇ?VþgªÏëá? #œR%°¨?¸3Ój1!Þ?×ã""á?EìâTÉï?.û²Ë;KÝ?lš8ýnÖ?0ÙðCUØë?FÄ\•’ªí?¶ÇNœÛ¹Ò?Ë.ì)Ÿœë?$‘ÑGÃï?–W5ØyÄ?aQ?ôî?9h`Îä?¨°#SÎо?ð £ÏÊá?s©Ôè¦ë?è.¶úç?i©¯èà4ê?psA%˜¢Ü?jK>iChÞ?X? ,YzÒ? E±B„Bà?Îhr÷íÚÐ?|Ó-=‚ׯ?R”ndmÛ?bm›@á?äûŸ˜ùÊá?jSàŠÙé?dû¤‚bRÏ?I¤ bbë?ˆÇåä¤ä?C¥“Ãá?4:a·r? y•ï?¸Yú]»Ë?NØÕl:ô×?¸‚ˆ[·!×?0 :@´?ñÐ= ®Ãï?¨1¨Ukî?Ò'°‹å?Üé‚¿Ý?`¥ŽÌné?ŸbüëîÛ?€š-œø¢?gbÒŒ=Ùå?@Ë•WÛ…²?,ÅÑbO°ë?°òã=%«¿?Œ¯”nÑSÓ?aC¹Ûl9â?pžD•µ?øYá×ÉÔ?-H¼ @$ä?°zøZÆÝ?BßkÜjï?¿ÿV×¥?tévÖùõÁ?€Õ0›zÛ?p·ÜÿÈÎ?òAY*ï?ñ^­é8í?Qš ¡ é? B~œªÒ?ég1A¼¸å?NÛ”\Ö{Ó?¢†Ê˜è"ì?\˜*ñÒ?Lÿ¢@ú•à?„@âí`Ä?[„êN@Î?hГ—ÉAÎ?ÁFŸ?…¤à?©1“·:é?­ÛÔyï?âÆÄÔ”=å?¼j™–fÛ?ÔOø@PkÔ?xÁÏF¢º?PúeqÝ?­<¶Kâé?¬ioÏÙ©ã?Îí¬“Áä?íû/p¨ê?|7`Erä?‚Á‡gˆüî?½-A€¬?$áHF¦Î?]²h<Üå?À–(¹ìà?x¬ÐÙ¶Ì?ÚqÒ€¥­Õ?&úy¦ÿé?O«x¡æ?´P§œŽ$ï?¤Þecj÷é?˜c½ßFå?~Èí§Ö?¬NíW7gæ?j`¢Ö?Ë¡–5ê?”ÄXNÓ?@ààÂÒ—ß?ðѹ |á?ŒCFõåfÈ?àk~Š~¢?Ea-üæ?rr+v¤Õ?vÐDEˆ è?ðÉ|.iüÍ?8ñä9”bÀ?Í\Ï™Û?àÙ‘$õ›Ç?JÌèðXß?Ô!‚ !Ä?jÖ–šýMç?Ø íHØå·?耕;í?䙣(’Â?’üFý-Û?Ñ³Š™ê3æ?(ÏhNÊI½?þ&%Sá?`gQb aÂ?žFV¸Rè?Ô»³\úvå?þŒ·eÚé?Öˆ˜›ü–à?¬s^ÏÄ?BÆmî–Ò?˜^U€í? \y÷ëé?Ÿø3`î?ï(/oÓï?ˆAt“ܽÈ?‰Ò8îté?ºì’•Ëžë?¸tõNeÛ?qoXÜäà?Šv ­ñãÖ?iþFO« ã?Œ¤Æ¶3œÕ?8q,ܾ¹?‘× àYï?ý~}~<„ã?„Ð&,#òÃ?úe£Ë€?Ð?*¢¼Õ¹?*ÕqÒò¨ï?r‰ÖJÓë?à9$¶¥?™“Üíœîè?ÔùrCÄ»á?m6ÒÈ®ë?¬æÒ˜â?«Öç×?øºCε@Î?ÔdÎ|3ã?õ Öí9Qâ?½ä.RÏÊà?ZÂ¥¥!4ï?€$H§é?]ßÐ^²Àã?¤Æ5ÃUSÑ?þG:rå?b÷ÉÈ?ñIÈÖÅë? ]Þ8”X™?™Hù‚Ûèë?fl ÈŠ‹â?ns%j½´Ø? ‹ZÐç?ÆÎ¹\` Ü?äžžœîžÉ?¸Å+=Þ¾º?’ÓW’Bøî?v±uIÞÓ?ÑNnÀã?D‘ÇG…Õï?+’u-˜á?žfµQšë?‘Ä.6ç?øt­¯áż?ºá½›¸Õ?…еå¢@ë?oWÃõ²Ðã?®@## Þ?T¸ÚNùä?ò6F£žÙ?¨Ÿ*ï»å·?Ÿ[Í·¯Ô?:âw¯¬Õ?k‘—úæ?`öW1ºñº?€K¶/Eâ? 7ôúËäè?UѶHTì?h‹¿!œ³? HÅ?`~qGfë?ÂÔªelÐ?¬h¯úéÅ?`¢r}Ù?ý˜“Ä@øå?ºl /bÚ?è;ÑR¶\·??*+# ?ù¼6iÐå?׎úï$ï?ņÌåÌŸë?Ùв)ä?ð8{@5ˆ¶?Ö‹+WŸ…Ó?H¤%¢ÐÉ?¢»ù‘¶ë?Д®S:pÅ?ØØ4…å?ÿ·~åGùá?tÿUV/aï? ÷²E—Ò?¦1ˆ”ßà?öÀ è-©Ô?j7‰uâ?9]Ò$è?) k¦í?>— È©ŸÙ?¹v' ®æ?ó"¸î%ä?¸lœîðÕÐ?L”õt Â? ãXsëêå?¢=¤cò¦ì?Aùˆ§aå?ŸHàö_ê?Ș˜<•ÍÊ?¨ÙРŸ}Í?tàq6šÐ?ªLw &KÛ?-ê0`Xï?šßï°&Úï?nÞÿå+àê?nGälÁ?˜ï ©ÑÖ?Ð3œ[@ÙÎ?Å@Ï3JIì?Þg-×ÊÒ?<\Uñä?Òšëþ·Õ?[ïG¶Úê?ëÜ'¢I î?ºÆ©ÎÖ?û7XEè?èw´#e<Ô?'ãÎæŠÚ?ð6÷0@+Ï?‚W7(®ê?Pðr¢µ{ã?ãmÏ¢å?†æe¾ÚyÐ?¨„¥Ù¸?à¼n©œÌç?þKý¼ºkÙ?ݹèÏÁé?1(~¸Àã?HÛÌC\·?DâÙV²É?ÐÓ·ÕuÚ?K5×Bßûá?ÀÝoœ“˜?0 „0ícî?œ’º>÷Á?a'✇Ù?2÷Ü×?™“êRwÄã?†‰2¸Ø×?'` Ü4òï?¨°315èí?à¥j¾ÀËÔ?0i‹Ú?ȹ$æeº?hHÁ»ˆL¾?N G;:¹ß?ÒF>†Á–Ö?ÆYÓÈ7¾é?æ­FTòùí?@\¦ËÇ»?ñ8Ýkßé?<XŠa”È?¼¥ñ¤æ?;Ô ö‘Ú?h,pBÐ?À#·Þ °?ü].M­ë?XßËŽÝ´?v)&EòÓ? ¤Ÿ—ÿw×?7ˆƒ6nÞâ?ñBZ½zè?øÉ[¼²?AN¯t¦ë?Š%‘‘„ß?H­Áf9¸Ø?‹iÜéÜ?€Q0&œÀ?Òª´°Š×?€÷0¤à?¬Ýýo,é?2ý·boø×?P«% û¥?^Ü’¡'Òé?ðˆI&"ìä?ú'š.gÖ?Èã²¾?èÂþ¤EÂ?ðuÕóyÃî?šË@>oá?@þÎ'нç?èÜÄR Ä?¥`(sì?3Dë´â?wUÊÃÿê?0 B¿ò©? ›øÕ8ï?5F‚©›£á?f“3MèÜ?LH2Áã?.7vzå?ªüÙ Ñå?l¥~UË?4€à óAÀ?„ÛN¶éé?b±_âLƒî?“ %£µ=ç?ÀÓ¶´¢É?^0·­ðå?Úf —‰BÚ?Ú*bç?`²‘/ØÊ?uàn TEê?U>Ëå?ñÃü¢§ì?Ç×xiOœì?ôFÁË~WÄ?«ŒÃp<éá?VЩœÍì?LI@ ÙbË?úÔ,|4×?ˆ«û‰/á¼?\nåJâ?Pw ¨í›¡?n„ U8âí?ï[+Ãmžä?Ðh£ªABÀ?¸rApS*¶?Ú,iÏÔ?lMŸâ?H§›"o0¼?Ž®ò«ÚÊë?àKaÈU²?w]抿ï?C¿¯ÇÍ?ÎóÇ«ÊûÚ?ðR]¬û Ø?¸¡“àǰ?º$Lúé£Ó? •TõI,Ú?LR¿k/â?0V×ÖT»î?€ŸÓ3’?r°ºiï?ÆËÛÔÔ?Q FžÃÐé?t†Æëñ€Ë?\ãB€²ÈÂ? s²¹Šcç?TÝý£êaà?î@O6cÓ?îæ9y™Ò?ôêt^°ÊÕ?ðƨÑ' ß?îcÓ ¡?>¿ßÉfMÖ?DdÊ-Eí?·ÖÌËá?û~Ð?@…ܹE?¬x&ñÊìÉ?ʇP<ÌûÛ?lž»~C#Ò??Gñø,Áæ?؇(É ÚÝ?\ãñKú{ë?ý†F¦œî?ƒ±¶ûKÑ?DÌ*8ÂÞ?0Y ñçÍá?lz‘žÞ@î?qÀ^q¸ã?•ÖQ°Bä?Bî3Ö‰¯Ñ?âÇ ü•Ú?Ê¡$Žæ]Ñ? jóÿ˜,¨?À1  øþÙ?L&ì$UBî?Od6ªaa?'!zÎU&ì?…ûë}‰òê?ºÄ´ƒ¸YÔ? 9çâZ‰Æ?…2Aô'ä?Â6 dr«?æ—™ç0ë?|wÿBüë?@}˜¿÷Þ?t4èŹßÜ?°_Å eÁ?FßÌØ;nî?ðlÙཊ?Pçdžåí?ˆÉÐúŽeÙ? ¾„—.å?à¸ãȬè?X˜­0§ƒâ?Þà Ì¢è?“ÆÔ»GÒ?x߬ìî–Ï?Ò½û0Ù?ð^Ñ.Î?zÇ[ M`ê?¶ü+¾¾Ò?é MYOë?··â–>î?œ —LøÄ?|‰xMj¨á?ÐÌÒy;²?’¨¢ä1Zé?ddŒYèÇ?¶ÈØÚ$Ô?ž¼Ç¾õë?¶¯û.BAæ?â–œùŠî?lVI½ Ê?޾?ncé?wôØPÀ‘ã?üúß×!qé?{çl ±ã? |ϾÊÚ–?6j4‰ËÚ?…!7~@Ãê?ˆUaµ”'í?»æIž½Ší?l‘ãO–ß?ø_€€×ç?"èÔ°}ùÜ? Ú±—¿ Û?,§K¨wâ?”°¯.<Ø?9yå·ç?[êÐíI á?Ö ~‘9SÓ?Û‡¦˜ùã? Æ·øª?Ü‚ž½ÔÚ?r¯OoèHæ?>ÒÙ#Œ;ã? ®éi‰kê??y­rY‡ç?ãàݤÃã?H« q È?BØ8_-Ø?Üò÷T#4é?{=¿I‚¡æ?¸A'CÄ.Ê?J]­[6Né?iÑ;åêè?T`‡à›%ë?|ˆ`ÜÞ Û?ïÜpšiæ?@ß,‡©Õ?û(N^òöî?²™ô|^ùØ? Ž?Ï*ü?¸ßôv›Cì?|a÷i{Á?,^gìØÐ?àY©f–÷å?X{9ÚIë?8uùXè?êŽ%ì?H_Ç2xÇ?â³½o¼}Ó?®âìò—(Ü?Fª= sÓë?òC‘Ðï?VrU$Xë?5#[CLî?Žžòp$$Ò?d\&.ââ?à‡L¬_ì?‡&—|ää?™ƒÃD>ë?PA=ª(ªÐ?¢#`ï uÒ?‰ì2ºGè?ȯñœF†à?è¿ÉÚ¤×?Á äIý¶?×]Å£?ã?0Ÿ³:æ?[‰Ÿôñê?íµ.^á?<ÈtgÛÉ?ÒÛÒø‹'ï?Ô™Mˆ©^×?Aó®½?ï?ÔqØ[÷øÀ?xºóE¹? }p!Ÿæ? ß•5Êß?Èdöÿ5ÖÊ?ìåóà?µñ6Æÿtá?š ,ü–?„M Ä?ö‡âØÕ?ÿ ‹µá?Éú®ƒ…Àá?’ÌËðõæ?«e÷ûh¾é?ìí"BÓá?’­ÖWšÕ?Øo¥n)(ê?ÞqcqïÕ?ÂWbTWáÚ?Ìa< =Ò?¬šH÷î?‘1ÐûÃÅè?N/ÃüÓ?Dz+*Œé?ûM4¡Î@ê?¾@K¬ŸÕ?Dûbb=PÎ?¨”K†·?^Õjñì¡Ý?XÅ7³‘µ?šË«0p|Ü?E%u¨µè?ËÈy•Ââ?tý 7J„å?1Ê »9nä?ÃÌg©¡ï?P¿ ÃêÁ?+á}]zzâ?ÔáýJæ¶Ã?¦TÅÓšã?à}Ëú.¼¨?Ò—YNÿÿà?†Ö“\¤Ô? Ÿ¶¯%¤?ͲúÅ?¤Nõ!ÐÐ?ÑÜ#—´eá? zûò–úë?*v»…àÚ?hð'µà?Ÿ½ó³Îâ?ö«ÝZï? ¹XÓàß?À0¼šö–«?Wê5oýýê?”¤‰oÑ.Æ?+ª²º¥Nè?R¾0í? YÞx8Î?³£]¨í?N ë*†œÒ?ì³¶ïË?ÔJ-å/Ð?ØñàN8Ô?=›†"–ê?V0ê0$ÓÓ?÷f(Y×ç?H9›f¼ìá?°û$®B9Æ?âäÑ™Tå?˘£!,'ì?‘“oOzà?ƒ¤÷?ýé?jÔˆÞËœå?Öý_Ð>î?b•^^é?0>B™¢?ŒxHüã?ÌÛØ]ÿ>æ?t|ÎÄpcÙ?ï>îÓê?ÌVØ3×?òì&}XˆÑ?ÏÐo«éï?Då”™ÕÎ?P…LX¶¯? è=MÍvÂ?+¾¡n—üè?,þ¢å?ô‡ýÉ?ðjÑìUÞ?èh¢óŸÚ?X܈ø‡yÚ?å/6Tç?TØG[/ƒÏ?v“cÛ[¯×?)âû<ËLí?§lcÙî?™™xÝEá?²“)„ÞÞ?½´ßyÕ?ëù±L†î?LÕ¢©cÈ?õæd¦|¸?3¡ú·ªë?úLÌW]eÞ?þ{ªÖ¥`ï?ŸMá("˜ï?~!“…šâ?„½:ÎÄ? O Ç!ä?!®·œ ä?ÔÇ<ÍÏ?Œ¥ïïÖ¹Á?$›hÍëyæ?ºë5Ô»å?§DÑOä?àÀ;`Æ?‚ÄW~Cê? ¹ü&ª¸?45*ߺÅ?žös©×?ZN‰J1§?`§–Cìq°?ã‰å?jÄâ!­ÆÙ?Ï ré?©Ç®Øìß?9¾_A±ë?L±TÑç?dCvóÝ?—ðM§V]à?p]8ør ²?§Äoܼ±?(Îûa’Ò?³¼ÓØ?ˆ×6¸Öê?‚5Íê“´í?܃ñ%%ê?à$Àp ¥Ñ?§}I Çì?RcQpÿÝ?XJOïw½?àùófáÏ?£°»Ñè?þež¿£ëà?÷½¸f¡Ìâ?2.rEÃí?ì`ÂÈSè?³Dƒ§±Ÿå?¡8®iPLâ?ØŸ„‡ë?ö>oŽRlÑ?}G±žšé?’_D¤Õ¨Û?ŠG¶ï?ŒÄ޶Â?¤¶lµ¾HÙ?ÀÂÏ•û §?1]Κ¥ýæ?Ü7Ïþ×Rß?+?s{QBá?•ìÔSï?ûP-wü"í?ãú£Õêé?´b1šý£è?ü6—\¥tÜ?ZbŠLÔÛ?ì›.œÆ? vª‹ ÌÛ?p‚?0÷òÈ?äú»8ÁÀ?bxшß-é?1§0¾yî?XSyÇ]Ó?Û°‚zî?p¹=y›¤?,—Éÿx Á?Ìg'yÒmì?»æ¢$Ní?ˆ€÷kÐ×?²tc÷ LÕ?ZΔ´E Ù?D34:LÛ?WÙUÏ"ã?V HÜÈß?b¯Ôû¹Nç?)óÄQh…í?z;@UûæÐ?ÞJËEŠí?ÀE²+ ?¾y³ò©Ò?Äi‘û©Ó?)É,÷^â?@–˰:½?·v[Ìôâ?ô9§ÍžtÉ?*SY+&>à?amQë]àä?„⡦ìÔ?NmÊ‘\Ãæ?D#QÇCâ?¬Ð,TtÁÒ?ºÔû:á?#X0¢ä?f7Ô™Éè?¼Î¥à?ì!Úâe'Â?䲋˜ÂÃ?(¬TÈ}Ã?‰‹è Ó?P-Å@q¯?å4ÐÌè?¯ƒð8åé?Ü=s“ù›á?ö‡üùŽæ?LêéüxÉË?Õ¹ïåÑè?ñ&àRòë?¤"ȃå?òT‡@‹ Þ?Âæ3sã¨Ý?s†îÓ?YéUö‘„â?ÔÆË«5Ø?NŠO2cé?Cô Ÿ6 ë?°pqQÛ®?¶Ž_¦Tæ?È[˜X<ì?^ÞÓý×?Pö•´?à§W’£ž?ðÛŽx” ?ð j‹‘–½?ÁŽù—É•è?tÎè”^Ô?Æ[EÒ¾6Õ?$Ïtt ä?ò´‚U²ÖØ?Ȭë]Î?¢g7 Ð?²÷˜¨Ûm×?£þÌUÛ?@DïÏüª?ýòÄÒ€êà?¡©$8›ì?0˜êÃk“¾?p¿­žÇ¿½?/ÔR~÷¶ä?3ç²  ç?Z ©%¥Õ? âÖ-œÚÈ?°fL¾¦^ê? – Å6“?ðÜ“‡~ç? d€ÿ Ï?0ÌUéÇ?à˜Ý0è«?“æ`Œ¿ì?Ò(eJôjå?!§ôÁb^ì?˜›E”ü\ã?®8)Jl›ï?— |õJí?(¢úBØ? Hþ Œ©?À5 !”Œƒ?41ädTäß?(š:©ãÈã?ÄÝ»?é?ÒužTÌÑ?sö?­¬î?ÀÉ4¥ËÂÁ?íó0îì?€Fàè·?ò3O¬g|í?°vÁ]£?ªõì°CÞ? ‡ÖjŠnÅ?sC¤ŒÞ?Žø7dÏÙ?*7Q…‡Ü?­Îâ kï?‰9×U…úï?TÍ_MfŒ×?™ÉµeÃÛ?£¶QQLðå?MBo*]»?ü0×ýÞfÎ?ZBë?8 p›sAË?sÊ”¶à?P §ñW@ê?"Ék®'wé? —$Ïá?@ý’M­Ý?ôMňì?®{àÝ?Æ0ÓÞ?@¸kÚ{hã?ºÔ £Úï?ép)“BÍ?T%6Ìøé?˜†…PzJ¼?˜bž¨Qé³?wÚö<£Ÿà?ª²ÏK··Ü?–å2uÙ?àÉ¢a«²°?Ö<Ø:îëÚ?n vÊ5qì?æÇ6ô…sç?€ÛHx !Ÿ?Œd§•é?Ž}ê?+›S±óâ?ý0×Ké?Èi"MŠK¼?\ïW’©È?XóJôÁ$Ð?˜Ä$üE,Å?¸Ô„”úÚ?ÖRi4Êê?èçÁ-ã?¸?àbDŠÐ·Û?Vû¾B+èÞ?XE…bÓ?þýߤÈæ?rãEZà¼è?Ë——Þ×ä?À<3å>ø£?Gá‚ ê??R¼}×î?¼€(T©Ì?h¦Ç$qùº?&õ#ÍÒß?-içÑ!Ä?fϵƒ ¸Ò?-³)éã? 0¾¤týÛ?pÙL`ý´?vX$ý"èÒ?X‹œr¾?€<Ι¼Ì|?½ò 7 }á?žvµ…NÐ?Úé±"vFÔ?Nô…rqâ?ÉËûªíá?Ê”f2€?ÔÄS%Ä?foÝdÚ?YfŒí?œ…J¿•è?dŸˆ€xUá?pÞï _£Â?‡"&»ãÏå?zAù¾ì?ä4¯Ø,Ê?„ b+é?)*a7Të?tmü×;Ù?ÀÇ„àEP±?º0γ4á?ø?K]ï„Ò?±{Y‹$Øí?žç/Y:ƒÐ?ŒêG#صÄ?yÂŽìÞüç?@~@ ÿ‘Ê?º›ý=^àâ?`õ$IAÕ?zç9SP‰ë?  òÀç?ó¹ ŒÁå?útÞÚ?çáüA!í?µëY_¦í?ìäaUí?“+}þcè?\pYÕÕç?.ËüÂâ?G¡«´ë?˜”Þe Ó?m‰`Üpß?çšE˜±ç?§dó5ê?oi5V‡}à??›.-æ/ì?*>­ruâá?X¦|ŸÏóå?7èéDÊÅå?ªž5Pê?EvTuMúí?8D°ØuaÀ?xý€9ë?òPøòDÑ?¤í¦õÀðÕ? ªÞßÙjç? ³íÑ{‚ž?ˆ )¾87Ø?4WãBÀ?˜o|{:Ö?½0¸ŸÊë?8&¥wmí?Ü€X_ŸÙ?˜]Ô´–É?îos|ø§ì?ЭÖPÔ·?J’¯ß?¶ÞÝû]Ð? Áþ…uÍ?|L–ßÅ?G¦—agá? Ÿ×Ȭß?ú­†)£ß?°Õ…×=¼±?ƒ®hkí?I\áy gí?àÚè6Ø?æÛ|ˆ#ß?„<3ÔïÎÙ?åί:â?Ñ `‰ð¿ï?´òþ^ü Ø?:³w¹xÖ?ø#!¶Ä?íkùÊxé?ü¹$2ŠÉ?„b¸`»Ö?'à ¦ì?“)¸¥‘rí?øMvR–µ?†x(O×þØ?!‡¦QËèá?pÙ° ™ÜÀ?a.×rüï? Ë|x®–?N:oW•?Èà"¥Zë?ˆxìïºÜ?€?=p‚s?}Níÿ¹æ?©p¥è?À;&QÈû×?fØøV¯*Ð?´}ú‡"˜Û?êÆ>ùp'Ü?RÅë…æ?6Ÿ'¨“:î?µ§ê‚©á?Áê sá?–(´{;™Ð?ºeºÑ?È/+Ͱ[Õ?†lð¸ç?‚‚e¼1_Ø?CiQHKÑ?}·W çâ?§êqã?hžæMØÑÆ?H¦Û©ç?¨Uüèu!³?xz„X£)ä?U‰ ±Žà?$4§³g“Ê?°çAkĤ?„e¯F œÛ?BšHÔpÒï?àSÑšBêÌ?O®ä4Ý?¥»FýÕ¾ë?cïÖ°oVç?!©"˜,éê? Ë&Õ?w5‡Y]¸è?ц÷2@é?Óäd§ýõå?ÆýÁݯåÜ?ÆcE1[òØ?áRàß?X¦´ñÔϹ?h¾e8ÐÏ?þLC¬{à?x×ÔëI à?ØXׯJR³?ê ÚŒ›Óæ? è/¢Œ©Ã? ÷ó7c»?–‚ã³yì?*Èõ*ŸþÛ?ºmÍXú»?ŒçýVÕ?$Û!6ç?6óîE,Ø?£E Þ¤î?`¨¢Íú¸‘?¬É"RIÆ?fì$®ƒªÓ?îï·PÛ?ptGì72»?ŒµSÏ{ä?–XO p?yĉ-Tê?ÈíUÍÆÒà?‚öÚøÈ?œ\7c¢·Ç?ø”WKè£?¨yÄØE…É?ÀÑZh½¯¨?3ñVÅÉÁà?¤º„Jæã?W}°°à?0… ¬º°?1sV¯äé?MÄÿJƒè?`…NF{ñÓ?ktz^éê?ÚÎl|íÍÙ?@ dç^¢Œ?—œYEWÄë?¾S±’ïßë?ajRZ7Å?W­÷³Š…à?Èzý@&æá?P‹÷Šç¥?\ »Þ?2É%ð|Îè?±@ÃG¸?0Wê@¢Áá?T×^3[Ó?$tÜѦì?°¼(›ÎôÇ?+j»jüã?ÀäCTÃK¢?åB¥Öòå?ÁBÀå$æ?IF¿é%é?Bs½m<Âè?PƒXd |í?ðê¼DœpÐ?à2>¸eË?Ô 7d÷Þ?ˆ’ï?‹Ò?¨“l·KRÝ?`œÒZEc­?]“ÃÌy›í?'•?Õhë?Ø o¼¥åÐ?âT˜I±ß?`&xwŠJ¸?@Ðö®Ó?ßÓÌaI»ë?@RóƒøÕ?LJ¶~aÓ?8sŸí?É1Q¸iàä?ëé¸Bç?èÕ(-¤å?pIâ›ô™å?Þ <¬ôyÒ?X½²ÞyâÑ?; ¼¾ç?#` jà?}m¹<1Žè?ˆ•.µæzà?žbKžþqã?Ò9„èØ™Ý?µ/]·É"î?èšîö,¡?½ýI‹£Kí?¼F7 Ô?þûx†kþÓ?ªNEÉ#VÝ?!Ù–jï?{† ví?è,® %›è?–¿xâ6Ñ?E8ÌBfã?@”R~S¶?óA¬RÔí?à\ ½Éc’?hSçÞúÖ?hL„_Scæ?PbçYÐoÉ?ކõÿÃì? ç*Œ<â?Ö4ìÙñ…ä?D—™eŒIË?ÚÞpjæ?Pýã ?«?¬Ñ_5ÔØ?Ž’¡µB>á?Hƒ;m¢Ü?ü—Þ†’Ï?¶×qžbä?,á Ô›-Ó?4ìØÈøåÚ?9´ŸUxî?ÌÄu%_×À?ØGÊ‘nÌ?°Óäpñ×?>¡6±`öÓ?Œ¼Ä Ô?h౎À­Î?„!Ö¹Õ?H:ààÏç?ØêçTïä?äæDhbEÂ?,‰ß›tèà?6Ùþð}nè?£í:/+é?€šöÐ-½?”ÐwùËÜ?ø˜~=óÔ?xpúéâÒ?´xŒ}»×?\±ø^ÅáË?®ê;¥=àØ?@㼆KÝ?ÕèWòDÐ? IßT‡à?ÀÙ‚sJ„? D¬w ä?9°q:ôæ?o&зúÚà?~.Õ_±Ìã?4tæô†Àæ?ØíªŽl$Æ?þŠÛ‚>ÅÜ? ¨IM~Ð?ö‚¾̹â?€x Ò9É?È «×­Ñê?îtyÎ÷ŠÜ?h÷êfšò»?¬—9è?Š€ë-ï?ÂŽ`nÕmØ?‘çÊ=wä?,/“áÅ?¢­ã‹ií? ›PÊî?”ÃØiÓ?ç ­”Oà?Ñ8í8ÕÔ?ÿü$Ñ?X²bsLË?>LŽXðî?ºÇ·‹qá?=P“ ƒoí?®öY½HÒ?Dâpæêä?Ò¸ûÌ»[æ?¶•}M+ à?bYÛ¡¥}å?ߘßUÛ?  ±Ú:«?$&߈¹dÛ?ÈÕ²°LOÓ?-² -L5ì?ÀXqmèò½?&KŸÓ?{|Á*ï?0jÅ .U§?*Ð…:žØ?I<´²\î?Z̯¨Ã—×?‡4Wv•ê?J­Ã*Ó? |`"Qä?Û5›Uº…â?BÄ?Uy?1È!ãè?ÀþžEuã?Ò²b”ß–î?KYe÷}íâ?0ZÝ—/ç?ú㈲`úë?¨ jNÙË?rJ“„ã?ÔiVÎCTä?tÂ×bŒè?ua öU9ä?Ê÷ºòþp?4íF•Ð?¬‚áéÍä?Zù3Cà?Ð;D‰Yª?àíWÜ*¥ê?˜BXlá?Ú¸8ðšÖ?EBf¿æ? ç«AYï?ä%‰EƒmÝ?}ãÛ«”ì? 8Ûm÷ïÂ?´¾ó³6ªÔ?ø¤ðÈœ´?lŦ”Ð?$»5 Ä?ø_| òè?œ¾ŽÄ aÍ?ŠâŸ!¸‡Ø?¦ü†¤Eâ?Gf#í?0EQÖÞÞã?ù^àå@å?åŽ}gÒî?´€ç©%×?¨ò/“½IÂ?:Kq”qè?úßÜÂLÐ? ®Ùâ&¤?¤bïlLÜà?"¢‰2ÂØ?JgNtÐ?€«ŒR:ýÑ?õ˜´­ÏÂî?ÈÏì°â?7¸™m²î?Úl²íß?ˆ#Ç ?µ³î… é?ŠK9¼yÁç?˜9ž£ÓÜ? µO;F¯?éq}vQbã?55\:)‰ì?$¹IØ?J´©Pê?dM 8<Á?Æïujkõâ?݉UxÃé?¸§ë,ÒPê?S¶"Þéé?DvHÜHÜ?L>kõxxë?$vG?Ô­È?D Á]4Oå?@_Ô°:Ê?ô'ÓÛÒ?¸µP+ÑÍ?¹Ó‹Gë?Ðë(c¢œÑ?¯0¢-V+ë?`ßa žÕ´?ôÕø7Uç?Ñg±WÚä?†i¿¼çóÙ?¬aµ|ãæã?ˆþõJçÜ?Ð,ÌÐ!º?\L_Ü@Û?Oqú§]Ã?OX¹·D|ì?&8•Më?ºò\¹;}Ú?x9­Ã£Í?»{MÌ×ä?´tK…¥È?äÃFT~±í?nOl`>Õ?†ÿ_]ŸŒç?-¦«ž¹yî?l4ëÚƒôê?hi8UȰÎ?/ÛyÉ“è?*ê×=@ Û?Ú*s1è?«¯ð½Tê?ÞË|Ä¡=Ø?x‡°Ø™Ä?Êu™Z!ŠÝ?<„f¤jÎ?жñÔ4î?€`eãÑåË?àî@ã#Á?ˆÂÃÑÜÄ?Ìw~É€¢â?Èxú‹8¯ç?‚ÃÐhî¦å?ŽÐæòˆŽ×?XNzâ*Ò?8Ñ{Aؽ?l·Ùß?À2VÍ]òÃ? ³±ØœÇë?î ]…¤ËÜ?D¨®»}²ã?¨¨ôAîÈì?àZCïb÷¾?Ìø%™€Ç?ÔÌn`â?0' —ª?Jev˺á?ùéy¯÷ç?0Ø>œûÒ?ÈV©æækß?0UÅU zÅ?eI¼;êåï?H³»½®Ó?¤"’ô ë?žu~¢_Ù?óºì ä?ÙrÝ?L3 £nÈ?ÎQ€¶ÄNë?µoQ%5˜æ?¦>µ6ÍÔ?q€yÒh6ê?€Ë„q-Íã?@q@Ÿ¦ëª?¶ÆFÚê?¢E0!]æ?uðIìåíé?èÒéz’ è?¬-ªŠ&?å?NÓøHŸå?“ŽÓ°NGè?Ò†» -â?Œ y•Ó?=ýw*‘é?`é‡=O°Ô?”ïY–|˜Æ?B0 dHà?ð|ãJ)°?ùy³8Ýäè?½èÕ¬3mé?t.¥Š7œÐ?xÓ øWÜ?agÏ&Ù7ç?`‚d9…2·?€zÐÃ,Ž? …;ˆâ?'Ü8uN×ä?˜‹¼o8Û?£7:ÊŠ4á?2ßd,hß?ÐCÍ´¯É?ú¸ãVÂÞ?:Éå?X 9¾t9µ?5ŸlCî?ë4t«ªæ?À"H‡†Œ?»?Ü#à?c«ªëôÁî?0_ቓvÕ?>ÛkKì?pR¨7‹6Þ?a³Y>,‡ã?‚“U®{Ö?a~» pÇå?®HâqJòÒ?_¤qUà?qìJ¤uâ?žÄø™Ó?hª%@ë?H•è¶óFÃ?R2™·tê?2›²ÝŒÑ?~Añ­Ú¨Ð?@Å=[ç²? bøÅî®? â›Õ‹Û?ÄjÑväRË?PD~ŠBî?Þ3I߸Ú?Ø6¸ã±í?àRó~Tª?1È2'å}ë?Øý5O7æä?Ç6Æ#ïPâ?8V ¶â¥»?¸ÏŸ_Ô? ‹à9´ÔÁ?{r¬™ã?è3«–Sî¶?‘žB¤=ï?@J—87˜?ø°°’À×?ˆ}€Õ‰ï?ÃÆÇºï?ä4¹é?@è?p`Wý/©?ÒT„aÔÔ?¸]íBs†Í?Ûq9‰]é?DrS{KÕÖ?0 ìä‘é?ÎÔ¹´#é?°ø]jfÞ?ø´ªÚ?x ÑܶÅ?ŽKA6x ×?€eý™F)ä?j {O[Yï?91‡‚õÆà?p #’BØ?ɹ²9öî?Äø‰pç?rK м9Ò?ŽÅré?ã;ö0C`â?’tŽAÖá?@|Š&æ?ÈnBµ`Á?NÁ?kP™Õ?8«N4°?ü]¿) é?:Ûày¬xÒ?»ÒÚN .á?p›Û‰FJ¸?˜m€”s Þ?k¨Kk}Yì?ñênóz­ã?/fãè€Lí?`ê1z¼¤®?,l@Ñd á?`ò¢Cæ è?Hw›TÆ?1†?Ù?P-2Sr¡?Ò¼Äý2ä?À÷ÄÄbTî?ÂÓ}шÃÛ?^oâ¨.ÀÜ?¨Hz…»ƒÊ?` hÛ ÓÇ?³¡Oø‚ï?VëøÅræ?ܲfð~Ø?’y‚ ¶â?b¨Þ¥Ö6×?7T{™ñýá? R¨RÚùÅ?\V³¼fì×?®Le:Q×?­n¾•Íïî?díï*è>Ø?WœÆ‘åí?«hþQiâ?Ìð4Ld"Ú?;M·–4ç?›ðì{Ýä?Ç#@EÂüæ?´ð‘½…÷è? v¤¸tfÞ?T•PϪ#ß?ȪÌúúV·?‰d»w ï?ãv¦v‹îè?PÇ¿¬…ý×?o2;]­æ?ì¼:Œªï?ĵP"éá?ßÃÄ?’ >5Œß?`vj2¦¬?7X¸*3¹á?2I¡á×té?|’¯¨ç’À?ôÚ¢…=9Õ?š}Ïáæ?„ƒvlç?YW£Xdäé?%+ËŒ'Ó?¤a¾³Þê?ƒÂ¼¾?™?¤«ÿÆä?w¹ËËÃì?àJ?}Ö¥?až¦\í’è?ÏL“ü¹í?v|?ä Uâ?‚ÿÖ ì½ê?üŠÂ¦¼±×?¨„BÅ0Î?ØëÀ÷¿ì?_àŽÔ¥ æ?Ć`;m¬Õ?t^Í óÞ?~|r÷ë?îeÖÇÒÕ?ÎÞu+µ§Ó??2N5–ï? zâ.ôé?`7á Z±Ä?@ÿ<Ýnî? Ávœ?_A7Mƒgá?Vì$U #Û?¹tÁê“yê?Íuáv/å?œ¹eƒ„ù×?œËö|É?@´B§¬ñÄ?ª4Uè~×?JÙÈ =pØ?¸l()eÒ?°d4´"¼?VÏliIæ?PC˜q<Ü¡?ê[TDÔ?îûM…¥Õ?pÄÖoD¿?V_§Æ\à?õÔ(X¹Áå?)xû=×?&þÓ”é?hló0pé?\Ùȯy†ã?`‚ШLœ?Â1ÏÛsÑ?y€Yòæ?°÷/r;Þè?þìCú»úâ?F0ìäßí?X¤¯Z—ê?ØVc¬ÿê?[ ¶*;Ý?0ª ñɾ«?â$ÝôbôÒ?â§ßµ£ê?À%Dþ2?ÑÚPæû¥î?Äú‹^eè?màk£²¤ì?x]ʹ?RŠ…<ê?P×1iP¨?¤ô\ì'ç?D›G© ×?¤£Üÿ YÑ?…;ÓuJ³æ?°1^ó'ô¥?Ë;„¶U¤à?@8¦9[¡?`iûlD©?$<`Þ?[8¸œÀ?ÞSÄð9¶ì? •¡å’?p[b=ã²?Ò$|àø×?Àhl†ÞÍ?x0¿°³?ë$í?É\¶ûVá?0ý2­|?ä?ÔºùÔTë?häS Q»?X Œ˜Ë Û?¤ˆ}ñ™4î?ÙZOVáZä?jô•IøÉÕ?àe šÌ«?­\Ä&©uå?/ù3s)]î?j3ßöMQÓ?Ž4¦MµXì?¤ü‘ðNÖ?'û¬J¨ ê?*áT´fæ?á6£ã?ØÎnï©Ê?Ý·9*pé? k–¨µ*Å?à‹meö^ç?W®s9ëÅ?¨¡–‡¸ï?ÌÉÊ? €zÅtn™?â[|çé?È?bx c²?„UC0J×?ÐØë„h©?‰·þf›Ìê?ÎzS™%æ?©¼ïbŒsè?6’Œ¡æ4Û?¨,H¦ï?ˆeµ‰§²í?°—%‡%ÆÂ?J4¯îQá?xß'gí²Æ?4ëOøxæ?–x‘DõÝ?„ªÔ7n(Û?÷Rýþ&\ë?eyâòì? Ï üÕòã?0‡(&Zß?o¸Ž`Ië?6èß[{ËÜ?è„Ä,»æ?üœ’„e~ê?hL2ìvÇ?û9Ø;là?‰*o+æ?“ä&oçà?(ð^¾õ½?X*–ïÓé?°ïLñ²’ç?Gf¶Ó¹Êä?:SdØ–÷æ?Ì^Ï*÷[Ú?gƒOªžFå?&s=zoר?-EØð³Ú?DuäT—ÄÚ?˜W nÕ?ÐevÕ’¥? šuJzaë?J‹è!-_Ú?@x#~g·À? 1§-óÖÞ?˜â{Æ?rÓ?Èh¾Ò0Êá?Ü00™Õ?¡ôŽíé?‹fSvè¬ì?€_lŒñæ¢?HÝ>Îl\°?øbºvCî?kt¹¸?`S¬Quu¬?ÈuIžC¼?Ó„m¦Ñê?ļuTˆx?sKþ¶ å?4¤SúõCï?TŸÃ Ø?L²¬Ä`;Ì?¯×Þ¹9èá?|Øw†Ä/ê?Ú:Ryá?ŸHÇNR¥ê?aZ°$æ?P´IF­[·?nÃñ¼•YÒ?˜¦ý†wa´?~ÞðF]Ïê?4O?ÝE¦á?`­¦p±?¸òùñ‰RÃ?ÀÎË,¸Ö‹?WÂ@ȳ?“²—„æ?, ÷/Pî?øpþ&]I»?€Å»ée¸?CÝzÙÄ«?@ånÐÛݯ?Ó$bbü’è?4\¯B9Ú?s9ñpL3î?.òÝÁžê?ö*~ÁèÝ?þÙ›üóX×?ê PˆÊ›ë?žï—]žÓ?ʉT0ÏÛ?ˆ6R!v×?žú¬nÙ?°å·Î$æÐ? o„Ô»?¾–Õ$—é?@¼*ó]†”?Ó‘œÐËç?·®©Ï ë?ÃÌäŠæ?©u(–.Þà?%dS°î?zÏEøä<ì?ƒÌÈE¹Ø?@Ç«B18¹?‰^ %uÝ?1>½TxÞ?pÍ;N ¸?žÂ FAï?вÐZ…µ?rVz&ÛÙ?K+¹Ñ/Ká?ì÷¾÷»Çå?,Ô1ÈNß?ÎmJe%–æ?Á ¸êÉÌ?àC…V]’Ê?K([#ïîä?hû7 Ï»?¶YK–ÈØ?.ÉÆ¶“ä?Ô»æ€hÛ?/ì2±J™å?XlSgù¶? º×³š}Ñ?Qv1%Ó?¢,Ë:-ðì?ÔOú½ß Ì?ùA—­:øâ?ê–ˆÑô}Û?ž=߯Åé?¯ÿÜ›¤?>iÙï|ñì? &,.É¿?úÙNTÆ¿?m-}–Zä?€ì bZ›?¢€›9pÐÐ?E»Uþ«§Þ?ÌÓ×Ð? ³Ñ!èá?cÌ#˜ã?^á 5ËÙ? eæ­I¬?¨'­óbÉ?™èÀg~è?ûöR¸BHæ?^ØKñºÝ?R*ÙÞ>Qê?JV¤)¥®â?üøº4 hØ?«ü‹í1à?ðCü¯§?÷K¾ëtæ? ωl®Ø?z˜S4åâ?€ Dª,Z|?ÐÔàîÐZ¯?‘ŸÚ¼„µ?f|ÐXò^?Ù*GâNÐ?À*)¹PÎŽ?'©|u¿à?Ú$‚â[¯Ó?>è•Õ0êä?xü‚H·AÙ?çõ<ºU¬ç?4ÛÄL‡Ò?˜³yø“Á?`¼p{§KÚ?cÖ=ófíä?Xz±Yâ­Ñ?öÅÒºù‘è?Š!³’ŠÖ?.Ì–‰¬êÜ?ì7:pÃ?Ä)ãY"Ó?RŽª¤ðã?6F¯+ä"ß?;á1•‘Jè?<¯[–%ËÄ?‹W¥Ê«á? ë¹CÜiÔ?¶¹dç 1Ø?¾Ï²…Ê Ó?È “ ï©Ç?’<£ÏÔ?*ü'ðÙ?an$’ê?8×ßâ8øæ?æ•Ô%Æë?ph$ ^”Ë?ÇÊ£Cã?âjÖs'³Û?À8 qý2·?k¶ç?–ÎÛ+¡ê?€,OËŸv?@YGJƒQž?€¤#|>úŠ?¬o¨¢Ð?RÊŒmÛ?s‘•™ å?`&8Ž´”?)Î<-å?†f˜ø¥ÛÐ?…gߦ;è?`Ååqª3¢?¤¶14Ô?*~©xé?pê¶“æ Ø?YñÍÆ; ã?òâ<º@Ý×?~…Ì/+Æç?®¼¯n)æ?(»¤êœ¾¿?”øöW Â?"˱–ð–Ù?üaO`á?lçy6kÐ?ÜŽ¥-huÀ?p}Aù€òä?šê‹æØ?ñ(EršÐ?i‹>kͰê?ŽXïÇ÷å?ÖµÿaÖØ?P áúx}Ä?¨ç ,©í?;¼ÌZ¤iï?p`†IÈ5Ò?C-p¸ \à?•"ñeŽá?¼š{/›ì?P(ø@œAÍ?`±Ûк?TÂr,%³Ä?€h¾pî)ž?ÌÌ^²K¼?:˜n°Šâ?”k 4Gëë?Šžô©ï? yYáŠI¯?üU÷Їã?T”±àñÄæ?ÂŽ«Žà?“ªÕ»0¹æ?°S3ÐeÁ?ì^lú»å?ÀïkãËâ?1ËEö•Ðí?ÀȹôÁ3™?"Š(¬øÛ?í¬T—-ä?œÂ”!ÜË?ø¿Ò› n±?Hò|nvA×?XÜ´N É?($}•þÊ?å‘^þ"Gé?ÈÌØÆSÉ?ª‰CBéë?ÕÍ(Ä­oê?ò7—¯B;à?Ì•\-½®Á?ýÜÏA=ã?Äë+é?Oî§:zë?Ò} Lè?ü†à*ÙÀ? ÝÙ3å?0ùr*ä!Ñ?lК9îØ?®–|NUÞ?¨Gõ«Ç/â?*ï1ý¸Nå?Û,{¾úÅ?͵·ºµê?ÁG!Tbªá?ªZéð¬—Ö?édOª¬½?ŒÐÂäïÇ?ò*ÌÊùóÞ?\f¬üñÓ?pmÑÊ%û«?Fe°ñìl?™y~Û¶å?gÖŒ=Bä?mÍqlìç?8)×ßoì?&_wÑ| ?Ü6´µž Ç?´KÖö]Ý?¢MÉM<â?‚‡¡ Ñ? ö¹œÉÛÌ?›ß›†ë?T)œ»?lˆo‹U«ß?žÓ²êº¾â?àm,Ѱ5Ç?”‹ S2Ø?4:/©6ë?¶a¨\"Öá?0Ý´ô”º?9M,näã?zÿ=¾›ÿÛ?îEkÈÖ?8!Ÿ2ljº?õRŒÁÉOç?C\¢Åì¶æ?î„v2„’ë?~#ýÕcÑ? [ð/u—?’ ³,gEÚ?áÔðŠXàã?ëâ2øÉ÷å?à]gw5&â?0JhÙéŠÛ?=¬uíiï?ÃykZ)î? &Ï‚îXÚ?ézPNõì?ð0¶‡è?¹¬½¹†²?Þáê?ìí4¿RÆ?hsxÄ>¹?æË°á?@…Ë›¡½Œ?Z›_m’à?M½Þzóôï?ÛvN’ìã?tˆG fÙ?€`Á㈀? ”^…€ é?ÚváæCÙ?mÑpc9ë?<Ðö>äÖ?L7ç/ãæ?~*u²»ã?ÖñA­ "Û?ˆæQð;³?ž"jÙ#EÙ?zèêÄÃà?°Kd©õ­?<‰ A…zå?ðk[Ý?"³<(uÔ?³ÙÄ,Éiï?æöØ/3_á?XÙ¡ßǬÝ?`Y[÷™?3{s>¶?ñŠòMñ!æ?$ZS:Tà?@/dz„Þ?hWòX8Ô?<$yTåFß?©ƒ Zä?q©GÀ»è?O̘r½£í?×(¥;Ñ è?;ý]³À6è?¤_ÐñoÜÚ?!¢_^õâ?x”ÏŠSÀ?0¢™lýÄ?XÌ =ßã? k‚ftFÁ?Ü[?ØÊEá?´’±1YÕ?Ìí¼DÁ?„ò:85ß?ÝEÕôÖã?:¡Ã×? ÐâÐà?®xû%«ê?(¿ja³Î?çnþa”né?` “ ?,„'Ôãï?‡ÌØ­æ?˜æö„È?òê_í?¡|n£"×á?ˆíßÀà?PÜ×GbÉ?¦wXwæ?Ì€]ÊXxé?jDW¡Ü?¬Ý¡‚>ÞÁ?ª¬±- ?ç?ØK}¦?|!ýÛÂÁ?¯º áØ?B¾9´Ø2á?q•5 ì?ýay&O÷æ?œãWë?Õ?3ìqã?.ÕFw»í?A‡«ô'ì?Z-6[Ý?bͬ˜ì?_Ü_Laäæ?êW6µ3Ïã?@aR¸ …?Äš#Äð€ì?y·u,í?yíJªS#í? ¾–>”ôá?ðºÉG—[á?˜pÞÜè?¼­Ž©p¨Ñ?–R•ò’BÒ?»¿}ñ:±?-Ö8$â?` JÙ!…µ?HȱÜeå?ƒA ÍÝ?лŠž§?#ìœ1rNë?\IíÚEfÓ?@%Ò›q}Ø?H ôà¶³?øN~ |µ?Àx^Ôˆ?ÀJÉ!:Ï?Ö9_Aº? Æs£·ñï?4#:¹¬¿Æ?…× &ã?µ–>²7í?¨ïš¢±é?ë|ôF¾?ã4rk©î?pL5z6®?ëø2ºØ?nEž°‚¬à?ô¹QP¸LÚ?šÅÆ*ø½Ö?úÀ×¶é?h590ù½?Ü]9q²Á?º¨ô)2 Ò?ü|Qâ. ß?“l‹rì?”4šçœÙë?œŒ«—Aí?Rdcï?¼ÇSÚëì?f=IAÚÑã? ²A+¿µ?“±´#8ï?êêt ‚¿Ð?'×ã?Z•“ŠíÝÒ?`äɪõ—?0C%Œë=´?¥ã²GŽí?ÏOú”Úé?À)®^@^Ù? 1í'„®Þ?ûNÛvLÍå?Ì¥¦è‰ê?xv¤MF×Û?TÏ—”ÁÎ?€¹fô-z¨?8/£Ý©Ù?x\Å÷ëØ½?…ë:á–ë?¼|é—ÏšÞ?&¬õ¦ê?êlÕÆùÑ?ŒwéÖs¬Ò?…%ªŽbâ?æCÔ§{^å?àoÑo‚°?@ULÞpÙ?eˆI?š·æ?ÁF’mì?}$uH“ƒâ?Ý¥\Ë%±æ?´– ;/Ô?£9 (dÛã?h+ôŪbÅ?ÀrÄ8$ïÁ?¶Áßùà? éŒ§Ò Ó?„ˆ…ÊquÌ? ɯ$ÑÊë?ÿaÎ GÅí?!-Mîølê?½fe^^î?òXõ‘9TÑ?Ûºšï™þã?OM0(Î?rfÛî Ô?â I··œÒ?¤›Á»æôÒ?”Å™»'ÇÃ?¦¾(µì Ö?ꮾúÃÕ?G£[¿ÿÛ?\ ;ø—EÓ?2y;÷{ã?À´TKlrÈ?5Hrá`à?L2ŨYáç?wùÅĤ?Á½'ß{Rì?»u\é³Ü?'† â€Bé?¨ˆýÛè(í?j‘Úd¨fÖ?²”k;O•ê?Ìä5¿uÑÏ?ÏJ.â4ôæ? 9UÊÎæ¦?оªŸÔ¸¯?MÊ·´ì?ä kZèÑ?ŒÞ5Íg˜ë?( å¾Sí?¶N¹Þ]ê?/¢¾÷Ðì?óm±Ð?Ã.õšå?jsu?í?4É’ÑÐßÚ?M -|Èï?ÄÏÉï¨é? ´´'„Ï?Êœ€WÊã?Ìñ–äVà?§k9p¥åå?¡¹´ °ã?ÌG³d>bÂ?/âÞà?~±®2½ì?ŒÓàrÝ?ðÒº#>Å?.ïÚš\×?Àû§žýå?œèÔûNrï?Cª)Œéã?Væ)ëË_Þ?&bþÅòÚ?ìk3ò}ê?È H´Û?NF/så?Ÿ&`ÝSéá?Ò·õ]á?¤}õ£.ñÝ?jÛLô ?x’ "Ë? FBHµŽÐ?¶8_¹Ü?Ø>F¡ïÐ?(™ˆR“¿É? UGm­h×?îl?èxä?~PÖ*„Ü?&…åßÔÐÛ?º…*„0Û?ò²k+É?Ô(‚7.,î?¬'7{Ò?1H·¦!Eä?6l…;“šÒ?ˆ³+²Î¼?òÁ†øf¹ï?¿“dù]Eè? Ë³f¢ ?xÊ®á+Ïî?À耠…׊?0°=?ptÅ?øyƒR¶?’4hê1¡ê?ϨÄùvî?kš +œî?‡¢;߇Þã?(+î 7ÝÐ?Ì0—Õ?2Y z_mí?ŠršˆAýí?Œb|ëÍÀê?ÜûÚZ‹kÔ? [Ã&– µ?œ'~êèâ?˜w>‘×±?GU¹xë?êë ¬í?ðœà—]i¶?­ÈϯG‰ç?ÓDÚ Ê!á?оù@9̼?ê^$Tm1Û?»ãŽ%ßã?åEØÛç?-ûMô8®é?pÿ/3—Ø?ÎS¶K­Ö?ž– Rjçâ?ëûÄÉSåå? rì…ì³?¾dª7Ù|Ø?pø”~nÄ?š¤]â¼Ý?¢—t–u¯Ò?`Œ€æìæ?þÑ[o ¯Û?®]&é?Ü' OÌGç?ÀãÝÄ-yì?%j$œiã?Î¥#ƒâ?ÚÃ<Èä?$m’Ndå?‘±èÐáÓæ?T7$8SØà?Íg3E#è?D>Àž4cÇ?8ò5EÓ ì?…Þ£òýæ?trI}yÊ?Zÿ_Å yã?lк]"Hß?Jv0Óžì?€žwí¥MÊ?È)æ›Qº?!ÉQÆ®í?tïȪûÎÙ?ÀÓ97µÝ?3À5ÛÜÏé?ãû˜ Œ:ã?¡û¹1aé?o/„p|7ë?j´‰¾<Ð?˜ß¬Üáà?Õ[Rïþ¶ä?¦¦hüPÕ?~ ÄzÙ?K>‚+:ê?Ä ö׃ÝÖ?°¶™|——À?’´B#²Ðá?:IM­åÒ?¢É"nn¶×?Y´y…‡[à? öÚ {m?b{³[Õ?¬êw¨|ï?¡Ý¹_* á? ¡¿xï‘?érÁ«5)î?PÈî:¡Ã?THRVáÀ?¿ï]²"-à?¨¸áàÇÎ?yî ¡£?÷A^å é?ƒEþ0nê?îrq&ß?xv“VÊ_Ø?0_†»Wæ?ŠFaBåìÙ?˜;¿XáÖ? ÿÐ’wéç?‡"Ì?¼w”ñ“Æ?rךßhÞ?EL€rá?á÷g?]å?cÆþ¶ åã?€_P€v?D⻬tÁ?@ U®ð•?V7W3ù&ê?&¢üCQÓ?äCå˜ì?`fÍì?ˆÎµ¸¯²»? #£ÆÃ ©?ïèÍ&xà?>˜$åFä?9üV®ç?22† NÒ?°@AMã?,Öžˆ–ýÔ?€=´?Í3 ?¸ñÇ'5)½?ÞißÞ¨Šè?Õ,ÓÚ½á?€®œ $$Š?oÀ’òoâ?–F¼dÞ´Ð?…»o°•bê? ,K«¥?÷üLM¹ç?``©ê…–ì?Øž)*¤å?:¤  ‰ÿÑ?N€s¯|-Ò?–‘)»æ?¯~´¸€?àT›©HÀ?2ûa<ðÀØ?«w¶è?Ö€`‘×? ü&€eÁå?Xbúž¶Þ?§?c&ï}å?…8‚UÌã?Û…Õmëë?ËZâ¼äå?þÝ'eÌß?Ð5—>Í?$÷´©\¥Í?ô:T¨†½î?üM¥obHÑ?8Æ~È(5Û?译%Ñ~É?Dcâö8ƒÄ?5»Æ=Ö?æª:×(ê?•ò€oÈå?ï5©£ìVä?dñÅI'Îå?ºsã7½?HP^—„Í?‘D»'ä?< S¢já?ÀÞ VCƒ¸?œr°!Ô?âÞ3”5ÍÒ?¿œjŽKä?Χ_ŠMóß?7¦vL»±à?¥blµñ1ä?îä’, ÈÝ?¦¥/lìÛ?´´Oã'úÒ?h?g÷ ”â?b;eo¨Rë?RÎïY•Û?¸ß¡Ë>³?T"‘ÿuì?ÎùéX½Qá?ÂP©èV;Ö?û³ 8Æí?ݽËrrì?†‹FÂD™Ø?*Ͻ 3é?l•õK;]é?åÙÄæËä?Ô°'UÚ-×?êý[ïÝ?§Hf•Cá?Šþcáüè?êB¢·ÞMÝ?À–,jÚ? ]7z;Ý?º hï?&RYK$ªÔ?ôçpFûÜ?`¦*’ªKÂ?@^¦**té?Lv'äÏšÅ?ƒÁì$æœï?€Ÿ#?♆?q=qŸÃ0ä?Õy^`ìæ?ûL°¹§Áî?(t¶Üñç?BãÍ>uå?tÍêë?ätYa_î?XeQ\£fÕ?·¿[Laê?Ãõš3Wªë?n e¥YEÖ?>ŸqÝœÑ? ª¦FZöå?ª9æÒŠcì? ¨ÅåFÞ?2þ©&åÙ?@noñ¹Ëì? ¬ú9—éä?Ú:P˜ýí?ò硦à?dÜOï,î?üÞ6?à«É?€ÞÙ¦Ú®?pÉ3§Àâ?0vXòö{ã?wTª’î?À(m"žå×?âóõ¬Ó?¡nËãöLã?¦\uSÔ>ä?ËÇ?Ùè´® á?oW³÷é?PÚ¹Üßìî?R1+BXé?¢³®C°ìì?Ä<ýéôâ?* z WíÓ?OØ´Úóå?5Ù. £è?ÖF+Ò„Cå?8K„S/Â?àÓr-¢ªœ?칿a°?DÉ ‘æ?„žáº<˜Û?‹Ö­ Ýã?aºUE;Xà?ëu¥AÛ?%RÈ;Uî?<­í†ë¹Á?CDmÌ™ë?N¿2C7+Þ?üfé\õÜ?¼3‡xSè?ÝýtQ‚à?)EÕfÁbã?Û¹‘»Ô?6 ,HÆà?|Áb§Ú/ê?Nâð2¡˜á?˜æ^7Ï·?ñ¾•â}î?Q±/i*Áì?ТÜÐ?¬2iÕ$Ý?]'Ö~#æ?ào{ ,JÖ?Xï~±ë\Ú?ðpï•ç<¼?e…Tê ç? 잢ŒÇ?˜â<³«ÙÓ?зÁâá?zs¸Dòé?P}¨íÿÁ?Üâ!«™À?ù«ì•òâ?|÷-Ú?hr®mê?A²ƒ&Y”ê?í£½`>×?Ô*v¸hË?×Tu—Oç?|É]Owß?˜]a©y߯?$ˆ¢–æšÇ?3ßlRî?Ì KP§Aç?¢SG¸i¶Õ?ÐeÝ{\í?sY`[ò8ê?ÀpÅBÀ?ÉÇb+ë?ìÄ%Ukã?Pš:6…xÁ?]®!Pì?• ç†Uç?ô¨,i¢ŽÔ?ˆ» U#ÄÇ?-1]±Âãè?°î›n¬Ðæ?m;ôÁSé? ~¨%¨¼?°äâH Î?ºÑiiW¢ê? wtѹ¯è? gW>ÞÙ?p0Í¢ò,Å?Lõ_‘µê?85Ƈ&‚Ü?&?|L›Ø?€êtÓ?°Œ|ßÉè?o EÛÍë?+·£ç?cM×SŸœé?¦Ô Ù<Èå?k—Nä_ì?ù¼»”³ç?ðåôÛù Ë?ž‘AznIì?f›P—B)í?¬¾lSªïà?gÜYÔfœã?²pGimÑ?¦5b-ã?tÖY‘mÈá? A>‚·%°?5c»·×gê?à•¥¯Bï?‚ÍÄ’¬GÑ?N¤z3õí?,LpFÑ?ó%™XÁ1ï?”‰¥ÇÝÓ?¤s„’†¡?‚XH ë? ¶¸Ó:&—?ìf¸òèJÇ?ë[ù$ì?Þ\Üãåß?q”ñþå?t= ¦9\é?· Hè?Ú·¥\ øÑ?l^{ã¤sØ??Øç]ÝÚ?8Vh‹kÂ?•d= ‰?F[,ù?ß?× 9ä%‰ë?ü$ ”ŽsÄ? F ù4Ô?C¢[A=êë?4:bÝO?ü׳íQÒÌ?€·?äCÞ?­ƒó}]ä?Zãì½9(é?kzq5;Bâ?WY¥Ò{sê?Øû˜!RÁ?‚ Ø ØJß?®T83iÖ?qãY?V$í? §üP-À?+9Žšï?`?Ou/®?xù~À¦…Ö?9‡ÚùÆ?Êô«ýÓ?7P·.pÑ? jWv¤—œ?×C•0Gï?3 ƒŸ?Øí>juí?p4øß×?|„6E›ŸÑ?ˆœŠ Šì?}tQì?dc~'ÐÝÁ?ù{˜ãÖ?Zs‘ÓÑfÝ?㊗Ó5zê?²‚èRÒ?4 ÍV|ê?mNyø×ä?µÓJ5Væ?Ïg’ü²‘î?¬ùyé^ÎÄ? ¾~Ø(zí?DK ‹KÚ? :ûËrÖ¹?¬îa6‰à?z™«òñØ?>ßp¨Sê?lœì!/ÒÀ?¶Ž=gýÿæ?»”ðSà?"Àü…Ó?XYñ¿Õ?œ*~&ö ä?ð‹3æHÚ?G¬¨Ê¬ç?Ð. )H"ï?&È‚ßèè?„¤9¢©?M¼ù”¢±ï? )/ÝØ?øÿ¦ÑbÔÞ?pF¤’eÌÑ?ð«viÃ?*ÇQbÓ?$†»Ýô­è?ŒˆböÚ?èן±M¼?éF¾ ÿpî?ú;8¾9Ú?H–º† ò±?¸¸rØ?T‰ìñVÕ?Êð¼`<æ?æP`€üÞ?hTWý³¿?€_p”Hv?%„I‡#•ê?Ý]°ˆè?Q>»?D¦všƒŠè?è¸N)tÒ?WÞ“ÌBDá?ÞàÂÛ΃ß?L€ç¼4Ñ?î赤 ×ê?4ÂÛ[õµÜ?iðù‰Eå?Z!Žä×EÓ?äá§Áì?±ìå?JÌžh½?˜ö;´­ÝÆ?žä¦ä»Èç?°2“¾Ð¦Ê?~q¥²bmØ?ðü[ÅvV·?üS#÷ÊùÚ?Oɶ;VSç?'Ì”áœè?5qîd´ç?øËÐ7G"»?°³Oáf“Ä?x?­Ë?0,z,°?%´ÔÒî?¤Di½DÃ?pïªÜS£?)¨ вËã?äâuCêôÅ?ÂÖlŒ¬¨Ý?½£•žæã?‡° ã?(åL©Î?2Ú‡3Bºä?ÀåtP`”–?•-ò¢Žì?¶#© ¬‘Ü?lŠØÍdÄ?Ú°3®7ç?†‚YØ,Nê?Fª¹ÓúßÕ?à 5Äp7 ?Œ°¸¤Á¶ï?´wZ±Š=È?Ø’Zôí?¨“ã÷ˆ?ðÉ‹ú•!Ô?éÆ!c¬é?&”“0ç?ÖAš ««Ñ?ÜØeìß?1.*ãí?zàGE»?,ðm/jÞ?$¤Bd©"í?˜"¶H§Â?¯m[e3ê?¯—Kšôê?øk—ÔÆ¿?KG;ˆ„á?åÃÄ£_ì? Úš Ž…Ë?£4&Ù?*—‡ˆÈ.Ð?Ǹ¥²Hê?ÄG¶Ò]ê?øKoFÃ?ˆŸ¯=>ÉÜ?oðÐë0…ä? çùŠ©?°RêÁPÉ?°íuÝ|¤?{ †þá?ÄMBCÝ)å?Hpa^­Ý?œm‚cÿß?a7Ê×Í? Mµas³Á?(ȤÂlÍ×?DðŦ'è?$š°ôÔ¶?ý)ŸÑ?h É)î?pqönmÏ?Øg3A‡†Æ? {aÛ?]¸^Õ¿!é?–ÒJ¦=#à?:S@N¾<ã?éÆK]³Eî?…ÿBßë”à?LX1yVÊÍ?¨oJF¹?–RºÚÒ¸Ú?E ¯Òµ&ì?¨œnñÒß?Y.’ÚÀì?Ðæ°€¸?`J7t¸<¤?|ê<{iÆ?z qfæ?óQsŸ†sæ?>ûý¾Äæî?qÙO¯ê? ÝÛáåÉè?ºµ ì?R" \ôèâ?'OŃ aì?bž&ºÕä?°"ñ¹ÎÇ?îÞy'øê?d&>Í"»ë?6Þºu‡žÛ?@ÿ_ºÚ—˜?>Ÿ]Ýeß?î’¾ÄãÃÕ?á­P]eïî?d`¦SÄHÄ?„˜àÙˆæ?=W×ßñNâ?N95"²§Ð?àþDÀè_Û?'ù`@¾Õë?Û*éÚÔÌë?|Øìý’Ë? ªýPxÉÜ?Œk{Rì?ß%Ý}Ã?heaLEïÒ?FÉ?ÄzYî?‰ð²Š=ç?82¾ŒVnã?hñ–+Síä?J ‚œ[ÓÞ?x å`$ÁË?Žóÿ—Ó?ö®£Õñ«Ú?±y󨧥?}F®]á?ÅÌ}ì?n°–ÔRÜ?¨tM«W4·?á•ïdÒ? u\èa'Ð?&ZP_WtÐ?4ÌWÿR«Â?PH€oÈ?ÐMØ ‚˜¬?S06-Ûsè?®¶×xá~å?` €åȨ?hÊú+Î*é?ƒPÄ¥.Ôì?«4îôï?N޼⮻é?I­ñcÃî?Ði°” ,¦?‚EÜ>.ê? –|8Î?~þGBuÝ?l{ùÑ ·Ä?ÿRâw²Ø?ü×Öë?ðEW¥aÓà?, ½7<ÂÂ?ÊûkóØ?Ü®¬MÄh×?–Ý”_ ÄÑ?=-†µíà?îÓš»û1Ô?nËEd¤Ù?xßmµ,Ã?,iáòX6Ä?NØ3û»?ò¼?Âqaâ?ªÍÿzV$Ú?.ÔåH6…×?y•Tß-oâ?„ÐŒ0@Æ?i¯±:{/í?p" )YÚ?škÅõNËÐ?PÀË™ Ú¥?EÜ ¹Žé?Ãõœý;ä?,„¦RÉã?ÀòÕ9öÓ?PvÇs®?Àc–MÝ’?brÅNÐê?ò‹Öb0Óì?ä^c3$‘Ê?Éí¸?Tú!gsaî?£¦cÀ™ê?©â­?aüã?àêã8ªHÀ?Ÿ”à¸_xä?˜Aή?´?'gØRï? —‘ Œ­®?>œîë?bP‚{+ë?xköŒöuÄ?lJnÇ3É?lU–y¿µß?x<ôµ Ü?úÿ]lÝØ?¼ônIRÜí?\…uIÎÌ?„§Ás˜Ë?ˆ–@ ì? øqI|˜å?¾À(JØ?¥c´Àáë?øDAOHNá?zœÕ˜À*á?Æço{…ß?TFª´ˆÔ? Ô]«ò—? š¦r Ì?`Ÿ$b"Î?@^{ Š?ý| /å?ŠŠùw|à×? qÏ€'g×?À—>iL—?éêdœðˆì?§ýgâ?žT 0Pï?Ñ\¦£…Uå?…™Â'3îè?öÿ„¤‡öÑ?Ï~?¿ä?T™uMKÊ?¶i1uÅéæ?aÀZìt¬?\Œ½ž‹xë?`8Õxóí?Ç6¾>²ë?PIÚÕ‰Û?AÞˆåå?y¯ÁP6Yá?_kÜ}Åã?e!¨{ç?ÔÍT÷íî?€/÷+®?6ò]k¹Ü?ô®=Žyè? µ†Äl<ç?° H^0½â?€‹›Ú¸Ó?„tᢈÍÝ?ä‘úîÈÊä?8 —úVÛÚ?cÞOú»ç?ÀP‘\ØÑ?ðë rÔº«?Œ%ö¸ê?´<ÛØj´Ä?*¦0Ä@ré?_(gÕpÃ?°8öÌKx³?dÆWCº6ä?f3£Tèµ?2ŽXåä?ç¯Cø>ã?ÒÍÃZÚ?LÖFúƒþÍ?é,ŠnOâ?àiuƒô ”?®Ÿ­Šñ©ß?Dº$'4 è?²l2ÞÄÖ?l œYË?$òÜSYÏ?Eì@i³ì?Ìï³w?.yû܃Þ? ÎÍ-þÅ?šoj|á‘á?Èä}NPÔ?üБÙNNç?¢Õ8jœŒÑ?€d˜ÿØ?bKTìÞ?¦ñ~Öêì? ê¿•öÔ?tæãâ??¢ ÂÀ?^»ñ€˜Ò?°Œ¼ûé­?Ò:Ï¿"Õ?mèÚä0á?è딉c°?êÌP!}Ã?T&»`Ç?ʸüc¼ß?oZo Uï?pÿAx‹™è?Î6Y™°¦Ö?iãÜŽºè? IÀKSh¢?# ’ÜË%ä?@/õ‰Uƒ?ý€*±ÁÓ?’Õê[jÒ?Ðr°6·éÃ?ø Ø{Ö?ÄÏ™×¾žÔ?ÒT,ùÌÝ?(_=-UÓ? iß¿Ø?¤Êt‰¦¤à?Àócž]Òë?ÒòqPß?`°Ò²9?f Õ1±ï?üÒ¡Ûýî?­VT\æ?+»H¨'–å?B§­fMå?ˆ¸T¡½jÃ?Ö>ذ‰˜í?–úªüRç?©¤%Ç*â?dŽ¡ Æ?ÈcÀ©yÂ?ï‰Îfà?ðÑk‚ƒ^Ò?p0fÇm#Ð?†âžðØ?Ð4™”™å¹?ø}Ù[ZÒº?ÇxY8¼>å?„Zí?—â?0þ\²7Ñï?üÉ¤Þ qÑ?²@­ î½ï?æÿÿS¶ì?VÕâLœë?Ô÷Â#v„Ù?7×ò;zÈá?|Îÿ*ëÐ?ô¬!Õß?òeŸü'Ó?uÉÜ\½á?ÈvÈöH²?MxTàÞï?ЮéÔ™Ûä?–é ö°-Þ?L¸uocâ?¢igíßÙ?…‰.àO]í?ƒ¹eä?àð8Å®µ?(hõòÅ6é?À<=ž)»?È®ìèàøÓ?÷D£JDì?èÄ;ÈÒõ·?¬t`ÿVUÀ?AÜ‹UTì?¾*¢ªEñà?¤ä†•?ÊZ(r¸²ã?JÓc¦?å«Ïƒë?ÜSŸÐò$Ì?üúŽÍªBÞ?ÎÔÜñ¨Ù?„ÁDöIÐ?¸4N …Î?KÛÚo÷ç?¤+ 4Þ?@<ýUmã?¶ž“!æ?°ÅÉùLª? üÙÊ›å? ÄÒH½´æ?Öt–cê?n9—­Næå?ŒeE3[½Ñ?–2ú\qSß?^Äûÿ ß?¡Š¤¥âä?§ç–í?TÄŽhW×?¥ý1 -ë?$†±Ñ]Þ?•o³ˆ¿à?,¸ ]ìHÌ?zás%¬×? „Ňrqë?v¦?d?lå?Ax!‡Ðí?H ›ç¸á?#påóhè?Jâ3Þâ?˜? ×?ƒç‘Sàã?Ú^Ÿ«Ø?0&iDÎ?õc—HÂç?ÿj–ômßå??M2¯ÐÏ? Û`Ü?¤7E]Í?ÙÛ_Xé?mÏýÈà?¢OŸ÷~Þ?Ìãgê?X€¼¸–öÜ?ÉÅÝzã?Å3í©¥à?ÀJ„lî?ÜÝ.’ß‚å?1#2àõôá?ÕçÉ- ²æ?¼‡A—Ø¢Ø?¸H¹<;ß?ÇÁA¸·å?øZgC»æ?¸Ø'¦Å?¬S¿®Ò?¾åužôIà?jWf ‹á?AI9§ƒæ? mÁP½Ã?¼¾aÅ×ç?d†M±­Ò?H\Æëx0Ä?€Ùôwhr?¨-fåF£ã?œ ‰©åÖî?Rž)ÞÖqÞ?°|'Ä?pBjû®þØ?…¸Þ†á?HF×ßu¯ë?2Ýk5ï?N¤#Á}¤è?`‹Iìn¶?VðÏñ.ÇÔ?YÇÛGc¿ï?[}æB®^â? áuŸ~ÆÈ?°Sj;§?PϹô_ÈÊ?´'±#VÜ?€ê 6A*?`‚<¯ °â?2õ¢A*ÎÐ?’ʾ’ã?®ÎøNw¦?ʬy±ï?] C£Ãä?(«/IÇžÅ? fµ÷­Ê?òy›9=¬Ò?~Ë@fuà?6¦Y¡6 ä?”½o7î?ËKÔ&àÏ?Vý `ù¹?,‚±3,“Â?¸VÈ‚€Ü?–×o¼'ÔÑ?`r³FEä?º¯tkàá?Ðã=yä??Gzÿ<ºï?8H„3"ñî? Ǥ Wï?Û8ÁŠÕ?޼¬PÓâÛ?Bq3ãØ?Þ}qw:‘é?¤ýÆßèºÉ?î6nËéå?’í^žã?!tTrWÞ?eüÜ—ç?:)oîúáà?ÞR±G'Ó?°+9Ô?|÷¥¡ï?\¾‡Ïµ?\¹ì* Õ?H¿¿ŸÓØ?`$L-ïQÞ?ÂYÆ¡hî?`ôm$‰ÐÇ?,–è?PÌ >´/í?¦[¹ºÃØ?8:Ì@Ë?©8Ý´ò©ç?ð(bW4Á?¼Ý|¸úQã?]B¤ñÙá?m¯(?ç?…î;ÜÞ?5Içã¼ è?5Êè?V„8¡ùè?8äëL1²?~(i2=æ?œøN¹¿Ï?sP`çå?˜ÿ‚ÕOlÅ?p¨wäR¸?päc¼Q³?ºóJÍ+|ä?Ë仟Cèá?Ö¨.¤ó„Ò?ôqGØ·á? JL</¶?bÞOK¢<â?!TFä?ØÐ‡ø£õÆ?vréä?|–EÛšEÖ?@ýñŒyê?³³F-ƒYì?:ÆedÚ?@Nýs¸Ô?=§Ø‡Úæ? c•lB>È?€£0$ŸÏÊ?o$3à¡Ô?#UµŒÄæ?rNMio?ú‚ ¶!ß?=^oÒî?kîAP€Uâ?Ÿm¡žsì?ñC"*»Té?AÉ÷y{ç?NŸù°Û«Ø?ú‹¼b@[á?~á‰6ÒÖ?îñHüÛ?)êNVªÊ?Ö»Û¥›Þ?gÖ³XC.ë?Ù%*Õí?¤  rË?–7ç}vUâ?§ )ï?ÄPîkN-è?v݆.Qå?Q{ÞÀqà?´>.g‹Ú?@Wi½¹ Þ?®4S9õÖ?.m¼üŸôÚ?1 _¥Âã?ÿ¨uÒÒÃà?©t¼àïé?þþ^çø]à?™›ÜµRî?Døn)Íá?¶T‰ñ7´Õ?0øF` ¹?[¢¹Þ1î?{Œ—¬ôìä?ñÏלí?¤ÐO…NÛ?R§«Ò?t×Kªà?hv™0!î?ð[]¤eºÎ?`½5ñ”?¿Ì!ž9µá?¸Í`<ôµ?æeÊe>›Ü?¸„èñy²?ÅMB¿U[ï?н¬¡»q¾?Ø~3¥„KÀ?XæáìÌçÕ?Z´Lûí?’ÀB8k³á?˜–Ž ÔÅ?c”ý–GSá?·.ëîGï?EŒ‚bV8å?+%—móï?JÿNÜÑé?íÇ]ЊAí?/|-Ùâï?XýÔè£Ð?ÜN|þÚí?f§¬¼hÝ?^{ŒÇγÙ? agòþã?¾„ÔQlì?6FW‘5kÒ?=ÚŽšê?9K²î'>î? mláçû?ÈoÔ (Ò?Ü»áÎVÑÆ?«qIÀâ?ðN¡=PpÚ?Ü1ÅÃÞÁ?F£|ë*¼à?*ÈÅ8@`á?žý›¥ê´â?áÌ4Ý?è¦ä}©•Ê?D•ä«oÓ?`¶"‣?8yØš Å?èæ—Y9SÃ?¨óÙUd¸?ETq³(!ï?ü÷Jz—Ñ?¤fµôö¹Ú?®>%nØ?Í“y¼J$æ?ø¼®Ë¼â×?Í\jp è?8ÍÊP_çÑ?Öo£õÛ?¨õâ-'æ?(jXÒÜ)¸?FÕÝz²Ð?®d¤ƒ¿—â?|ýÖƒ[Ã?Š{Ê…àÚ?`¸26ÍÃ?3Ù1òè?š«@ ûÞ?éøc äè?°€ürÏÆ?h]¯UÐ?«^Íè]ùä?¸µx DEé?Þ£¢ßÑ?fn&³ÃBÙ?ð @\[ެ?òNW£ªFÓ?íç‡Ïá?-*Ûüüæá?o½Î÷Ú?XäÉMõ1Ý?€|Ü‹›Á??ŽÊ!·?<Û=êž´Ú?2FØhêí?d&*bt9Ð?Œ5„^šËî?³ƒû~‰vï?ÂNi`,'Û?s¨5öûûæ?ÁÒÓAÑå?œ·ß^Cã?ÖÜ$_®.î?„Î{ÜtëÙ? ­A†‰g?HäÚ_²pÞ?l•© X½ç?zùK·qèë?<æ=¬ÐÓ?¥,=ù"Eì?œ¤ö™!å?ÌèòÚñWË?ëI?]ý^ë?¼Ô—¢ß?úßÊÈ5?ì?0ë‘ûË?hE„7f¼?v96tã?n„ÚWÞ?í?¨fæ?\¹ìTƸÔ?_Ù½Ë]í?óÉ©"í?3•‡áÊÓå?pœ<”ì?° ™ÕÿhÎ?¥=Ð6á?Íw4s_—ä?˜Täü¼°¹?V¤'Qëì?À½íÅž°?ô–°2ö"ß?`÷íÅR½Ï?à/ðÌ‚Ê?èhþgºúè?$‡vEtÂ?.DXDÁì?ÈdA“ž²?ZQs{32å?XÂøËKXà?€/A€Š?÷)ÊtXïã?¤ïÍ?¡u—ómî?è¸ÿ¶ô×?®I¿3å?Wš(mÔ¸?Re9ü‰Ð? óÙ)`“?Cµ¦‹šã?U«5 É?¤•Pa˜¬Ç?H ÄŸ±·?UnS“ná?t¹ÎOšÜ?(—þí¿È?fÛÌeüÈÚ?9é1Ëáî? K‰D«Äè?J…LJú$æ?Vøyo »ì?>5<¢IÙ?øï!xž’Ù?_tAÈìá?óu^2„{â?À‚ƒ`Å ?µveÓ5î?¶ÐØ^må?6ßÁ wÜ?7CÒ:õ’å?Dâë&²¬À?U¯æjï?³Ðήöå?P½9Ì;Æá?Àt²‚ûÄ?xÖ¸pÈââ?máûõÓê?_^ @œøã?”Bø¥ãæ?Êâno ç? Õú*¶Ÿî?3^­þì?€ºÄñeÆž?.à™ozÏí?*«Y[GÕÙ?fÓÛƒù´?ü̦èˆÚã?õ‹ mÔÓá?>{˜ó©Ö? .)|›ž?†X[ê‡Ù?LuúÅxÉÓ?æ³S6ŒPÜ?UWqóŽë?ìzÓ#Á É?ÒÞ5ãÍà?¿ÉíèÕì?®?'ï?U~ÐUvÅï?€r9€¯?¨2ÿÝé?8,\Õ,¹?5ñxÊtÒ?â´9wªÙ?ün™]Ý×? °ý8îä?NdtÈÌÐ?<ÇòúìÙ?êAc„<é?c&ÄP=Ûî?@Ìõ’Œà?cƒAH«®è?´ŽaË_ÑË?˜"tÒ?ƒ!¢Ï”vî?xDÐ szÔ?ÎŽP(Oï?b.¹r Ð?3'™½üê?özü‚Ǹç?2‰mÔÁÚ?Cá>yÒÓç?Þ¦UÐÖ?h Öòæ?º ùG1ôä?pKžÒ?ÿèÈjÒ?> )¥ƒì?Hr[AÁªß?¨” º´è? ¸ñë?È0œE¼ÛÎ?UÃfž å?#ñœ,ââ?¸3 (jué?àÄxY4¡?x×N×á?H¼Ý» wæ?jxÜâ?œá´\Eç?wƒU“Jí?¨Pà„»?fi4SŒå?óÀÒ2˜½ì?º­’TØ?"MI»)Ý?À Œ³lÒ?Œb[—a®î?=8Þkäüã?„Ž™¡¹?‡î³›ì?ª1æ.›?`쯑ÐCà?T4~®‹ÌÏ?@’éÖ…?Ää%2cûã?`™ÀjW™š?3dPÄóYâ?lÄîgSÎ?ø*â?R"Ø?bËÌ›YŒÚ?$ K4íSß?oZ‡í”í?ž¹ra¬Á?Ä:$ÄÉnØ?ŠŒOhö:Ø?Qq‚úHä?DZúHâ? E¼z Þ?€o~.H!¯?*ÚŠhaÞ?ìVªmœÄ?:,ÙÉÓ?ðµƒ;;Þ?ç?D[cµs“å?G6<®{†?x?Ö`ÉêÏ?q‘„Œ>˜ç?ók6@~ì?Ù2J—âÂ?èö­Ã5Ø? ;Ð)UÓ?ü´þ‹EÌ?‘cÛ$G‹å?ˆ3`ì9¦²?E‰oÂAFà?R§Ÿãš¼Õ?ž„3Û?׉4Ú?]IJ)0Šå?žTŸMǺÑ?1¨Nøà?Èáá ¤ì?7Ü2·Uâ?Œmß[¥fà?Nðrb£Û?1Èy~»4â?šwéã?K±K’¥ê?þ¨ç3hæ?ÒÝ­Ú3ç?Y`›xôê?èP–i,ÛÃ?cY¨8Ýã? aM¾å?@Ò`iá•?Š¿b‰&Þ?™¤G<æ?¾ãHuBXÕ?É04tâ?º^w½6î?ô$U’6Ë?ÐǶEË,à?¢/æ=çÜî? s€Ä³?ø‡$`@ïá?¹ß3”XÑ?××éÌ9×å?&Q·DÝ?ùoq?*’öK‚3Õ?ïÊOÅî?çŠõÇ?øê?F¢iÛ†Uå?ŒfÝÄ[Ë?ôT°dž±é?’œ-´æ£Þ?p·€м?¾¿Ïðd5Ú?,zýZ6Në?ûØ9âLè?¦#`·è×? ®SdÍ?8õnÔŸ¶? '”gÉç?2hh^pß?8+?ï‰öÕ?ŒK¨ç?¢ ûJèÓ?àõÀ:d§?´¶á_é%ë?¾úµLê?¨@@däÛ?ýå-—ká?ö3CI3<æ?7ñE,Ÿâ?ê¼V²ê?ü2 ÈÛ7å?to>lkgê?X}: z?"ÉÞû…í?3œåƱå?À•°uF·?ÔòBP6ºÃ?ØÈI4 tÑ?‘õÊKèê? êH§Tå?°Å¸îdÒ©?ð*µÛШ?X$N‹ýÌ?õ¥Wdåè?Òñ–Þèá?œžÚ?ºxè6Óß? ‡ÕªÛî?àÚ|NÃ?F0R…Ø?Œ÷¬M¾‚Ó?'7çv˜é?üHC²Ö?ì7Š´Ò? ; „l”Ü?(&Õ‡-¼?%Øb<ùTç?ÃÁ¡Ã?(ÿû+Bbí?±GùœOËë?ú˜u’à?;Ozt;ìã?~bÒBé?óNjËä?èuËZ‘¸½?pil¥Õ¾?âp7X¿ï?€û{¢ŠÄ?ÌxŠÅZã?¦ws‘3?á?¦(ðÄ—/ß?¡ÁSo§é?œœŸ/¼Ã?$ð]ð<Û?LŸ‘·Ý4Ø?A‰†•ùç?B!oªïÖ?d"ã_˜Ù?ZkCÖ±ÒÓ?˜?Žq‹É?Öü‚éÚ?HA±":Ñ?PÌ$ÇÃ?*‘:”¹±Ó?$ÆœýŽÁß?º_ÅðŸlÒ?Àb‚A#¶?|õÀÔ’ŸÂ?[»­Wd(ï?"E3”·¢ß?r¢K,ÀWî? Íå7Ëôá?æY 0šWá?À–x™íÀ?€û¦ï9q?cóÁ_»¤ê?æpVâSí?)ÂÓYéÜ?0·'ND^¿?Šé׃/¨á?Üð¡ š¡è?üâQ[ê?hÏLOt±?€ýfhhØ?›Gpî±Òá?¶”eÉ1Þ?ˆBŸ¹kaí?Ô³‰oÔ?2%@ Yë?Då !î? ào”6á?à㉇[ÒÀ?ØLÊò=¹?à,°ºÚ?Á?!$´ã?uÝŒ]Íã?<Ú—s±tØ?F0a¦‡?Px×[¼‰¿?€zeÇGs?<½,εùÆ?Ù[ÕD\î?¦Ã3_P¡é?%¿J‹®Äî?HÔ4Xâ?`iü³Ï?¢ÀrfX¼â?€Ç¨Û]¤ì?ö6Bï5ÇÜ?ðY¡ÆÅº?½r39è?²êÁ4Ví?äD+ÄBØï?¬†—â?DÑ'Ýá?H_§_Bä? eêÅ[å?0Hé?@uKnd–Å?ï C+|ä?Êâ6è«Þã?0ÉâI¶«é?Ð,¯ÓE6Ù?(û¥:0Ù?6†tAÛ?3"‘ÀMç?7Ÿm©á?®öoÏ4ç?dÇåþ.ˆ?j'[Ô?€ Oqrª?är‚oàõÙ?Ô㟧øWç? &Ö6oÈ?àÙgŠÚ?óõu)Þè?dþû’žBÅ?HaNüÀMÅ? Óì’© ?˜h 톓ß?TuŒI®à?k@þ`T-ì?Mt®kÐóæ?ŒHX¯? («ä?¬-žÜä?$%*Ê:ç?p¸Lrd»î?0æ×ìé4Ç?í¶ 5Û?x6pìÝ?x´ñ2ÆíÊ?¾8ìô™ä?ûÂôÅ9ªè?Ô-P²¸Ü×?úͽM"0Ý?T“à !é?#Y‡#ÔDí?À-˜î¦‹î?˜¹Ü÷«ì¶?ù~ r¾ï?Ä’ÖЬBÁ?…ì2áËä?FÎ=éÑØ?\ôÛmOÌ? tNV;²? Æ —Ó? ‰JJš?!o#Xöªà?0ïÿ_!Î?üÄl6LÇ?â¤`2A?¤‰ÛŽ ©é?·sØÙ2Äí?Ä/³ÿùÛÖ?üASÓ?·ÊŒüNià?ÍÌàø,?L­™—ˆíÍ?¸îþF—8Ù?Eù‚u7Êæ? $KÉF“­?¨PìrÁ1Õ?Ȥ¹MĽ?4´;f0iÀ?ü>›vê?àÒ`Ù¯?çAZ¸ñRì?y†ë³ÒÚë?‡S‚Pè?òÉÈj÷ŠÜ?„ˆ”ñûšÓ?ïÕVš4Õ?#6ìŠ@¤?PÃN·ÀÓ?‚;‡î¦Ó?8Qëy†¶?­ÞT&Wíå? ¹Š]Kœ?ˆ]ð‡‰·?¬ÂÏhë?À$J҆ͧ?OWJÞ?|Ï|ÈmÄ? *v—HâÀ?®>„4®ï?¸ÀáeƒÒá?(l™‹è?‰N`à?ÉýШ²¥í?pJæ&üÈ?aç?Í­íŸøë?O_$Aí?¨-é¯;•Å?ˆœN?jÞ?¾ )”^Ýì?xõžÑQcÕ?]ÚøOõxâ? Õ™ m§?@ŸLù‰€? Rùr?À?ñýìóë?Ž÷{þ¨¬î?@2>Ù½”?rü¢*;{ì?˜´‡Am¤¶?PØíÙmþ´?ä£BúÖ?g=àFúûê?¤ÞÝ·Èé?çò°‘WÈ?è|"÷^2µ?@¢Å´f²?l“‰€,Ú?ŵ†[Qàä?[æ»oÕ?2–ÁÖ?êÁ‘š 6Þ?TQÊ ³êÉ?`£NßQø¤?Ü_÷äÁ÷Í?À_Vám^?æ‹2:á?Š岉Ø?lIÌøÚ1î?H/'íuké?¢âtê?€É}£¬¦?*±P ÿñî?õš@^ ?²$ù[æ?.7M‰íÓ?†HkbQÝ? Ö÷Qÿ°Ö?ø¤i@ŒMå?üOgëtÏÔ?Ü\¡Ù¸Â?n¾ovÐ?KÿˆÇµÔè?øT_Pbë?CF•Ý(Wî?”––¬vâÑ?ã&»8»ê?J"¬2¨ã?*åé?^÷üE\^î?‹ Iìà?`ÌœÛ"—¬?lj6RWFî?z\¿ëÜ7Ñ?°ÿ/Åé?2‘a_mÞ?©U²ï¯Ð?6ˆ‡Ù·Ó?$’ÄËX-Ý?4KþaÓ?`JÈE©ÈÙ? ìf¡Ýî?©U˼Öaç?ˆ4¨´?h‘nKOoÝ? 5P;vÞ?èì^fܼ?Ô¶QŸÇFÙ?ÊžOê?ÀûH£“?JŒ¼¡˜ä?ׇ2¢q á?°ŸóR`­?ÃPÕ¬Ò?XNQ] Zá?fýü8å?ŠÛ­ççáî?uJ+ZÛ‰é?è"2½X×Ñ?Tÿ®W•Ûè?XßF ©ßê?pW‡”©?mì´‡çï?Ìÿ ø(3Õ?¤÷'^”pÒ?¿º‘ Ðûë?h;Ç7á?`&8H¾È?¨æûþZLè?1$ú¢Þõã?2d@Š&ï?ÞÀ2SïðÖ?pÎ;ó»©?C3èe‚„é?xÞ>|¬ë?;ãý^W‚æ?öyá,”ç?Ø_Ãxkæ?<(VÂÄûÇ?èAZÙ°êÓ?’ºMLÒê?Y­ø1nËá?m7[þ…ì?3ß(⥃å? 9ÃT +Æ?@Bº‚³?À|Ý{PÄ?·q ûÐ?¦²:+7íê?€ÿVyjºÊ? È2‘,4Ø?þÚ«¾Ì?«ãn/œ&ï?(û +æÈ?@$uuêä‘?®Ò~âá?¸P,æËuµ?`¦Êš¾Õ?äžAúðÞÙ?qÐd‡Ý?î+úGªUß?àòÂý/Ä?TË.0˜ Ã?W1ÑUhfä?‚À–Š™rÝ?nFŠƒ*ï?ŽC\/(æ?]` lûî?¦ð„<Ä?¢X°È«$ë?$%Ù!جÄ?24âT&Ð?/vC“ÔKâ?1í\ƒ3Âä?ކ/4Þ?ÁÝM›–ê?\‘É ÷ Ý?¨ }‡éNÈ? T6Ñæ?”.cŒs¢Õ?DÍ0 À?¨¬%&¹€Ã?ÛË5?ì?ÇôÙê å?’ }$G4ß?ÆôC—Ù?`®ùø¤?¾2²íܲÐ?Ô<‘SLö×?”Õû¸P‹Ó?8Êl2Ó Ç?÷EôÑ5é?šÍâ±”ì?ßSxlBç?¶§÷DÖ?p+¶ÚÚÙ?''$€&áä?6è¢%ÛÒÓ?¨t7å¬êâ?Äÿêx„oÞ??`ô¼¸dà?¸}—©›¾?ÜS¥Yëbâ?ä%ϻߔç?—>Øï©Øæ?Ñ׋4á?2M8ó0Ö?8 d|¤ÇÇ?cëŸá?_“Ú²M¶?STRÃê?d}ÖÔ$Ö?¯=‹üÊâ?àÚ¨ƒ!ÿÖ?»PuÚTì?Ç͹㭲æ?hóû<ÂâÑ?LK³î}Ê?--¬6Æè?Äãn6ð#ß?1÷aÒ•›è?Ð&Ó×I±©?Z/&‰Ø?Ö?|yÎ3*Ë?t‰Ì ïÒ? i.ïÐ?ƒtè1U%à?|¶¾ÌJÏ?ðÜ&±\Å?B§°¼€ÑÛ?R‹ÔdÆÓ?²Ê;žBè?Ö¨ÁÃöEî?øæßÓÀ?VŸpGϼï?WH“w Iå?îªÁç7OÓ?ûí$žå?Pi'sÑ«î?Ù9¾2=î?zÈrJG™?ø¥4QU’Û?ÆGw«ñ­Ý?Àž}¼TÇ?©•Ò’Úíç?Œ¹³z£3ë?º8Þp½aÓ?ˆ°]æÁ?ZÎ;_oé? úzÈ[Úà?pÚØ,`l¤?¬43Ã¥î?˜‘­ZÇ?ãõž ©„ç?ð´fÒ‰åå?úN8øTyá?(µÅB«ì?jñY¡â?À„8äçÅ‘?¸Ü 6…ŒÛ?bÕœÒÁPÔ?ü¹I Óí?䔋`¯„Õ?¦È´õÕ?ÂþFn$tÐ?N Q4@ß?X~µ”å`å?šÜ=£\Ñ?hjO­|©Ú?ïÈ 2eÑ?vÆ PÙ1ß?þêæÛIÛ?P°®SÍxÁ?P~Åë««?2Êêá? ÓÈJ´?.«º…þ™ê?ÙC‰ò í?×Õ±~ÞÁ?ˆF§¶?n–æîë?Ðú‡ùÔØ?ÔfzÛ€Ô?ïsS,‘ùä?)c‡óÏå?«z,ú¢?Ök"ûÅÑ?A±ÄÈÏ à?K†æÛ9ûá?Ì©ÜèÁ? ùÙåùã?­§E Âå?ãì é¹ì?À‚UÚ?Î?(Ÿžñ—XÖ?þ¯p*é? û‡Ù]Gã?`%,Öjè?‚£Jw}ì?œEÇÉ«Ý?.r5HgVí?|ž(Ut<Ô?R|B2çÜ?Ö¤‘®“uÚ?“™øÈæ>å?wùÏ#{©ç?ØuéáÔ?%y½kÈúè?мñ,à?Š/IÁÝç?níâ ¾Ü?~NA’ãŠÞ?¥x3Á!è?„Rkç?©±h÷«Žè?(<××áY³?¼ÕtòÒµï?5*Æ…Wœá?l­Þ׉OÛ?¯¯Â¶Í®â?x‹þôØâ?¬H*åüÝ?8Éã?è)Êß¼Ä?æ$éokç?yOWsì?°&‡û «?à(]ÃÐì?H=ÇôŸî?o+žÊ?М^ì£uË?‡_ð'îYç?H(ö“1Ã?¢žšGá#ç? ú+ɽwÇ?sÒ<æ?3"«ˆà?¼¯b¨Î9à?ÊM|—ÿtä?KÄIs5Ä?¤§tã?ôr{½Â?y«Û²½í?܈žØ?ÄÝ?b¥\l= Ó?І›dÆÚ?~ïÇÏØêÐ?ªÌçu„Ëî?à§F¼Çtš?Men>CÐã?”Ï~î&è?ðKÑÆ?™ý½9èì?€a°ÂÅÅ?Ø—Q2Œ÷Ô?Ø µÁ¡lÆ?txûXÃÑ?H $Uî?¨%ÇW=]Û? U¹uß?ÇÙ"éYä?ÚPü8$ä?Ì jÃ*Ñ?ßÀ¶dá?À9Ãÿï8ï?¯˜ZùØEã?Ns¢Â³`ì?.ŽN|ùÑ?ˆ±ÛÔÃÕÒ?è /Ôø»?XëšU_Á? Ä·ž'u”?½ÊC*4Jî?;xë¡Èzå?/û«øw7â?Û}?¨ÏÈ?:bçtï?á±ö‡–hã?KwJ×<ì?Høi~Ö¦è?QÏñšé?”×qFô–?óÊfgß?ÐúßgO±?³p«êÃ?kîz[ô¥í?€ƒ¤Uº?Å· O€œä?ý¶ù^ì?„c·7ÓÜæ?1‰£Q•Då?Úo®·›ÃÑ?`¨æ"‰Ë—?ܼ-LKÕ?Öª… ÎqÞ?çÉzD(!é?xÒUÌî¾Õ?fÒ3.Ú?HÊ܃áh½?¼O'6þ™Ä?â‚–·(.Ù?5Cs¤Žé?¼?‘ë—ä?N%‘MÛç?Ø„‡ó4Ì?6=Š]ïÙë?€-§f.¿?RöþkIÒÙ?vYx±óaÕ?9ÐÌPåî?Ð&E­SÕ?c¯6öŠì?ÈÓÇØ´?ã¹Ý8ë?H¹V…ó¢Ë?k­¢½Xã?DtåÎ<è?ä‹ðïkàâ?Éðg“\eâ?óŒg| Õé?Ú¾wÅ”ðØ?œµ *µiÍ?t™9àÔÞá?$ª…ä¾<Æ? ýC±{à?lx|™[?Ïù œ=à?¸¹†±„§á?3×Ä·óå?; ×9é?- 5ì?ðj‹­ëŽÚ?¬Ã\UÔÌ?ÀHJ;ÕÕ©?à¢%P™å?¼ðoéÇä?Œ$”R<ï? úì½»í?P±n€”J¶?Ъõ³-i¶?Ôã‡;nsè?ˆŸýȽÂÞ?bÞ€¯kÕ?€4Bã"š¦?lf¥—e0æ?²*]ÚùAÒ?ìEíBßË?$·†¯3]ã?qÃùÊä?ªu.<á?ؘI®–nÃ?èôÐJCûÆ?ˆÕL‚ŒHê?qƒ *Éä?Á¤=ô óï?î}„"í?P ;*¨¢?$ŸgŠ£Å??·CÛBì?@Ýáë&Þæ?aW™wå?>8™Åµ?ø`ÒˆSÖ?F}åø˜ï?óp9’¿Væ?@åëFl“?H#yÇ‚Å?zt˜×¬%Ó?N¢vt@Ô?܃hjËÛ?̧7ÿê²à?0Â+3¹?l–VÓïÝ?ðÎw7Ä?ìpã}ªµÎ?À;_¬†?á<œ¬ÀŒå?ðà (NWË?èO-Šræ?9¦Úêí?p#gÕZNÚ?²yÍ— É?¤aæÔnì?» Ó7Já?Í¿ðzÇ?ä®BáTBÍ?2Ü%ÙÏØâ?­G¾äóÿè?e£‡áèç?®jú—¼Û?9.´Sî?ؼM@ÌÜ?¼IújÐ?@ªŠpÒ1¡?86K CÚ?ô&fÁ?®qp€O¯ä?iî´ôtí?n^={Ý? Äglþþî?Ná{@~ê?8tìÞ[¼?98‰nÅí?ªrãr\?¸˜ôëž_º?´IäÝ-|Û?òhkîë¡Ú? [̦"kâ?gl#ã?|¯àŸíê?8Ð¥AÛ³¾? ÃáWIªÞ?‚ª dZï?äv{å#Õ?¦‘isAâ?À'¿ã‰Ì?–Q˃é?ØŒodÄÉâ?UÞ(?ëä?ðµà÷Ö±?Œ'AéÉ?}Ñà˜çjä?|«7Òã? O ›¶™?mM†“üë?@Ûz8s‰¹?.ËC]ëIÚ?x1ËåKå?¨æ=G»×?¸p ?Ú?Ç÷ÈÁ>å?ò°yþaˆÔ?d5.dõÜÂ?:£Žû AÙ?pPÜr;¿Û?‰<–YqDï?>¼¸D„UÓ?lUcô-í?z5_‚Ñ?˜B¤Iú¶é?¾zÎõ""ç?0†ñÍO2Ï?´ÊKÅ&§Ã?d»Ó EXî?Am<¤šã?8¨¢€'-Ð?‚jp(Õ?ºÉÔ|YÙ?0ö,0 ¨?ÖGS_ë?ûèÖ9Ðê?Ð(ͪå]á?èå…*Bî?ë›!ÿãÝê?Ë[ØbpË?n\wWÀÐ?°dfüȸ?¯{ש»?´éŒdï?Ž‚é‘âä?^¨Û8¢æ?Ü˳jÉ?T0yzù½Ê?2zhsPá?`ÞõD|à?´rÊÀÃ?›UL+fní?Ìç7ør>æ?Qœ´Öµ?ùEƒ+Èöâ?xó ¡M¡½?Pp’CGçÌ?¹fDu¯úë?@*„­ÔŒÑ?µ„taVÿä?€vÎjD ?Àò]Ÿ¼\²?ZŒ»Þ›#Ó?IÈiÛë?ò+ìG¾Õ?ˆÅ»Âè?¼Š¥×?SéÀag)â?qíöy é?@ -d†È?°@ŸÃÖŸ?Ï|_xÝ?`éPM7•? •¦f?ˆ†USo²?ðÝ·FõÀº?4q$éjã?œ*õúÔ?€€ºIr¤Ø?β}¡–Xß?]û•z¥î?ÈÜ·òMÏ?Æ«ÆÍžì?@P˜û.è—?‘hØ>ã?èâyéuþÅ?…ótqê?’;ˆ½šÖ?übbž¥Ò?šr[ýçÐ?§¢v£E»á?Ÿ%Æ ‹æ?:ö¤˜Sá?ŒÞåraÍ?žu³Úà‚î?°º#Æ=Ű?l•Å)_Ø?²1žœà?Ç€ 3¸•á?ÓX6,=Ëï?°¹„«¨W¯?‰Ãüi×Ü?â˜`C•Û?Ðze—'È?hÈ6Ç·?T¥ýb}!É?T{°˜ÍÈ?Р­ÐÓ?´:tA¬á?MöôÇ+Éà?´&„šß?G#.Ë6’á?€&üHÛç?vx¸OtíÓ?~]&®‘5ê? Ü{ì%Ë?n†øÕ?¨ô+òà?¯ô‚µcê?€ÕØÚ?ÂE¯!ÈyÙ?sùî'òÛ?ð 8\„Ë?{Ó“MˆŽê?YH¦ ½,è?2ÑÿZ|Õ?Ÿv¾öå4î?Ðò7¥U Æ?Ð@ïå?Xƒ6&ø(»?˜0¶ á?Mäa7è?´™á÷ÿ¬Ò?pìeø!¾?ÎÆWß·î?@ÊkËO¢Ï?R;{ –ê?†NŸ•{é? ñ‹g=zÔ?&QLï?Ä¡°ËÊÀ?€=àSÀ^à?oyiíéí?„ÄôÍ2í?¾ÔªÆíë?ØéϺ¤?`û³>Ylß?ÄVGäâ?ÎÄä2gÒ? nSÞ§ ?þ1a'¿Â?Ÿ *sS[í?@ìËoÇm”?“[Ÿˆ#sã?ÄÒ“qñå?4øî²Ø?(ÏÛøëÎ?>t ãnÙ?\¨Î_r¼Ö?`cE=¼Ä? !ÆöÀ à?…s¸òSâ?PQ•µ±¾¡?¦«5ßcÜ?bFÆË(^ì?ð·äfrª?àŠ›á~Ê?&ÿ»¶NÇí?Éé.†qç?%ŸQŠRFé?¼¸<¢ªä?€ åTn3Æ?v]~ oä?$à ‹—ãã?{.¿™û±á?ü´e* ÅÇ?Z%øË¹aÞ?ý•zBÕôå?•e\ lì?…|Aʵ?(Ç߯2ÙÁ?1Ðè>+Èí?\I•€‰$×?¸cvÏÖ?hš5@»óì?>v Xãâ?ðÝWMQì?\\Áã±$Ç?‰"2Ù8œá?ÂIàP©Aî?úT-Ž ç?)´ ¦ýøè?¬dÈÄ”×?·_Ù³E ä?ån%P†Há?Lç%~FxÉ?þ Ô,Z§ä?+%_ß?’o£@Òä?×ñÛ¹§Ï?b$¥¥ Û?¬™ Pä?.+p<&vÐ?ÀWŒN‚¬?dö«vSì?Ol&»ì?lÑšW˜‚é? H”L7sì?n÷Ò³wVÜ?Ñ:ÈN›ë?ëZG&®Vë?rJEê?‚=µö!â?•GÇîñî? 8)sÏè?(ƒ‹âµ?Ð-ÝnÌ]½?°O.ªB ×?¯= j1ã?’uNíÔ?ZºZˆå?ð|R-q±?â”.µÌë?À5ÓÑ?ÑA,îônï?í,·æ?gØÏ ðyé? PC»˜˜?¡ ýyä?ÂjO›>Ñì?`9O#¡î?¨ìCÄ)î?¢ÍÏËTã?À' n³?8?éùÙ?€ º[w/?EÌÓ”ƒâ?Àc8Ú…Ç?ׄíŽ]ˆå?PbˆFÚ?@¢ÜÇ~ÂÙ?¤Ì4´lbÌ?ÀÉ_o•sÞ?òû5]¦Ú?˜‘â¬ÑŒå?61¯Q-Ñ?4-X^‹Ù? ê]ƒ …‘?à™jYïѶ?, jS?RÚ•gÖ?`iA ª¦?~SêóÔç?,}©¬ùÄ?Z¡ á?ûÆE­çƒä?1Ö •—í?[=³€r—ì?,.ç¿ð×?xöÐâÐç?<[¡Øøã?; i³–ç?Œ~†vûà?Ç"FAöuä?Dí¡(:¹î?ß“d(/æ?3¸8J™æ?·˜3ÓEˆï?öž·!Íì?"XhüU,ë?ø¢ùJÔ°?Â~›ÎÜ?A+’ òé?Œ;Ójë_Û?ëoIÒâ?L q°¦ê?W u:Å?8°GßȘ?À,g®!¢?’”ú÷‚³î?´Ày¨üÇ?äÆŽ+JÂ?6Bùàíì?°±öãô¸?zp‘¬Óß?³û­ÅAï?„AÿGûì?Øü³õ#Ñ?NAÂ4)Ãâ?§Lø”íï?H´£(Ê?ˆÉ¸ËÏäÄ?|^Óù‹pÁ?ð¡wäEl®?eC¨µ¼Zí?÷ð›ûç?cq‡Ÿn›è?l…aÛ+¢Ú?h˜íýLê?ðÍW\J²²?b å+ÿí?°Ð$O3ת?á4_+¦ï?dÌôäêÓ?™‡idÓœé?°Ã92ÍÍ?@ò/@£z?JðŸå?¾®ÿ!›Ôá?˜¤ø˜2WÏ?hG·šÖ?0k™aõã?h!Gdá?D ßÅâ?ºâß´óè?ë³Ï¹ðî?ø£2éÄ?r˜mµ1iÞ?2DëGSÑ?QZêÆ=æê?æ9u6Ù?Fè}O„xè?bµÉ%EÞ?ìtÇõoÐ?0>8ŒB\Ê?àÕÞAaˆ´?z}âY˜Û?øáµ™mß?Úò8Z§üé?2m™!D‰á?̈'V»Ì?ó˜sG™eé?yÅ^#­ç?Øpº ÃÏ?æ^ý0êá?2°7>øøÙ?Ù2óÍ?é?}øzã?6îõË šé?^~‰—ßËë?q,¢Ø9ê?Ô¨}æ•aÁ?rsʹ?4 Vì!å?0mymeå?~°r<( ×?`‹ÿ˜£˜¨?ðÌ{>­è?>”—ž í?·Ó®têÔ?`‚ü¿Éàä?`C³"£˜É?Y€< 5é?88ØÆÓ?ÉvBbóÀà?1“³¼ê?š…¯N€Þ?”«¢²)Ùß?Jnýïç?{Œ§q‚à?-K>—:±ç?ÄÇ Má!Ù?”ÞÔµŸ}Ð?Ë?ã‰ûç?wJÞü–Ûè?hªð•Ëçä?(Q~—såç?–äé÷Ô?[|H¡„ëê?Aa¼CðZë?T'Ô=HcÃ?£ˆAHNØì?|Šœá×È? ‘è¶Ø?_ÝôQŸæ?†°q¿-Ô?0%ë™yì?0MëSúÎ?ù0 Îã·é?e©[x.¤ã?bÀü˜2åÒ?Ÿñ‘æ[ì?YK½‘Uç?ËxL+ÞŠæ?#]ç&(á?¾>ê‚o;æ?Ò#½MÖ?SW Ñ£å?r/£6p}í?\ãôbÔ?U[åjòå?Êéån¢WÛ?0þf$4¯?üÌÂàuÅ?h¦˜8?-Ù?áÉö¿ªï?(‰÷zT°Ó?ïÏ®kPê?û ®±Å³í?À Ùûð½?XÝTÓIëÝ?œÜ³O]È?+üfNÔžì?~õ£ ‡ ì?̱·%KÖ?”ƒßÝé?hÕ*©bç?*÷cJEÉä?Q~…â<ì?_É&Ú&Ìá?jönI~é? Y?ùdÒã? ŽŒì¤?ä?ŸÐƒ«°÷á?ºiw§ËŠÜ?„175äÒÊ?ÝnüOxá?n€ç$ÞÝÖ?QB†%_ ç?øZÑs`ëç?Ö!4`˜ë?´å¶—RÍÁ?5Vçc4Åà?JMhD[=Ø?Ëzhû=;ã?8«/Ø€iÏ? ¾²1·¾æ?PÞXž1õÝ?º#ïµÙ?ÎjTÎß?vqf>[Ó?æÄø£NöÜ?sÌš ã?h˜Fšâ_Ò?Ìï x­è?„"«žt3Ð?LF°h=¡ã?°º±¶\ÃÌ?Ð/ÂeMÁí?/C½Sxòá? ʱùÀï?ñ²ÿNë?pæïl ðµ?~Ž~úñóï?͈õ>ØÉï?¨‘‚€²?WkŒ:¶Õé? "“Õ”ê?•vU­ÕÎé?ˆ•éeÖ?𘿠iÓª?öáÀ¾Õ?Ü7׉2‹æ?×MŒvkî?”’ÎÀâ?ÇFŸÇU0à? {ùbÇïà?”Tðg€Ö?fO÷P¸ï?ÅÛèýîzá?²] ™Y‰Ô?º, òÝ?ŠPUî?nàÝVñí?a/‘€6ë?êå¤å?íg…£ehâ?\“Ù”€ˆÞ?ZÜúºå?ŠlYî?äR!Á?L©R™ãÄ?`q‡eÞÉ?ŸoRÄá?0õßhÕá?“9rC«Uã? <¿/¢? Ak÷¥$Í?Ưb9Þ?êÓ¥ºØ? …2r ã?À Ü@ÔŽ?ˆ¼?èrm`Í?ŒyØcSÐÆ?€%œ7…Õ?x•«ÛÕ/Ó?•L ÎÕè?¸Ç(mÚ?dâžµ¥ä?ûÏw›rè?âdõ/HŽÕ?T}Û*•ÖÞ?œ.ËÞ?@;¬Âhµ?ø±ÙžÓ#Ñ? Óv†#˜³?Ò3WÇÿ í?$®ñt~Ìç?܉)8®?aŠÎPtá?P›p‹í"¦?:÷KYiè?ԨɋÄòÏ?æ•·Û‘á?l:0 ʦÐ?;³žKë?8©UìÂà³?à*¼2¨Ðª?‰âUÏ’ë?{Ñu rí?4¾ "’î?ÔžS\ÇÓ?núÿLÕ?ç 'gæ?´t|”Ì®Æ?‹ŽÕôhè?üeší/Ëå?T¢•³ñžÝ?šØxàf,Ú?@¯á´<€¢?hŠÛ„øÏ? Ñh,-¢?ñ‡f>è?ê4Ý,Œä?N¥g½Ô?‘D™*±æ?Æñiì.¹ë?Üst…Ø?ø*‡þ½Í?ÎdB}îÚ?Ū‡çdÀï?e…¨û.Â?_4x“Zfë?Ô‰«»Ö?amâz4é?vf XgÚ×?è¥þŒ8ˆÜ?¤›B-Šï?6Õ2^cè?®AJ4ƒé?̸eëiÇ?!ñ™¡¹.î?º»›œàä?@–¼Ù 0í?D'ââÉ?dOÅNyŠÍ?ÄÖØ¡°ÇÈ?8ôA·Ýî?äTžchÍ?€ÙhÎk†?FŸ³GÆ”í?˜ËíÛ³Ú?Ô-q* Õ?ƒÆ};¥&â?wA´;6qç?@×øZð?ïð,-wâ?àL„^c(Ã?Æü€™6±Ò?¨-ÃTø·?Þ•Ö€ Ší?g¿>×úþë?Um,ÂÃé?R=±1øûï?€nÏçÓÙ?PØŠÏ2lÇ?@ÒýïE‘›?ÆGo/gþî?²XjÛïï? ¡yà»?´IÕ¾Ç?oùÿèr(ì?ÞŒ¹¥ãqã?üVrájÓ?[;¨0Ñâ?’LHhÓ×?ëA‚>ê?äx>¬@Ò?PŸô£e*Ô?“Ml¾¦ç?Pö2—Ø?%ÈþêÛå?î$8 Û?̸…¡îíÁ?þ}»»Ð?T"l5Ó?h›qâ«Ê?寘è?¬šùöê?+‡9w’3ã?=#{€ì?àÎ¥Ä%JÒ?0MåJ ¼?U[tÕßâ?H$±K5?ul- ¶î?{#UæÇ?Ibk®Ãâ?„ºá§QÇ?´-M2•ÉÑ? ƒ‚ȆÌ?̸Hø|Â?F¿äŽÚä?—¨ØqW'è?d9· 5hà?V`16aë?|ίÂù±Ü?¦Ú÷©Cmî?pªt÷þÊ?ÎJ½Ÿfï?@ÍÅ–™?ðžTè?Þ?xügJX¸?3?X=è?0ƒœé?0º”¤ ?”IïðÊ?Ž+oÅËNä?šbðoa ï?è²,‘ž4Ê?ò°5ÎP!Ò?͉½©}ç?Aÿ'Qyæ?ûœký9¡ä?ɉ0^çå?!þ¼.è?ب} öûµ? js=(ãã?@­Þj»¼?Ä_ص0Ýä?‡YJOºä? 'ýì€ã?ÕEwI\|æ?@-Ñ‡× ?槃ld‘ì?–¥Hï|AÒ?ð¼•k^ ¨?Æ'i‰ÜØ?;ŠÄítã? ÛüI°¢·?Tæ¡­~Ê?ÍFî@§ à?¤»ûf2Þ?À’"·±ñœ?b<](ç?êYÙ9rê?<]˜à˦Í?VFŸ!Õ?Ô¬ç}õî×?ðc¾vÞÚ?ûí+ T·?Š8p ƒÞ?\ü—µ]Á?Æ£Up)×?–¼VE£?Þ?pÉþ <™­?B?h @^á?i¹†çj¦ï?x}#cY#è?ab¹9°óè?PÕ¿òëÖ?°WçÀYßÛ?È55 Uo¶?¾¢5JÓ?à£SÐÉî?ô[1DHì??LVˆ‡é?ð'IéÊÆÕ?xqJË4aÓ??nÜÑ?лa㑨Â?@C›“uý”?2Ÿ»¨ 4í?±Ù²­ëßã?ÞuúiÖé?tÝ"™•öà?J§ª~77á?´ú™seÎÓ?ÖxµÒ…Ü?¶¿mÒ6,×?2ä‰nòá?QTçèé?}ºAˆôË?Ì[æµÝ Ð?<)?U¹\ë?4©/úíåÄ?ÆøRDÂâì?@¸M!b„?ùí˜n¿ä?Úˆb¥\Û?ü%Ž¢qà?Œ&ÚÓCÌ?tÌb(D í?O¾õXâ?YŽ“îñ¸ä?Ì’蔚Õ?0¨›Ü? W¼Ì¢ì?KÌáaôç?HÍÜÐ^ªÚ?ð˜.cÑ©?øÔ ‚—½?¥=éýï?(~DÇ&~Ï?Ïû®ÑÉí? øÙ¥fuà?p‰—sã¿ì?й²Ð=æ?ÃÂÐQÖ?ÈÞªCÝ?——•MÈé?|ÜÙ7³oä?Hôm>ÇÄä?eÚ2 Ïnç?d›š*„wÇ?ªcõí„ðß?÷óˆóåã?rò_9íÏ?W-b„¾Âá? Ú½MMQ¡?6’¥`ÓÐê?ÂxÓ1 ×?þ(ÏbðfÜ?@\#è~ç?sR çDå?ˆ7ýkH;ç?Êk¿¿­Ú? a¡˜Xê?P:븴Ü?Ý‹#Ôè?Qd0 ʃê?ŒN½¹ÑÄ?æŒaoƒí?¹Í2¶ŽÕ?9\fªT è? yý¶#¨“?PÀ9ë<‡¼?(Í1èƒÓ?„0É]È?B·»¥†Õ?ƒ}O£_æ?`^îöÊ?ñDDi ¾ì?êJÄØiuÖ?^Q½XâÑ?øƒ:ü[¼Ë?F¢Q±\”ì?å„ õà?·µÿõwë?…°syyï? =w…óà¹?XDFœÙÍ?nŠIµuæ?~Ză¡¶ç?J`mÑbà?tøÍËêï?BŸö (Ö? Ô+Â{Æ?‡a¼Ìæ?j33«ÚÖÛ? ryžYÒ”?¥K›IÉ?·%Á¥Šì?(Ñ~ )ê?*N‡v¾Û?´1oòà?„ æ‡Å?-ícˆ)Õî?ðàu:jº?àí4:¸×?vúïaíÞß? †ª1Ø«?N&ÇÜ? æ/ˆ1ê?‚ÃÓÝ?¦ˆ!°Úz|ÓÖ?Ð??f§?⬹¬ä?¡Õi>±"é?Ú¢-F»ë?€’±¡Å?XÃÙRûÖÀ?ò ú™Ù?ÑxŸýËá?Lk8²ÀÀ?½÷áI Âá?:°[ŽP»Ô?,s!´ &î?Wûeº‹Óì?º c©"è? ­`óé4™?`ò€é~«‘?”÷üP„èÇ? )|í~.æ?F®kOÍÕ?86ªÊÔ™è?øãLW±¸?Îå1.è?k@ŠI§?û¢Ž£›Ð?è©?Þ{Û?tZ·«æ?¸X°Km‚Ç?à©!al¦?øF~#€Ù?’n“7ñâ?Ð8NùÇ«?pôçnèW¯?Á Ó×Ýæ?ž]ËóÖ?ú³ä êgè?ƒÀÓ Vwì?Ø&€RH Í?@˜çjñÃé?]…æv/5ë?ÈB¸^Ñ?7¯o->ûá?0%:+Iú»?š€ n€¹Ý? 3´DqÄ? r}:Æ?0P„:¼Ë?‹ MùœÕà?îÐBpÕ?Ö¿Üb(kí?Pç„gÃJÃ?eÑi°Á:ä?Àø€Ûd?]ÓÓ§ææ?-ü3Ó`©è?4Øyï±Ø?üL`é?`ƒtÉÎVë?J«囊î?úÐΫi/Ò?,WýjhEê?~fzYî?Ÿx&wqà?©‰Uð¯é?¼'°‘½RÌ?Æ@dËúgâ?øŽòf‚uÅ?ÐZܘwe­?ŽUÎןÕ?`²ôΡ?É[§®¼Ý?OÝ]“ê?  9£¬gÕ?îáà?6mÎ)â©Ð? ê+:Ãê?0‹« FûÁ?É¥—ãCßä?صU(lË? ¯`ï? >ƒ–‘?ö_,ŽH‹Ý?v(—ïà?³¤ã?zò.˜üeÓ?r¬8aví?À6J´¬›?..8úäÞ?Q–Í¿«ç?ŽŽœÎVÙ?d×D’§yã?àU·Ä?nT§÷ zÔ?-¼]Aë?XM8±.º?xÍrFÈ?äLUZÙ?ÖO°a.æ?DÚ»3È?²«‰uò„â?ÐÿÄf¤Ú?ª·µ©%Ý?€äb·³±ã?(r#îw³?ÌÕSO[0Û?YÕ€à?¾¦yUv3ï?4# $y¨?ð‰]ËÑÕ?-ky´úë?¸{^ݤÃ?Ffþ×ø’Ü?×yª Ï?Ê¢2ègê?×m`nÎ?¶ÕÄAùß?()ÊpDîÌ?ÞDSŽÑ?`‹KÃ? ¡3Ûé?`VÇ­MÜê?¬2«iò§Ô?3Ô¶¡í?f„êúpÚ?z¦CþÓþÜ?ß/lBï?Lù¾ÂVë?ƒ\6ݦæ?|Ýýp±ë?¼þsŠ5=ì?`qÍ´ý®?“Š,Ì^è?âZþ5U ë?@‘X¼Ã?ÀǦ‘žž¼?lˆ­=6wÙ?Pö³Ç£º?vYÈ$üÞÝ?6ôÃô å?*—¨kÑî?iÁz;Ý?ú×ßkdÝ?*Ø$g‡ Ô?6Ců°ï?-Ì‹|w‡é?lBú6VÐÉ?¿ìÁ0`è?€Þú?r?rSü}é?«b‡j¶?À˜þóïîÆ?ÔÁ·´½>Â?9kå Í è?º\ÐWÒ?´)døWä?bLFxÚ?¬ƒ“—Åç×?`Õ6’Ö Ê?ÀçÞ0\ë?Išl.î?ýáhej æ?˜ïGqdí?n‡„ròrï?¢$¦üs\Ù?è=Hƒ˜³?¨ßú\ìê?bò;R7é?£éh‘†9à?ÂGÇËï?bƒT.Ô?Hj; ¿õç?€‹RT~Öï?s0kcçÌà?’xb2CÛ?¾]  °·Ó?‚úåN {Ó?¢9aá"–å? ßÌä?à{¶ Ów®?TA@¯¹oÞ?†•ïhœ–ã?(½?fm?p6¶BBݶ?ÏNxã?xíT ¹¿ê?pq>ü­×?€áÕ8\§?ê,Ñê*ˆÙ?tÜG<–à?*ÚNùÞÔ?ªÚ㣋ï?4 Y^Ù?øy^ã)È?8>ŒªÈÇÎ?²¤“× ›å?sÇáäÂæ?¸ ¢ËÞ ä?f+EMfJï?YSàºî?ƒ`‡(Çã?“ps»ùç?€ž£ã'œ?ëÍàqœä?4‰`u—Í?€åÑRü·?ÚZ;(“Ð?(ÌsÏh?öÆí›HÛ?)ù¤º#©è?6Â/·æÕ?ËÆE9ˆá?{òùqÁšé?äl톎ðÒ?üÔl éË?±h­þuÞ?ž$ „eá?`¾îBLÃ?LOáÙE²Ú?ƒ §Ê£á?H–Iº4’?% 9 °ì?°¿àï†ï?ðqH ÇÙ?ÃB¤lÄç?˜>,Êää?¬xÁ×Èà?@êʦ[6“?@³ÞRXÅ©?HøÄæV?Àn¡3H¾?ðÕˆ¬iG·?¢ïÛ¹ßmÜ?0he‹eá?bóöfë?$ãa@ Î?ŒÁâAQEå?°€Y;¬Éº?8©4æ?€Ð½úFôÈ?£d̓ç?䓇‹HÔ?ˆÄ©·ôî?(êœI˜ž±?UœžÔÿží?ž*åÇ[æ?Ž ÇIÁ?‚«`ìçPÞ?mù­ÿxé?ßÒ>ƒFå?•tEi8Uá? æR'Gë?ÑðÃeöÑ?pãžô¡?Òyb]àõØ?I>aæ?Ò¨GÛî?× ‚ é?OjmxYôé?ÁPc£8›ì?®«dÿà?Òp%€Ü?–ƒk"%å?¼n"wò×?„ý™YÙ>é?ø¯aÁmkÊ?¬_™mlÃ?dxLÓÛÒ?ÀØíHdà¡?X‘¯«ºˆÅ?1³ŠÑ³é?ýªB÷wâ?쀉›Bí?^>ÓPâ?\ìÒ‚` ä?ÂŒ1™î?Ði)wÀá?TkÙ¡ûá?+¨âYä?-ÿOØgçî?#ìäI­ã?Þó1þ{uã?èBÒ~´sé?á[÷,º?0К¬;uÌ?$S@Æ#æÕ?@Èž²½IÑ?$ÅÉËíöê?Ô &÷ië?Òåw"í?’ÊYûá:ã?(¶W@²?äÕºÛ=Ââ?’ëØGÍrì?@»:U§?£I§È±å?£n¥—ì?*xìÖHä?NÞ¡FÖ?PÛüêŽpÀ?œF+r‡Ó?t(¶ã£Ñ?‰¿èÀéÏ?à›tjÚ·?È6´>_Ö?¸˜P&Câ?¨ïŠ.þÁ?Þ< <äï?Ê\=èIÚì?Ú2ÓD•îä?è’ j§å?Ù¬ë:¢ëé?ØPJ–ý(²?Æ71¢‹Ù?4å]"Æ?¤†éÉ-0Ï?@!ZqÜ?n rÌÔ?ˆ½öxQäê?$Ø ÛÃOé?:³¿þFµÚ?N$¸¢áî?&–V$sFã?ýòÄ'Æ¥? §à/Çê?s4¶àOZê? eôôÁé?l½¾Ä&õÞ?sÒ¸õš˜à?°ÎF°Ä?¿<\x´?°qHÿ¢§¯?$+´üQãØ?¸d¡ƒ!È? t”°2?ïÁW0§Âã?¤†àõ-òä?ÔܘõycÕ?÷?¢ùí?gæ¶«êié?eñîÏè?ž×ÅGß?ZâÈ¢í?ŠïºÆiÓ? &¹¡Äµ?€õlaà?€ o=W˜?,×ʵæ?x:Ä ÕÇá?œEèXµë?ÚjNÛ‘ç?¨hUïc»Ä?g× Xëâ?ê¿D3/æ?Ø|ßœÕ Ó?â覘íÕ?Dƒ$ƒàÌ?Ƶ }ãMâ?R<ìgÁÛ?É×ö NÅê?Vèù’g—Ú?þq”z†ß?Ü=«Àséå?X*‹{7>Â?8kÐ÷£ŠÃ?àL|…°Ö?ÌE.ºUÓ?ýµHôR×î?ÈsKAÃ?Há™j[Ú?F é(vã?lãô‹XÆ?âéI­iÚ?vg‰côŸÔ? ™)„#¯? š·XwLâ?/ôù‚ŒÝì?dÿÉ*râ?ÎU-àŽ´Ý?ÒÑu÷î?¶¥4yÙ?2kÄ´,üí?(ôªVpUÔ?b(ÛÈç?géÀïÃèâ?äð"n×kÄ?˜Ê˜„?À?æ¿I.&˜Û?EýT?\! 8Åæ?@†—ÿ¥©?]v×öCæ?ÀSô¬‘à?øÀ–¬Á?£vŠ~5nä?‚À/0…?¨“Ëûò¸Ù?ʱBÆüï?ž0ÂR°ËÓ?èncë>o³?€ç½gÙ%ì?Ø›‹»þË?Ð ×~ ?Ò5Œubß?üuÝêÔÖ?•ÁV)d¬ç?Ð#ýÍÙ¯?T”5xIÙ?ðÄ7ÎýíÌ?–³+†Æ¡ß?Š0Vm²rÜ? | ²uÙ? È–ú§Â?àgê¸?ð…{Œýä¸?ÕÆu>iªä?´BÊÃ?ÅØgϼ‚ì?Àhlt5mÞ?0#‡ã_ë?ÀíîÌÕnÌ?òÿæhÝ?„`uï˜Û?h\ïä?¤9™qâ?@Ég¶?-ZF ï?JNüFÛ²î?Wgx!‡ïë?Ø‚}&écÕ? LæÅƒ]é?®ùhœÏë?màâÎËâ?Kßq5®¸?q¡•í?†Ñà¸v8Ô?™@¥T‰á?Tñ…° Õ?€'IÛ{s½?ÜHkUјÊ?2äçÍþtï?`\ðC…·?>/Ÿô,4æ?ØßDëq¯À?¬/!¬mRÙ?òó# ¤ñã?´l.Oæ?Â57ŠËÇÕ?T€Æ®Ã?\€A”ëÒÎ?ýõ«³ë?1ÙºGàlá?|ä6å=Ñ?…nÖ—ÉQæ?|¥év@VÚ?ñŒRï?íÆ)Œv\ä?íQ°©]Kâ?P~uTAÕ?ÉãÕ£±Šê?„²#˜Þ?¸.-°sÇ??<ùDÚ?îP¨,ã?Œ7(ô·?Dïzòî?‹+Þê?S‹ÒbGå?š ÙÖÈÜ?wEä[è?À`FË)‘?i»§ö¿½?Ó-h¯)òâ?ŠŠpñî?¶ŽrÔ@}á?Ù/ïï?ðœY±?–Àëã¡Ð?¬`%uuÊ?Ü ø¸NJé?0 ZO–á?PRP︪?üÕò„¤Ç?x%9}·?PMK‡•¨Ò?$´0œ»é?ÓFh{î?ÝT9$ Tí?•«ÆÃ¿râ?ªvè*]lØ?ÔÜ÷ |Ù?%=;>ÂNì?šG›P/€ë?T?ÿ7nÙ?`¥˜Žéçá?NÙUÃ{BÕ?Šy®Ìtºß?àêרQÞ?[‡Ãçã?/}“Âì?LÐ)ù¡Ã?ä!Î…9Á?(_¹yôhØ?yi¢~@Üê?Оb¨è?Œ*‚É¿ ë?=`‚d±lä?'»ª·î?lš´7\ðÆ?|)ʺ©kÈ?Ìö[w ©í?ôŽ L’À?—¨ÈÎ ê?rT*[üvÐ?u{ØöÿÍ?¶8Qsé?ðX}›5Ó¢?˜ÆT(0³?«DÍ9¢¥ï?œ[2 ,Å?²-¹°ZÔ?Zû÷aè?ØÈiÝÿÆ?`òÉW ¡?¨ÙÊû¹@Ô?`¢*¨î³?äwed<Ú? TÂ(²ã?¤X€K°æ?•‰ô6âZî?R;Ìâºæà?8õp)è?ªF÷ûí?$‘¢Ë®Ý?$Í‚íÅì?8à §zÎ?0‰EL'Ì?Õ%!Âé?_j]gë?þxOnç?•-á%É—ä?¬Á”¸¦ãÍ?;ᮈÛ?£%°ºé?ëÿbzï?húšÉšFâ?Ý s§ÿï?¢ö•ÓxÆ?|p¾º"Ñ?جäTßUÊ?É”C·gÁé?CU°Mæ?j‡IIyË?V¿]ûÛHÑ?xÓFŒÏ±?R€|æ¥<Ó? £ªÀMã? .h!ÿ ì?ãâ:Äd“å?¬á‰é€¹Ü?6:nÓÉç?¶¶Æñ½Ôß?ü;§ø€Ö?ï¯&õX=à?ôò0wèŒâ?xœK °w¶?Èu¶‡¤³?^6küÖ?¹Í…üœá?­S~î[³ä? ±ý•lî?Å8²Nôä?T0ùDï?S†%¢ç¢ç?àþ½V®?Fµ“$~›Õ?Ò{YÙÓ? ;ôõmc—?Cl] ±ë?©u>ù$–ä?gH°X±›ê?V“ÀÓ°æß?[ã£;K€ç?‚œOõVØ?èJ`óùã?0›açî}í?ô·k9qé?ÞØ²a˜aã?pŠ!˜Ïà±?äαéÇ?HŒ«þï?øâZ´,íÍ?(/Ñ7·«ß?¼ÆZˆ§íÕ?ŒÒ¿ ŠÕ?Ê YYø-Ú?üI©qñÅÅ?˜…íÝÿæ?8:”Ê?Ö¹×€À¿æ?̤å¶Ã?¯T²Lé? >lH{å?ö_CÍÉí?Î~&°ÖÝ?ÔPIŠgè?zíÔ“èKë?ô%h·:Ý?pœL¸lQé?†2Zš1¬ï? wà˜x¼?:â¬Ñ“å×?u#*6î@ï? ¶#ÎÕ?×…DØ„”î? N›øß?fÊÙ?€U(Œ ²ç?P¡3ï?¿U÷ú0£è?øOVÔë®Ê?çàYZ’â?BÂDš½zê?ª–\Õà?g%bÉOKà?–+&Tû~Ø?é‡ä«Ý?PžxöVã?pŠ'^Þƒ ?(Z¦»À¤´?c^t_Ù?ÜÁÞÄ?Ù‘Û½¸é?) —dŒâ?ÏRH Ä}?ÌÉÃÔbÂÌ?”_ë2X¶Ê?ßêv9Dnà?ÂÜw3då?È´šZe°?D '_"Eà?³Ó?Z3yá?Љ÷Þâ¸è?Зà§´à?#­ AF‚ê?ZªÈ 4å?l¸Áqè?<ÝHÕ?v)‡6Õ"ï?@àñŸ`o‘?§pGkýºë??<m?m?±—¹åMâ?ïcJû½Êé?¤zæ¡…é?D—ÇŽßß?ÀéÁÆÒ?O\2¾bä?¡±¯à…þé?Íú )ãmå?ó„±ÿì?óã¨QSá?€ÕU‚ƒ/Ö?Ü5hºÐ?}J¡DzŸì?íù®Úì?Ó»€‹MFä?`ªÐæ ¯?€Ĩ‡ ”?@Ý<ÉSøŽ?˜{ "C‚¹?°ÞË^ ’Õ?˜KbcQë?øÞKE•ë?¤Ã€?¬¾Ú?Ah=v˜á?CO="»ê?½võ}ƒâ?nö=RaÛ?ˆPL4×?Áêÿ»æ?]bà)â?,¡[>Рí?¤ÑüÜ(Û?‚ra"A°?`À·b¢¥?º"{þ][á?Ð?¾ÑÎ?¨–!“)»?}ë–·Ãæ?*÷ònrfç?hÌ—ßîë?–¨æ­xÓ?x£E?•Îë?q?£ gã?çÉŠ15²î?Kêã›+Íï? >F[×?>ß âMÝ?p»'rê¡ì?ØÈ9TÜ?¿“£7rýï?:ó‘ÚpÒë?¡ðÐ"Í?‚äo ÊÚ?àÛ~Ï‚µ?"Œ¬›•Ý?ç’ëÍ2ä?ì«?Ûþ´?E}E0MÝæ?ݸ? ÇÄã?%â&«„ßî?˜4”-_D¾?°ØÐXÜ?ȹÔw‚²?Ô”ÉRƒé?Nqª‹À„Ö?4µJyÙIÆ?‚ŸqGMÞ?¦vë…;&ë?+K^ÃMã?’™Adšà?œÿžg,Ý?ù½¿ƒS¥?|­¶|qÉ?ŒË§ÓÝ?ú­I]©sç? ÐWÛMÔ?n™t«6Ø? ‘2Ç>Ë? ¾ÁP‡üÀ?‚\¬äDÔ?@OšÊf6?¹»›}.\â? ­Õä?Š%œ-àºá?l—* _Æ?ÈvÐŒ"¶?+ð@„Ìã?ˆhX1šÛ? Ÿ(Ä?¬‡åIuÏà? Ø«¾ò»Ñ?ÉúãÇÐÑë?"§b¢ÖôÙ?‚H+yðã?…¿¦Ý¡§?ené;žRá?šî•sÔ?ð”¤]ć¬?§$ê—ší?ඃ†Ô?PèÌ·dÔ?%c´¿Râ?^ˆfˆsí?|w™Ÿ×Ô?BÏæo(©Ö?ôð3½PÞ?K]2žÐïè?b9y7¡ÆÒ?~´ˆ*`0å?‹´­Ù?lØ7ü#Ó?€79”%‰¥?$ö5ì#Ç?/Íà;ë?Gÿ òqá?rPwµ¶À?àOαèŸ?€‘Æaë/r?ª$,ǸZâ?|ïí†Þ#Ô? ¶³óÕóæ?¨ƒq„F©ï?¬ CÝÕÀà?P@IU’’´?°\Aõ*¡?m %¢j¦æ?Ä4ÍMržÛ?‚²ž©è©î?u›i!a˜á?ȯL‚"â? vÏ}Ð?Â8+@ûá?pû¬Ë§²·?ÓÇK¦ŸŸà?xE&ëb×?¨äƒ#®Í´?Ja2Bæ?Hù§²žØ?à@ÛhÕ™?ŽäÄuÑ?yëg…Óê?jÜ5‰R½ê?_Xvp*é?Z¥ß?zlØ”4?Ô?ø_Yϰ4??Ùœ”ßã?~ñ ŒDÕ?DçX×Õ?€C°×™p? ãp·oÒá?àñ]Aá$ž?`Ú¦ŠÜË?îÄ.àlHê?˜ˆâKçì?pઠ± å?:òš¼äï?ü·ÐÔ?ø*`>ø¹?;]&¹¤Øæ?` dèç”?ðÀÁl¦?PÅü/´ç?4DËF9ï?,QÁ“oì?P¬`îŽü¾?£$µæŸÝä?h.‘V¾@·?Φš¸Cƒë?ðaËZÊ„²?nõ%j"îß? 9‹ÇN¶?´£ÔÉOã?èÛÆîEì?à={ªØ?&ÙcŒà?úð*æûæ?¡û5¨Šë?!D´ƒµ"ì? Ám³ÍÞ©?ð×›žØ¥?ˆlØwÓ?€Gù õ¯¿?ˆšœ”´?ÿ†a~¤?#V%(ã?€Gü+3ûÀ?ÀÔdœå§Â?`tk:¢Ï?$¢ÖUÈ?Bn¸;Ê›í?Ý5YŒ8˜í?š=r¯‘Õ?€ÁSÒ„ß~?{Tü~Vï?âùOçî?P²-XŸ×?;=N¬Ð?ô¼0”¾Å?ù0ÇvÚ¿í?ÀðX¥\ƒ?ÚYŠÄNÕ?ôü¤Ù÷Ò?8ÚðÉÚá?‘`”“ˆí?Àê>Ϧ¥?ÐXè‹ú¤?vR"•Ö?_W­}à?ÖðSBÒ?ÈuDŠê?€sd¡‡ß?ü\¥mà¦ê?¸õP2 .Ø?ÊK¥º½^ß?q-Tã?pZ5‡PžÉ?üHsÕjäÜ?~ba€Ê?Iõ×Ü4™ê?Î$7¿¥‹ä?Öùßf_‘Ú?½†FÑûÆ?8gãºÂbÕ?b5/ÁXë?œÁ®Ú°Ý?.·XŸŠCÞ?2 rÞ'ê?Ì"¶QÕÑ?A:Ôlèeæ?îP]DÅ|ä?nØbÅl3í?©5Jfu|à?2ˆMú [Ù?®!¿Š@é?Xåø2:ÿ¶?é[—ïÇßï?á}`lG ?ȶòÖ|Ö?RòcÊ„Û?Tú€ þxÜ?Eê¤Lïç?Œ¿5O¥ æ?¦XÉÝÂåå?q¼ Àðí?ØÈÿá1+Á?¢º´SÏÛ?.’.Jë?×½2į?ÀTåD£?KÁ¦}í?µHŸÁ4ì?ƒ_ûnjâ?,“± #ýÕ?* [ÔW8Ý?åPµ|gì?¶•ÏÇa/ë? TÒÜêÂÃ?\î_]‰HØ?i9ˆ÷‘Xì?Ä]úU+é?ßH3K/´ï?†‡Òf²ä?¯Ÿ 5ê?ÎÃñ.€ÖØ? Pˆ§»Îá?MŠgi3‚è?(®à[\Ä?æé PúÇç?Æ|\áè’Ý?àa|3ÆdÒ?êPaÑ?Z®w ·ã?Èzh¶°ŠÍ?UŒ;õwûä?*È™v´gÑ?øí 5U»Ð?Öî•G ´æ?0í°y³­¯?ÀÒ-ä@?ƒü÷9ïÛ?ø¨äß±?¦TƾXÌá? ›`÷Ä·í? Sá®SÚ”?lQ L­¸Ù?€Hñ®¡¾?j¹×³jsè?óÓÞpâ?š0VFß?À”1½³?ý/©ü.ì?–u_÷§?Ù?˜kj Á?$?¨rä¤Ó?J%Qs“8â?,ÍKb6cì?Yõ²CgTì?¨1LyÇ?¦ùÑ4Þ?€Ÿ†&äáÙ?h{¦%…£Á?Ì„&óñé?LaGí*Í?”n/MJÓ?°‡+p”Ã? @ÛÓÑRÛ?ÿ` oÔî?r¿šRRî?zî|¯? ÍrQÀ?Åý5&Ú?ÓPkÃTå?! Ô|f|á?ºÏ>U7ì?ÿÞg™ç?2sÈï?Ö¯—eá?³³90&7æ?þOŠí*Ð?«iÛ YÑæ?îS¤ˆb[î?X[ZhÞïï?X@›QÀ?¨„Ç•ÓgÆ?¥‘uzIé?•HPˆHî?JÌ/RAWÞ?–4Lª++á?tÎ|§®|å?€­r%ÇÇÀ?I短FÚ?’Íð¦,Ì?uçlEÜ?·XѸgç?ÏÖDãà?@Cq“é?”u€Í§Ü?‹]M "aì?Œ“ïWÜê?FÉcß×?À—x+’A³?&À¶8AØ?ûʆ피å?üD4à×Ç?$ã`E½Ù?"êÕD6`à?H7JkÀ–Ø?¯Ý95~â?ðÓ,nMàã?œ¾Tþ@^ä?¼¦éUì?žøR³—í?¨]¬:ë?%w£¼ í?ÒUEö?ç?+·D³-ë?Æû®maç? R›n3åÌ?düãñ»?ìí OÛ‘Í?Õ%öuÖk?¬VÛ‡×?“±Wq´ì?1Ã:C#ç?'ìÂ_ë?Œ’í'›žÕ?4»]qVà?pVUï?OFU"Xoè?7cK`5Ìì?LƼ”!Ñ?d¨Z5M^å?d€@€X-â?XL·çüâ?,Dxï¢Aá?dXªùïÑ?f¼U‹°Ìå?ªR¦rfÚ?Veõ8æÐÙ?œj<âA@À?.2sð+â?Ün—-€¬ß?õ˜‘«?µë?x$t»(Ø?pㆩ ÑË?@–Ä5°–?”iV+¦ÏÇ?ùËÕ¼ÄÑ?¼ò:šNûÆ?ˆíî~eæ?|øÍ¼ÏkÌ?†˜ÊÔ6ÉØ?’jØë¢²?aŠÕŸ»lî?mçL kï?bÍ5Õ¨@ä?ø‘[‰Qñ½?¬´»É?b¦æ- Ô?¼K°Ká?*ÚL>Ô?>²T3cSÞ?äÑ ¡!¿Þ?xàI Œæ?ô¼NK›!ë?){F_²Ä?ÓHc8;ì?DAS!aµÞ?`l€¼8GË?x%†èbd²?XSG4å'à?RvÛŸñà?}åÃɶ?jXŠô9Ð?œ%g'4ÅÂ?H)9‡­±?¼ ºŽMtÃ?žXäçà?¦„²ÓÞ?sO92Oè?ȃ9V·êî?`-tÊ)˜?¸2b}‘ Û?¥¦ŠÊAí? –z³Sà?ptûÁˆÃ?0³—i–Õ?@]Õ;j-Š?"ìFÕß(Ú?ø ïë‰UÝ?8Ȧ·Ü?8mþÞâ?œnÇ]‰bÇ? ’žØÁ?]b:Û„ë?ÍÙYs î?\ú_ìÝ?Ò?~WâÅÉÕ?[´\h÷ æ?Ø\ãëMç?ù?*8(¨î?t‚uV¿Õ?Ðü|[Z¶?]P^rCéä?Ðð·ç? ïíÿH(À?äFv:•Â?ÐOŒ¿q¦?Ã÷êªL è? è´›;ªá?7Cí®Òá?ZŠrXšè?`Á9úqùÇ?,X~ëU¿Þ?8)ìõ„Â?8$$h~н?X˜•½ROï?ÞyK¦ï?ß Ïê?°âÁ7íÄ×?xb;Zj;Ú?ªítn:šà?jT±5÷já?ôs'¢)åß?ÊÒ ñ‡«Ü?Öœ;‘Ëß?PÅ;^·?eîæÑ©ê? $ü@ÀŒÌ?Fè°\óî?0Ž—í©À?–²×3zÎÕ?e šÔÝn?À;ÂjÓ?Å®%7³á?êKN!Ìí?|–†Çu%Ì?®_.*à?Ë%¢Â­eç?ûÕ4Sì?Ê4ÊxQQé?âa=ªJ€é?®‰¦Uûæ?(iïn×é? ËhƒXzš?P·{æÏ7Õ?Dp»ŸcÕ?lTÄ]×Å?²ÁÐ\’Ý?Jés÷ù±å?AM¨¤D3ï?áZ1Ì}µç?ðe¨”ÍÔ?T¶# EÓ?4¥LÔbÆ?²hö‰ôúÝ?Ò þ«e­å?P_žN4†Ã?"d¨üÐ?4 ÀÂ.Û?fHøÈÔ?g¿=Íàã?±Éªç?Œ%!ϪÐÂ?0 —æ-£? ó\{÷å?P7}5üÑ?ˆcÇ€þË?M(jìc.à?Lᜎá?x™aó®Uï?À敱É?çB Ÿ½æ?R>WêÍÐ?èÞœ!Ì ì?‚º¨N‘Ü?°×òZWÓ?¸XÐôq–¸?d$žo3á?¼ÕYW:kà?;fÕ;×?솥w¤Ü?cûÀš¶_è?Pé2Ñ ñß?…† ïýËá?Z¼#‡Cyé?Ö°GöH-â?x~Ÿ É×?º le&è?ÐÑ [¿Ê?´"zÃÄ?=™jLúê?Ý.(…í?–i+vÖ?°h.uõ·?ö4Ûk× Û?°•ÔÙ6iÌ?øj-HG Á?­ãb»øÓ?±˜âÿãå?Ñ1Šqgãá??§£ŒNì?nMÑ8wßÝ?ÜŸµ1ï?8ù9 ³Þ?ð‚f}ƒµ¨?ø]Ç*ä?xMUä[Í?Z_¿‘wÕ?Z€X+ZÚ?ïOø¦¦éã?„¦¨ü#ÕË?Î$,aß?Ø;ÇDaáå?â*“ÙlÝ?H'ë$שÔ?€:hoøÊ­?/8áÍ"=ì?üw…þ±ã?Åî Úã?2|‚©ü™Ý?éÊ^LEê?íëq¿¤ æ?DªÇ:W|ë?l¥JOdÊ?´î}ÃÂÙ? ÝQó¥¶?ž¤·É`?è?\(ÈíNÜ?0¦ÑQ¯¦é?”ÃÚò…Ú?¸™wŸÙ?”¢%väØ?xåà1(Ö?@ÙòìÞæ?çÙ@¹/¤å?x÷¢KÛ?þ‹¡>Œì?1d‘sNÍí?,²Å¼¬ Ï?#ASDF BLOCK INDEX %YAML 1.1 --- [359] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/blocks.diff0000644000537500020070000000042400000000000022476 0ustar00wjamiesonSTSCI\sciencetree: foobar: bizbaz: > green < red datatype: > uint64 < float64 shape: - > 9000 < 10000 ndarrays differ by shape, datatype and contents ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/frames.diff0000644000537500020070000000233600000000000022502 0ustar00wjamiesonSTSCI\sciencetree: asdf_library: version: > 1.2.2.dev858 < 1.2.2.dev846 > frames: > - reference_frame: > galcen_coord: > dec: > unit: > deg > value: > -28.936175 > ra: > unit: > deg > value: > 266.4051 > wrap_angle: > unit: > deg > value: > 360.0 > galcen_v_sun: > - unit: > km s-1 > value: > 11.1 > - unit: > km s-1 > value: > 232.24 > - unit: > km s-1 > value: > 7.25 < galcen_dec: < unit: < rad < value: < 1.0 < galcen_ra: < unit: < deg < value: < 45.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/frames0.asdf0000644000537500020070000001013700000000000022565 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev858} frames: - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: ICRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: FK5} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4_noeterms} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: galactic} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: galcen_coord: !wcs/icrs_coord-1.1.0 dec: {value: -28.936175} ra: value: 266.4051 wrap_angle: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: 360.0} galcen_distance: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 5.0} galcen_v_sun: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 11.1} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 232.24} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 7.25} roll: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: 3.0} type: galactocentric z_sun: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 pc, value: 3.0} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: obsgeoloc: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 3.0856775814671916e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 9.257032744401574e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 6.1713551629343834e+19} obsgeovel: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 2.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 1.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 8.0} obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: GCRS unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: CIRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2022-01-03 00:00:00.000', type: ITRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: equinox: !time/time-1.1.0 J2000.000 obsgeoloc: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 3.0856775814671916e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 9.257032744401574e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 6.1713551629343834e+19} obsgeovel: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 2.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 1.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 8.0} obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: precessed_geocentric unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/frames1.asdf0000644000537500020070000000732100000000000022567 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev846} frames: - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat, blurg] name: CelestialFrame reference_frame: {type: ICRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: FK5} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4_noeterms} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: galactic} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: galcen_dec: !unit/quantity-1.1.0 unit: rad value: 1.0 galcen_distance: !unit/quantity-1.1.0 unit: m value: 5.0 galcen_ra: !unit/quantity-1.1.0 unit: deg value: 45.0 roll: !unit/quantity-1.1.0 unit: deg value: 3.0 type: galactocentric z_sun: !unit/quantity-1.1.0 unit: pc value: 3.0 unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: obsgeoloc: - !unit/quantity-1.1.0 unit: m value: 3.0856775814671916e+16 - !unit/quantity-1.1.0 unit: m value: 9.257032744401574e+16 - !unit/quantity-1.1.0 unit: m value: 6.1713551629343834e+19 obsgeovel: - !unit/quantity-1.1.0 unit: m s-1 value: 2.0 - !unit/quantity-1.1.0 unit: m s-1 value: 1.0 - !unit/quantity-1.1.0 unit: m s-1 value: 8.0 obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: GCRS unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: CIRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2022-01-03 00:00:00.000', type: ITRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: equinox: !time/time-1.1.0 J2000.000 obsgeoloc: - !unit/quantity-1.1.0 unit: m value: 3.0856775814671916e+16 - !unit/quantity-1.1.0 unit: m value: 9.257032744401574e+16 - !unit/quantity-1.1.0 unit: m value: 6.1713551629343834e+19 obsgeovel: - !unit/quantity-1.1.0 unit: m s-1 value: 2.0 - !unit/quantity-1.1.0 unit: m s-1 value: 1.0 - !unit/quantity-1.1.0 unit: m s-1 value: 8.0 obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: precessed_geocentric unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/frames_ignore_asdf_library.diff0000644000537500020070000000221000000000000026555 0ustar00wjamiesonSTSCI\science> tree: > frames: > - reference_frame: > galcen_coord: > dec: > unit: > deg > value: > -28.936175 > ra: > unit: > deg > value: > 266.4051 > wrap_angle: > unit: > deg > value: > 360.0 > galcen_v_sun: > - unit: > km s-1 > value: > 11.1 > - unit: > km s-1 > value: > 232.24 > - unit: > km s-1 > value: > 7.25 < galcen_dec: < unit: < rad < value: < 1.0 < galcen_ra: < unit: < deg < value: < 45.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/frames_ignore_both.diff0000644000537500020070000000000000000000000025043 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/frames_ignore_reference_frame.diff0000644000537500020070000000014700000000000027233 0ustar00wjamiesonSTSCI\sciencetree: asdf_library: version: > 1.2.2.dev858 < 1.2.2.dev846 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/frames_minimal.diff0000644000537500020070000000041500000000000024204 0ustar00wjamiesonSTSCI\sciencetree: asdf_library: version: > 1.2.2.dev858 < 1.2.2.dev846 frames: - reference_frame: > galcen_coord > galcen_v_sun < galcen_dec < galcen_ra ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/simple_inline_array.diff0000644000537500020070000000005100000000000025242 0ustar00wjamiesonSTSCI\science ndarrays differ by contents ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/simple_inline_array0.asdf0000644000537500020070000000075700000000000025344 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.5.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 2.6.1.dev2+gef67341} history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: !core/software-1.0.0 {name: asdf, version: 2.6.1.dev2+gef67341} array: !core/ndarray-1.0.0 [0, 1, 2] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/data/simple_inline_array1.asdf0000644000537500020070000000075700000000000025345 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.5.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 2.6.1.dev2+gef67341} history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: !core/software-1.0.0 {name: asdf, version: 2.6.1.dev2+gef67341} array: !core/ndarray-1.0.0 [0, 1, 3] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_defragment.py0000644000537500020070000000252700000000000023211 0ustar00wjamiesonSTSCI\scienceimport os import numpy as np import pytest import asdf from asdf import AsdfFile from asdf.commands import main from asdf.tests.helpers import get_file_sizes, assert_tree_match def _test_defragment(tmpdir, codec): x = np.arange(0, 1000, dtype=float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(100, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') out_path = os.path.join(str(tmpdir), 'original.defragment.asdf') ff = AsdfFile(tree) ff.write_to(path) assert len(ff.blocks) == 2 result = main.main_from_args( ['defragment', path, '-o', out_path, '-c', codec]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original.defragment.asdf' in files assert files['original.defragment.asdf'] < files['original.asdf'] with asdf.open(os.path.join(str(tmpdir), 'original.defragment.asdf')) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff.blocks.internal_blocks)) == 2 def test_defragment_zlib(tmpdir): _test_defragment(tmpdir, 'zlib') def test_defragment_bzp2(tmpdir): _test_defragment(tmpdir, 'bzp2') def test_defragment_lz4(tmpdir): pytest.importorskip('lz4') _test_defragment(tmpdir, 'lz4') ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_diff.py0000644000537500020070000000442000000000000021777 0ustar00wjamiesonSTSCI\scienceimport io from functools import partial import pytest from asdf.tests import helpers from asdf.commands import main, diff from . import data as test_data get_test_data_path = partial(helpers.get_test_data_path, module=test_data) def _assert_diffs_equal(filenames, result_file, minimal=False, ignore=None): iostream = io.StringIO() file_paths = [get_test_data_path(name) for name in filenames] diff(file_paths, minimal=minimal, iostream=iostream, ignore=ignore) iostream.seek(0) result_path = get_test_data_path(result_file) with open(result_path, 'r') as handle: assert handle.read() == iostream.read() def test_diff(): filenames = ['frames0.asdf', 'frames1.asdf'] result_file = 'frames.diff' _assert_diffs_equal(filenames, result_file, minimal=False) def test_diff_minimal(): filenames = ['frames0.asdf', 'frames1.asdf'] result_file = 'frames_minimal.diff' _assert_diffs_equal(filenames, result_file, minimal=True) @pytest.mark.parametrize('result_file, ignore', [ ('frames_ignore_asdf_library.diff', ['asdf_library']), ('frames_ignore_reference_frame.diff', ['frames[*].reference_frame']), ('frames_ignore_both.diff', ['asdf_library', 'frames[*].reference_frame']), ]) def test_diff_ignore(result_file, ignore): filenames = ['frames0.asdf', 'frames1.asdf'] _assert_diffs_equal(filenames, result_file, minimal=False, ignore=ignore) def test_diff_block(): filenames = ['block0.asdf', 'block1.asdf'] result_file = 'blocks.diff' _assert_diffs_equal(filenames, result_file, minimal=False) def test_diff_simple_inline_array(): filenames = ['simple_inline_array0.asdf', 'simple_inline_array1.asdf'] result_file = 'simple_inline_array.diff' _assert_diffs_equal(filenames, result_file, minimal=False) @pytest.mark.filterwarnings('ignore::astropy.io.fits.verify.VerifyWarning') def test_file_not_found(): # Try to open files that exist but are not valid asdf filenames = ['frames.diff', 'blocks.diff'] with pytest.raises(RuntimeError): diff([get_test_data_path(name) for name in filenames], False) def test_diff_command(): filenames = ['frames0.asdf', 'frames1.asdf'] paths = [get_test_data_path(name) for name in filenames] assert main.main_from_args(['diff'] + paths) == 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_edit.py0000644000537500020070000002222000000000000022012 0ustar00wjamiesonSTSCI\sciencefrom contextlib import contextmanager import os import re import numpy as np from numpy.testing import assert_array_equal import pytest import asdf from asdf.commands import main @pytest.fixture(params=asdf.versioning.supported_versions) def version(request): return request.param @pytest.fixture def create_editor(tmp_path): """ Fixture providing a function that generates an editor script. """ def _create_editor(pattern, replacement): if isinstance(pattern, str): pattern = pattern.encode("utf-8") if isinstance(replacement, str): replacement = replacement.encode("utf-8") editor_path = tmp_path / "editor.py" content = f"""import re import sys with open(sys.argv[1], "rb") as file: content = file.read() content = re.sub({pattern!r}, {replacement!r}, content, flags=(re.DOTALL | re.MULTILINE)) with open(sys.argv[1], "wb") as file: file.write(content) """ with editor_path.open("w") as file: file.write(content) return f"python3 {editor_path}" return _create_editor @contextmanager def file_not_modified(path): """ Assert that a file was not modified during the context. """ original_mtime = os.stat(path).st_mtime_ns yield assert os.stat(path).st_mtime_ns == original_mtime @pytest.fixture def mock_input(monkeypatch): """ Fixture providing a function that mocks the edit module's built-in input function. """ @contextmanager def _mock_input(pattern, response): called = False def _input(prompt=None): nonlocal called called = True assert prompt is not None and re.match(pattern, prompt) return response with monkeypatch.context() as m: m.setattr("builtins.input", _input) yield assert called, "input was not called as expected" return _mock_input @pytest.fixture(autouse=True) def default_mock_input(monkeypatch): """ Fixture that raises an error when the program requests unexpected input. """ def _input(prompt=None): raise AssertionError(f"Received unexpected request for input: {prompt}") monkeypatch.setattr("builtins.input", _input) def test_no_blocks(tmp_path, create_editor, version): file_path = str(tmp_path/"test.asdf") with asdf.AsdfFile(version=version) as af: af["foo"] = "bar" af.write_to(file_path) os.environ["EDITOR"] = create_editor(r"foo: bar", "foo: baz") assert main.main_from_args(["edit", file_path]) == 0 with asdf.open(file_path) as af: assert af["foo"] == "baz" def test_no_blocks_increase_size(tmp_path, create_editor, version): file_path = str(tmp_path/"test.asdf") with asdf.AsdfFile(version=version) as af: af["foo"] = "bar" af.write_to(file_path) new_value = "a" * 32768 os.environ["EDITOR"] = create_editor(r"foo: bar", f"foo: {new_value}") # With no blocks, we can expand the existing file, so this case # shouldn't require confirmation from the user. assert main.main_from_args(["edit", file_path]) == 0 with asdf.open(file_path) as af: assert af["foo"] == new_value def test_no_blocks_decrease_size(tmp_path, create_editor, version): file_path = str(tmp_path/"test.asdf") original_value = "a" * 32768 with asdf.AsdfFile(version=version) as af: af["foo"] = original_value af.write_to(file_path) os.environ["EDITOR"] = create_editor(f"foo: {original_value}", "foo: bar") assert main.main_from_args(["edit", file_path]) == 0 with asdf.open(file_path) as af: assert af["foo"] == "bar" def test_with_blocks(tmp_path, create_editor, version): file_path = str(tmp_path/"test.asdf") array1 = np.random.rand(100) array2 = np.random.rand(100) with asdf.AsdfFile(version=version) as af: af["array1"] = array1 af["array2"] = array2 af["foo"] = "bar" af.write_to(file_path) os.environ["EDITOR"] = create_editor(r"foo: bar", "foo: baz") assert main.main_from_args(["edit", file_path]) == 0 with asdf.open(file_path) as af: assert af["foo"] == "baz" assert_array_equal(af["array1"], array1) assert_array_equal(af["array2"], array2) def test_with_blocks_increase_size(tmp_path, create_editor, version, mock_input): file_path = str(tmp_path/"test.asdf") array1 = np.random.rand(100) array2 = np.random.rand(100) with asdf.AsdfFile(version=version) as af: af["array1"] = array1 af["array2"] = array2 af["foo"] = "bar" af.write_to(file_path) new_value = "a" * 32768 os.environ["EDITOR"] = create_editor(r"foo: bar", f"foo: {new_value}") # Abort without updating the file with mock_input(r"\(c\)ontinue or \(a\)bort\?", "a"): with file_not_modified(file_path): assert main.main_from_args(["edit", file_path]) == 1 # Agree to allow the file to be rewritten with mock_input(r"\(c\)ontinue or \(a\)bort\?", "c"): assert main.main_from_args(["edit", file_path]) == 0 with asdf.open(file_path) as af: assert af["foo"] == new_value assert_array_equal(af["array1"], array1) assert_array_equal(af["array2"], array2) def test_with_blocks_decrease_size(tmp_path, create_editor, version): file_path = str(tmp_path/"test.asdf") original_value = "a" * 32768 array1 = np.random.rand(100) array2 = np.random.rand(100) with asdf.AsdfFile(version=version) as af: af["array1"] = array1 af["array2"] = array2 af["foo"] = original_value af.write_to(file_path) os.environ["EDITOR"] = create_editor(f"foo: {original_value}", "foo: bar") assert main.main_from_args(["edit", file_path]) == 0 with asdf.open(file_path) as af: assert af["foo"] == "bar" assert_array_equal(af["array1"], array1) assert_array_equal(af["array2"], array2) def test_no_changes(tmp_path, create_editor, version): file_path = str(tmp_path/"test.asdf") with asdf.AsdfFile(version=version) as af: af["foo"] = "bar" af.write_to(file_path) os.environ["EDITOR"] = create_editor(r"non-existent-string", "non-existent-string") with file_not_modified(file_path): assert main.main_from_args(["edit", file_path]) == 0 def test_update_asdf_standard_version(tmp_path, create_editor, version, mock_input): file_path = str(tmp_path/"test.asdf") with asdf.AsdfFile(version=version) as af: af["foo"] = "bar" af.write_to(file_path) os.environ["EDITOR"] = create_editor(r"^#ASDF_STANDARD .*?$", "#ASDF_STANDARD 999.999.999") with file_not_modified(file_path): with mock_input(r"\(c\)ontinue editing or \(a\)bort\?", "a"): assert main.main_from_args(["edit", file_path]) == 1 def test_update_yaml_version(tmp_path, create_editor, version, mock_input): file_path = str(tmp_path/"test.asdf") with asdf.AsdfFile(version=version) as af: af["foo"] = "bar" af.write_to(file_path) os.environ["EDITOR"] = create_editor(r"^%YAML 1.1$", "%YAML 1.2") with file_not_modified(file_path): with mock_input(r"\(c\)ontinue editing or \(a\)bort\?", "a"): assert main.main_from_args(["edit", file_path]) == 1 def test_bad_yaml(tmp_path, create_editor, version, mock_input): file_path = str(tmp_path/"test.asdf") with asdf.AsdfFile(version=version) as af: af["foo"] = "bar" af.write_to(file_path) os.environ["EDITOR"] = create_editor(r"foo: bar", "foo: [") with file_not_modified(file_path): with mock_input(r"\(c\)ontinue editing or \(a\)bort\?", "a"): assert main.main_from_args(["edit", file_path]) == 1 def test_validation_failure(tmp_path, create_editor, version, mock_input): file_path = str(tmp_path/"test.asdf") with asdf.AsdfFile(version=version) as af: af["array"] = np.arange(100) af.write_to(file_path) os.environ["EDITOR"] = create_editor(r"byteorder: .*?$", "byteorder: med") with file_not_modified(file_path): with mock_input(r"\(c\)ontinue editing, \(f\)orce update, or \(a\)bort\?", "a"): assert main.main_from_args(["edit", file_path]) == 1 with mock_input(r"\(c\)ontinue editing, \(f\)orce update, or \(a\)bort\?", "f"): assert main.main_from_args(["edit", file_path]) == 0 with open(file_path, "rb") as f: content = f.read() assert b"byteorder: med" in content def test_asdf_open_failure(tmp_path, create_editor, version, mock_input): file_path = str(tmp_path/"test.asdf") with asdf.AsdfFile(version=version) as af: af["foo"] = "bar" af.write_to(file_path) os.environ["EDITOR"] = create_editor(r"^#ASDF .*?$", "#HJKL 1.0.0") with file_not_modified(file_path): with mock_input(r"\(c\)ontinue editing or \(a\)bort\?", "a"): assert main.main_from_args(["edit", file_path]) == 1 def test_non_asdf_file(tmp_path): file_path = str(tmp_path/"test.asdf") with open(file_path, "w") as f: f.write("Dear diary...") with file_not_modified(file_path): assert main.main_from_args(["edit", file_path]) == 1 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_exploded.py0000644000537500020070000000307000000000000022673 0ustar00wjamiesonSTSCI\scienceimport os import numpy as np import asdf from asdf import AsdfFile from asdf.commands import main from ...tests.helpers import get_file_sizes, assert_tree_match def test_explode_then_implode(tmpdir): x = np.arange(0, 10, dtype=float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') ff = AsdfFile(tree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ff.write_to(path, all_array_storage='internal') assert len(ff.blocks) == 2 result = main.main_from_args(['explode', path]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original_exploded.asdf' in files assert 'original_exploded0000.asdf' in files assert 'original_exploded0001.asdf' in files assert 'original_exploded0002.asdf' not in files assert files['original.asdf'] > files['original_exploded.asdf'] path = os.path.join(str(tmpdir), 'original_exploded.asdf') result = main.main_from_args(['implode', path]) assert result == 0 with asdf.open(str(tmpdir.join('original_exploded_all.asdf'))) as af: assert_tree_match(af.tree, tree) assert len(af.blocks) == 2 def test_file_not_found(tmpdir): path = os.path.join(str(tmpdir), 'original.asdf') assert main.main_from_args(['explode', path]) == 2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/test_extension.py0000644000537500020070000000070600000000000023106 0ustar00wjamiesonSTSCI\scienceimport pytest from .. import find_extensions @pytest.mark.parametrize("summary", [True, False]) @pytest.mark.parametrize("tags_only", [True, False]) def test_parameter_combinations(summary, tags_only): # Just confirming no errors: find_extensions(summary, tags_only) def test_builtin_extension_included(capsys): find_extensions(True, False) captured = capsys.readouterr() assert "asdf.extension.BuiltinExtension" in captured.out ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_extract.py0000644000537500020070000000173000000000000022542 0ustar00wjamiesonSTSCI\scienceimport os import numpy as np import pytest astropy = pytest.importorskip('astropy') from astropy.io.fits import HDUList, ImageHDU import asdf from asdf.fits_embed import AsdfInFits from asdf.tests.helpers import assert_tree_match from .. import extract def test_extract(tmpdir): hdulist = HDUList() image = ImageHDU(np.random.random((25, 25))) hdulist.append(image) tree = { 'some_words': 'These are some words', 'nested': { 'a': 100, 'b': 42 }, 'list': [x for x in range(10)], 'image': image.data } asdf_in_fits = str(tmpdir.join('asdf.fits')) with AsdfInFits(hdulist, tree) as aif: aif.write_to(asdf_in_fits) pure_asdf = str(tmpdir.join('extract.asdf')) extract.extract_file(asdf_in_fits, pure_asdf) assert os.path.exists(pure_asdf) with asdf.open(pure_asdf) as af: assert not isinstance(af, AsdfInFits) assert_tree_match(tree, af.tree) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_info.py0000644000537500020070000000202200000000000022016 0ustar00wjamiesonSTSCI\sciencefrom functools import partial import pytest from ...tests import helpers from . import data as test_data from .. import main get_test_data_path = partial(helpers.get_test_data_path, module=test_data) # The test file we're using here contains objects whose schemas # have been dropped from the ASDF Standard. We should select # a new file once the locations of schemas are more stable. @pytest.mark.filterwarnings("ignore::asdf.exceptions.AsdfConversionWarning") def test_info_command(capsys): file_path = get_test_data_path("frames0.asdf") assert main.main_from_args(["info", file_path]) == 0 captured = capsys.readouterr() assert "root" in captured.out assert "frames" in captured.out original_len = len(captured.out.split("\n")) assert main.main_from_args(["info", "--max-rows", str(original_len - 5), file_path]) == 0 captured = capsys.readouterr() assert "root" in captured.out assert "frames" in captured.out new_len = len(captured.out.split("\n")) assert new_len < original_len ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_main.py0000644000537500020070000000063700000000000022021 0ustar00wjamiesonSTSCI\scienceimport pytest from .. import main def test_help(): # Just a smoke test, really main.main_from_args(['help']) def test_invalid_command(): with pytest.raises(SystemExit) as e: main.main([]) assert e.value.code == 2 with pytest.raises(SystemExit) as e: main.main(['foo']) if isinstance(e.value, int): assert e.value == 2 else: assert e.value.code == 2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_remove_hdu.py0000644000537500020070000000163700000000000023233 0ustar00wjamiesonSTSCI\scienceimport os import numpy as np import pytest astropy = pytest.importorskip('astropy') from astropy.io import fits from asdf.fits_embed import AsdfInFits from .. import remove_hdu def test_remove_hdu(tmpdir): hdulist = fits.HDUList() image = fits.ImageHDU(np.random.random((25, 25))) hdulist.append(image) tree = { 'some_words': 'These are some words', 'nested': { 'a': 100, 'b': 42 }, 'list': [x for x in range(10)], 'image': image.data } asdf_in_fits = str(tmpdir.join('asdf.fits')) with AsdfInFits(hdulist, tree) as aif: aif.write_to(asdf_in_fits) with fits.open(asdf_in_fits) as hdul: assert 'ASDF' in hdul new_fits = str(tmpdir.join('remove.fits')) remove_hdu(asdf_in_fits, new_fits) assert os.path.exists(new_fits) with fits.open(new_fits) as hdul: assert 'ASDF' not in hdul ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/commands/tests/test_tags.py0000644000537500020070000000112600000000000022025 0ustar00wjamiesonSTSCI\scienceimport io import pytest from ... import AsdfFile from .. import list_tags @pytest.mark.parametrize("display_classes", [True, False]) def test_parameter_combinations(display_classes): # Just confirming no errors: list_tags(display_classes) def test_all_tags_present(): iostream = io.StringIO() list_tags(iostream=iostream) iostream.seek(0) tags = {line.strip() for line in iostream.readlines()} af = AsdfFile() for tag in af.type_index._type_by_tag: assert tag in tags for tag in af.extension_manager._converters_by_tag: assert tag in tags ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/tests/test_to_yaml.py0000644000537500020070000000155200000000000022536 0ustar00wjamiesonSTSCI\scienceimport os import numpy as np import asdf from ... import AsdfFile from .. import main from ...tests.helpers import get_file_sizes, assert_tree_match def test_to_yaml(tmpdir): x = np.arange(0, 10, dtype=float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') ff = AsdfFile(tree) ff.write_to(path) assert len(ff.blocks) == 2 result = main.main_from_args(['to_yaml', path]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original.yaml' in files with asdf.open(os.path.join(str(tmpdir), 'original.yaml')) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff.blocks.internal_blocks)) == 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/commands/to_yaml.py0000644000537500020070000000362000000000000020333 0ustar00wjamiesonSTSCI\science""" Contains commands for dealing with exploded and imploded forms. """ import os import asdf from .main import Command from .. import AsdfFile __all__ = ['to_yaml'] class ToYaml(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("to_yaml"), help="Convert as ASDF file to pure YAML.", description="""Convert all data to inline YAML so the ASDF file contains no binary blocks.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to convert to YAML.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with a '.yaml' extension.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return to_yaml(args.filename[0], args.output, args.resolve_references) def to_yaml(input, output=None, resolve_references=False): """ Implode a given ASDF file, which may reference external data, back into a single ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. """ if output is None: base, ext = os.path.splitext(input) output = base + '.yaml' with asdf.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to(output, all_array_storage='inline') ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9349585 asdf-2.9.2/asdf/compat/0000755000537500020070000000000000000000000015776 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/compat/__init__.py0000644000537500020070000000000000000000000020075 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/compat/numpycompat.py0000644000537500020070000000026000000000000020722 0ustar00wjamiesonSTSCI\sciencefrom ..util import minversion __all__ = ['NUMPY_LT_1_7', 'NUMPY_LT_1_14'] NUMPY_LT_1_7 = not minversion('numpy', '1.7.0') NUMPY_LT_1_14 = not minversion('numpy', '1.14.0') ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/compression.py0000644000537500020070000002427600000000000017441 0ustar00wjamiesonSTSCI\scienceimport struct import zlib import bz2 import numpy as np import warnings from .exceptions import AsdfWarning from .config import get_config def validate(compression): """ Validate the compression string. Parameters ---------- compression : str, bytes or None Returns ------- compression : str or None In canonical form. Raises ------ ValueError """ if not compression or compression == b'\0\0\0\0': return None if isinstance(compression, bytes): compression = compression.decode('ascii') compression = compression.strip('\0') builtin_labels = ['zlib', 'bzp2', 'lz4', 'input'] ext_labels = _get_all_compression_extension_labels() all_labels = ext_labels + builtin_labels # An extension is allowed to override a builtin compression or another extension, # but let's warn the user of this. # TODO: is this the desired behavior? for i,label in enumerate(all_labels): if label in all_labels[i+1:]: warnings.warn(f'Found more than one compressor for "{label}"', AsdfWarning) if compression not in all_labels: raise ValueError( f"Supported compression types are: {all_labels}, not '{compression}'") return compression class Lz4Compressor: def __init__(self): try: import lz4.block except ImportError: raise ImportError( "lz4 library in not installed in your Python environment, " "therefore the compressed block in this ASDF file " "can not be decompressed.") self._api = lz4.block def compress(self, data, **kwargs): kwargs['mode'] = kwargs.get('mode','default') compression_block_size = kwargs.pop('compression_block_size', 1<<22) nelem = compression_block_size // data.itemsize for i in range(0,len(data),nelem): _output = self._api.compress(data[i:i+nelem], **kwargs) header = struct.pack('!I', len(_output)) yield header + _output def decompress(self, blocks, out, **kwargs): _size = 0 _pos = 0 _partial_len = b'' _buffer = None bytesout = 0 for block in blocks: block = memoryview(block).cast('c') # don't copy on slice while len(block): if not _size: # Don't know the (compressed) length of this block yet if len(_partial_len) + len(block) < 4: _partial_len += block break # we've exhausted the block if _partial_len: # If we started to fill a len key, finish filling it remaining = 4-len(_partial_len) if remaining: _partial_len += block[:remaining] block = block[remaining:] _size = struct.unpack('!I', _partial_len)[0] _partial_len = b'' else: # Otherwise just read the len key directly _size = struct.unpack('!I', block[:4])[0] block = block[4:] if len(block) < _size or _buffer is not None: # If we have a partial block, or we're already filling a buffer, use the buffer if _buffer is None: _buffer = np.empty(_size, dtype=np.byte) # use numpy instead of bytearray so we can avoid zero initialization _pos = 0 newbytes = min(_size - _pos, len(block)) # don't fill past the buffer len! _buffer[_pos:_pos+newbytes] = np.frombuffer(block[:newbytes], dtype=np.byte) _pos += newbytes block = block[newbytes:] if _pos == _size: _out = self._api.decompress(_buffer, return_bytearray=True, **kwargs) out[bytesout:bytesout+len(_out)] = _out bytesout += len(_out) _buffer = None _size = 0 else: # We have at least one full block _out = self._api.decompress(memoryview(block[:_size]), return_bytearray=True, **kwargs) out[bytesout:bytesout+len(_out)] = _out bytesout += len(_out) block = block[_size:] _size = 0 return bytesout class ZlibCompressor: def compress(self, data, **kwargs): comp = zlib.compress(data, **kwargs) yield comp def decompress(self, blocks, out, **kwargs): decompressor = zlib.decompressobj(**kwargs) i = 0 for block in blocks: decomp = decompressor.decompress(block) out[i:i+len(decomp)] = decomp i += len(decomp) return i class Bzp2Compressor: def compress(self, data, **kwargs): comp = bz2.compress(data, **kwargs) yield comp def decompress(self, blocks, out, **kwargs): decompressor = bz2.BZ2Decompressor(**kwargs) i = 0 for block in blocks: decomp = decompressor.decompress(block) out[i:i+len(decomp)] = decomp i += len(decomp) return i def _get_compressor_from_extensions(compression, return_extension=False): ''' Look at the loaded ASDF extensions and return the first one (if any) that can handle this type of compression. `return_extension` can be used to return corresponding extension for bookkeeping purposes. Returns None if no match found. ''' # TODO: in ASDF 3, this will be done by the ExtensionManager extensions = get_config().extensions for ext in extensions: for comp in ext.compressors: if compression == comp.label.decode('ascii'): if return_extension: return comp,ext else: return comp return None def _get_all_compression_extension_labels(): ''' Get the list of compression labels supported via extensions ''' # TODO: in ASDF 3, this will be done by the ExtensionManager labels = [] extensions = get_config().extensions for ext in extensions: for comp in ext.compressors: labels += [comp.label.decode('ascii')] return labels def _get_compressor(label): ext_comp = _get_compressor_from_extensions(label) if ext_comp != None: # Use an extension before builtins comp = ext_comp elif label == 'zlib': comp = ZlibCompressor() elif label == 'bzp2': comp = Bzp2Compressor() elif label == 'lz4': comp = Lz4Compressor() else: raise ValueError( "Unknown compression type: '{0}'".format(label)) return comp def to_compression_header(compression): """ Converts a compression string to the four byte field in a block header. """ if not compression: return b'' if isinstance(compression, str): return compression.encode('ascii') return compression def decompress(fd, used_size, data_size, compression, config=None): """ Decompress binary data in a file Parameters ---------- fd : generic_io.GenericIO object The file to read the compressed data from. used_size : int The size of the compressed data data_size : int The size of the uncompressed data compression : str The compression type used. config : dict or None, optional Any kwarg parameters to pass to the underlying decompression function Returns ------- array : numpy.array A flat uint8 containing the decompressed data. """ buffer = np.empty((data_size,), np.uint8) compression = validate(compression) decoder = _get_compressor(compression) if config is None: config = {} blocks = fd.read_blocks(used_size) # data is a generator len_decoded = decoder.decompress(blocks, out=buffer.data, **config) if len_decoded != data_size: raise ValueError("Decompressed data wrong size") return buffer def compress(fd, data, compression, config=None): """ Compress array data and write to a file. Parameters ---------- fd : generic_io.GenericIO object The file to write to. data : buffer The buffer of uncompressed data. compression : str The type of compression to use. config : dict or None, optional Any kwarg parameters to pass to the underlying compression function """ compression = validate(compression) encoder = _get_compressor(compression) if config is None: config = {} # Get a contiguous, 1D memoryview of the underlying data, preserving data.itemsize # - contiguous: because we may not want to assume that all compressors can handle arbitrary strides # - 1D: so that len(data) works, not just data.nbytes # - itemsize: should preserve data.itemsize for compressors that want to use the record size # - memoryview: don't incur the expense of a memcpy, such as with tobytes() data = memoryview(data) if not data.contiguous: data = memoryview(data.tobytes()) # make a contiguous copy data = memoryview(np.frombuffer(data, dtype=data.format)) # get a 1D array that preserves byteorder if not data.contiguous: # the data will be contiguous by construction, but better safe than sorry! raise ValueError(data.contiguous) compressed = encoder.compress(data, **config) # Write block by block for comp in compressed: fd.write(comp) def get_compressed_size(data, compression, config=None): """ Returns the number of bytes required when the given data is compressed. Parameters ---------- See `compress()`. Returns ------- nbytes : int The size of the compressed data """ class _ByteCountingFile: def __init__(self): self.count = 0 def write(self, data): self.count += len(data) bcf = _ByteCountingFile() compress(bcf, data, compression, config=config) return bcf.count ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/config.py0000644000537500020070000002761100000000000016341 0ustar00wjamiesonSTSCI\science""" Methods for getting and setting asdf global configuration options. """ import threading from contextlib import contextmanager import copy from . import entry_points from .resource import ResourceMappingProxy, ResourceManager from . import versioning from ._helpers import validate_version from .extension import ExtensionProxy from . import util __all__ = ["AsdfConfig", "get_config", "config_context"] DEFAULT_VALIDATE_ON_READ = True DEFAULT_DEFAULT_VERSION = str(versioning.default_version) DEFAULT_LEGACY_FILL_SCHEMA_DEFAULTS = True DEFAULT_IO_BLOCK_SIZE = -1 # auto DEFAULT_ARRAY_INLINE_THRESHOLD = None class AsdfConfig: """ Container for ASDF configuration options. Users are not intended to construct this object directly; instead, use the `asdf.get_config` and `asdf.config_context` module methods. """ def __init__(self): self._resource_mappings = None self._resource_manager = None self._extensions = None self._validate_on_read = DEFAULT_VALIDATE_ON_READ self._default_version = DEFAULT_DEFAULT_VERSION self._legacy_fill_schema_defaults = DEFAULT_LEGACY_FILL_SCHEMA_DEFAULTS self._io_block_size = DEFAULT_IO_BLOCK_SIZE self._array_inline_threshold = DEFAULT_ARRAY_INLINE_THRESHOLD self._lock = threading.RLock() @property def resource_mappings(self): """ Get the list of registered resource mapping instances. Unless overridden by user configuration, this list contains every mapping registered with an entry point. Returns ------- list of asdf.resource.ResourceMappingProxy """ if self._resource_mappings is None: with self._lock: if self._resource_mappings is None: self._resource_mappings = entry_points.get_resource_mappings() return self._resource_mappings def add_resource_mapping(self, mapping): """ Register a new resource mapping. The new mapping will take precedence over all previously registered mappings. Parameters ---------- mapping : collections.abc.Mapping Map of `str` resource URI to `bytes` content """ with self._lock: mapping = ResourceMappingProxy.maybe_wrap(mapping) # Insert at the beginning of the list so that # ResourceManager uses the new mapping first. resource_mappings = [mapping] + [ r for r in self.resource_mappings if r != mapping ] self._resource_mappings = resource_mappings self._resource_manager = None def remove_resource_mapping(self, mapping=None, *, package=None): """ Remove a registered resource mapping. Parameters ---------- mapping : collections.abc.Mapping, optional Mapping to remove. package : str, optional Remove only extensions provided by this package. If the `mapping` argument is omitted, then all mappings from this package will be removed. """ if mapping is None and package is None: raise ValueError("Must specify at least one of mapping or package") if mapping is not None: mapping = ResourceMappingProxy.maybe_wrap(mapping) def _remove_condition(m): result = True if mapping is not None: result = result and m == mapping if package is not None: result = result and m.package_name == package return result with self._lock: self._resource_mappings = [m for m in self.resource_mappings if not _remove_condition(m)] self._resource_manager = None def reset_resources(self): """ Reset registered resource mappings to the default list provided as entry points. """ with self._lock: self._resource_mappings = None self._resource_manager = None @property def resource_manager(self): """ Get the `ResourceManager` instance. Includes resources from registered resource mappings and any mappings added at runtime. Returns ------- asdf.resource.ResourceManager """ if self._resource_manager is None: with self._lock: if self._resource_manager is None: self._resource_manager = ResourceManager(self.resource_mappings) return self._resource_manager @property def extensions(self): """ Get the list of registered extensions. Returns ------- list of asdf.extension.ExtensionProxy """ if self._extensions is None: with self._lock: if self._extensions is None: self._extensions = entry_points.get_extensions() return self._extensions def add_extension(self, extension): """ Register a new extension. The new extension will take precedence over all previously registered extensions. Parameters ---------- extension : asdf.extension.AsdfExtension or asdf.extension.Extension """ with self._lock: extension = ExtensionProxy.maybe_wrap(extension) self._extensions = [extension] + [e for e in self.extensions if e != extension] def remove_extension(self, extension=None, *, package=None): """ Remove a registered extension. Parameters ---------- extension : asdf.extension.AsdfExtension or asdf.extension.Extension or str, optional An extension instance or URI pattern to remove. package : str, optional Remove only extensions provided by this package. If the `extension` argument is omitted, then all extensions from this package will be removed. """ if extension is None and package is None: raise ValueError("Must specify at least one of extension or package") if extension is not None and not isinstance(extension, str): extension = ExtensionProxy.maybe_wrap(extension) def _remove_condition(e): result = True if isinstance(extension, str): result = result and util.uri_match(extension, e.extension_uri) elif isinstance(extension, ExtensionProxy): result = result and e == extension if package is not None: result = result and e.package_name == package return result with self._lock: self._extensions = [e for e in self.extensions if not _remove_condition(e)] def reset_extensions(self): """ Reset extensions to the default list registered via entry points. """ with self._lock: self._extensions = None @property def default_version(self): """ Get the default ASDF Standard version used for new files. Returns ------- str """ return self._default_version @default_version.setter def default_version(self, value): """ Set the default ASDF Standard version used for new files. Parameters ---------- value : str """ self._default_version = validate_version(value) @property def io_block_size(self): """ Get the block size used when reading and writing files. Returns ------- int Block size, or -1 to use the filesystem's preferred block size. """ return self._io_block_size @io_block_size.setter def io_block_size(self, value): """ Set the block size used when reading and writing files. Parameters ---------- value : int Block size, or -1 to use the filesystem's preferred block size. """ self._io_block_size = value @property def legacy_fill_schema_defaults(self): """ Get the configuration that controls filling defaults from schemas for older ASDF Standard versions. If `True`, missing default values will be filled from the schema when reading files from ASDF Standard <= 1.5.0. Later versions of the standard do not support removing or filling schema defaults. Returns ------- bool """ return self._legacy_fill_schema_defaults @legacy_fill_schema_defaults.setter def legacy_fill_schema_defaults(self, value): """ Set the flag that controls filling defaults from schemas for older ASDF Standard versions. Parameters ---------- value : bool """ self._legacy_fill_schema_defaults = value @property def array_inline_threshold(self): """ Get the threshold below which arrays are automatically written as inline YAML literals instead of binary blocks. This number is compared to number of elements in the array. Returns ------- int or None Integer threshold, or None to disable automatic selection of the array storage type. """ return self._array_inline_threshold @array_inline_threshold.setter def array_inline_threshold(self, value): """ Set the threshold below which arrays are automatically written as inline YAML literals instead of binary blocks. This number is compared to number of elements in the array. Parameters ---------- value : int or None Integer threshold, or None to disable automatic selection of the array storage type. """ self._array_inline_threshold = value @property def validate_on_read(self): """ Get configuration that controls schema validation of ASDF files on read. Returns ------- bool """ return self._validate_on_read @validate_on_read.setter def validate_on_read(self, value): """ Set the configuration that controls schema validation of ASDF files on read. If `True`, newly opened files will be validated. Parameters ---------- value : bool """ self._validate_on_read = value def __repr__(self): return ( "" ).format( self.array_inline_threshold, self.default_version, self.io_block_size, self.legacy_fill_schema_defaults, self.validate_on_read, ) class _ConfigLocal(threading.local): def __init__(self): self.config_stack = [] _global_config = AsdfConfig() _local = _ConfigLocal() def get_config(): """ Get the current config, which may have been altered by one or more surrounding calls to `asdf.config_context`. Returns ------- asdf.config.AsdfConfig """ if len(_local.config_stack) == 0: return _global_config else: return _local.config_stack[-1] @contextmanager def config_context(): """ Context manager that temporarily overrides asdf configuration. The context yields an `asdf.config.AsdfConfig` instance that can be modified without affecting code outside of the context. """ if len(_local.config_stack) == 0: base_config = _global_config else: base_config = _local.config_stack[-1] config = copy.copy(base_config) _local.config_stack.append(config) try: yield config finally: _local.config_stack.pop() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/conftest.py0000644000537500020070000000237400000000000016720 0ustar00wjamiesonSTSCI\science# This contains pytest fixtures used in asdf tests. # by importing them here in conftest.py they are discoverable by pytest # no matter how it is invoked within the source tree. import pytest from asdf.tests.httpserver import HTTPServer, RangeHTTPServer @pytest.fixture() def httpserver(request): """ The returned ``httpserver`` provides a threaded HTTP server instance. It serves content from a temporary directory (available as the attribute tmpdir) at randomly assigned URL (available as the attribute url). * ``tmpdir`` - path to the tmpdir that it's serving from (str) * ``url`` - the base url for the server """ server = HTTPServer() request.addfinalizer(server.finalize) return server @pytest.fixture() def rhttpserver(request): """ The returned ``httpserver`` provides a threaded HTTP server instance. It serves content from a temporary directory (available as the attribute tmpdir) at randomly assigned URL (available as the attribute url). The server supports HTTP Range headers. * ``tmpdir`` - path to the tmpdir that it's serving from (str) * ``url`` - the base url for the server """ server = RangeHTTPServer() request.addfinalizer(server.finalize) return server ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/constants.py0000644000537500020070000000121100000000000017074 0ustar00wjamiesonSTSCI\scienceimport numpy as np ASDF_MAGIC = b'#ASDF' BLOCK_MAGIC = b'\xd3BLK' FITS_MAGIC = b'SIMPLE' BLOCK_HEADER_BOILERPLATE_SIZE = 6 ASDF_STANDARD_COMMENT = b'ASDF_STANDARD' INDEX_HEADER = b'#ASDF BLOCK INDEX' # The maximum number of blocks supported MAX_BLOCKS = 2 ** 16 MAX_BLOCKS_DIGITS = int(np.ceil(np.log10(MAX_BLOCKS) + 1)) YAML_TAG_PREFIX = 'tag:yaml.org,2002:' YAML_END_MARKER_REGEX = br'\r?\n\.\.\.((\r?\n)|$)' STSCI_SCHEMA_URI_BASE = 'http://stsci.edu/schemas/' STSCI_SCHEMA_TAG_BASE = 'tag:stsci.edu:asdf' BLOCK_FLAG_STREAMED = 0x1 # ASDF max number size MAX_BITS = 63 MAX_NUMBER = (1< 1 and not hasattr(delegate, "select_tag"): raise RuntimeError( "Converter handles multiple tags for this extension, " "but does not implement a select_tag method." ) self._tags = sorted(relevant_tags) self._types = [] for typ in delegate.types: if isinstance(typ, (str, type)): self._types.append(typ) else: raise TypeError("Converter property 'types' must contain str or type values") @property def tags(self): """ Get the list of tag URIs that this converter is capable of handling. Returns ------- list of str """ return self._tags @property def types(self): """ Get the Python types that this converter is capable of handling. Returns ------- list of type or str """ return self._types def select_tag(self, obj, ctx): """ Select the tag to use when converting an object to YAML. Parameters ---------- obj : object Instance of the custom type being converted. ctx : asdf.asdf.SerializationContext Serialization parameters. Returns ------- str Selected tag. """ method = getattr(self._delegate, "select_tag", None) if method is None: return self._tags[0] else: return method(obj, self._tags, ctx) def to_yaml_tree(self, obj, tag, ctx): """ Convert an object into a node suitable for YAML serialization. Parameters ---------- obj : object Instance of a custom type to be serialized. tag : str The tag identifying the YAML type that `obj` should be converted into. ctx : asdf.asdf.SerializationContext Serialization parameters. Returns ------- object The YAML node representation of the object. """ return self._delegate.to_yaml_tree(obj, tag, ctx) def from_yaml_tree(self, node, tag, ctx): """ Convert a YAML node into an instance of a custom type. Parameters ---------- tree : dict or list or str The YAML node to convert. tag : str The YAML tag of the object being converted. ctx : asdf.asdf.SerializationContext Serialization parameters. Returns ------- object """ return self._delegate.from_yaml_tree(node, tag, ctx) @property def delegate(self): """ Get the wrapped converter instance. Returns ------- asdf.extension.Converter """ return self._delegate @property def extension(self): """ Get the extension that provided this converter. Returns ------- asdf.extension.ExtensionProxy """ return self._extension @property def package_name(self): """ Get the name of the Python package of this converter's extension. This may not be the same package that implements the converter's class. Returns ------- str or None Package name, or `None` if the extension was added at runtime. """ return self.extension.package_name @property def package_version(self): """ Get the version of the Python package of this converter's extension. This may not be the same package that implements the converter's class. Returns ------- str or None Package version, or `None` if the extension was added at runtime. """ return self.extension.package_version @property def class_name(self): """ Get the fully qualified class name of this converter. Returns ------- str """ return self._class_name def __eq__(self, other): if isinstance(other, ConverterProxy): return other.delegate is self.delegate and other.extension is self.extension else: return False def __hash__(self): return hash((id(self.delegate), id(self.extension))) def __repr__(self): if self.package_name is None: package_description = "(none)" else: package_description = "{}=={}".format(self.package_name, self.package_version) return "".format( self.class_name, package_description, ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/extension/_extension.py0000644000537500020070000002512300000000000021257 0ustar00wjamiesonSTSCI\scienceimport abc from packaging.specifiers import SpecifierSet from ..util import get_class_name from ._tag import TagDefinition from ._legacy import AsdfExtension from ._converter import ConverterProxy from ._compressor import Compressor class Extension(abc.ABC): """ Abstract base class defining an extension to ASDF. Implementing classes must provide the `extension_uri`. Other properties are optional. """ @classmethod def __subclasshook__(cls, C): if cls is Extension: return hasattr(C, "extension_uri") return NotImplemented # pragma: no cover @abc.abstractproperty def extension_uri(self): """ Get the URI of the extension to the ASDF Standard implemented by this class. Note that this may not uniquely identify the class itself. Returns ------- str """ pass # pragma: no cover @property def legacy_class_names(self): """ Get the set of fully-qualified class names used by older versions of this extension. This allows a new-style implementation of an extension to prevent warnings when a legacy extension is missing. Returns ------- iterable of str """ return set() @property def asdf_standard_requirement(self): """ Get the ASDF Standard version requirement for this extension. Returns ------- str or None If str, PEP 440 version specifier. If None, support all versions. """ return None @property def converters(self): """ Get the `asdf.extension.Converter` instances for tags and Python types supported by this extension. Returns ------- iterable of asdf.extension.Converter """ return [] @property def tags(self): """ Get the YAML tags supported by this extension. Returns ------- iterable of str or asdf.extension.TagDefinition """ return [] @property def compressors(self): """ Get the `asdf.extension.Compressor` instances for compression schemes supported by this extension. Returns ------- iterable of asdf.extension.Compressor """ return [] @property def yaml_tag_handles(self): """ Get a dictionary of custom yaml TAG handles defined by the extension. The dictionary key indicates the TAG handles to be placed in the YAML header, the value defines the string for tag replacement. See https://yaml.org/spec/1.1/#tag%20shorthand/ Example: ``{"!foo!": "tag:nowhere.org:custom/"}`` Returns ------- dict """ return {} class ExtensionProxy(Extension, AsdfExtension): """ Proxy that wraps an extension, provides default implementations of optional methods, and carries additional information on the package that provided the extension. """ @classmethod def maybe_wrap(self, delegate): if isinstance(delegate, ExtensionProxy): return delegate else: return ExtensionProxy(delegate) def __init__(self, delegate, package_name=None, package_version=None): if not isinstance(delegate, (Extension, AsdfExtension)): raise TypeError( "Extension must implement the Extension or AsdfExtension interface" ) self._delegate = delegate self._package_name = package_name self._package_version = package_version self._class_name = get_class_name(delegate) self._legacy = isinstance(delegate, AsdfExtension) # Sort these out up-front so that errors are raised when the extension is loaded # and not in the middle of the user's session. The extension will fail to load # and a warning will be emitted, but it won't crash the program. self._legacy_class_names = set() for class_name in getattr(self._delegate, "legacy_class_names", []): if isinstance(class_name, str): self._legacy_class_names.add(class_name) else: raise TypeError("Extension property 'legacy_class_names' must contain str values") if self._legacy: self._legacy_class_names.add(self._class_name) value = getattr(self._delegate, "asdf_standard_requirement", None) if isinstance(value, str): self._asdf_standard_requirement = SpecifierSet(value) elif value is None: self._asdf_standard_requirement = SpecifierSet() else: raise TypeError("Extension property 'asdf_standard_requirement' must be str or None") self._tags = [] for tag in getattr(self._delegate, "tags", []): if isinstance(tag, str): self._tags.append(TagDefinition(tag)) elif isinstance(tag, TagDefinition): self._tags.append(tag) else: raise TypeError("Extension property 'tags' must contain str or asdf.extension.TagDefinition values") self._yaml_tag_handles = getattr(delegate, "yaml_tag_handles", {}) # Process the converters last, since they expect ExtensionProxy # properties to already be available. self._converters = [ConverterProxy(c, self) for c in getattr(self._delegate, "converters", [])] self._compressors = [] if hasattr(self._delegate, "compressors"): for compressor in self._delegate.compressors: if not isinstance(compressor, Compressor): raise TypeError("Extension property 'compressors' must contain instances of asdf.extension.Compressor") self._compressors.append(compressor) @property def extension_uri(self): """ Get the URI of the extension to the ASDF Standard implemented by this class. Note that this may not uniquely identify the class itself. Returns ------- str or None """ return getattr(self._delegate, "extension_uri", None) @property def legacy_class_names(self): """ Get the set of fully-qualified class names used by older versions of this extension. This allows a new-style implementation of an extension to prevent warnings when a legacy extension is missing. Returns ------- set of str """ return self._legacy_class_names @property def asdf_standard_requirement(self): """ Get the extension's ASDF Standard requirement. Returns ------- packaging.specifiers.SpecifierSet """ return self._asdf_standard_requirement @property def converters(self): """ Get the extension's converters. Returns ------- list of asdf.extension.Converter """ return self._converters @property def compressors(self): """ Get the extension's compressors. Returns ------- list of asdf.extension.Compressor """ return self._compressors @property def tags(self): """ Get the YAML tags supported by this extension. Returns ------- list of asdf.extension.TagDefinition """ return self._tags @property def types(self): """ Get the legacy extension's ExtensionType subclasses. Returns ------- iterable of asdf.type.ExtensionType """ return getattr(self._delegate, "types", []) @property def tag_mapping(self): """ Get the legacy extension's tag-to-schema-URI mapping. Returns ------- iterable of tuple or callable """ return getattr(self._delegate, "tag_mapping", []) @property def url_mapping(self): """ Get the legacy extension's schema-URI-to-URL mapping. Returns ------- iterable of tuple or callable """ return getattr(self._delegate, "url_mapping", []) @property def delegate(self): """ Get the wrapped extension instance. Returns ------- asdf.extension.Extension or asdf.extension.AsdfExtension """ return self._delegate @property def package_name(self): """ Get the name of the Python package that provided this extension. Returns ------- str or None `None` if the extension was added at runtime. """ return self._package_name @property def package_version(self): """ Get the version of the Python package that provided the extension Returns ------- str or None `None` if the extension was added at runtime. """ return self._package_version @property def class_name(self): """ Get the fully qualified class name of the extension. Returns ------- str """ return self._class_name @property def legacy(self): """ Get the extension's legacy flag. Subclasses of `asdf.extension.AsdfExtension` are marked `True`. Returns ------- bool """ return self._legacy @property def yaml_tag_handles(self): """ Get a dictionary of custom yaml TAG handles defined by the extension. The dictionary key indicates the TAG handles to be placed in the YAML header, the value defines the string for tag replacement. See https://yaml.org/spec/1.1/#tag%20shorthand/ Example: ``{"!foo!": "tag:nowhere.org:custom/"}`` Returns ------- dict """ return self._yaml_tag_handles def __eq__(self, other): if isinstance(other, ExtensionProxy): return other.delegate is self.delegate else: return False def __hash__(self): return hash(id(self.delegate)) def __repr__(self): if self.package_name is None: package_description = "(none)" else: package_description = f"{self.package_name}=={self.package_version}" if self.extension_uri is None: uri_description = "(none)" else: uri_description = self.extension_uri return ( f"" ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/extension/_legacy.py0000644000537500020070000001753700000000000020521 0ustar00wjamiesonSTSCI\scienceimport abc import warnings from functools import lru_cache from .. import types from .. import resolver from ..type_index import AsdfTypeIndex from ..exceptions import AsdfDeprecationWarning class AsdfExtension(metaclass=abc.ABCMeta): """ Abstract base class defining a (legacy) extension to ASDF. New code should use `asdf.extension.Extension` instead. """ @classmethod def __subclasshook__(cls, C): if cls is AsdfExtension: return (hasattr(C, 'types') and hasattr(C, 'tag_mapping')) return NotImplemented @abc.abstractproperty def types(self): """ A list of `asdf.CustomType` subclasses that describe how to store custom objects to and from ASDF. """ pass @abc.abstractproperty def tag_mapping(self): """ A list of 2-tuples or callables mapping YAML tag prefixes to JSON Schema URL prefixes. For each entry: - If a 2-tuple, the first part of the tuple is a YAML tag prefix to match. The second part is a string, where case the following are available as Python formatting tokens: - ``{tag}``: the complete YAML tag. - ``{tag_suffix}``: the part of the YAML tag after the matched prefix. - ``{tag_prefix}``: the matched YAML tag prefix. - If a callable, it is passed the entire YAML tag must return the entire JSON schema URL if it matches, otherwise, return `None`. Note that while JSON Schema URLs uniquely define a JSON Schema, they do not have to actually exist on an HTTP server and be fetchable (much like XML namespaces). For example, to match all YAML tags with the ``tag:nowhere.org:custom` prefix to the ``http://nowhere.org/schemas/custom/`` URL prefix:: return [('tag:nowhere.org:custom/', 'http://nowhere.org/schemas/custom/{tag_suffix}')] """ pass @abc.abstractproperty def url_mapping(self): """ Schema content can be provided using the resource Mapping API. A list of 2-tuples or callables mapping JSON Schema URLs to other URLs. This is useful if the JSON Schemas are not actually fetchable at their corresponding URLs but are on the local filesystem, or, to save bandwidth, we have a copy of fetchable schemas on the local filesystem. If neither is desirable, it may simply be the empty list. For each entry: - If a 2-tuple, the first part is a URL prefix to match. The second part is a string, where the following are available as Python formatting tokens: - ``{url}``: The entire JSON schema URL - ``{url_prefix}``: The matched URL prefix - ``{url_suffix}``: The part of the URL after the prefix. - If a callable, it is passed the entire JSON Schema URL and must return a resolvable URL pointing to the schema content. If it doesn't match, should return `None`. For example, to map a remote HTTP URL prefix to files installed alongside as data alongside Python module:: return [('http://nowhere.org/schemas/custom/1.0.0/', asdf.util.filepath_to_url( os.path.join(SCHEMA_PATH, 'stsci.edu')) + '/{url_suffix}.yaml' )] """ pass class AsdfExtensionList: """ Manage a set of extensions that are in effect. """ def __init__(self, extensions): from ._extension import ExtensionProxy extensions = [ExtensionProxy.maybe_wrap(e) for e in extensions] tag_mapping = [] url_mapping = [] validators = {} self._type_index = AsdfTypeIndex() for extension in extensions: tag_mapping.extend(extension.tag_mapping) url_mapping.extend(extension.url_mapping) for typ in extension.types: self._type_index.add_type(typ, extension) validators.update(typ.validators) for sibling in typ.versioned_siblings: self._type_index.add_type(sibling, extension) validators.update(sibling.validators) self._extensions = extensions self._tag_mapping = resolver.Resolver(tag_mapping, 'tag') self._url_mapping = resolver.Resolver(url_mapping, 'url') self._resolver = resolver.ResolverChain(self._tag_mapping, self._url_mapping) self._validators = validators @property def tag_to_schema_resolver(self): """Deprecated. Use `tag_mapping` instead""" warnings.warn( "The 'tag_to_schema_resolver' property is deprecated. Use " "'tag_mapping' instead.", AsdfDeprecationWarning) return self._tag_mapping @property def extensions(self): return self._extensions @property def tag_mapping(self): return self._tag_mapping @property def url_mapping(self): return self._url_mapping @property def resolver(self): return self._resolver @property def type_index(self): return self._type_index @property def validators(self): return self._validators def get_cached_asdf_extension_list(extensions): """ Get a previously created AsdfExtensionList for the specified extensions, or create and cache one if necessary. Building the type index is expensive, so it helps performance to reuse the index when possible. Parameters ---------- extensions : list of asdf.extension.AsdfExtension Returns ------- asdf.extension.AsdfExtensionList """ from ._extension import ExtensionProxy # The tuple makes the extensions hashable so that we # can pass them to the lru_cache method. The ExtensionProxy # overrides __hash__ to return the hashed object id of the wrapped # extension, so this will method will only return the same # AsdfExtensionList if the list contains identical extension # instances in identical order. extensions = tuple(ExtensionProxy.maybe_wrap(e) for e in extensions) return _get_cached_asdf_extension_list(extensions) @lru_cache() def _get_cached_asdf_extension_list(extensions): return AsdfExtensionList(extensions) # A kludge in asdf.util.get_class_name allows this class to retain # its original name, despite being moved from extension.py to # this file. class BuiltinExtension: """ This is the "extension" to ASDF that includes all the built-in tags. Even though it's not really an extension and it's always available, it's built in the same way as an extension. """ @property def types(self): return types._all_asdftypes @property def tag_mapping(self): return resolver.DEFAULT_TAG_TO_URL_MAPPING @property def url_mapping(self): return resolver.DEFAULT_URL_MAPPING class _DefaultExtensions: @property def extensions(self): from ..config import get_config return [e for e in get_config().extensions if e.legacy] @property def extension_list(self): return get_cached_asdf_extension_list(self.extensions) @property def package_metadata(self): return { e.class_name: (e.package_name, e.package_version) for e in self.extensions if e.package_name is not None } def reset(self): """This will be used primarily for testing purposes.""" from ..config import get_config get_config().reset_extensions() @property def resolver(self): return self.extension_list.resolver default_extensions = _DefaultExtensions() def get_default_resolver(): """ Get the resolver that includes mappings from all installed extensions. """ return default_extensions.resolver ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/extension/_manager.py0000644000537500020070000001403200000000000020652 0ustar00wjamiesonSTSCI\sciencefrom functools import lru_cache from ._extension import ExtensionProxy from ..util import get_class_name class ExtensionManager: """ Wraps a list of extensions and indexes their converters by tag and by Python type. Parameters ---------- extensions : iterable of asdf.extension.Extension List of enabled extensions to manage. Extensions placed earlier in the list take precedence. """ def __init__(self, extensions): self._extensions = [ExtensionProxy.maybe_wrap(e) for e in extensions] self._tag_defs_by_tag = {} self._converters_by_tag = {} # This dict has both str and type keys: self._converters_by_type = {} for extension in self._extensions: for tag_def in extension.tags: if tag_def.tag_uri not in self._tag_defs_by_tag: self._tag_defs_by_tag[tag_def.tag_uri] = tag_def for converter in extension.converters: # If a converter's tags do not actually overlap with # the extension tag list, then there's no reason to # use it. if len(converter.tags) > 0: for tag in converter.tags: if tag not in self._converters_by_tag: self._converters_by_tag[tag] = converter for typ in converter.types: if isinstance(typ, str): if typ not in self._converters_by_type: self._converters_by_type[typ] = converter else: type_class_name = get_class_name(typ, instance=False) if typ not in self._converters_by_type and type_class_name not in self._converters_by_type: self._converters_by_type[typ] = converter self._converters_by_type[type_class_name] = converter @property def extensions(self): """ Get the list of extensions. Returns ------- list of asdf.extension.ExtensionProxy """ return self._extensions def handles_tag(self, tag): """ Return `True` if the specified tag is handled by a converter. Parameters ---------- tag : str Tag URI. Returns ------- bool """ return tag in self._converters_by_tag def handles_type(self, typ): """ Returns `True` if the specified Python type is handled by a converter. Parameters ---------- typ : type Returns ------- bool """ return ( typ in self._converters_by_type or get_class_name(typ, instance=False) in self._converters_by_type ) def get_tag_definition(self, tag): """ Get the tag definition for the specified tag. Parameters ---------- tag : str Tag URI. Returns ------- asdf.extension.TagDefinition Raises ------ KeyError Unrecognized tag URI. """ try: return self._tag_defs_by_tag[tag] except KeyError: raise KeyError( "No support available for YAML tag '{}'. " "You may need to install a missing extension.".format( tag ) ) from None def get_converter_for_tag(self, tag): """ Get the converter for the specified tag. Parameters ---------- tag : str Tag URI. Returns ------- asdf.extension.Converter Raises ------ KeyError Unrecognized tag URI. """ try: return self._converters_by_tag[tag] except KeyError: raise KeyError( "No support available for YAML tag '{}'. " "You may need to install a missing extension.".format( tag ) ) from None def get_converter_for_type(self, typ): """ Get the converter for the specified Python type. Parameters ---------- typ : type Returns ------- asdf.extension.Converter Raises ------ KeyError Unrecognized type. """ try: return self._converters_by_type[typ] except KeyError: class_name = get_class_name(typ, instance=False) try: return self._converters_by_type[class_name] except KeyError: raise KeyError( "No support available for Python type '{}'. " "You may need to install or enable an extension.".format( get_class_name(typ, instance=False) ) ) from None def get_cached_extension_manager(extensions): """ Get a previously created ExtensionManager for the specified extensions, or create and cache one if necessary. Building the manager is expensive, so it helps performance to reuse it when possible. Parameters ---------- extensions : list of asdf.extension.AsdfExtension or asdf.extension.Extension Returns ------- asdf.extension.ExtensionManager """ from ._extension import ExtensionProxy # The tuple makes the extensions hashable so that we # can pass them to the lru_cache method. The ExtensionProxy # overrides __hash__ to return the hashed object id of the wrapped # extension, so this will method will only return the same # ExtensionManager if the list contains identical extension # instances in identical order. extensions = tuple(ExtensionProxy.maybe_wrap(e) for e in extensions) return _get_cached_extension_manager(extensions) @lru_cache() def _get_cached_extension_manager(extensions): return ExtensionManager(extensions) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/extension/_manifest.py0000644000537500020070000000666600000000000021064 0ustar00wjamiesonSTSCI\scienceimport yaml from ._extension import Extension from ._tag import TagDefinition class ManifestExtension(Extension): """ Extension implementation that reads the extension URI, ASDF Standard requirement, and tag list from a manifest document. Parameters ---------- manifest : dict Parsed manifest. converters : iterable of asdf.extension.Converter, optional Converter instances for the tags and Python types supported by this extension. compressors : iterable of asdf.extension.Compressor, optional Compressor instances to support additional binary block compression options. legacy_class_names : iterable of str, optional Fully-qualified class names used by older versions of this extension. """ @classmethod def from_uri(cls, manifest_uri, **kwargs): """ Construct the extension using the manifest with the specified URI. The manifest document must be registered with ASDF's resource manager. Parameters ---------- manifest_uri : str Manifest URI. See the class docstring for details on keyword parameters. """ from ..config import get_config manifest = yaml.safe_load(get_config().resource_manager[manifest_uri]) return cls(manifest, **kwargs) def __init__(self, manifest, *, legacy_class_names=None, converters=None, compressors=None): self._manifest = manifest if legacy_class_names is None: self._legacy_class_names = [] else: self._legacy_class_names = legacy_class_names if converters is None: self._converters = [] else: self._converters = converters if compressors is None: self._compressors = [] else: self._compressors = compressors @property def extension_uri(self): return self._manifest["extension_uri"] @property def legacy_class_names(self): return self._legacy_class_names @property def asdf_standard_requirement(self): version = self._manifest.get("asdf_standard_requirement", None) if version is None: return None elif isinstance(version, str): return "=={}".format(version) else: specifiers = [] for prop, operator in [("gt", ">"), ("gte", ">="), ("lt", "<"), ("lte", "<=")]: value = version.get(prop) if value: specifiers.append("{}{}".format(operator, value)) return ",".join(specifiers) @property def converters(self): return self._converters @property def compressors(self): return self._compressors @property def tags(self): result = [] for tag in self._manifest.get("tags", []): if isinstance(tag, str): # ExtensionProxy knows how to handle str tags. result.append(tag) elif isinstance(tag, dict): result.append( TagDefinition( tag["tag_uri"], schema_uris=tag.get("schema_uri"), title=tag.get("title"), description=tag.get("description"), ) ) else: raise TypeError("Malformed manifest document") return result ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/extension/_tag.py0000644000537500020070000000501300000000000020012 0ustar00wjamiesonSTSCI\scienceimport warnings from asdf.exceptions import AsdfDeprecationWarning class TagDefinition: """ Container for properties of a custom YAML tag. Parameters ---------- tag_uri : str Tag URI. schema_uri : str, optional URI of the schema that should be used to validate objects with this tag. title : str, optional Short description of the tag. description : str, optional Long description of the tag. """ def __init__(self, tag_uri, *, schema_uris=None, title=None, description=None): if "*" in tag_uri: raise ValueError("URI patterns are not permitted in TagDefinition") self._tag_uri = tag_uri if schema_uris is None: self._schema_uris = [] else: if isinstance(schema_uris, list): self._schema_uris = schema_uris else: self._schema_uris = [schema_uris] self._title = title self._description = description @property def tag_uri(self): """ Get the tag URI. Returns ------- str """ return self._tag_uri @property def schema_uri(self): """ DEPRECATED Get the URI of the schema that should be used to validate objects with this tag. Returns ------- str or None """ warnings.warn( "The TagDefinition.schema_uri property is deprecated. Use TagDefinition.schema_uris instead.", AsdfDeprecationWarning ) if len(self._schema_uris) == 0: return None elif len(self._schema_uris) == 1: return self._schema_uris[0] else: raise RuntimeError("Cannot use TagDefinition.schema_uri when multiple schema URIs are present") @property def schema_uris(self): """ Get the URIs of the schemas that should be used to validate objects with this tag. Returns ------- list """ return self._schema_uris @property def title(self): """ Get the short description of the tag. Returns ------- str or None """ return self._title @property def description(self): """ Get the long description of the tag. Returns ------- str or None """ return self._description def __repr__(self): return ("".format(self.tag_uri)) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9393158 asdf-2.9.2/asdf/extern/0000755000537500020070000000000000000000000016020 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/extern/RangeHTTPServer.py0000755000537500020070000001566700000000000021337 0ustar00wjamiesonSTSCI\science#!/usr/bin/env python #Portions Copyright (C) 2009,2010 Xyne #Portions Copyright (C) 2011 Sean Goller # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # (version 2) as published by the Free Software Foundation. # # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Range HTTP Server. This module builds on BaseHTTPServer by implementing the standard GET and HEAD requests in a fairly straightforward manner, and includes support for the Range header. """ __version__ = "0.1" __all__ = ["RangeHTTPRequestHandler"] import os import shutil import posixpath import http.server import urllib.parse class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler): # pragma: no cover """Simple HTTP request handler with GET and HEAD commands. This serves files from the current directory and any of its subdirectories. The MIME type for files is determined by calling the .guess_type() method. The GET and HEAD requests are identical except that the HEAD request omits the actual contents of the file. """ server_version = "RangeHTTP/" + __version__ def do_GET(self): """Serve a GET request.""" f, start_range, end_range = self.send_head() if f: f.seek(start_range, 0) chunk = 0x1000 total = 0 while chunk > 0: if start_range + chunk > end_range: chunk = end_range - start_range try: self.wfile.write(f.read(chunk)) except: break total += chunk start_range += chunk f.close() def do_HEAD(self): """Serve a HEAD request.""" f, start_range, end_range = self.send_head() if f: f.close() def send_head(self): """Common code for GET and HEAD commands. This sends the response code and MIME headers. Return value is either a file object (which has to be copied to the outputfile by the caller unless the command was HEAD, and must be closed by the caller under all circumstances), or None, in which case the caller has nothing further to do. """ path = self.translate_path(self.path) f = None if os.path.isdir(path): if not self.path.endswith('/'): # redirect browser - doing basically what apache does self.send_response(301) self.send_header("Location", self.path + "/") self.end_headers() return (None, 0, 0) for index in "index.html", "index.htm": index = os.path.join(path, index) if os.path.exists(index): path = index break else: return self.list_directory(path) ctype = self.guess_type(path) try: # Always read in binary mode. Opening files in text mode may cause # newline translations, making the actual size of the content # transmitted *less* than the content-length! f = open(path, 'rb') except IOError: self.send_error(404, "File not found") return (None, 0, 0) if "Range" in self.headers: self.send_response(206) else: self.send_response(200) self.send_header("Content-type", ctype) fs = os.fstat(f.fileno()) size = int(fs[6]) start_range = 0 end_range = size self.send_header("Accept-Ranges", "bytes") if "Range" in self.headers: s, e = self.headers['range'][6:].split('-', 1) sl = len(s) el = len(e) if sl > 0: start_range = int(s) if el > 0: end_range = int(e) + 1 elif el > 0: ei = int(e) if ei < size: start_range = size - ei self.send_header( "Content-Range", 'bytes ' + str(start_range) + '-' + str(end_range - 1) + '/' + str(size)) self.send_header("Content-Length", end_range - start_range) self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) self.end_headers() return (f, start_range, end_range) def translate_path(self, path): """Translate a /-separated PATH to the local filename syntax. Components that mean special things to the local file system (e.g. drive or directory names) are ignored. (XXX They should probably be diagnosed.) """ # abandon query parameters path = path.split('?', 1)[0] path = path.split('#', 1)[0] path = posixpath.normpath(urllib.parse.unquote(path)) words = path.split('/') words = filter(None, words) path = os.getcwd() for word in words: drive, word = os.path.splitdrive(word) head, word = os.path.split(word) if word in (os.curdir, os.pardir): continue path = os.path.join(path, word) return path def copyfile(self, source, outputfile): """Copy all data between two file objects. The SOURCE argument is a file object open for reading (or anything with a read() method) and the DESTINATION argument is a file object open for writing (or anything with a write() method). The only reason for overriding this would be to change the block size or perhaps to replace newlines by CRLF -- note however that this the default server uses this to copy binary data as well. """ shutil.copyfileobj(source, outputfile) def guess_type(self, path): """Guess the type of a file. Argument is a PATH (a filename). Return value is a string of the form type/subtype, usable for a MIME Content-type header. The default implementation looks the file's extension up in the table self.extensions_map, using application/octet-stream as a default; however it would be permissible (if slow) to look inside the data to make a better guess. """ base, ext = posixpath.splitext(path) if ext in self.extensions_map: return self.extensions_map[ext] ext = ext.lower() if ext in self.extensions_map: return self.extensions_map[ext] else: return self.extensions_map[''] extensions_map = {'': 'unknown'} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/extern/__init__.py0000644000537500020070000000001000000000000020120 0ustar00wjamiesonSTSCI\science""" """ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/extern/atomicfile.py0000644000537500020070000001011200000000000020501 0ustar00wjamiesonSTSCI\scienceimport os import tempfile import sys import errno if os.name == 'nt': # pragma: no cover import random import time _rename = lambda src, dst: False _rename_atomic = lambda src, dst: False import ctypes _MOVEFILE_REPLACE_EXISTING = 0x1 _MOVEFILE_WRITE_THROUGH = 0x8 _MoveFileEx = ctypes.windll.kernel32.MoveFileExW def _rename(src, dst): if not isinstance(src, str): src = str(src, sys.getfilesystemencoding()) if not isinstance(dst, str): dst = str(dst, sys.getfilesystemencoding()) if _rename_atomic(src, dst): return True retry = 0 rv = False while not rv and retry < 100: rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH) if not rv: time.sleep(0.001) retry += 1 return rv # new in Vista and Windows Server 2008 _CreateTransaction = ctypes.windll.ktmw32.CreateTransaction _CommitTransaction = ctypes.windll.ktmw32.CommitTransaction _MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW _CloseHandle = ctypes.windll.kernel32.CloseHandle def _rename_atomic(src, dst): ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Atomic rename') if ta == -1: return False try: retry = 0 rv = False while not rv and retry < 100: rv = _MoveFileTransacted(src, dst, None, None, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH, ta) if rv: rv = _CommitTransaction(ta) break else: time.sleep(0.001) retry += 1 return rv finally: _CloseHandle(ta) def atomic_rename(src, dst): # Try atomic or pseudo-atomic rename if _rename(src, dst): return # Fall back to "move away and replace" try: os.rename(src, dst) except OSError as e: if e.errno != errno.EEXIST: raise old = "%s-%08x" % (dst, random.randint(0, sys.maxsize)) os.rename(dst, old) os.rename(src, dst) try: os.unlink(old) except Exception: pass else: atomic_rename = os.rename class _AtomicWFile(object): """Helper class for :func:`atomic_open`.""" def __init__(self, f, tmp_filename, filename): self._f = f self._tmp_filename = tmp_filename self._filename = filename def __getattr__(self, attr): return getattr(self._f, attr) def __enter__(self): return self @property def name(self): return self._filename def close(self): if self._f.closed: return self._f.close() atomic_rename(self._tmp_filename, self._filename) def __exit__(self, exc_type, exc_value, tb): if exc_type is None: self.close() else: self._f.close() try: os.remove(self._tmp_filename) except OSError: pass def __repr__(self): return '<%s %s%r, mode %r>' % ( self.__class__.__name__, self._f.closed and 'closed ' or '', self._filename, self._f.mode ) def atomic_open(filename, mode='w'): """Works like a regular `open()` but writes updates into a temporary file instead of the given file and moves it over when the file is closed. The file returned behaves as if it was a regular Python """ if mode in ('r', 'rb', 'r+', 'rb+', 'a', 'ab'): raise TypeError('Read or append modes don\'t work with atomic_open') f = tempfile.NamedTemporaryFile(mode, prefix='.___atomic_write', dir=os.path.dirname(filename), delete=False) return _AtomicWFile(f, f.name, filename) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/fits_embed.py0000644000537500020070000002744500000000000017202 0ustar00wjamiesonSTSCI\science""" Utilities for embedded ADSF files in FITS. """ import io import re import numpy as np from . import asdf from . import block from . import util from . import generic_io try: from astropy.io import fits except ImportError: raise ImportError("AsdfInFits requires astropy") ASDF_EXTENSION_NAME = 'ASDF' FITS_SOURCE_PREFIX = 'fits:' __all__ = ['AsdfInFits'] class _FitsBlock: def __init__(self, hdu): self._hdu = hdu def __repr__(self): return ''.format(self._hdu.name, self._hdu.ver) def __len__(self): return self._hdu.data.nbytes @property def data(self): return self._hdu.data @property def readonly(self): return False @property def array_storage(self): return 'fits' @property def trust_data_dtype(self): # astropy.io.fits returns arrays in native byte order # when it has to apply scaling. In that case, we don't # want to interpret the bytes as big-endian, since astropy # has already converted them properly. return True class _EmbeddedBlockManager(block.BlockManager): def __init__(self, hdulist, asdffile): self._hdulist = hdulist super(_EmbeddedBlockManager, self).__init__(asdffile) def get_block(self, source): if (isinstance(source, str) and source.startswith(FITS_SOURCE_PREFIX)): parts = re.match( # All printable ASCII characters are allowed in EXTNAME '((?P[ -~]+),)?(?P[0-9]+)', source[len(FITS_SOURCE_PREFIX):]) if parts is not None: ver = int(parts.group('ver')) if parts.group('name'): pair = (parts.group('name'), ver) else: pair = ver return _FitsBlock(self._hdulist[pair]) else: raise ValueError("Can not parse source '{0}'".format(source)) return super(_EmbeddedBlockManager, self).get_block(source) def get_source(self, block): if isinstance(block, _FitsBlock): for i, hdu in enumerate(self._hdulist): if hdu is block._hdu: if hdu.name == '': return '{0}{1}'.format( FITS_SOURCE_PREFIX, i) else: return '{0}{1},{2}'.format( FITS_SOURCE_PREFIX, hdu.name, hdu.ver) raise ValueError("FITS block seems to have been removed") return super(_EmbeddedBlockManager, self).get_source(block) def find_or_create_block_for_array(self, arr, ctx): from .tags.core import ndarray if not isinstance(arr, ndarray.NDArrayType): base = util.get_array_base(arr) for hdu in self._hdulist: if hdu.data is None: continue if base is util.get_array_base(hdu.data): return _FitsBlock(hdu) return super( _EmbeddedBlockManager, self).find_or_create_block_for_array(arr, ctx) class AsdfInFits(asdf.AsdfFile): """ Embed ASDF tree content in a FITS file. The YAML rendering of the tree is stored in a special FITS extension with the EXTNAME of ``ASDF``. Arrays in the ASDF tree may refer to binary data in other FITS extensions by setting source to a string with the prefix ``fits:`` followed by an ``EXTNAME``, ``EXTVER`` pair, e.g. ``fits:SCI,0``. Examples -------- Create a FITS file with ASDF structure, based on an existing FITS file:: import numpy as np from astropy.io import fits from asdf import fits_embed hdulist = fits.HDUList([ fits.PrimaryHDU(), fits.ImageHDU(np.arange(512, dtype=np.float32), name='SCI'), fits.ImageHDU(np.zeros(512, dtype=np.int16), name='DQ')]) tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist['DQ'].data, 'wcs': 'WCS info' } } } ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to('test.fits') # doctest: +SKIP """ def __init__(self, hdulist=None, tree=None, **kwargs): if hdulist is None: hdulist = fits.HDUList() super(AsdfInFits, self).__init__(tree=tree, **kwargs) self._blocks = _EmbeddedBlockManager(hdulist, self) self._hdulist = hdulist self._close_hdulist = False def __exit__(self, type, value, traceback): super(AsdfInFits, self).__exit__(type, value, traceback) if self._close_hdulist: self._hdulist.close() self._tree = {} def close(self): super(AsdfInFits, self).close() if self._close_hdulist: self._hdulist.close() self._tree = {} @classmethod def open(cls, fd, uri=None, validate_checksums=False, extensions=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, strict_extension_check=False, ignore_missing_extensions=False, **kwargs): """Creates a new AsdfInFits object based on given input data Parameters ---------- fd : FITS HDUList instance, URI string, or file-like object May be an already opened instance of a FITS HDUList instance, string ``file`` or ``http`` URI, or a Python file-like object. uri : str, optional The URI for this ASDF file. Used to resolve relative references against. If not provided, will be automatically determined from the associated file object, if possible and if created from `asdf.open`. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. Requires reading the entire file, so disabled by default. extensions : object, optional Additional extensions to use when reading and writing the file. May be any of the following: `asdf.extension.AsdfExtension`, `asdf.extension.Extension`, `asdf.extension.AsdfExtensionList` or a `list` extensions. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. strict_extension_check : bool, optional When `True`, if the given ASDF file contains metadata about the extensions used to create it, and if those extensions are not installed, opening the file will fail. When `False`, opening a file under such conditions will cause only a warning. Defaults to `False`. ignore_missing_extensions : bool, optional When `True`, do not raise warnings when a file is read that contains metadata about extensions that are not available. Defaults to `False`. validate_on_read : bool, optional DEPRECATED. When `True`, validate the newly opened file against tag and custom schemas. Recommended unless the file is already known to be valid. """ return cls._open_impl(fd, uri=uri, validate_checksums=validate_checksums, extensions=extensions, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions, **kwargs) @classmethod def _open_impl(cls, fd, uri=None, validate_checksums=False, extensions=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, strict_extension_check=False, ignore_missing_extensions=False, **kwargs): close_hdulist = False if isinstance(fd, fits.hdu.hdulist.HDUList): hdulist = fd else: uri = generic_io.get_uri(fd) try: hdulist = fits.open(fd) # Since we created this HDUList object, we need to be # responsible for cleaning up upon close() or __exit__ close_hdulist = True except IOError: msg = "Failed to parse given file '{}'. Is it FITS?" raise ValueError(msg.format(uri)) self = cls(hdulist, uri=uri, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag) self._close_hdulist = close_hdulist try: asdf_extension = hdulist[ASDF_EXTENSION_NAME] except (KeyError, IndexError, AttributeError): # This means there is no ASDF extension return self generic_file = generic_io.get_file(io.BytesIO(asdf_extension.data), mode='r', uri=uri) try: return cls._open_asdf(self, generic_file, validate_checksums=validate_checksums, extensions=extensions, strict_extension_check=strict_extension_check, ignore_missing_extensions=ignore_missing_extensions, **kwargs) except RuntimeError: self.close() raise def _create_hdu(self, buff, use_image_hdu): # Allow writing to old-style ImageHDU for backwards compatibility if use_image_hdu: array = np.frombuffer(buff.getvalue(), np.uint8) return fits.ImageHDU(array, name=ASDF_EXTENSION_NAME) else: data = np.array(buff.getbuffer(), dtype=np.uint8)[None, :] fmt = '{}B'.format(len(data[0])) column = fits.Column(array=data, format=fmt, name='ASDF_METADATA') return fits.BinTableHDU.from_columns([column], name=ASDF_EXTENSION_NAME) def _update_asdf_extension(self, all_array_storage=None, all_array_compression=None, pad_blocks=False, use_image_hdu=False, **kwargs): if self.blocks.streamed_block is not None: raise ValueError( "Can not save streamed data to ASDF-in-FITS file.") buff = io.BytesIO() super(AsdfInFits, self).write_to( buff, all_array_storage=all_array_storage, all_array_compression=all_array_compression, pad_blocks=pad_blocks, include_block_index=False, **kwargs) if ASDF_EXTENSION_NAME in self._hdulist: del self._hdulist[ASDF_EXTENSION_NAME] self._hdulist.append(self._create_hdu(buff, use_image_hdu)) def write_to(self, filename, all_array_storage=None, all_array_compression=None, pad_blocks=False, use_image_hdu=False, *args, **kwargs): if "auto_inline" in kwargs: asdf_kwargs = {"auto_inline": kwargs.pop("auto_inline")} else: asdf_kwargs = {} self._update_asdf_extension( all_array_storage=all_array_storage, all_array_compression=all_array_compression, pad_blocks=pad_blocks, use_image_hdu=use_image_hdu, **asdf_kwargs) self._hdulist.writeto(filename, *args, **kwargs) def update(self, all_array_storage=None, all_array_compression=None, pad_blocks=False, **kwargs): raise NotImplementedError( "In-place update is not currently implemented for ASDF-in-FITS") self._update_asdf_extension( all_array_storage=all_array_storage, all_array_compression=all_array_compression, pad_blocks=pad_blocks, **kwargs) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/generic_io.py0000644000537500020070000010105000000000000017165 0ustar00wjamiesonSTSCI\science""" This provides abstractions around a number of different file and stream types available to Python so that they are always used in the most efficient way. The classes in this module should not be instantiated directly, but instead, one should use the factory function `get_file`. """ import io import os import re import sys import pathlib import tempfile from os import SEEK_SET, SEEK_CUR, SEEK_END from urllib.request import url2pathname, urlopen import numpy as np from . import util from .exceptions import DelimiterNotFoundError from .extern import atomicfile from .util import patched_urllib_parse __all__ = ['get_file', 'get_uri', 'resolve_uri', 'relative_uri'] _local_file_schemes = ['', 'file'] if sys.platform.startswith('win'): # pragma: no cover import string _local_file_schemes.extend(string.ascii_letters) def _check_bytes(fd, mode): """ Checks whether a given file-like object is opened in binary mode. """ # On Python 3, doing fd.read(0) on an HTTPResponse object causes # it to not be able to read any further, so we do this different # kind of check, which, unfortunately, is not as robust. if isinstance(fd, io.IOBase): if isinstance(fd, io.TextIOBase): return False return True if 'r' in mode: x = fd.read(0) if not isinstance(x, bytes): return False elif 'w' in mode: try: fd.write(b'') except TypeError: return False return True def resolve_uri(base, uri): """ Resolve a URI against a base URI. """ if base is None: base = '' resolved = patched_urllib_parse.urljoin(base, uri) parsed = patched_urllib_parse.urlparse(resolved) if parsed.path != '' and not parsed.path.startswith('/'): raise ValueError( "Resolved to relative URL") return resolved def relative_uri(source, target): """ Make a relative URI from source to target. """ su = patched_urllib_parse.urlparse(source) tu = patched_urllib_parse.urlparse(target) extra = list(tu[3:]) relative = None if tu[0] == '' and tu[1] == '': if tu[2] == su[2]: relative = '' elif not tu[2].startswith('/'): relative = tu[2] elif su[0:2] != tu[0:2]: return target if relative is None: if tu[2] == su[2]: relative = '' else: relative = os.path.relpath(tu[2], os.path.dirname(su[2])) if relative == '.': relative = '' relative = patched_urllib_parse.urlunparse(["", "", relative] + extra) return relative class _TruncatedReader: """ Reads until a given delimiter is found. Only works with RandomAccessFile and InputStream, though as this is a private class, this is not explicitly enforced. """ def __init__(self, fd, delimiter, readahead_bytes, delimiter_name=None, include=False, initial_content=b'', exception=True): self._fd = fd self._delimiter = delimiter self._readahead_bytes = readahead_bytes if delimiter_name is None: delimiter_name = delimiter self._delimiter_name = delimiter_name self._include = include self._initial_content = initial_content self._trailing_content = b'' self._exception = exception self._past_end = False def read(self, nbytes=None): if self._past_end: content = self._trailing_content[:nbytes] if nbytes is None: self._trailing_content = b'' else: self._trailing_content = self._trailing_content[nbytes:] return content if nbytes is None: content = self._fd.peek() elif nbytes <= len(self._initial_content): content = self._initial_content[:nbytes] self._initial_content = self._initial_content[nbytes:] return content else: content = self._fd.peek(nbytes - len(self._initial_content) + self._readahead_bytes) if content == b'': if self._exception: raise DelimiterNotFoundError("{0} not found".format(self._delimiter_name)) self._past_end = True return content index = re.search(self._delimiter, content) if index is not None: if self._include: index = index.end() else: index = index.start() content = content[:index] self._past_end = True elif nbytes is None and self._exception: # Read the whole file and didn't find the delimiter raise DelimiterNotFoundError("{0} not found".format(self._delimiter_name)) else: if nbytes: content = content[:nbytes - len(self._initial_content)] self._fd.fast_forward(len(content)) if self._initial_content: content = self._initial_content + content self._initial_content = b'' if self._past_end and nbytes: self._trailing_content = content[nbytes:] content = content[:nbytes] return content class GenericFile(metaclass=util.InheritDocstrings): """ Base class for an abstraction layer around a number of different file-like types. Each of its subclasses handles a particular kind of file in the most efficient way possible. This class should not be instantiated directly, but instead the factory function `get_file` should be used to get the correct subclass for the given file-like object. """ def __init__(self, fd, mode, close=False, uri=None): """ Parameters ---------- fd : file-like object The particular kind of file-like object must match the subclass of `GenericFile` being instantiated. mode : str Must be ``"r"`` (read), ``"w"`` (write), or ``"rw"`` (read/write). close : bool, optional When ``True``, close the given `fd` in the ``__exit__`` method, i.e. at the end of the with block. Should be set to ``True`` when this object "owns" the file object. Default: ``False``. uri : str, optional The file path or URI used to open the file. This is used to resolve relative URIs when the file refers to external sources. """ if not _check_bytes(fd, mode): raise ValueError( "File-like object must be opened in binary mode.") # can't import at the top level due to circular import from .config import get_config self._asdf_get_config = get_config self._fd = fd self._mode = mode self._close = close self._size = None self._uri = uri self.block_size = get_config().io_block_size def __enter__(self): return self def __exit__(self, type, value, traceback): if self._close: if hasattr(self._fd, '__exit__'): self._fd.__exit__(type, value, traceback) else: self._fd.close() @property def block_size(self): return self._blksize @block_size.setter def block_size(self, block_size): if block_size == -1: try: block_size = os.fstat(self._fd.fileno()).st_blksize except Exception: block_size = io.DEFAULT_BUFFER_SIZE if block_size <= 0: raise ValueError(f'block_size ({block_size}) must be > 0') self._blksize = block_size @property def mode(self): """ The mode of the file. Will be ``'r'``, ``'w'`` or ``'rw'``. """ return self._mode @property def uri(self): """ The base uri of the file. """ return self._uri def read(self, size=-1): """ Read at most size bytes from the file (less if the read hits EOF before obtaining size bytes). If the size argument is negative or omitted, read all data until EOF is reached. The bytes are returned as a `bytes` object. An empty `bytes` object is returned when EOF is encountered immediately. Only available if `readable` returns `True`. """ # On Python 3, reading 0 bytes from a socket causes it to stop # working, so avoid doing that at all costs. if size == 0: return b'' return self._fd.read(size) def read_block(self): """ Read a "block" from the file. For real filesystem files, the block is the size of a native filesystem block. """ return self.read(self._blksize) def read_blocks(self, size): """ Read ``size`` bytes of data from the file, one block at a time. The result is a generator where each value is a bytes object. """ for i in range(0, size, self._blksize): thissize = min(self._blksize, size - i) yield self.read(thissize) def write(self, content): self._fd.write(content) write.__doc__ = """ Write a string to the file. There is no return value. Due to buffering, the string may not actually show up in the file until the flush() or close() method is called. Only available if `writable` returns `True`. """ def write_array(self, array): """ Write array content to the file. Array must be 1D contiguous so that this method can avoid making assumptions about the intended memory layout. Endianness is preserved. Parameters ---------- array : np.ndarray Must be 1D contiguous. """ if len(array.shape) != 1 or not array.flags.contiguous: raise ValueError("Requires 1D contiguous array.") self.write(array.data) def peek(self, size=-1): """ Read bytes of the file without consuming them. This method must be implemented by all GenericFile implementations that provide ASDF input (those that aren't seekable should use a buffer to store peeked bytes). Parameters ---------- size : int Number of bytes to peek, or -1 to peek all remaining bytes. """ if self.seekable(): cursor = self.tell() content = self.read(size) self.seek(cursor, SEEK_SET) return content else: raise RuntimeError("Non-seekable file") def seek(self, offset, whence=0): """ Set the file's current position. Only available if `seekable` returns `True`. Parameters ---------- offset : integer Offset, in bytes. whence : integer, optional The `whence` argument is optional and defaults to SEEK_SET or 0 (absolute file positioning); other values are SEEK_CUR or 1 (seek relative to the current position) and SEEK_END or 2 (seek relative to the file’s end). """ result = self._fd.seek(offset, whence) self.tell() return result def tell(self): """ Return the file's current position, in bytes. Only available in `seekable` returns `True`. """ return self._fd.tell() def flush(self): """ Flush the internal buffer. """ self._fd.flush() def close(self): """ Close the file. The underlying file-object will only be closed if ``close=True`` was passed to the constructor. """ if self._close: self._fd.close() def truncate(self, size=None): """ Truncate the file to the given size. """ raise NotImplementedError() def writable(self): """ Returns `True` if the file can be written to. """ return 'w' in self.mode def readable(self): """ Returns `True` if the file can be read from. """ return 'r' in self.mode def seekable(self): """ Returns `True` if the file supports random access (`seek` and `tell`). """ return False def can_memmap(self): """ Returns `True` if the file supports memmapping. """ return False def is_closed(self): """ Returns `True` if the underlying file object is closed. """ return self._fd.closed def read_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Reads until a match for a given regular expression is found. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Returns ------- content : bytes The content from the current position in the file, up to the delimiter. Includes the delimiter if `include` is ``True``. Raises ------ DelimiterNotFoundError : If the delimiter is not found before the end of the file. """ buff = io.BytesIO() reader = self.reader_until( delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) while True: content = reader.read(self.block_size) buff.write(content) if len(content) < self.block_size: break return buff.getvalue() def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Returns a readable file-like object that treats the given delimiter as the end-of-file. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Raises ------ DelimiterNotFoundError : If the delimiter is not found before the end of the file. """ raise NotImplementedError() def seek_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Seeks in the file until a match for a given regular expression is found. This is similar to ``read_until``, except the intervening content is not retained. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Returns ------- bool ``True`` if the delimiter was found. Raises ------ DelimiterNotFoundError : If ``exception`` is enabled and the delimiter is not found before the end of the file. """ reader = self.reader_until( delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=True) try: while reader.read(self.block_size) != b'': pass return True except DelimiterNotFoundError as e: if exception: raise e else: return False def fast_forward(self, size): """ Move the file position forward by `size`. """ raise NotImplementedError() def clear(self, nbytes): """ Write nbytes of zeros. """ blank_data = b'\0' * self.block_size for i in range(0, nbytes, self.block_size): length = min(nbytes - i, self.block_size) self.write(blank_data[:length]) def memmap_array(self, offset, size): """ Memmap a chunk of the file into a `np.core.memmap` object. Parameters ---------- offset : integer The offset, in bytes, in the file. size : integer The size of the data to memmap. Returns ------- array : np.core.memmap """ raise NotImplementedError() def read_into_array(self, size): """ Read a chunk of the file into a uint8 array. Parameters ---------- size : integer The size of the data. Returns ------- array : np.core.memmap """ buff = self.read(size) return np.frombuffer(buff, np.uint8, size, 0) class GenericWrapper: """ A wrapper around a `GenericFile` object so that closing only happens in the very outer layer. """ def __init__(self, fd): self._fd = fd def __enter__(self): return self def __exit__(self, type, value, traceback): pass def __getattr__(self, attr): return getattr(self._fd, attr) class RandomAccessFile(GenericFile): """ The base class of file types that support random access. """ def seekable(self): return True def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): return _TruncatedReader( self, delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) def fast_forward(self, size): if size < 0: self.seek(0, SEEK_END) self.seek(size, SEEK_CUR) if sys.platform.startswith('win'): # pragma: no cover def truncate(self, size=None): # ftruncate doesn't work on an open file in Windows. The # best we can do is clear the extra bytes or add extra # bytes to the end. if size is None: size = self.tell() self.seek(0, SEEK_END) file_size = self.tell() if size < file_size: self.seek(size, SEEK_SET) nbytes = file_size - size elif size > file_size: nbytes = size - file_size else: nbytes = 0 block = b'\0' * self.block_size while nbytes > 0: self.write(block[:min(nbytes, self.block_size)]) nbytes -= self.block_size self.seek(size, SEEK_SET) else: def truncate(self, size=None): if size is None: self._fd.truncate() else: self._fd.truncate(size) self.seek(size, SEEK_SET) class RealFile(RandomAccessFile): """ Handles "real" files on a filesystem. """ def __init__(self, fd, mode, close=False, uri=None): super(RealFile, self).__init__(fd, mode, close=close, uri=uri) stat = os.fstat(fd.fileno()) self._size = stat.st_size if (uri is None and isinstance(fd.name, str)): self._uri = util.filepath_to_url(os.path.abspath(fd.name)) def write_array(self, arr): if isinstance(arr, np.memmap) and getattr(arr, 'fd', None) is self: arr.flush() self.fast_forward(len(arr.data)) else: if len(arr.shape) != 1 or not arr.flags.contiguous: raise ValueError("Requires 1D contiguous array.") self._fd.write(arr.data) def can_memmap(self): return True def memmap_array(self, offset, size): if 'w' in self._mode: mode = 'r+' else: mode = 'r' mmap = np.memmap( self._fd, mode=mode, offset=offset, shape=size) mmap.fd = self return mmap def read_into_array(self, size): return np.fromfile(self._fd, dtype=np.uint8, count=size) class MemoryIO(RandomAccessFile): """ Handles random-access memory buffers, mainly `io.BytesIO` and `StringIO.StringIO`. """ def __init__(self, fd, mode, uri=None): super(MemoryIO, self).__init__(fd, mode, uri=uri) tell = fd.tell() fd.seek(0, 2) self._size = fd.tell() fd.seek(tell, 0) def read_into_array(self, size): buf = self._fd.getvalue() offset = self._fd.tell() result = np.frombuffer(buf, np.uint8, size, offset) # Copy the buffer so the original memory can be released. result = result.copy() self.seek(size, SEEK_CUR) return result class InputStream(GenericFile): """ Handles an input stream, such as stdin. """ def __init__(self, fd, mode='r', close=False, uri=None): super(InputStream, self).__init__(fd, mode, close=close, uri=uri) self._fd = fd self._buffer = b'' def peek(self, size=-1): if size < 0: self._buffer += self._fd.read() else: len_buffer = len(self._buffer) if len_buffer < size: self._buffer += self._fd.read(size - len_buffer) return self._buffer def read(self, size=-1): # On Python 3, reading 0 bytes from a socket causes it to stop # working, so avoid doing that at all costs. if size == 0: return b'' len_buffer = len(self._buffer) if len_buffer == 0: return self._fd.read(size) elif size < 0: self._buffer += self._fd.read() buffer = self._buffer self._buffer = b'' return buffer elif len_buffer < size: if len_buffer < size: self._buffer += self._fd.read(size - len(self._buffer)) buffer = self._buffer self._buffer = b'' return buffer else: buffer = self._buffer[:size] self._buffer = self._buffer[size:] return buffer def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): return _TruncatedReader( self, delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) def fast_forward(self, size): if size >= 0 and len(self.read(size)) != size: raise IOError("Read past end of file") def read_into_array(self, size): try: # See if Numpy can handle this as a real file first... return np.fromfile(self._fd, np.uint8, size) except (IOError, AttributeError): # Else, fall back to reading into memory and then # returning the Numpy array. data = self.read(size) # We need to copy the array, so it is writable result = np.frombuffer(data, np.uint8, size) # When creating an array from a buffer, it is read-only. # If we need a read/write array, we have to copy it. if 'w' in self._mode: result = result.copy() return result class OutputStream(GenericFile): """ Handles an output stream, such as stdout. """ def __init__(self, fd, close=False, uri=None): super(OutputStream, self).__init__(fd, 'w', close=close, uri=uri) self._fd = fd def fast_forward(self, size): if size < 0: return self.clear(size) def _http_to_temp(init, mode, uri=None): """ Stream the content of an http or https URL to a temporary file. Parameters ---------- init : str HTTP or HTTPS URL. mode : str ASDF file mode. The temporary file will always be opened in w+b mode, but the resulting GenericFile will report itself writable based on this value. uri : str, optional URI against which relative paths within the file are resolved. If None, the init value will be used. Returns ------- RealFile Temporary file. """ from asdf import get_config fd = tempfile.NamedTemporaryFile("w+b") block_size = get_config().io_block_size if block_size == -1: try: block_size = os.fstat(fd.fileno()).st_blksize except Exception: block_size = io.DEFAULT_BUFFER_SIZE try: # This method is only called with http and https schemes: with urlopen(init) as response: # nosec chunk = response.read(block_size) while len(chunk) > 0: fd.write(chunk) chunk = response.read(block_size) fd.seek(0) return RealFile(fd, mode, close=True, uri=uri or init) except Exception: fd.close() raise def get_uri(file_obj): """ Returns the uri of the given file object Parameters ---------- uri : object """ if isinstance(file_obj, str): return file_obj if isinstance(file_obj, GenericFile): return file_obj.uri # A catch-all for types from Python's io module that have names return getattr(file_obj, 'name', '') def get_file(init, mode='r', uri=None, close=False): """ Returns a `GenericFile` instance suitable for wrapping the given object `init`. If passed an already open file-like object, it must be opened for reading/writing in binary mode. It is the caller's responsibility to close it. Parameters ---------- init : object `init` may be: - A `bytes` or `unicode` file path or ``file:`` or ``http:`` url. - A Python 2 `file` object. - An `io.IOBase` object (the default file object on Python 3). - A ducktyped object that looks like a file object. If `mode` is ``"r"``, it must have a ``read`` method. If `mode` is ``"w"``, it must have a ``write`` method. If `mode` is ``"rw"`` it must have the ``read``, ``write``, ``tell`` and ``seek`` methods. - A `GenericFile` instance, in which case it is wrapped in a `GenericWrapper` instance, so that the file is closed when only when the final layer is unwrapped. mode : str Must be one of ``"r"``, ``"w"`` or ``"rw"``. uri : str Sets the base URI of the file object. This will be used to resolve any relative URIs contained in the file. This is redundant if `init` is a `bytes` or `unicode` object (since it will be the uri), and it may be determined automatically if `init` refers to a regular filesystem file. It is not required if URI resolution is not used in the file. close : bool If ``True``, closes the underlying file handle when this object is closed. Defaults to ``False``. Returns ------- fd : GenericFile Raises ------ ValueError, TypeError, IOError """ if mode not in ('r', 'w', 'rw'): raise ValueError("mode must be 'r', 'w' or 'rw'") if init in (sys.__stdout__, sys.__stdin__, sys.__stderr__): init = os.fdopen(init.fileno(), init.mode + 'b') if isinstance(init, (GenericFile, GenericWrapper)): if mode not in init.mode: raise ValueError( "File is opened as '{0}', but '{1}' was requested".format( init.mode, mode)) return GenericWrapper(init) elif isinstance(init, (str, pathlib.Path)): parsed = patched_urllib_parse.urlparse(str(init)) if parsed.scheme in ['http', 'https']: if 'w' in mode: raise ValueError( "HTTP connections can not be opened for writing") return _http_to_temp(init, mode, uri=uri) elif parsed.scheme in _local_file_schemes: if mode == 'rw': realmode = 'r+b' else: realmode = mode + 'b' # Windows paths are not URIs, and so they should not be parsed as # such. Otherwise, the drive component of the path can get lost. # This is not an ideal solution, but we can't use pathlib here # because it doesn't handle URIs properly. if sys.platform.startswith('win') and parsed.scheme in string.ascii_letters: realpath = str(init) else: realpath = url2pathname(parsed.path) if mode == 'w': fd = atomicfile.atomic_open(realpath, realmode) else: fd = open(realpath, realmode) fd = fd.__enter__() return RealFile(fd, mode, close=True, uri=uri) elif isinstance(init, io.BytesIO): return MemoryIO(init, mode, uri=uri) elif isinstance(init, io.StringIO): raise TypeError( "io.StringIO objects are not supported. Use io.BytesIO instead.") elif isinstance(init, io.IOBase): if (('r' in mode and not init.readable()) or ('w' in mode and not init.writable())): raise ValueError( "File is opened as '{0}', but '{1}' was requested".format( init.mode, mode)) if init.seekable(): if isinstance(init, (io.BufferedReader, io.BufferedWriter, io.BufferedRandom)): init2 = init.raw else: init2 = init if isinstance(init2, io.RawIOBase): result = RealFile(init2, mode, uri=uri, close=close) else: result = MemoryIO(init2, mode, uri=uri) result._secondary_fd = init return result else: if mode == 'w': return OutputStream(init, uri=uri, close=close) elif mode == 'r': return InputStream(init, mode, uri=uri, close=close) else: raise ValueError( "File '{0}' could not be opened in 'rw' mode".format(init)) elif mode == 'w' and ( hasattr(init, 'write') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'r' and ( hasattr(init, 'read') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'rw' and ( hasattr(init, 'read') and hasattr(init, 'write') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'w' and hasattr(init, 'write'): return OutputStream(init, uri=uri, close=close) elif mode == 'r' and hasattr(init, 'read'): return InputStream(init, mode, uri=uri, close=close) raise ValueError("Can't handle '{0}' as a file for mode '{1}'".format( init, mode)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/reference.py0000644000537500020070000001230500000000000017024 0ustar00wjamiesonSTSCI\science""" Manages external references in the YAML tree using the `JSON Reference standard `__ and `JSON Pointer standard `__. """ from collections.abc import Sequence import weakref import numpy as np from .types import AsdfType from . import generic_io from . import treeutil from . import util from .util import patched_urllib_parse __all__ = [ 'resolve_fragment', 'Reference', 'find_references', 'resolve_references', 'make_reference'] def resolve_fragment(tree, pointer): """ Resolve a JSON Pointer within the tree. """ pointer = pointer.lstrip(u"/") parts = patched_urllib_parse.unquote(pointer).split(u"/") if pointer else [] for part in parts: part = part.replace(u"~1", u"/").replace(u"~0", u"~") if isinstance(tree, Sequence): # Array indexes should be turned into integers try: part = int(part) except ValueError: pass try: tree = tree[part] except (TypeError, LookupError): raise ValueError( "Unresolvable reference: '{0}'".format(pointer)) return tree class Reference(AsdfType): yaml_tag = 'tag:yaml.org,2002:map' def __init__(self, uri, base_uri=None, asdffile=None, target=None): self._uri = uri if asdffile is not None: self._asdffile = weakref.ref(asdffile) self._base_uri = base_uri self._target = target def _get_target(self, **kwargs): if self._target is None: base_uri = self._base_uri if base_uri is None: base_uri = self._asdffile().uri uri = generic_io.resolve_uri(base_uri, self._uri) asdffile = self._asdffile().open_external(uri, **kwargs) parts = patched_urllib_parse.urlparse(self._uri) fragment = parts.fragment self._target = resolve_fragment(asdffile.tree, fragment) return self._target def __repr__(self): # repr alone should not force loading of the reference if self._target is None: return "".format( self._uri) else: return "".format(repr(self._target)) def __str__(self): # str alone should not force loading of the reference if self._target is None: return "".format( self._uri) else: return str(self._target) def __len__(self): return len(self._get_target()) def __getattr__(self, attr): if attr == '_tag': return None try: return getattr(self._get_target(), attr) except Exception: raise AttributeError("No attribute '{0}'".format(attr)) def __getitem__(self, item): return self._get_target()[item] def __setitem__(self, item, val): self._get_target()[item] = val def __array__(self): return np.asarray(self._get_target()) def __call__(self, **kwargs): return self._get_target(**kwargs) def __contains__(self, item): return item in self._get_target() @classmethod def to_tree(self, data, ctx): if ctx.uri is not None: uri = generic_io.relative_uri(ctx.uri, data._uri) else: uri = data._uri return {'$ref': uri} @classmethod def validate(self, data): pass def find_references(tree, ctx): """ Find all of the JSON references in the tree, and convert them into `Reference` objects. """ def do_find(tree, json_id): if isinstance(tree, dict) and '$ref' in tree: return Reference(tree['$ref'], json_id, asdffile=ctx) return tree return treeutil.walk_and_modify( tree, do_find, ignore_implicit_conversion=ctx._ignore_implicit_conversion) def resolve_references(tree, ctx, **kwargs): """ Resolve all of the references in the tree, by loading the external data and inserting it directly into the tree. """ def do_resolve(tree): if isinstance(tree, Reference): return tree(**kwargs) return tree tree = find_references(tree, ctx) return treeutil.walk_and_modify( tree, do_resolve, ignore_implicit_conversion=ctx._ignore_implicit_conversion) def make_reference(asdffile, path): """ Make a reference to a subtree of the given ASDF file. Parameters ---------- asdffile : AsdfFile path : list of str and int, optional The parts of the path pointing to an item in this tree. If omitted, points to the root of the tree. Returns ------- reference : reference.Reference A reference object. """ path_str = '/'.join( x.replace(u"~", u"~0").replace(u"/", u"~1") for x in path) target = resolve_fragment(asdffile.tree, path_str) if asdffile.uri is None: raise ValueError( "Can not make a reference to a AsdfFile without an associated URI.") base_uri = util.get_base_uri(asdffile.uri) uri = base_uri + '#' + path_str return Reference(uri, target=target) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/resolver.py0000644000537500020070000001333600000000000016734 0ustar00wjamiesonSTSCI\scienceimport sys import os.path import warnings from . import constants from . import util from .exceptions import AsdfDeprecationWarning def find_schema_path(): dirname = os.path.dirname(__file__) # This means we are working within a development build if os.path.exists(os.path.join(dirname, '..', 'asdf-standard')): return os.path.join(dirname, '..', 'asdf-standard', 'schemas') # Otherwise, we return the installed location return os.path.join(dirname, 'schemas') class Resolver: """ A class that can be used to map strings with a particular prefix to another. """ def __init__(self, mappings, prefix): """ Parameters ---------- mappings : list of tuple or callable A list of mappings to try, in order. For each entry: - If a callable, must take a string and return a remapped string. Should return `None` if the mapping does not apply to the input. - If a tuple, the first item is a string prefix to match. The second item specifies how to create the new result in Python string formatting syntax. The following formatting tokens are available, where ``X`` relates to the ``prefix`` argument: - ``{X}``: The entire string passed in. - ``{X_prefix}``: The prefix of the string that was matched. - ``{X_suffix}``: The part of the string following the prefix. prefix : str The prefix to use for the Python formatting token names. """ self._mappings = self._validate_mappings(mappings) self._prefix = prefix def add_mapping(self, mappings, prefix=''): # Deprecating this because Resolver is used as part of a dictionary key # and so shouldn't be mutable. warnings.warn("The 'add_mapping' method is deprecated.", AsdfDeprecationWarning) if prefix != self._prefix: raise ValueError(f"Prefix '{prefix}' does not match the Resolver prefix '{self._prefix}'") self._mappings = self._mappings + self._validate_mappings(mappings) def _perform_mapping(self, mapping, input): if callable(mapping): output = mapping(input) if output is not None: return (sys.maxsize, mapping(input)) else: return None else: if input.startswith(mapping[0]): format_tokens = { self._prefix: input, self._prefix + "_prefix": mapping[0], self._prefix + "_suffix": input[len(mapping[0]):] } return len(mapping[0]), mapping[1].format(**format_tokens) else: return None def _validate_mappings(self, mappings): normalized = [] for mapping in mappings: if callable(mapping): normalized.append(mapping) elif (isinstance(mapping, (list, tuple)) and len(mapping) == 2 and isinstance(mapping[0], str) and isinstance(mapping[1], str)): normalized.append(tuple(mapping)) else: raise ValueError("Invalid mapping '{0}'".format(mapping)) return tuple(normalized) def __call__(self, input): candidates = [(0, input)] for mapping in self._mappings: output = self._perform_mapping(mapping, input) if output is not None: candidates.append(output) candidates.sort() return candidates[-1][1] def __hash__(self): return hash(self._mappings) def __eq__(self, other): if not isinstance(other, Resolver): return NotImplemented return self._mappings == other._mappings class ResolverChain: """ A chain of Resolvers, each of which is called with the previous Resolver's output to produce the final transformed string. """ def __init__(self, *resolvers): """ Parameters ---------- *resolvers : list of Resolver Resolvers to include in the chain. """ self._resolvers = tuple(resolvers) def __call__(self, input): for resolver in self._resolvers: input = resolver(input) return input def __hash__(self): return hash(self._resolvers) def __eq__(self, other): if not isinstance(other, ResolverChain): return NotImplemented return self._resolvers == other._resolvers DEFAULT_URL_MAPPING = [ (constants.STSCI_SCHEMA_URI_BASE, util.filepath_to_url( os.path.join(find_schema_path(), 'stsci.edu')) + '/{url_suffix}.yaml')] DEFAULT_TAG_TO_URL_MAPPING = [ (constants.STSCI_SCHEMA_TAG_BASE, 'http://stsci.edu/schemas/asdf{tag_suffix}') ] def default_url_mapping(uri): warnings.warn("'default_url_mapping' is deprecated.", AsdfDeprecationWarning) return default_url_mapping._resolver(uri) default_url_mapping._resolver = Resolver(DEFAULT_URL_MAPPING, 'url') def default_tag_to_url_mapping(uri): warnings.warn("'default_tag_to_url_mapping' is deprecated.", AsdfDeprecationWarning) return default_tag_to_url_mapping._resolver(uri) default_tag_to_url_mapping._resolver = Resolver(DEFAULT_TAG_TO_URL_MAPPING, 'tag') def default_resolver(uri): warnings.warn( "The 'default_resolver(...)' function is deprecated. Use " "'asdf.extension.get_default_resolver()(...)' instead.", AsdfDeprecationWarning) return default_resolver._resolver(uri) default_resolver._resolver = ResolverChain(default_tag_to_url_mapping._resolver, default_url_mapping._resolver) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/resource.py0000644000537500020070000002145300000000000016721 0ustar00wjamiesonSTSCI\science""" Support for plugins that provide access to resources such as schemas. """ from collections.abc import Mapping from pathlib import Path import fnmatch import os import pkgutil import sys if sys.version_info < (3, 9): import importlib_resources else: import importlib.resources as importlib_resources import asdf from .util import get_class_name __all__ = [ "ResourceMappingProxy", "DirectoryResourceMapping", "ResourceManager", "JsonschemaResourceMapping", "get_core_resource_mappings", ] class ResourceMappingProxy(Mapping): """ Wrapper around a resource mapping that carries additional information on the package that provided the mapping. """ @classmethod def maybe_wrap(self, delegate): if isinstance(delegate, ResourceMappingProxy): return delegate else: return ResourceMappingProxy(delegate) def __init__(self, delegate, package_name=None, package_version=None): if not isinstance(delegate, Mapping): raise TypeError("Resource mapping must implement the Mapping interface") self._delegate = delegate self._package_name = package_name self._package_version = package_version self._class_name = get_class_name(delegate) def __getitem__(self, uri): return self._delegate.__getitem__(uri) def __len__(self): return self._delegate.__len__() def __iter__(self): return self._delegate.__iter__() @property def delegate(self): """ Get the wrapped mapping instance. Returns ------- collections.abc.Mapping """ return self._delegate @property def package_name(self): """ Get the name of the Python package that provided this mapping. Returns ------- str or None `None` if the mapping was added at runtime. """ return self._package_name @property def package_version(self): """ Get the version of the Python package that provided the mapping. Returns ------- str or None `None` if the mapping was added at runtime. """ return self._package_version @property def class_name(self): """" Get the fully qualified class name of the mapping. Returns ------- str """ return self._class_name def __eq__(self, other): if isinstance(other, ResourceMappingProxy): return other.delegate is self.delegate else: return False def __hash__(self): return hash(id(self.delegate)) def __repr__(self): if self.package_name is not None: package_description = "{}=={}".format(self.package_name, self.package_version) else: package_description = "(none)" return "".format( self.class_name, package_description, len(self), ) class DirectoryResourceMapping(Mapping): """ Resource mapping that reads resource content from a directory or directory tree. Parameters ---------- root : str or importlib.abc.Traversable Root directory (or directory-like Traversable) of the resource files. `str` will be interpreted as a filesystem path. uri_prefix : str Prefix used to construct URIs from file paths. The prefix will be prepended to paths relative to the root directory. recursive : bool, optional If `True`, recurse into subdirectories. Defaults to `False`. filename_pattern : str, optional Glob pattern that identifies relevant filenames. Defaults to `"*.yaml"`. stem_filename : bool, optional If `True`, remove the filename's extension when constructing its URI. """ def __init__(self, root, uri_prefix, recursive=False, filename_pattern="*.yaml", stem_filename=True): self._uri_to_file = {} self._recursive = recursive self._filename_pattern = filename_pattern self._stem_filename = stem_filename if isinstance(root, str): self._root = Path(root) else: self._root = root if uri_prefix.endswith("/"): self._uri_prefix = uri_prefix[:-1] else: self._uri_prefix = uri_prefix for file, path_components in self._iterate_files(self._root, []): self._uri_to_file[self._make_uri(file, path_components)] = file def _iterate_files(self, directory, path_components): for obj in directory.iterdir(): if obj.is_file() and fnmatch.fnmatch(obj.name, self._filename_pattern): yield obj, path_components elif obj.is_dir() and self._recursive: yield from self._iterate_files(obj, path_components + [obj.name]) def _make_uri(self, file, path_components): if self._stem_filename: filename = os.path.splitext(file.name)[0] else: filename = file.name return "/".join([self._uri_prefix] + path_components + [filename]) def __getitem__(self, uri): return self._uri_to_file[uri].read_bytes() def __len__(self): return len(self._uri_to_file) def __iter__(self): yield from self._uri_to_file def __repr__(self): return "{}({!r}, {!r}, recursive={!r}, filename_pattern={!r}, stem_filename={!r})".format( self.__class__.__name__, self._root, self._uri_prefix, self._recursive, self._filename_pattern, self._stem_filename, ) class ResourceManager(Mapping): """ Wraps multiple resource mappings into a single interface with some friendlier error handling. Parameters ---------- resource_mappings : iterable of collections.abc.Mapping Underlying resource mappings. In the case of a duplicate URI, the first mapping takes precedence. """ def __init__(self, resource_mappings): self._resource_mappings = resource_mappings self._mappings_by_uri = {} for mapping in resource_mappings: for uri in mapping: if uri not in self._mappings_by_uri: self._mappings_by_uri[uri] = mapping def __getitem__(self, uri): if uri not in self._mappings_by_uri: raise KeyError("Resource unavailable for URI: {}".format(uri)) content = self._mappings_by_uri[uri][uri] if isinstance(content, str): content = content.encode("utf-8") return content def __len__(self): return len(self._mappings_by_uri) def __iter__(self): yield from self._mappings_by_uri def __contains__(self, uri): # Implement __contains__ only for efficiency. return uri in self._mappings_by_uri def __repr__(self): return "".format(self.__len__()) _JSONSCHEMA_URI_TO_FILENAME = { "http://json-schema.org/draft-04/schema": "draft4.json", } class JsonschemaResourceMapping(Mapping): """ Resource mapping that fetches metaschemas from the jsonschema package. """ def __getitem__(self, uri): filename = _JSONSCHEMA_URI_TO_FILENAME[uri] return pkgutil.get_data("jsonschema", "schemas/{}".format(filename)) def __len__(self): return len(_JSONSCHEMA_URI_TO_FILENAME) def __iter__(self): yield from _JSONSCHEMA_URI_TO_FILENAME def __repr__(self): return "JsonschemaResourceMapping()" def get_core_resource_mappings(): """ Get the resource mapping instances for the core schemas. This method is registered with the asdf.resource_mappings entry point. """ core_schemas_root = importlib_resources.files(asdf)/"schemas"/"stsci.edu" if not core_schemas_root.is_dir(): # In an editable install, the schemas can be found in the # asdf-standard submodule. core_schemas_root = Path(__file__).parent.parent/"asdf-standard"/"schemas"/"stsci.edu" if not core_schemas_root.is_dir(): raise RuntimeError("Unable to locate core schemas") resources_root = importlib_resources.files(asdf)/"resources" if not resources_root.is_dir(): # In an editable install, the resources can be found in the # asdf-standard submodule. resources_root = Path(__file__).parent.parent/"asdf-standard"/"resources" if not resources_root.is_dir(): raise RuntimeError("Unable to locate core resources") return [ DirectoryResourceMapping(core_schemas_root, "http://stsci.edu/schemas", recursive=True), DirectoryResourceMapping(resources_root / "asdf-format.org", "asdf://asdf-format.org", recursive=True), JsonschemaResourceMapping(), ] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/schema.py0000644000537500020070000006407500000000000016341 0ustar00wjamiesonSTSCI\scienceimport json import datetime import warnings import copy from numbers import Integral from functools import lru_cache from collections import OrderedDict from collections.abc import Mapping from jsonschema import validators as mvalidators from jsonschema.exceptions import ValidationError import yaml import numpy as np from .config import get_config from . import constants from . import generic_io from . import reference from . import treeutil from . import util from . import extension from . import yamlutil from . import versioning from . import tagged from .exceptions import AsdfDeprecationWarning, AsdfWarning from .util import patched_urllib_parse YAML_SCHEMA_METASCHEMA_ID = 'http://stsci.edu/schemas/yaml-schema/draft-01' if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_loader = yaml.SafeLoader __all__ = ['validate', 'fill_defaults', 'remove_defaults', 'check_schema'] def default_ext_resolver(uri): """ Resolver that uses tag/url mappings from all installed extensions """ # Deprecating this because it doesn't play nicely with the caching on # load_schema(...). warnings.warn( "The 'default_ext_resolver(...)' function is deprecated. Use " "'asdf.extension.get_default_resolver()(...)' instead.", AsdfDeprecationWarning) return extension.get_default_resolver()(uri) PYTHON_TYPE_TO_YAML_TAG = { None: 'null', str: 'str', bytes: 'str', bool: 'bool', int: 'int', float: 'float', list: 'seq', dict: 'map', set: 'set', OrderedDict: 'omap' } # Prepend full YAML tag prefix for k, v in PYTHON_TYPE_TO_YAML_TAG.items(): PYTHON_TYPE_TO_YAML_TAG[k] = constants.YAML_TAG_PREFIX + v def _type_to_tag(type_): for base in type_.mro(): if base in PYTHON_TYPE_TO_YAML_TAG: return PYTHON_TYPE_TO_YAML_TAG[base] return None def validate_tag(validator, tag_pattern, instance, schema): """ Implements the tag validation directive, which checks the tag against a pattern that may include wildcards. See `asdf.util.uri_match` for details on the matching behavior. """ if hasattr(instance, '_tag'): instance_tag = instance._tag else: # Try tags for known Python builtins instance_tag = _type_to_tag(type(instance)) if instance_tag is None: yield ValidationError( "mismatched tags, wanted '{}', got unhandled object type '{}'".format( tag_pattern, util.get_class_name(instance) ) ) if not util.uri_match(tag_pattern, instance_tag): yield ValidationError( "mismatched tags, wanted '{0}', got '{1}'".format( tag_pattern, instance_tag)) def validate_propertyOrder(validator, order, instance, schema): """ Stores a value on the `tagged.TaggedDict` instance so that properties can be written out in the preferred order. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not validator.is_type(instance, 'object'): return if not order: # propertyOrder may be an empty list return instance.property_order = order def validate_flowStyle(validator, flow_style, instance, schema): """ Sets a flag on the `tagged.TaggedList` or `tagged.TaggedDict` object so that the YAML generator knows which style to use to write the element. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not (validator.is_type(instance, 'object') or validator.is_type(instance, 'array')): return instance.flow_style = flow_style def validate_style(validator, style, instance, schema): """ Sets a flag on the `tagged.TaggedString` object so that the YAML generator knows which style to use to write the string. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not validator.is_type(instance, 'string'): return instance.style = style def validate_type(validator, types, instance, schema): """ PyYAML returns strings that look like dates as datetime objects. However, as far as JSON is concerned, this is type==string and format==date-time. That detects for that case and doesn't raise an error, otherwise falling back to the default type checker. """ if (isinstance(instance, datetime.datetime) and schema.get('format') == 'date-time' and 'string' in types): return return mvalidators.Draft4Validator.VALIDATORS['type']( validator, types, instance, schema) YAML_VALIDATORS = util.HashableDict( mvalidators.Draft4Validator.VALIDATORS.copy()) YAML_VALIDATORS.update({ 'tag': validate_tag, 'propertyOrder': validate_propertyOrder, 'flowStyle': validate_flowStyle, 'style': validate_style, 'type': validate_type }) def validate_fill_default(validator, properties, instance, schema): if not validator.is_type(instance, 'object'): return for property, subschema in properties.items(): if "default" in subschema: instance.setdefault(property, subschema["default"]) for err in mvalidators.Draft4Validator.VALIDATORS['properties']( validator, properties, instance, schema): yield err FILL_DEFAULTS = util.HashableDict() for key in ('allOf', 'items'): FILL_DEFAULTS[key] = mvalidators.Draft4Validator.VALIDATORS[key] FILL_DEFAULTS['properties'] = validate_fill_default def validate_remove_default(validator, properties, instance, schema): if not validator.is_type(instance, 'object'): return for property, subschema in properties.items(): if subschema.get("default", None) is not None: if instance.get(property, None) == subschema["default"]: del instance[property] for err in mvalidators.Draft4Validator.VALIDATORS['properties']( validator, properties, instance, schema): yield err REMOVE_DEFAULTS = util.HashableDict() for key in ('allOf', 'items'): REMOVE_DEFAULTS[key] = mvalidators.Draft4Validator.VALIDATORS[key] REMOVE_DEFAULTS['properties'] = validate_remove_default class _ValidationContext: """ Context that tracks (tree node, schema fragment) pairs that have already been validated. Instances of this class are context managers that track how many times they have been entered, and only reset themselves when exiting the outermost context. """ def __init__(self): self._depth = 0 self._seen = set() def add(self, instance, schema): """ Inform the context that an instance has been validated against a schema fragment. """ self._seen.add(self._make_seen_key(instance, schema)) def seen(self, instance, schema): """ Return True if an instance has already been validated against a schema fragment. """ return self._make_seen_key(instance, schema) in self._seen def __enter__(self): self._depth += 1 return self def __exit__(self, exc_type, exc_value, traceback): self._depth -= 1 if self._depth == 0: self._seen = set() def _make_seen_key(self, instance, schema): return (id(instance), id(schema)) @lru_cache() def _create_validator(validators=YAML_VALIDATORS, visit_repeat_nodes=False): meta_schema = _load_schema_cached(YAML_SCHEMA_METASCHEMA_ID, extension.get_default_resolver(), False, False) type_checker = mvalidators.Draft4Validator.TYPE_CHECKER.redefine_many({ 'array': lambda checker, instance: isinstance(instance, list) or isinstance(instance, tuple), 'integer': lambda checker, instance: not isinstance(instance, bool) and isinstance(instance, Integral), 'string': lambda checker, instance: isinstance(instance, (str, np.str_)), }) id_of = mvalidators.Draft4Validator.ID_OF base_cls = mvalidators.create( meta_schema=meta_schema, validators=validators, type_checker=type_checker, id_of=id_of ) class ASDFValidator(base_cls): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._context = _ValidationContext() def iter_errors(self, instance, _schema=None): # We can't validate anything that looks like an external reference, # since we don't have the actual content, so we just have to defer # it for now. If the user cares about complete validation, they # can call `AsdfFile.resolve_references`. with self._context: if _schema is None: schema = self.schema else: schema = _schema if self._context.seen(instance, schema): # We've already validated this instance against this schema, # no need to do it again. return if not visit_repeat_nodes: self._context.add(instance, schema) if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return if _schema is None: tag = getattr(instance, '_tag', None) if tag is not None: if self.serialization_context.extension_manager.handles_tag(tag): tag_def = self.serialization_context.extension_manager.get_tag_definition(tag) schema_uris = tag_def.schema_uris else: schema_uris = [self.ctx.tag_mapping(tag)] if schema_uris[0] == tag: schema_uris = [] # Must validate against all schema_uris for schema_uri in schema_uris: try: s = _load_schema_cached(schema_uri, self.ctx.resolver, False, False) except FileNotFoundError: msg = "Unable to locate schema file for '{}': '{}'" warnings.warn(msg.format(tag, schema_uri), AsdfWarning) s = {} if s: with self.resolver.in_scope(schema_uri): for x in super(ASDFValidator, self).iter_errors(instance, s): yield x if isinstance(instance, dict): for val in instance.values(): for x in self.iter_errors(val): yield x elif isinstance(instance, list): for val in instance: for x in self.iter_errors(val): yield x else: for x in super(ASDFValidator, self).iter_errors(instance, _schema=schema): yield x return ASDFValidator @lru_cache() def _load_schema(url): with generic_io.get_file(url) as fd: if isinstance(url, str) and url.endswith('json'): json_data = fd.read().decode('utf-8') result = json.loads(json_data, object_pairs_hook=OrderedDict) else: # The following call to yaml.load is safe because we're # using a loader that inherits from pyyaml's SafeLoader. result = yaml.load(fd, Loader=yamlutil.AsdfLoader) # nosec return result, fd.uri def _make_schema_loader(resolver): def load_schema(url): # Check if this is a URI provided by the new # Mapping API: resource_manager = get_config().resource_manager if url in resource_manager: content = resource_manager[url] # The jsonschema metaschemas are JSON, but pyyaml # doesn't mind. # The following call to yaml.load is safe because we're # using a loader that inherits from pyyaml's SafeLoader. result = yaml.load(content, Loader=yamlutil.AsdfLoader) # nosec return result, url # If not, fall back to fetching the schema the old way: url = resolver(str(url)) return _load_schema(url) return load_schema def _make_resolver(url_mapping): handlers = {} schema_loader = _make_schema_loader(url_mapping) def get_schema(url): return schema_loader(url)[0] for x in ['http', 'https', 'file', 'tag', 'asdf']: handlers[x] = get_schema # Supplying our own implementation of urljoin_cache # allows asdf:// URIs to be resolved correctly. urljoin_cache = lru_cache(1024)(patched_urllib_parse.urljoin) # We set cache_remote=False here because we do the caching of # remote schemas here in `load_schema`, so we don't need # jsonschema to do it on our behalf. Setting it to `True` # counterintuitively makes things slower. return mvalidators.RefResolver( '', {}, cache_remote=False, handlers=handlers, urljoin_cache=urljoin_cache, ) @lru_cache() def load_custom_schema(url): warnings.warn( "The 'load_custom_schema(...)' function is deprecated. Use" "'load_schema' instead.", AsdfDeprecationWarning ) return load_schema(url, resolve_references=True) def load_schema(url, resolver=None, resolve_references=False, resolve_local_refs=False): """ Load a schema from the given URL. Parameters ---------- url : str The path to the schema resolver : callable, optional A callback function used to map URIs to other URIs. The callable must take a string and return a string or `None`. This is useful, for example, when a remote resource has a mirror on the local filesystem that you wish to use. resolve_references : bool, optional If `True`, resolve all `$ref` references. resolve_local_refs : bool, optional If `True`, resolve all `$ref` references that refer to other objects within the same schema. This will automatically be handled when passing `resolve_references=True`, but it may be desirable in some cases to control local reference resolution separately. This parameter is deprecated. """ if resolve_local_refs is True: warnings.warn( "The 'resolve_local_refs' parameter is deprecated.", AsdfDeprecationWarning ) if resolver is None: # We can't just set this as the default in load_schema's definition # because invoking get_default_resolver at import time leads to a circular import. resolver = extension.get_default_resolver() # We want to cache the work that went into constructing the schema, but returning # the same object is treacherous, because users who mutate the result will not # expect that they're changing the schema everywhere. return copy.deepcopy( _load_schema_cached(url, resolver, resolve_references, resolve_local_refs) ) def _safe_resolve(resolver, json_id, uri): """ This function handles the tricky task of resolving a schema URI in the presence of both new and legacy extensions. There are two senses of "resolve" here: one is to resolve the URI to a file:// URL using the legacy extension resolver object. The other is to resolve relative URIs against the id of the current schema document, which is what generic_io.resolve_uri does. For URIs associated with new-style extensions, we want to resolve with generic_io.resolve_uri, but not with the resolver object, otherwise we risk mangling URIs that share a prefix with a resolver mapping. """ # We can't use urllib.parse here because tag: URIs don't # parse correctly. parts = uri.split("#") base = parts[0] if len(parts) > 1: fragment = parts[1] else: fragment = "" # The generic_io.resolve_uri method cannot operate on tag: URIs. # New-style extensions don't support $ref with a tag URI target anyway, # so it's safe to feed this through the resolver right away. if base.startswith("tag:"): base = resolver(base) # Resolve relative URIs (e.g., #foo/bar, ../foo/bar) against # the current schema id. base = generic_io.resolve_uri(json_id, base) # Use the resolver object only if the URI does not belong to one # of the new-style extensions. if base not in get_config().resource_manager: base = resolver(base) return base, fragment @lru_cache() def _load_schema_cached(url, resolver, resolve_references, resolve_local_refs): loader = _make_schema_loader(resolver) schema, url = loader(url) if resolve_references or resolve_local_refs: def resolve_refs(node, json_id): if json_id is None: json_id = url if isinstance(node, dict) and '$ref' in node: suburl_base, suburl_fragment = _safe_resolve(resolver, json_id, node['$ref']) if suburl_base == url or suburl_base == schema.get("id"): # This is a local ref, which we'll resolve in both cases. subschema = schema elif resolve_references: # Only resolve non-local refs when the flag is set. subschema = load_schema(suburl_base, resolver, True) else: # Otherwise return the $ref unmodified. return node return reference.resolve_fragment(subschema, suburl_fragment) else: return node schema = treeutil.walk_and_modify(schema, resolve_refs) return schema def get_validator(schema={}, ctx=None, validators=None, url_mapping=None, *args, _visit_repeat_nodes=False, _serialization_context=None, **kwargs): """ Get a JSON schema validator object for the given schema. The additional *args and **kwargs are passed along to `jsonschema.validate`. Parameters ---------- schema : schema, optional Explicit schema to use. If not provided, the schema to use is determined by the tag on instance (or subinstance). ctx : AsdfFile context Used to resolve tags and urls validators : dict, optional A dictionary mapping properties to validators to use (instead of the built-in ones and ones provided by extension types). url_mapping : resolver.Resolver, optional A resolver to convert remote URLs into local ones. _visit_repeat_nodes : bool, optional Force the validator to visit nodes that it has already seen. This flag is a temporary hack to support a specific project that uses a custom validator to update a .fits file. Setting `True` is discouraged and will lead to RecursionError in trees containing reference cycles. Returns ------- validator : jsonschema.Validator """ if ctx is None: from .asdf import AsdfFile ctx = AsdfFile() if _serialization_context is None: _serialization_context = ctx._create_serialization_context() if validators is None: validators = util.HashableDict(YAML_VALIDATORS.copy()) validators.update(ctx.extension_list.validators) kwargs['resolver'] = _make_resolver(url_mapping) # We don't just call validators.validate() directly here, because # that validates the schema itself, wasting a lot of time (at the # time of this writing, it was half of the runtime of the unit # test suite!!!). Instead, we assume that the schemas are valid # through the running of the unit tests, not at run time. cls = _create_validator(validators=validators, visit_repeat_nodes=_visit_repeat_nodes) validator = cls(schema, *args, **kwargs) validator.ctx = ctx validator.serialization_context = _serialization_context return validator def _validate_large_literals(instance, reading): """ Validate that the tree has no large numeric literals. """ def _validate(value): if value <= constants.MAX_NUMBER and value >= constants.MIN_NUMBER: return if reading: warnings.warn( f"Invalid integer literal value {value} detected while reading file. " "The value has been read safely, but the file should be " "fixed.", AsdfWarning ) else: raise ValidationError( f"Integer value {value} is too large to safely represent as a " "literal in ASDF" ) if isinstance(instance, Integral): _validate(instance) elif isinstance(instance, Mapping): for key in instance: if isinstance(key, Integral): _validate(key) def _validate_mapping_keys(instance, reading): """ Validate that mappings do not contain illegal key types (as of ASDF Standard 1.6.0, only str, int, and bool are permitted). """ if not isinstance(instance, Mapping): return for key in instance: if isinstance(key, tagged.Tagged) or not isinstance(key, (str, int, bool)): if reading: warnings.warn( f"Invalid mapping key {key} detected while reading file. " "The value has been read safely, but the file should be " "fixed.", AsdfWarning ) else: raise ValidationError( f"Mapping key {key} is not permitted. Valid types: " "str, int, bool." ) def validate(instance, ctx=None, schema={}, validators=None, reading=False, *args, **kwargs): """ Validate the given instance (which must be a tagged tree) against the appropriate schema. The schema itself is located using the tag on the instance. The additional *args and **kwargs are passed along to `jsonschema.validate`. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls schema : schema, optional Explicit schema to use. If not provided, the schema to use is determined by the tag on instance (or subinstance). validators : dict, optional A dictionary mapping properties to validators to use (instead of the built-in ones and ones provided by extension types). reading: bool, optional Indicates whether validation is being performed when the file is being read. This is useful to allow for different validation behavior when reading vs writing files. """ if ctx is None: from .asdf import AsdfFile ctx = AsdfFile() validator = get_validator(schema, ctx, validators, ctx.resolver, *args, **kwargs) validator.validate(instance, _schema=(schema or None)) additional_validators = [_validate_large_literals] if ctx.version >= versioning.RESTRICTED_KEYS_MIN_VERSION: additional_validators.append(_validate_mapping_keys) def _callback(instance): for validator in additional_validators: validator(instance, reading) treeutil.walk(instance, _callback) def fill_defaults(instance, ctx, reading=False): """ For any default values in the schema, add them to the tree if they don't exist. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls reading: bool, optional Indicates whether the ASDF file is being read (in contrast to being written). """ validate(instance, ctx, validators=FILL_DEFAULTS, reading=reading) def remove_defaults(instance, ctx): """ For any values in the tree that are the same as the default values specified in the schema, remove them from the tree. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls """ validate(instance, ctx, validators=REMOVE_DEFAULTS) def check_schema(schema, validate_default=True): """ Check a given schema to make sure it is valid YAML schema. Parameters ---------- schema : dict The schema object, as returned by `load_schema`. validate_default : bool, optional Set to `True` to validate the content of the default field against the schema. """ validators = util.HashableDict( mvalidators.Draft4Validator.VALIDATORS.copy()) if validate_default: # The jsonschema library doesn't validate defaults # on its own. instance_validator = get_validator(schema) instance_scope = schema.get('id', '') def _validate_default(validator, default, instance, schema): if not validator.is_type(instance, 'object'): return if 'default' in instance: with instance_validator.resolver.in_scope(instance_scope): for err in instance_validator.iter_errors( instance['default'], instance): yield err validators.update({ 'default': _validate_default }) meta_schema_id = schema.get('$schema', YAML_SCHEMA_METASCHEMA_ID) meta_schema = _load_schema_cached(meta_schema_id, extension.get_default_resolver(), False, False) resolver = _make_resolver(extension.get_default_resolver()) cls = mvalidators.create( meta_schema=meta_schema, validators=validators, type_checker=mvalidators.Draft4Validator.TYPE_CHECKER, id_of=mvalidators.Draft4Validator.ID_OF, ) validator = cls(meta_schema, resolver=resolver) validator.validate(schema, _schema=meta_schema) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/search.py0000644000537500020070000003034300000000000016335 0ustar00wjamiesonSTSCI\science""" Utilities for searching ASDF trees. """ import inspect import re import typing import builtins from .util import NotSet from ._display import (render_tree, DEFAULT_MAX_ROWS, DEFAULT_MAX_COLS, DEFAULT_SHOW_VALUES, format_italic, format_faint, _NodeInfo) from .treeutil import get_children, is_container __all__ = ["AsdfSearchResult"] class AsdfSearchResult: """ Result of a call to AsdfFile.search. """ def __init__(self, identifiers, node, filters=[], parent_node=None, max_rows=DEFAULT_MAX_ROWS, max_cols=DEFAULT_MAX_COLS, show_values=DEFAULT_SHOW_VALUES): self._identifiers = identifiers self._node = node self._filters = filters self._parent_node = parent_node self._max_rows = max_rows self._max_cols = max_cols self._show_values = show_values def format(self, max_rows=NotSet, max_cols=NotSet, show_values=NotSet): """ Change formatting parameters of the rendered tree. Parameters ---------- max_rows : int, tuple, None, or NotSet, optional Maximum number of lines to print. Nodes that cannot be displayed will be elided with a message. If int, constrain total number of displayed lines. If tuple, constrain lines per node at the depth corresponding \ to the tuple index. If None, display all lines. If NotSet, retain existing value. max_cols : int, None or NotSet, optional Maximum length of line to print. Nodes that cannot be fully displayed will be truncated with a message. If int, constrain length of displayed lines. If None, line length is unconstrained. If NotSet, retain existing value. show_values : bool or NotSet, optional Set to False to disable display of primitive values in the rendered tree. Set to NotSet to retain existign value. Returns ------- AsdfSearchResult the reformatted search result """ if max_rows is NotSet: max_rows = self._max_rows if max_cols is NotSet: max_cols = self._max_cols if show_values is NotSet: show_values = self._show_values return AsdfSearchResult( self._identifiers, self._node, filters=self._filters, parent_node=self._parent_node, max_rows=max_rows, max_cols=max_cols, show_values=show_values ) def _maybe_compile_pattern(self, query): if isinstance(query, str): return re.compile(query) else: return query def _safe_equals(self, a, b): try: result = (a == b) if isinstance(result, bool): return result else: return False except Exception: return False def _get_fully_qualified_type(self, value): value_type = type(value) if value_type.__module__ == "builtins": return value_type.__name__ else: return ".".join([value_type.__module__, value_type.__name__]) def search(self, key=NotSet, type=NotSet, value=NotSet, filter=None): """ Further narrow the search. Parameters ---------- key : NotSet, str, or any other object Search query that selects nodes by dict key or list index. If NotSet, the node key is unconstrained. If str, the input is searched among keys/indexes as a regular expression pattern. If any other object, node's key or index must equal the queried key. type : NotSet, str, or builtins.type Search query that selects nodes by type. If NotSet, the node type is unconstrained. If str, the input is searched among (fully qualified) node type names as a regular expression pattern. If builtins.type, the node must be an instance of the input. value : NotSet, str, or any other object Search query that selects nodes by value. If NotSet, the node value is unconstrained. If str, the input is searched among values as a regular expression pattern. If any other object, node's value must equal the queried value. filter : callable Callable that filters nodes by arbitrary criteria. The callable accepts one or two arguments: - the node - the node's list index or dict key (optional) and returns True to retain the node, or False to remove it from the search results. Returns ------- AsdfSearchResult the subsequent search result """ if not (type is NotSet or isinstance(type, str) or isinstance(type, typing.Pattern) or isinstance(type, builtins.type)): raise TypeError("type must be NotSet, str, regular expression, or instance of builtins.type") # value and key arguments can be anything, but pattern and str have special behavior key = self._maybe_compile_pattern(key) type = self._maybe_compile_pattern(type) value = self._maybe_compile_pattern(value) filter = _wrap_filter(filter) def _filter(node, identifier): if isinstance(key, typing.Pattern): if key.search(str(identifier)) is None: return False elif key is not NotSet: if not self._safe_equals(identifier, key): return False if isinstance(type, typing.Pattern): fully_qualified_node_type = self._get_fully_qualified_type(node) if type.search(fully_qualified_node_type) is None: return False elif isinstance(type, builtins.type): if not isinstance(node, type): return False if isinstance(value, typing.Pattern): if is_container(node): # The string representation of a container object tends to # include the child object values, but that's probably not # what searchers want. return False elif value.search(str(node)) is None: return False elif value is not NotSet: if not self._safe_equals(node, value): return False if filter is not None: if not filter(node, identifier): return False return True return AsdfSearchResult( self._identifiers, self._node, filters=self._filters + [_filter], parent_node=self._parent_node, max_rows=self._max_rows, max_cols=self._max_cols, show_values=self._show_values ) def replace(self, value): """ Assign a new value in place of all leaf nodes in the search results. Parameters ---------- value : object """ results = [] def _callback(identifiers, parent, node, children): if all(f(node, identifiers[-1]) for f in self._filters): results.append((identifiers[-1], parent)) _walk_tree_breadth_first(self._identifiers, self._node, _callback) for identifier, parent in results: parent[identifier] = value @property def node(self): """ Retrieve the leaf node of a tree with one search result. Returns ------- object the single node of the search result """ results = self.nodes if len(results) == 0: return None elif len(results) == 1: return results[0] else: raise RuntimeError("More than one result") @property def path(self): """ Retrieve the path to the leaf node of a tree with one search result. Returns ------- str the path to the searched node """ results = self.paths if len(results) == 0: return None elif len(results) == 1: return results[0] else: raise RuntimeError("More than one result") @property def nodes(self): """ Retrieve all leaf nodes in the search results. Returns ------- list of object every node in the search results (breadth-first order) """ results = [] def _callback(identifiers, parent, node, children): if all(f(node, identifiers[-1]) for f in self._filters): results.append(node) _walk_tree_breadth_first(self._identifiers, self._node, _callback) return results @property def paths(self): """ Retrieve the paths to all leaf nodes in the search results. Returns ------- list of str the path to every node in the search results """ results = [] def _callback(identifiers, parent, node, children): if all(f(node, identifiers[-1]) for f in self._filters): results.append(_build_path(identifiers)) _walk_tree_breadth_first(self._identifiers, self._node, _callback) return results def __repr__(self): lines = render_tree( self._node, max_rows=self._max_rows, max_cols=self._max_cols, show_values=self._show_values, filters=self._filters, identifier=self._identifiers[-1] ) if len(lines) == 0: return format_faint(format_italic("No results found.")) else: return "\n".join(lines) def __getitem__(self, key): if (isinstance(self._node, dict) or isinstance(self._node, list) or isinstance(self._node, tuple) or _NodeInfo.supports_info(self._node)): if _NodeInfo.supports_info(self._node): child = self._node.__asdf_traverse__()[key] else: child = self._node[key] else: raise TypeError("This node cannot be indexed") return AsdfSearchResult( self._identifiers + [key], child, filters=self._filters, parent_node=self._node, max_rows=self._max_rows, max_cols=self._max_cols, show_values=self._show_values, ) def _walk_tree_breadth_first(root_identifiers, root_node, callback): """ Walk the tree in breadth-first order (useful for prioritizing lower-depth nodes). """ current_nodes = [(root_identifiers, None, root_node)] seen = set() while True: next_nodes = [] for identifiers, parent, node in current_nodes: if (isinstance(node, dict) or isinstance(node, list) or isinstance(node, tuple) or _NodeInfo.supports_info(node)) and id(node) in seen: continue if _NodeInfo.supports_info(node): tnode = node.__asdf_traverse__() else: tnode = node children = get_children(tnode) callback(identifiers, parent, node, [c for _, c in children]) next_nodes.extend([(identifiers + [i], node, c) for i, c in children]) seen.add(id(node)) if len(next_nodes) == 0: break current_nodes = next_nodes def _build_path(identifiers): """ Generate the Python code needed to extract the identified node. """ if len(identifiers) == 0: return "" else: return identifiers[0] + "".join("[{}]".format(repr(i)) for i in identifiers[1:]) def _wrap_filter(filter): """ Ensure that filter callable accepts the expected number of arguments. """ if filter is None: return None else: arity = len(inspect.signature(filter).parameters) if arity == 1: return lambda n, i: filter(n) elif arity == 2: return filter else: raise ValueError("filter must accept 1 or 2 arguments") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/stream.py0000644000537500020070000000337400000000000016367 0ustar00wjamiesonSTSCI\sciencefrom .tags.core import ndarray class Stream(ndarray.NDArrayType): """ Used to put a streamed array into the tree. Examples -------- Save a double-precision array with 1024 columns, one row at a time:: >>> from asdf import AsdfFile, Stream >>> import numpy as np >>> ff = AsdfFile() >>> ff.tree['streamed'] = Stream([1024], np.float64) >>> with open('test.asdf', 'wb') as fd: ... ff.write_to(fd) ... for i in range(200): ... nbytes = fd.write( ... np.array([i] * 1024, np.float64).tobytes()) """ name = None types = [] def __init__(self, shape, dtype, strides=None): self._shape = shape self._datatype, self._byteorder = ndarray.numpy_dtype_to_asdf_datatype(dtype) self._strides = strides self._array = None def _make_array(self): self._array = None @classmethod def reserve_blocks(cls, data, ctx): if isinstance(data, Stream): yield ctx.blocks.get_streamed_block() @classmethod def from_tree(cls, data, ctx): return ndarray.NDArrayType.from_tree(data, ctx) @classmethod def to_tree(cls, data, ctx): ctx.blocks.get_streamed_block() result = {} result['source'] = -1 result['shape'] = ['*'] + data._shape result['datatype'] = data._datatype result['byteorder'] = data._byteorder if data._strides is not None: result['strides'] = data._strides return result def __repr__(self): return "Stream({}, {}, strides={})".format( self._shape, self._datatype, self._strides) def __str__(self): return str(self.__repr__()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tagged.py0000644000537500020070000001025500000000000016323 0ustar00wjamiesonSTSCI\science""" This file manages a transient representation of the tree made up of simple Python data types (lists, dicts, scalars) wrapped inside of `Tagged` subclasses, which add a ``tag`` attribute to hold the associated YAML tag. Below "basic data types" refers to the basic built-in data types defined in the core YAML specification. "Custom data types" are specialized tags that are added by ASDF or third-parties that are not in the YAML specification. When YAML is loaded from disk, we want to first validate it using JSON schema, which only understands basic Python data types, not the ``Nodes`` that ``pyyaml`` uses as its intermediate representation. However, basic Python data types do not preserve the tag information from the YAML file that we need later to convert elements to custom data types. Therefore, the approach here is to wrap those basic types inside of `Tagged` objects long enough to run through the jsonschema validator, and then convert to custom data types and throwing away the tag annotations in the process. Upon writing, the custom data types are first converted to basic Python data types wrapped in `Tagged` objects. The tags assigned to the ``Tagged`` objects are then used to write tags to the YAML file. All of this is an implementation detail of the our custom YAML loader and dumper (``yamlutil.AsdfLoader`` and ``yamlutil.AsdfDumper``) and is not intended to be exposed to the end user. """ from collections import UserDict, UserList, UserString from copy import deepcopy, copy __all__ = ['tag_object', 'get_tag'] class Tagged: """ Base class of classes that wrap a given object and store a tag with it. """ pass class TaggedDict(Tagged, UserDict, dict): """ A Python dict with a tag attached. """ flow_style = None property_order = None def __init__(self, data=None, tag=None): if data is None: data = {} self.data = data self._tag = tag def __eq__(self, other): return (isinstance(other, TaggedDict) and self.data == other.data and self._tag == other._tag) def __deepcopy__(self, memo): data_copy = deepcopy(self.data, memo) return TaggedDict(data_copy, self._tag) def __copy__(self): data_copy = copy(self.data) return TaggedDict(data_copy, self._tag) class TaggedList(Tagged, UserList, list): """ A Python list with a tag attached. """ flow_style = None def __init__(self, data=None, tag=None): if data is None: data = [] self.data = data self._tag = tag def __eq__(self, other): return (isinstance(other, TaggedList) and self.data == other.data and self._tag == other._tag) def __deepcopy__(self, memo): data_copy = deepcopy(self.data, memo) return TaggedList(data_copy, self._tag) def __copy__(self): data_copy = copy(self.data) return TaggedList(data_copy, self._tag) class TaggedString(Tagged, UserString, str): """ A Python string with a tag attached. """ style = None def __eq__(self, other): return (isinstance(other, TaggedString) and str.__eq__(self, other) and self._tag == other._tag) def tag_object(tag, instance, ctx=None): """ Tag an object by wrapping it in a ``Tagged`` instance. """ if isinstance(instance, Tagged): instance._tag = tag elif isinstance(instance, dict): instance = TaggedDict(instance, tag) elif isinstance(instance, list): instance = TaggedList(instance, tag) elif isinstance(instance, str): instance = TaggedString(instance) instance._tag = tag else: from . import AsdfFile, yamlutil if ctx is None: ctx = AsdfFile() try: instance = yamlutil.custom_tree_to_tagged_tree(instance, ctx) except TypeError: raise TypeError("Don't know how to tag a {0}".format(type(instance))) instance._tag = tag return instance def get_tag(instance): """ Get the tag associated with the instance, if there is one. """ return getattr(instance, '_tag', None) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9396756 asdf-2.9.2/asdf/tags/0000755000537500020070000000000000000000000015451 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tags/__init__.py0000644000537500020070000000000000000000000017550 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9424744 asdf-2.9.2/asdf/tags/core/0000755000537500020070000000000000000000000016401 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/__init__.py0000644000537500020070000000315100000000000020512 0ustar00wjamiesonSTSCI\sciencefrom ...types import AsdfType class AsdfObject(dict): pass from .constant import ConstantType from .ndarray import NDArrayType from .complex import ComplexType from .integer import IntegerType from .external_reference import ExternalArrayReference __all__ = ['AsdfObject', 'Software', 'HistoryEntry', 'ExtensionMetadata', 'SubclassMetadata', 'ConstantType', 'NDArrayType', 'ComplexType', 'IntegerType', 'ExternalArrayReference'] class AsdfObjectType(AsdfType): name = 'core/asdf' version = '1.1.0' supported_versions = {'1.0.0', '1.1.0'} types = [AsdfObject] @classmethod def from_tree(cls, node, ctx): return AsdfObject(node) @classmethod def to_tree(cls, data, ctx): return dict(data) class Software(dict, AsdfType): name = 'core/software' version = '1.0.0' class HistoryEntry(dict, AsdfType): name = 'core/history_entry' version = '1.0.0' class ExtensionMetadata(dict, AsdfType): name = 'core/extension_metadata' version = '1.0.0' @property def extension_uri(self): return self.get('extension_uri') @property def extension_class(self): return self['extension_class'] @property def software(self): return self.get('software') class SubclassMetadata(dict, AsdfType): """ The tagged object supported by this class is part of an experimental feature that has since been dropped from this library. This class remains so that ASDF files that used that feature will still deserialize without warnings. """ name = 'core/subclass_metadata' version = '1.0.0' ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/complex.py0000644000537500020070000000102300000000000020416 0ustar00wjamiesonSTSCI\scienceimport numpy as np from ...types import AsdfType from ... import util class ComplexType(AsdfType): name = 'core/complex' version = '1.0.0' types = list(util.iter_subclasses(np.complexfloating)) + [complex] @classmethod def to_tree(cls, node, ctx): return str(node) @classmethod def from_tree(cls, tree, ctx): tree = tree.replace( 'inf', 'INF').replace( 'i', 'j').replace( 'INF', 'inf').replace( 'I', 'J') return complex(tree) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/constant.py0000644000537500020070000000066200000000000020610 0ustar00wjamiesonSTSCI\sciencefrom ...types import AsdfType class Constant: def __init__(self, value): self._value = value @property def value(self): return self._value class ConstantType(AsdfType): name = 'core/constant' version = '1.0.0' types = [Constant] @classmethod def from_tree(cls, node, ctx): return Constant(node) @classmethod def to_tree(cls, data, ctx): return data.value ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/external_reference.py0000644000537500020070000000421100000000000022611 0ustar00wjamiesonSTSCI\sciencefrom ...types import AsdfType class ExternalArrayReference(AsdfType): """ Store a reference to an array in an external File. This class is a simple way of referring to an array in another file. It provides no way to resolve these references, that is left to the user. It also performs no checking to see if any of the arguments are correct. e.g. if the file exits. Parameters ---------- fileuri: `str` The path to the path to be referenced. Can be relative to the file containing the reference. target: `object` Some internal target to the data in the file. Examples may include a HDU index, a HDF path or an asdf fragment. dtype: `str` The (numpy) dtype of the contained array. shape: `tuple` The shape of the array to be loaded. Examples -------- >>> import asdf >>> ref = asdf.ExternalArrayReference("myfitsfile.fits", 1, "float64", (100, 100)) >>> tree = {'reference': ref} >>> with asdf.AsdfFile(tree) as ff: ... ff.write_to("test.asdf") """ name = "core/externalarray" version = (1, 0, 0) def __init__(self, fileuri, target, dtype, shape): self.fileuri = str(fileuri) self.target = target self.dtype = dtype self.shape = tuple(shape) def __repr__(self): return "".format( self.fileuri, self.target, self.shape, self.dtype) def __str__(self): return repr(self) def __eq__(self, other): uri = self.fileuri == other.fileuri target = self.target == other.target dtype = self.dtype == other.dtype shape = self.shape == other.shape return all((uri, target, dtype, shape)) @classmethod def to_tree(self, data, ctx): node = {} node['fileuri'] = data.fileuri node['target'] = data.target node['datatype'] = data.dtype node['shape'] = data.shape return node @classmethod def from_tree(cls, tree, ctx): return cls(tree['fileuri'], tree['target'], tree['datatype'], tree['shape']) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/integer.py0000644000537500020070000000655600000000000020424 0ustar00wjamiesonSTSCI\sciencefrom numbers import Integral import numpy as np from ...types import AsdfType class IntegerType(AsdfType): """ Enables the storage of arbitrarily large integer values The ASDF Standard mandates that integer literals in the tree can be no larger than 64 bits. Use of this class enables the storage of arbitrarily large integer values. When reading files that contain arbitrarily large integers, the values that are restored in the tree will be raw Python `int` instances. Parameters ---------- value: `numbers.Integral` A Python integral value (e.g. `int` or `numpy.integer`) storage_type: `str`, optional Optionally overrides the storage type of the array used to represent the integer value. Valid values are "internal" (the default) and "inline" Examples -------- >>> import asdf >>> import random >>> # Create a large integer value >>> largeval = random.getrandbits(100) >>> # Store the large integer value to the tree using asdf.IntegerType >>> tree = dict(largeval=asdf.IntegerType(largeval)) >>> with asdf.AsdfFile(tree) as af: ... af.write_to('largeval.asdf') >>> with asdf.open('largeval.asdf') as aa: ... assert aa['largeval'] == largeval """ name = 'core/integer' version = '1.0.0' _value_cache = dict() def __init__(self, value, storage_type='internal'): if storage_type not in ['internal', 'inline']: raise ValueError(f"storage_type '{storage_type}' is not a recognized storage type") self._value = value self._sign = '-' if value < 0 else '+' self._storage = storage_type @classmethod def to_tree(cls, node, ctx): if ctx not in cls._value_cache: cls._value_cache[ctx] = dict() abs_value = int(np.abs(node._value)) # If the same value has already been stored, reuse the array if abs_value in cls._value_cache[ctx]: array = cls._value_cache[ctx][abs_value] else: # pack integer value into 32-bit words words = [] value = abs_value while value > 0: words.append(value & 0xffffffff) value >>= 32 array = np.array(words, dtype=np.uint32) if node._storage == 'internal': cls._value_cache[ctx][abs_value] = array tree = dict() ctx.set_array_storage(array, node._storage) tree['words'] = array tree['sign'] = node._sign tree['string'] = str(int(node._value)) return tree @classmethod def from_tree(cls, tree, ctx): value = 0 for x in tree['words'][::-1]: value <<= 32 value |= int(x) if tree['sign'] == '-': value = -value return IntegerType(value) def __int__(self): return int(self._value) def __float__(self): return float(self._value) def __eq__(self, other): if isinstance(other, Integral): return self._value == other elif isinstance(other, IntegerType): return self._value == other._value else: raise ValueError( "Can't compare IntegralType to unknown type: {}".format( type(other))) def __repr__(self): return "IntegerType({})".format(self._value) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/ndarray.py0000644000537500020070000006127000000000000020421 0ustar00wjamiesonSTSCI\scienceimport sys import numpy as np from numpy import ma from jsonschema import ValidationError from ...types import AsdfType from ... import util _datatype_names = { 'int8' : 'i1', 'int16' : 'i2', 'int32' : 'i4', 'int64' : 'i8', 'uint8' : 'u1', 'uint16' : 'u2', 'uint32' : 'u4', 'uint64' : 'u8', 'float32' : 'f4', 'float64' : 'f8', 'complex64' : 'c8', 'complex128' : 'c16', 'bool8' : 'b1' } _string_datatype_names = { 'ascii' : 'S', 'ucs4' : 'U' } def asdf_byteorder_to_numpy_byteorder(byteorder): if byteorder == 'big': return '>' elif byteorder == 'little': return '<' raise ValueError("Invalid ASDF byteorder '{0}'".format(byteorder)) def asdf_datatype_to_numpy_dtype(datatype, byteorder=None): if byteorder is None: byteorder = sys.byteorder if isinstance(datatype, str) and datatype in _datatype_names: datatype = _datatype_names[datatype] byteorder = asdf_byteorder_to_numpy_byteorder(byteorder) return np.dtype(str(byteorder + datatype)) elif (isinstance(datatype, list) and len(datatype) == 2 and isinstance(datatype[0], str) and isinstance(datatype[1], int) and datatype[0] in _string_datatype_names): length = datatype[1] byteorder = asdf_byteorder_to_numpy_byteorder(byteorder) datatype = str(byteorder) + str(_string_datatype_names[datatype[0]]) + str(length) return np.dtype(datatype) elif isinstance(datatype, dict): if 'datatype' not in datatype: raise ValueError("Field entry has no datatype: '{0}'".format(datatype)) name = datatype.get('name', '') byteorder = datatype.get('byteorder', byteorder) shape = datatype.get('shape') datatype = asdf_datatype_to_numpy_dtype(datatype['datatype'], byteorder) if shape is None: return (str(name), datatype) else: return (str(name), datatype, tuple(shape)) elif isinstance(datatype, list): datatype_list = [] for i, subdatatype in enumerate(datatype): np_dtype = asdf_datatype_to_numpy_dtype(subdatatype, byteorder) if isinstance(np_dtype, tuple): datatype_list.append(np_dtype) elif isinstance(np_dtype, np.dtype): datatype_list.append((str(''), np_dtype)) else: raise RuntimeError("Error parsing asdf datatype") return np.dtype(datatype_list) raise ValueError("Unknown datatype {0}".format(datatype)) def numpy_byteorder_to_asdf_byteorder(byteorder, override=None): if override is not None: return override if byteorder == '=': return sys.byteorder elif byteorder == '<': return 'little' else: return 'big' def numpy_dtype_to_asdf_datatype(dtype, include_byteorder=True, override_byteorder=None): dtype = np.dtype(dtype) if dtype.names is not None: fields = [] for name in dtype.names: field = dtype.fields[name][0] d = {} d['name'] = name field_dtype, byteorder = numpy_dtype_to_asdf_datatype(field, override_byteorder=override_byteorder) d['datatype'] = field_dtype if include_byteorder: d['byteorder'] = byteorder if field.shape: d['shape'] = list(field.shape) fields.append(d) return fields, numpy_byteorder_to_asdf_byteorder(dtype.byteorder, override=override_byteorder) elif dtype.subdtype is not None: return numpy_dtype_to_asdf_datatype(dtype.subdtype[0], override_byteorder=override_byteorder) elif dtype.name in _datatype_names: return dtype.name, numpy_byteorder_to_asdf_byteorder(dtype.byteorder, override=override_byteorder) elif dtype.name == 'bool': return 'bool8', numpy_byteorder_to_asdf_byteorder(dtype.byteorder, override=override_byteorder) elif dtype.name.startswith('string') or dtype.name.startswith('bytes'): return ['ascii', dtype.itemsize], 'big' elif dtype.name.startswith('unicode') or dtype.name.startswith('str'): return (['ucs4', int(dtype.itemsize / 4)], numpy_byteorder_to_asdf_byteorder(dtype.byteorder, override=override_byteorder)) raise ValueError("Unknown dtype {0}".format(dtype)) def inline_data_asarray(inline, dtype=None): # np.asarray doesn't handle structured arrays unless the innermost # elements are tuples. To do that, we drill down the first # element of each level until we find a single item that # successfully converts to a scalar of the expected structured # dtype. Then we go through and convert everything at that level # to a tuple. This probably breaks for nested structured dtypes, # but it's probably good enough for now. It also won't work with # object dtypes, but ASDF explicitly excludes those, so we're ok # there. if dtype is not None and dtype.fields is not None: def find_innermost_match(l, depth=0): if not isinstance(l, list) or not len(l): raise ValueError( "data can not be converted to structured array") try: np.asarray(tuple(l), dtype=dtype) except ValueError: return find_innermost_match(l[0], depth + 1) else: return depth depth = find_innermost_match(inline) def convert_to_tuples(l, data_depth, depth=0): if data_depth == depth: return tuple(l) else: return [convert_to_tuples(x, data_depth, depth+1) for x in l] inline = convert_to_tuples(inline, depth) return np.asarray(inline, dtype=dtype) else: def handle_mask(inline): if isinstance(inline, list): if None in inline: inline_array = np.asarray(inline) nones = np.equal(inline_array, None) return np.ma.array(np.where(nones, 0, inline), mask=nones) else: return [handle_mask(x) for x in inline] return inline inline = handle_mask(inline) inline = np.ma.asarray(inline, dtype=dtype) if not ma.is_masked(inline): return inline.data else: return inline def numpy_array_to_list(array): def tolist(x): if isinstance(x, (np.ndarray, NDArrayType)): if x.dtype.char == 'S': x = x.astype('U').tolist() else: x = x.tolist() if isinstance(x, (list, tuple)): return [tolist(y) for y in x] else: return x def ascii_to_unicode(x): # Convert byte string arrays to unicode string arrays, since YAML # doesn't handle the former. if isinstance(x, list): return [ascii_to_unicode(y) for y in x] elif isinstance(x, bytes): return x.decode('ascii') else: return x result = ascii_to_unicode(tolist(array)) return result class NDArrayType(AsdfType): name = 'core/ndarray' version = '1.0.0' types = [np.ndarray, ma.MaskedArray] def __init__(self, source, shape, dtype, offset, strides, order, mask, asdffile): self._asdffile = asdffile self._source = source self._block = None self._array = None self._mask = mask if isinstance(source, list): self._array = inline_data_asarray(source, dtype) self._array = self._apply_mask(self._array, self._mask) self._block = asdffile.blocks.add_inline(self._array) if shape is not None: if ((shape[0] == '*' and self._array.shape[1:] != tuple(shape[1:])) or (self._array.shape != tuple(shape))): raise ValueError( "inline data doesn't match the given shape") self._shape = shape self._dtype = dtype self._offset = offset self._strides = strides self._order = order if not asdffile.blocks.lazy_load: self._make_array() def _make_array(self): # If the ASDF file has been updated in-place, then there's # a chance that the block's original data object has been # closed and replaced. We need to check here and re-generate # the array if necessary, otherwise we risk segfaults when # memory mapping. if self._array is not None: base = util.get_array_base(self._array) if isinstance(base, np.memmap) and base._mmap is not None and base._mmap.closed: self._array = None if self._array is None: block = self.block shape = self.get_actual_shape( self._shape, self._strides, self._dtype, len(block)) if block.trust_data_dtype: dtype = block.data.dtype else: dtype = self._dtype self._array = np.ndarray( shape, dtype, block.data, self._offset, self._strides, self._order) self._array = self._apply_mask(self._array, self._mask) if block.readonly: self._array.setflags(write=False) return self._array def _apply_mask(self, array, mask): if isinstance(mask, (np.ndarray, NDArrayType)): # Use "mask.view()" here so the underlying possibly # memmapped mask array is freed properly when the masked # array goes away. array = ma.array(array, mask=mask.view()) # assert util.get_array_base(array.mask) is util.get_array_base(mask) return array elif np.isscalar(mask): if np.isnan(mask): return ma.array(array, mask=np.isnan(array)) else: return ma.masked_values(array, mask) return array def __array__(self): return self._make_array() def __repr__(self): # repr alone should not force loading of the data if self._array is None: return "<{0} (unloaded) shape: {1} dtype: {2}>".format( 'array' if self._mask is None else 'masked array', self._shape, self._dtype) return repr(self._make_array()) def __str__(self): # str alone should not force loading of the data if self._array is None: return "<{0} (unloaded) shape: {1} dtype: {2}>".format( 'array' if self._mask is None else 'masked array', self._shape, self._dtype) return str(self._make_array()) def get_actual_shape(self, shape, strides, dtype, block_size): """ Get the actual shape of an array, by computing it against the block_size if it contains a ``*``. """ num_stars = shape.count('*') if num_stars == 0: return shape elif num_stars == 1: if shape[0] != '*': raise ValueError("'*' may only be in first entry of shape") if strides is not None: stride = strides[0] else: stride = np.product(shape[1:]) * dtype.itemsize missing = int(block_size / stride) return [missing] + shape[1:] raise ValueError("Invalid shape '{0}'".format(shape)) @property def block(self): if self._block is None: self._block = self._asdffile.blocks.get_block(self._source) return self._block @property def shape(self): if self._shape is None: return self.__array__().shape if '*' in self._shape: return tuple(self.get_actual_shape( self._shape, self._strides, self._dtype, len(self.block))) return tuple(self._shape) @property def dtype(self): if self._array is None: return self._dtype else: return self._make_array().dtype def __len__(self): if self._array is None: return self._shape[0] else: return len(self._make_array()) def __getattr__(self, attr): # We need to ignore __array_struct__, or unicode arrays end up # getting "double casted" and upsized. This also reduces the # number of array creations in the general case. if attr == '__array_struct__': raise AttributeError() return getattr(self._make_array(), attr) def __setitem__(self, *args): # This workaround appears to be necessary in order to avoid a segfault # in the case that array assignment causes an exception. The segfault # originates from the call to __repr__ inside the traceback report. try: self._make_array().__setitem__(*args) except Exception as e: self._array = None raise e from None def __getattribute__(self, name): # The presence of these attributes on an NDArrayType instance # can cause problems when the array is passed to other # libraries. # See https://github.com/asdf-format/asdf/issues/1015 if name in ("name", "version"): raise AttributeError( f"'{self.__class__.name}' object has no attribute '{name}'" ) else: return super().__getattribute__(name) @classmethod def from_tree(cls, node, ctx): if isinstance(node, list): return cls(node, None, None, None, None, None, None, ctx) elif isinstance(node, dict): source = node.get('source') data = node.get('data') if source and data: raise ValueError( "Both source and data may not be provided " "at the same time") if data: source = data shape = node.get('shape', None) if data is not None: byteorder = sys.byteorder else: byteorder = node['byteorder'] if 'datatype' in node: dtype = asdf_datatype_to_numpy_dtype( node['datatype'], byteorder) else: dtype = None offset = node.get('offset', 0) strides = node.get('strides', None) mask = node.get('mask', None) return cls(source, shape, dtype, offset, strides, 'A', mask, ctx) raise TypeError("Invalid ndarray description.") @classmethod def reserve_blocks(cls, data, ctx): # Find all of the used data buffers so we can add or rearrange # them if necessary if isinstance(data, np.ndarray): yield ctx.blocks.find_or_create_block_for_array(data, ctx) elif isinstance(data, NDArrayType): yield data.block @classmethod def to_tree(cls, data, ctx): # The ndarray-1.0.0 schema does not permit 0 valued strides. # Perhaps we'll want to allow this someday, to efficiently # represent an array of all the same value. if any(stride == 0 for stride in data.strides): data = np.ascontiguousarray(data) # The view computations that follow assume that the base array # is contiguous. If not, we need to make a copy to avoid # writing a nonsense view. base = util.get_array_base(data) if not base.flags.contiguous: data = np.ascontiguousarray(data) base = util.get_array_base(data) shape = data.shape block = ctx.blocks.find_or_create_block_for_array(data, ctx) if block.array_storage == "fits": # Views over arrays stored in FITS files have some idiosyncracies. # astropy.io.fits always writes arrays C-contiguous with big-endian # byte order, whereas asdf preserves the "contiguousity" and byte order # of the base array. if (block.data.shape != data.shape or block.data.dtype != data.dtype or block.data.ctypes.data != data.ctypes.data or block.data.strides != data.strides): raise ValueError( "ASDF has only limited support for serializing views over arrays stored " "in FITS HDUs. This error likely means that a slice of such an array " "was found in the ASDF tree. The slice can be decoupled from the FITS " "array by calling copy() before assigning it to the tree." ) offset = 0 strides = None dtype, byteorder = numpy_dtype_to_asdf_datatype( data.dtype, include_byteorder=(block.array_storage != "inline"), override_byteorder="big", ) else: # Compute the offset relative to the base array and not the # block data, in case the block is compressed. offset = data.ctypes.data - base.ctypes.data if data.flags.c_contiguous: strides = None else: strides = data.strides dtype, byteorder = numpy_dtype_to_asdf_datatype( data.dtype, include_byteorder=(block.array_storage != "inline"), ) result = {} result['shape'] = list(shape) if block.array_storage == 'streamed': result['shape'][0] = '*' if block.array_storage == 'inline': listdata = numpy_array_to_list(data) result['data'] = listdata result['datatype'] = dtype else: result['shape'] = list(shape) if block.array_storage == 'streamed': result['shape'][0] = '*' result['source'] = ctx.blocks.get_source(block) result['datatype'] = dtype result['byteorder'] = byteorder if offset > 0: result['offset'] = offset if strides is not None: result['strides'] = list(strides) if isinstance(data, ma.MaskedArray): if np.any(data.mask): if block.array_storage == 'inline': ctx.blocks.set_array_storage(ctx.blocks[data.mask], 'inline') result['mask'] = data.mask return result @classmethod def _assert_equality(cls, old, new, func): if old.dtype.fields: if not new.dtype.fields: # This line is safe because this is actually a piece of test # code, even though it lives in this file: assert False, "arrays not equal" # nosec for a, b in zip(old, new): cls._assert_equality(a, b, func) else: old = old.__array__() new = new.__array__() if old.dtype.char in 'SU': if old.dtype.char == 'S': old = old.astype('U') if new.dtype.char == 'S': new = new.astype('U') old = old.tolist() new = new.tolist() # This line is safe because this is actually a piece of test # code, even though it lives in this file: assert old == new # nosec else: func(old, new) @classmethod def assert_equal(cls, old, new): from numpy.testing import assert_array_equal cls._assert_equality(old, new, assert_array_equal) @classmethod def assert_allclose(cls, old, new): from numpy.testing import assert_allclose, assert_array_equal if (old.dtype.kind in 'iu' and new.dtype.kind in 'iu'): cls._assert_equality(old, new, assert_array_equal) else: cls._assert_equality(old, new, assert_allclose) @classmethod def copy_to_new_asdf(cls, node, asdffile): if isinstance(node, NDArrayType): array = node._make_array() asdffile.blocks.set_array_storage(asdffile.blocks[array], node.block.array_storage) return node._make_array() return node def _make_operation(name): def __operation__(self, *args): return getattr(self._make_array(), name)(*args) return __operation__ for op in [ '__neg__', '__pos__', '__abs__', '__invert__', '__complex__', '__int__', '__long__', '__float__', '__oct__', '__hex__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', '__cmp__', '__rcmp__', '__add__', '__sub__', '__mul__', '__floordiv__', '__mod__', '__divmod__', '__pow__', '__lshift__', '__rshift__', '__and__', '__xor__', '__or__', '__div__', '__truediv__', '__radd__', '__rsub__', '__rmul__', '__rdiv__', '__rtruediv__', '__rfloordiv__', '__rmod__', '__rdivmod__', '__rpow__', '__rlshift__', '__rrshift__', '__rand__', '__rxor__', '__ror__', '__iadd__', '__isub__', '__imul__', '__idiv__', '__itruediv__', '__ifloordiv__', '__imod__', '__ipow__', '__ilshift__', '__irshift__', '__iand__', '__ixor__', '__ior__', '__getitem__', '__delitem__', '__contains__']: setattr(NDArrayType, op, _make_operation(op)) def _get_ndim(instance): if isinstance(instance, list): array = inline_data_asarray(instance) return array.ndim elif isinstance(instance, dict): if 'shape' in instance: return len(instance['shape']) elif 'data' in instance: array = inline_data_asarray(instance['data']) return array.ndim elif isinstance(instance, (np.ndarray, NDArrayType)): return len(instance.shape) def validate_ndim(validator, ndim, instance, schema): in_ndim = _get_ndim(instance) if in_ndim != ndim: yield ValidationError( "Wrong number of dimensions: Expected {0}, got {1}".format( ndim, in_ndim), instance=repr(instance)) def validate_max_ndim(validator, max_ndim, instance, schema): in_ndim = _get_ndim(instance) if in_ndim > max_ndim: yield ValidationError( "Wrong number of dimensions: Expected max of {0}, got {1}".format( max_ndim, in_ndim), instance=repr(instance)) def validate_datatype(validator, datatype, instance, schema): if isinstance(instance, list): array = inline_data_asarray(instance) in_datatype, _ = numpy_dtype_to_asdf_datatype(array.dtype) elif isinstance(instance, dict): if 'datatype' in instance: in_datatype = instance['datatype'] elif 'data' in instance: array = inline_data_asarray(instance['data']) in_datatype, _ = numpy_dtype_to_asdf_datatype(array.dtype) else: raise ValidationError("Not an array") elif isinstance(instance, (np.ndarray, NDArrayType)): in_datatype, _ = numpy_dtype_to_asdf_datatype(instance.dtype) else: raise ValidationError("Not an array") if datatype == in_datatype: return if schema.get('exact_datatype', False): yield ValidationError( "Expected datatype '{0}', got '{1}'".format( datatype, in_datatype)) np_datatype = asdf_datatype_to_numpy_dtype(datatype) np_in_datatype = asdf_datatype_to_numpy_dtype(in_datatype) if not np_datatype.fields: if np_in_datatype.fields: yield ValidationError( "Expected scalar datatype '{0}', got '{1}'".format( datatype, in_datatype)) if not np.can_cast(np_in_datatype, np_datatype, 'safe'): yield ValidationError( "Can not safely cast from '{0}' to '{1}' ".format( in_datatype, datatype)) else: if not np_in_datatype.fields: yield ValidationError( "Expected structured datatype '{0}', got '{1}'".format( datatype, in_datatype)) if len(np_in_datatype.fields) != len(np_datatype.fields): yield ValidationError( "Mismatch in number of columns: " "Expected {0}, got {1}".format( len(datatype), len(in_datatype))) for i in range(len(np_datatype.fields)): in_type = np_in_datatype[i] out_type = np_datatype[i] if not np.can_cast(in_type, out_type, 'safe'): yield ValidationError( "Can not safely cast to expected datatype: " "Expected {0}, got {1}".format( numpy_dtype_to_asdf_datatype(out_type)[0], numpy_dtype_to_asdf_datatype(in_type)[0])) NDArrayType.validators = { 'ndim': validate_ndim, 'max_ndim': validate_max_ndim, 'datatype': validate_datatype } ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9450831 asdf-2.9.2/asdf/tags/core/tests/0000755000537500020070000000000000000000000017543 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tags/core/tests/__init__.py0000644000537500020070000000000000000000000021642 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9464753 asdf-2.9.2/asdf/tags/core/tests/data/0000755000537500020070000000000000000000000020454 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tags/core/tests/data/__init__.py0000644000537500020070000000000000000000000022553 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tags/core/tests/data/datatype-1.0.0.yaml0000644000537500020070000000073100000000000023606 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://nowhere.org/schemas/custom/datatype-1.0.0" type: object properties: a: datatype: float32 b: datatype: float32 exact_datatype: true c: datatype: - name: a datatype: int16 - name: b datatype: ['ascii', 16] d: datatype: - name: a datatype: int16 - name: b datatype: ['ascii', 16] exact_datatype: true././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tags/core/tests/data/ndim-1.0.0.yaml0000644000537500020070000000027300000000000022723 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://nowhere.org/schemas/custom/ndim-1.0.0" type: object properties: a: ndim: 2 b: max_ndim: 2././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/tests/test_complex.py0000644000537500020070000000252500000000000022627 0ustar00wjamiesonSTSCI\scienceimport re import pytest import asdf from asdf.tests import helpers def make_complex_asdf(string): yaml = """ a: !core/complex-1.0.0 {} """.format(string) return helpers.yaml_to_asdf(yaml) @pytest.mark.parametrize('invalid', [ '3 + 4i', '3+-4i', '3-+4i', '3i+4i', 'X3+4iX', '3+X4i', '3+4', '3i+4' '3+4z', '3.+4i', '3+4.i', '3e-4.0+4i', '3+4e4.0i', '' ]) def test_invalid_complex(invalid): with pytest.raises(asdf.ValidationError): with asdf.open(make_complex_asdf(invalid)): pass @pytest.mark.parametrize('valid', [ '3+4j', '(3+4j)', '.3+4j', '3+.4j', '3e10+4j', '3e-10+4j', '3+4e10j', '3.0+4j', '3+4.0j', '3.0+4.0j', '3+4e-10j', '3+4J', '3+4i', '3+4I', 'inf', 'inf+infj', 'inf+infi', 'infj', 'infi', 'INFi', 'INFI', '3+infj', 'inf+4j', ]) def test_valid_complex(valid): with asdf.open(make_complex_asdf(valid)) as af: assert af.tree['a'] == complex(re.sub(r'[iI]$', r'j', valid)) @pytest.mark.parametrize('valid', [ 'nan', 'nan+nanj', 'nan+nani', 'nanj', 'nani', 'NANi', 'NANI', '3+nanj', 'nan+4j' ]) def test_valid_nan_complex(valid): with asdf.open(make_complex_asdf(valid)): pass def test_roundtrip(tmpdir): tree = { 'a': 0+0j, 'b': 1+1j, 'c': -1+1j, 'd': -1-1j } helpers.assert_roundtrip_tree(tree, tmpdir) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/tests/test_extension_metadata.py0000644000537500020070000000117400000000000025033 0ustar00wjamiesonSTSCI\scienceimport asdf from asdf.tests import helpers def test_extra_properties(): yaml = """ metadata: !core/extension_metadata-1.0.0 extension_class: foo.extension.FooExtension software: !core/software-1.0.0 name: FooSoft version: "1.5" extension_uri: http://foo.biz/extensions/foo-1.0.0 """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff) as af: af["metadata"].extension_class == "foo.extension.FooExtension" af["metadata"].software["name"] == "FooSoft" af["metadata"].software["version"] == "1.5" af["metadata"]["extension_uri"] == "http://foo.biz/extensions/foo-1.0.0" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/tests/test_external_reference.py0000644000537500020070000000052400000000000025015 0ustar00wjamiesonSTSCI\sciencefrom asdf.tags.core.external_reference import ExternalArrayReference from asdf.tests import helpers def test_roundtrip_external_array(tmpdir): ref = ExternalArrayReference("./nonexistant.fits", 1, "np.float64", (100, 100)) tree = {'nothere': ref} helpers.assert_roundtrip_tree(tree, tmpdir) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/tests/test_history.py0000644000537500020070000002040400000000000022655 0ustar00wjamiesonSTSCI\scienceimport os import datetime import fractions import pytest from jsonschema import ValidationError import asdf from asdf import util from asdf import types from asdf.tests import helpers from asdf.tests.helpers import yaml_to_asdf, assert_no_warnings from asdf.tags.core import HistoryEntry from asdf.exceptions import AsdfWarning SCHEMA_PATH = os.path.join(os.path.dirname(helpers.__file__), 'data') class CustomExtension: """ This is the base class that is used for extensions for custom tag classes that exist only for the purposes of testing. """ @property def types(self): return [] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(SCHEMA_PATH) + '/{url_suffix}.yaml')] def test_history(): ff = asdf.AsdfFile() assert 'history' not in ff.tree ff.add_history_entry('This happened', {'name': 'my_tool', 'homepage': 'http://nowhere.org', 'author': 'John Doe', 'version': '2.0'}) assert len(ff.tree['history']['entries']) == 1 with pytest.raises(ValidationError): ff.add_history_entry('That happened', {'author': 'John Doe', 'version': '2.0'}) assert len(ff.tree['history']['entries']) == 1 ff.add_history_entry('This other thing happened') assert len(ff.tree['history']['entries']) == 2 assert isinstance(ff.tree['history']['entries'][0]['time'], datetime.datetime) def test_history_to_file(tmpdir): tmpfile = str(tmpdir.join('history.asdf')) with asdf.AsdfFile() as ff: ff.add_history_entry('This happened', {'name': 'my_tool', 'homepage': 'http://nowhere.org', 'author': 'John Doe', 'version': '2.0'}) ff.write_to(tmpfile) with asdf.open(tmpfile) as ff: assert 'entries' in ff.tree['history'] assert 'extensions' in ff.tree['history'] assert len(ff.tree['history']['entries']) == 1 entry = ff.tree['history']['entries'][0] assert entry['description'] == 'This happened' assert entry['software']['name'] == 'my_tool' assert entry['software']['version'] == '2.0' # Test the history entry retrieval API entries = ff.get_history_entries() assert len(entries) == 1 assert isinstance(entries, list) assert isinstance(entries[0], HistoryEntry) assert entries[0]['description'] == "This happened" assert entries[0]['software']['name'] == 'my_tool' def test_old_history(tmpdir): """Make sure that old versions of the history format are still accepted""" yaml = """ history: - !core/history_entry-1.0.0 description: "Here's a test of old history entries" software: !core/software-1.0.0 name: foo version: 1.2.3 """ buff = yaml_to_asdf(yaml) with asdf.open(buff) as af: assert len(af.tree['history']) == 1 # Test the history entry retrieval API entries = af.get_history_entries() assert len(entries) == 1 assert isinstance(entries, list) assert isinstance(entries[0], HistoryEntry) assert entries[0]['description'] == "Here's a test of old history entries" assert entries[0]['software']['name'] == 'foo' def test_get_history_entries(tmpdir): """ Test edge cases for the get_history_entries API. Other cases tested above """ tmpfile = str(tmpdir.join('empty.asdf')) with asdf.AsdfFile() as af: af.write_to(tmpfile) # Make sure this works when there is no history section at all with asdf.open(tmpfile) as af: assert len(af['history']['extensions']) > 0 assert len(af.get_history_entries()) == 0 def test_extension_metadata(tmpdir): ff = asdf.AsdfFile() tmpfile = str(tmpdir.join('extension.asdf')) ff.write_to(tmpfile) with asdf.open(tmpfile) as af: assert len(af.tree['history']['extensions']) == 1 metadata = af.tree['history']['extensions'][0] assert metadata.extension_class == 'asdf.extension.BuiltinExtension' # Don't bother with testing the version here since it will depend on # how recently the package was built (version is auto-generated) assert metadata.software['name'] == 'asdf' def test_missing_extension_warning(): yaml = """ history: extensions: - !core/extension_metadata-1.0.0 extension_class: foo.bar.FooBar software: !core/software-1.0.0 name: foo version: 1.2.3 """ buff = yaml_to_asdf(yaml) with pytest.warns(AsdfWarning, match="File was created with extension class 'foo.bar.FooBar'"): with asdf.open(buff): pass def test_extension_version_warning(): yaml = """ history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: !core/software-1.0.0 name: asdf version: 100.0.3 """ buff = yaml_to_asdf(yaml) with pytest.warns(AsdfWarning, match="File was created with extension class 'asdf.extension.BuiltinExtension'"): with asdf.open(buff): pass buff.seek(0) # Make sure suppressing the warning works too with assert_no_warnings(): with asdf.open(buff, ignore_missing_extensions=True): pass def test_strict_extension_check(): yaml = """ history: extensions: - !core/extension_metadata-1.0.0 extension_class: foo.bar.FooBar software: !core/software-1.0.0 name: foo version: 1.2.3 """ buff = yaml_to_asdf(yaml) with pytest.raises(RuntimeError): with asdf.open(buff, strict_extension_check=True): pass # Make sure to test for incompatibility with ignore_missing_extensions buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff, strict_extension_check=True, ignore_missing_extensions=True): pass def test_metadata_with_custom_extension(tmpdir): class FractionType(types.CustomType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [fractions.Fraction] @classmethod def to_tree(cls, node, ctx): return [node.numerator, node.denominator] @classmethod def from_tree(cls, tree, ctx): return fractions.Fraction(tree[0], tree[1]) class FractionExtension(CustomExtension): @property def types(self): return [FractionType] tree = { 'fraction': fractions.Fraction(2, 3) } tmpfile = str(tmpdir.join('custom_extension.asdf')) with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to(tmpfile) # We expect metadata about both the Builtin extension and the custom one with asdf.open(tmpfile, extensions=FractionExtension()) as af: assert len(af['history']['extensions']) == 2 with pytest.warns(AsdfWarning, match="was created with extension"): with asdf.open(tmpfile, ignore_unrecognized_tag=True): pass # If we use the extension but we don't serialize any types that require it, # no metadata about this extension should be added to the file tree2 = { 'x': [x for x in range(10)] } tmpfile2 = str(tmpdir.join('no_extension.asdf')) with asdf.AsdfFile(tree2, extensions=FractionExtension()) as ff: ff.write_to(tmpfile2) with asdf.open(tmpfile2) as af: assert len(af['history']['extensions']) == 1 with assert_no_warnings(): with asdf.open(tmpfile2): pass # Make sure that this works even when constructing the tree on-the-fly tmpfile3 = str(tmpdir.join('custom_extension2.asdf')) with asdf.AsdfFile(extensions=FractionExtension()) as ff: ff.tree['fraction'] = fractions.Fraction(4, 5) ff.write_to(tmpfile3) with asdf.open(tmpfile3, extensions=FractionExtension()) as af: assert len(af['history']['extensions']) == 2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/tests/test_integer.py0000644000537500020070000000441500000000000022615 0ustar00wjamiesonSTSCI\scienceimport random import pytest import asdf from asdf import IntegerType from asdf.tests import helpers # Make sure tests are deterministic random.seed(0) @pytest.mark.parametrize('sign', ['+', '-']) @pytest.mark.parametrize('value', [ random.getrandbits(64), random.getrandbits(65), random.getrandbits(100), random.getrandbits(128), random.getrandbits(129), random.getrandbits(200), ]) def test_integer_value(tmpdir, value, sign): if sign == '-': value = -value integer = IntegerType(value) tree = dict(integer=integer) helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.parametrize('inline', [False, True]) def test_integer_storage(tmpdir, inline): tmpfile = str(tmpdir.join('integer.asdf')) kwargs = dict() if inline: kwargs['storage_type'] = 'inline' random.seed(0) value = random.getrandbits(1000) tree = dict(integer=IntegerType(value, **kwargs)) with asdf.AsdfFile(tree) as af: af.write_to(tmpfile) with asdf.open(tmpfile, _force_raw_types=True) as rf: if inline: assert 'source' not in rf.tree['integer']['words'] assert 'data' in rf.tree['integer']['words'] else: assert 'source' in rf.tree['integer']['words'] assert 'data' not in rf.tree['integer']['words'] assert 'string' in rf.tree['integer'] assert rf.tree['integer']['string'] == str(value) def test_integer_storage_duplication(tmpdir): tmpfile = str(tmpdir.join('integer.asdf')) random.seed(0) value = random.getrandbits(1000) tree = dict(integer1=IntegerType(value), integer2=IntegerType(value)) with asdf.AsdfFile(tree) as af: af.write_to(tmpfile) assert len(af.blocks) == 1 with asdf.open(tmpfile, _force_raw_types=True) as rf: assert rf.tree['integer1']['words']['source'] == 0 assert rf.tree['integer2']['words']['source'] == 0 with asdf.open(tmpfile) as aa: assert aa.tree['integer1'] == value assert aa.tree['integer2'] == value def test_integer_conversion(): random.seed(0) value = random.getrandbits(1000) integer = asdf.IntegerType(value) assert integer == value assert int(integer) == int(value) assert float(integer) == float(value) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tags/core/tests/test_ndarray.py0000644000537500020070000006214200000000000022621 0ustar00wjamiesonSTSCI\scienceimport io import os import re import sys import pytest import numpy as np from numpy import ma from numpy.testing import assert_array_equal import jsonschema import yaml import asdf from asdf import util from asdf.tests import helpers, CustomTestType from asdf.tags.core import ndarray from . import data as test_data TEST_DATA_PATH = helpers.get_test_data_path('', module=test_data) # These custom types and the custom extension are here purely for the purpose # of testing NDArray objects and making sure that they can be validated as part # of a nested hierarchy, and not just top-level objects. class CustomNdim(CustomTestType): name = 'ndim' organization = 'nowhere.org' standard = 'custom' version = '1.0.0' class CustomDatatype(CustomTestType): name = 'datatype' organization = 'nowhere.org' standard = 'custom' version = '1.0.0' class CustomExtension: @property def types(self): return [CustomNdim, CustomDatatype] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [( 'http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] def test_sharing(tmpdir): x = np.arange(0, 10, dtype=float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2] } def check_asdf(asdf): tree = asdf.tree assert_array_equal(tree['science_data'], x) assert_array_equal(tree['subset'], x[3:-3]) assert_array_equal(tree['skipping'], x[::2]) assert tree['science_data'].ctypes.data == tree['skipping'].ctypes.data assert len(list(asdf.blocks.internal_blocks)) == 1 assert next(asdf.blocks.internal_blocks)._size == 80 if 'w' in asdf._mode: tree['science_data'][0] = 42 assert tree['skipping'][0] == 42 def check_raw_yaml(content): assert b'!core/ndarray' in content helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml) def test_byteorder(tmpdir): tree = { 'bigendian': np.arange(0, 10, dtype=str('>f8')), 'little': np.arange(0, 10, dtype=str('' assert my_tree['little'].dtype.byteorder == '=' else: assert my_tree['bigendian'].dtype.byteorder == '=' assert my_tree['little'].dtype.byteorder == '<' def check_raw_yaml(content): assert b'byteorder: little' in content assert b'byteorder: big' in content helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml) def test_all_dtypes(tmpdir): tree = {} for byteorder in ('>', '<'): for dtype in ndarray._datatype_names.values(): # Python 3 can't expose these dtypes in non-native byte # order, because it's using the new Python buffer # interface. if dtype in ('c32', 'f16'): continue if dtype == 'b1': arr = np.array([True, False]) else: arr = np.arange(0, 10, dtype=str(byteorder + dtype)) tree[byteorder + dtype] = arr helpers.assert_roundtrip_tree(tree, tmpdir) def test_dont_load_data(): x = np.arange(0, 10, dtype=float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2] } ff = asdf.AsdfFile(tree) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) with asdf.open(buff) as ff: ff.run_hook('reserve_blocks') # repr and str shouldn't load data str(ff.tree['science_data']) repr(ff.tree) for block in ff.blocks.internal_blocks: assert block._data is None def test_table_inline(tmpdir): table = np.array( [(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[(str('MINE'), np.int8), (str(''), np.float64), (str('arr'), '>i4', (2,))]) tree = {'table_data': table} def check_raw_yaml(content): tree = yaml.safe_load( re.sub(br'!core/\S+', b'', content)) assert tree['table_data'] == { 'datatype': [ {'datatype': 'int8', 'name': 'MINE'}, {'datatype': 'float64', 'name': 'f1'}, {'datatype': 'int32', 'name': 'arr', 'shape': [2]} ], 'data': [[0, 1.0, [2, 3]], [4, 5.0, [6, 7]]], 'shape': [2] } with asdf.config_context() as config: config.array_inline_threshold = 100 helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_raw_yaml) def test_array_inline_threshold_recursive(tmpdir): models = pytest.importorskip('astropy.modeling.models') aff = models.AffineTransformation2D(matrix=[[1, 2], [3, 4]]) tree = {'test': aff} def check_asdf(asdf): assert len(list(asdf.blocks.internal_blocks)) == 0 with asdf.config_context() as config: config.array_inline_threshold = 100 helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf) def test_copy_inline(): yaml = """ x0: !core/ndarray-1.0.0 data: [-1.0, 1.0] """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff) as infile: with asdf.AsdfFile() as f: f.tree['a'] = infile.tree['x0'] f.tree['b'] = f.tree['a'] f.write_to(io.BytesIO()) def test_table(tmpdir): table = np.array( [(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[(str('MINE'), np.int8), (str(''), 'i4', (2,))]) tree = {'table_data': table} def check_raw_yaml(content): tree = yaml.safe_load( re.sub(br'!core/\S+', b'', content)) assert tree['table_data'] == { 'datatype': [ {'byteorder': 'big', 'datatype': 'int8', 'name': 'MINE'}, {'byteorder': 'little', 'datatype': 'float64', 'name': 'f1'}, {'byteorder': 'big', 'datatype': 'int32', 'name': 'arr', 'shape': [2]} ], 'shape': [2], 'source': 0, 'byteorder': 'big' } helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_raw_yaml) def test_table_nested_fields(tmpdir): table = np.array( [(0, (1, 2)), (4, (5, 6)), (7, (8, 9))], dtype=[(str('A'), ' 5) tree = { 'masked_array': m, 'unmasked_array': x } def check_asdf(asdf): tree = asdf.tree m = tree['masked_array'] print(m) print(m.mask) assert np.all(m.mask[6:]) assert len(asdf.blocks) == 2 helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf) def test_len_roundtrip(tmpdir): sequence = np.arange(0, 10, dtype=int) tree = { 'sequence': sequence } def check_len(asdf): s = asdf.tree["sequence"] assert len(s) == 10 helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_len) def test_mask_arbitrary(): content = """ arr: !core/ndarray-1.0.0 data: [[1, 2, 3, 1234], [5, 6, 7, 8]] mask: 1234 """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert_array_equal( ff.tree['arr'].mask, [[False, False, False, True], [False, False, False, False]]) def test_mask_nan(): content = """ arr: !core/ndarray-1.0.0 data: [[1, 2, 3, .NaN], [5, 6, 7, 8]] mask: .NaN """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert_array_equal( ff.tree['arr'].mask, [[False, False, False, True], [False, False, False, False]]) def test_string(tmpdir): tree = { 'ascii': np.array([b'foo', b'bar', b'baz']), 'unicode': np.array(['სáƒáƒ›áƒ”ცნიერáƒ', 'данные', 'வடிவமà¯']) } helpers.assert_roundtrip_tree(tree, tmpdir) def test_string_table(tmpdir): tree = { 'table': np.array([(b'foo', 'სáƒáƒ›áƒ”ცნიერáƒ', '42', '53.0')]) } helpers.assert_roundtrip_tree(tree, tmpdir) def test_inline_string(): content = "arr: !core/ndarray-1.0.0 ['a', 'b', 'c']" buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert_array_equal(ff.tree['arr']._make_array(), ['a', 'b', 'c']) def test_inline_structured(): content = """ arr: !core/ndarray-1.0.0 datatype: [['ascii', 4], uint16, uint16, ['ascii', 4]] data: [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]]""" buff = helpers.yaml_to_asdf(content) with asdf.open(buff) as ff: assert ff.tree['arr']['f1'].dtype.char == 'H' def test_simple_table(): table = np.array( [(10.683262825012207, 41.2674560546875, 0.13, 0.12, 213.916), (10.682777404785156, 41.270111083984375, 0.1, 0.09, 306.825), (10.684737205505371, 41.26903533935547, 0.08, 0.07, 96.656), (10.682382583618164, 41.26792526245117, 0.1, 0.09, 237.145), (10.686025619506836, 41.26922607421875, 0.13, 0.12, 79.581), (10.685656547546387, 41.26955032348633, 0.13, 0.12, 55.219), (10.684028625488281, 41.27090072631836, 0.13, 0.12, 345.269), (10.687610626220703, 41.270301818847656, 0.18, 0.14, 60.192)], dtype=[ (str('ra'), str(' a: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! a: !core/ndarray-1.0.0 shape: [1, 3] data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [[[1, 2, 3]]] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass def test_datatype_validation(tmpdir): content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: float32 """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: float64 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: int16 """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: int16 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass def test_structured_datatype_validation(tmpdir): content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int64 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a', 0], [2, 'b', 1], [3, 'c', 2]] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] - name: c datatype: float64 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! d: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.open(buff, extensions=CustomExtension()): pass content = """ obj: ! d: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int16 - name: b datatype: ['ascii', 16] """ buff = helpers.yaml_to_asdf(content) with asdf.open(buff, extensions=CustomExtension()): pass def test_string_inline(): x = np.array([b'a', b'b', b'c']) l = ndarray.numpy_array_to_list(x) for entry in l: assert isinstance(entry, str) def test_inline_shape_mismatch(): content = """ arr: !core/ndarray-1.0.0 data: [1, 2, 3] shape: [2] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(ValueError): with asdf.open(buff): pass @pytest.mark.xfail( reason="NDArrays with dtype=object are not currently supported") def test_simple_object_array(tmpdir): # See https://github.com/asdf-format/asdf/issues/383 for feature # request dictdata = np.empty((3, 3), dtype=object) for i, _ in enumerate(dictdata.flat): dictdata.flat[i] = {'foo': i*42, 'bar': i**2} helpers.assert_roundtrip_tree({'bizbaz': dictdata}, tmpdir) @pytest.mark.xfail( reason="NDArrays with dtype=object are not currently supported") def test_tagged_object_array(tmpdir): # See https://github.com/asdf-format/asdf/issues/383 for feature # request quantity = pytest.importorskip('astropy.units.quantity') objdata = np.empty((3, 3), dtype=object) for i, _ in enumerate(objdata.flat): objdata.flat[i] = quantity.Quantity(i, 'angstrom') helpers.assert_roundtrip_tree({'bizbaz': objdata}, tmpdir) def test_broadcasted_array(tmpdir): attrs = np.broadcast_arrays(np.array([10,20]), np.array(10), np.array(10)) tree = {'one': attrs[1] }#, 'two': attrs[1], 'three': attrs[2]} helpers.assert_roundtrip_tree(tree, tmpdir) def test_broadcasted_offset_array(tmpdir): base = np.arange(10) offset = base[5:] broadcasted = np.broadcast_to(offset, (4, 5)) tree = {'broadcasted': broadcasted} helpers.assert_roundtrip_tree(tree, tmpdir) def test_non_contiguous_base_array(tmpdir): base = np.arange(60).reshape(5, 4, 3).transpose(2, 0, 1) * 1 contiguous = base.transpose(1, 2, 0) tree = {'contiguous': contiguous} helpers.assert_roundtrip_tree(tree, tmpdir) def test_fortran_order(tmpdir): array = np.array([[11,12,13], [21,22,23]], order='F') tree = dict(data=array) helpers.assert_roundtrip_tree(tree, tmpdir) def test_readonly(tmpdir): tmpfile = str(tmpdir.join('data.asdf')) tree = dict(data=np.ndarray((100))) with asdf.AsdfFile(tree) as af: # Make sure we're actually writing to an internal array for this test af.write_to(tmpfile, all_array_storage='internal') # Opening in read mode (the default) should mean array is readonly with asdf.open(tmpfile) as af: assert af['data'].flags.writeable == False with pytest.raises(ValueError) as err: af['data'][0] = 41 assert str(err) == 'assignment destination is read-only' # This should be perfectly fine with asdf.open(tmpfile, mode='rw') as af: assert af['data'].flags.writeable == True af['data'][0] = 40 # Copying the arrays makes it safe to write to the underlying array with asdf.open(tmpfile, mode='r', copy_arrays=True) as af: assert af['data'].flags.writeable == True af['data'][0] = 42 def test_readonly_inline(tmpdir): tmpfile = str(tmpdir.join('data.asdf')) tree = dict(data=np.ndarray((100))) with asdf.AsdfFile(tree) as af: af.write_to(tmpfile, all_array_storage='inline') # This should be safe since it's an inline array with asdf.open(tmpfile, mode='r') as af: assert af['data'].flags.writeable == True af['data'][0] = 42 # Confirm that NDArrayType's internal array is regenerated # following an update. def test_block_data_change(tmpdir): tmpfile = str(tmpdir.join("data.asdf")) tree = {"data": np.ndarray(10)} with asdf.AsdfFile(tree) as af: af.write_to(tmpfile) with asdf.open(tmpfile, mode="rw") as af: array_before = af.tree["data"].__array__() af.update() array_after = af.tree["data"].__array__() assert array_before is not array_after def test_problematic_class_attributes(tmp_path): """ The presence of the "name" and "version" attributes in NDArrayType cause problems when our arrays are used with other libraries. See https://github.com/asdf-format/asdf/issues/1015 """ file_path = tmp_path / "test.asdf" with asdf.AsdfFile() as af: af["arr"] = np.arange(100) af.write_to(file_path) with asdf.open(file_path) as af: assert isinstance(af["arr"], ndarray.NDArrayType) with pytest.raises(AttributeError): af["arr"].name with pytest.raises(AttributeError): af["arr"].version ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9472253 asdf-2.9.2/asdf/testing/0000755000537500020070000000000000000000000016170 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643914801.0 asdf-2.9.2/asdf/testing/__init__.py0000644000537500020070000000000000000000000020267 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643914801.0 asdf-2.9.2/asdf/testing/helpers.py0000644000537500020070000000302300000000000020202 0ustar00wjamiesonSTSCI\science""" Helpers for writing unit tests of ASDF support. """ from io import BytesIO import asdf def roundtrip_object(obj, version=None): """ Add the specified object to an AsdfFile's tree, write the file to a buffer, then read it back in and return the deserialized object. Parameters ---------- obj : object Object to serialize. version : str or None. ASDF Standard version. If None, use the library's default version. Returns ------- object The deserialized object. """ buff = BytesIO() with asdf.AsdfFile(version=version) as af: af["obj"] = obj af.write_to(buff) buff.seek(0) with asdf.open(buff, lazy_load=False, copy_arrays=True) as af: return af["obj"] def yaml_to_asdf(yaml_content, version=None): """ Given a string of YAML content, adds the extra pre- and post-amble to make it an ASDF file. Parameters ---------- yaml_content : string or bytes YAML content. version : str or None. ASDF Standard version. If None, use the library's default version. Returns ------- io.BytesIO A file-like object containing the ASDF file. """ if isinstance(yaml_content, str): yaml_content = yaml_content.encode("utf-8") buff = BytesIO() with asdf.AsdfFile(version=version) as af: af["$REPLACE"] = "ME" af.write_to(buff) buff.seek(0) asdf_content = buff.read().replace(b"$REPLACE: ME", yaml_content) return BytesIO(asdf_content) ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.960342 asdf-2.9.2/asdf/tests/0000755000537500020070000000000000000000000015655 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/__init__.py0000644000537500020070000000334400000000000017772 0ustar00wjamiesonSTSCI\science""" This packages contains affiliated package tests. """ import numpy as np from .. import CustomType, util from .helpers import get_test_data_path class CustomTestType(CustomType): """This class is intended to be inherited by custom types that are used purely for the purposes of testing. The methods ``from_tree_tagged`` and ``from_tree`` are implemented solely in order to avoid custom type conversion warnings. """ @classmethod def from_tree_tagged(cls, tree, ctx): return cls.from_tree(tree.data, ctx) @classmethod def from_tree(cls, tree, ctx): return tree def create_small_tree(): x = np.arange(0, 10, dtype=float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } return tree def create_large_tree(): # These are designed to be big enough so they don't fit in a # single block, but not so big that RAM/disk space for the tests # is enormous. x = np.random.rand(256, 256) y = np.random.rand(16, 16, 16) tree = { 'science_data': x, 'more': y } return tree class CustomExtension: """ This is the base class that is used for extensions for custom tag classes that exist only for the purposes of testing. """ @property def types(self): return [] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(get_test_data_path('')) + '/{url_suffix}.yaml')] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/conftest.py0000644000537500020070000000134000000000000020052 0ustar00wjamiesonSTSCI\scienceimport pytest from . import create_small_tree, create_large_tree from asdf import config from asdf import schema @pytest.fixture def small_tree(): return create_small_tree() @pytest.fixture def large_tree(): return create_large_tree() @pytest.fixture(autouse=True) def restore_default_config(): yield config._global_config = config.AsdfConfig() config._local = config._ConfigLocal() @pytest.fixture(autouse=True) def clear_schema_cache(): """ Fixture that clears schema caches to prevent issues when tests use same URI for different schema content. """ yield schema._load_schema.cache_clear() schema._load_schema_cached.cache_clear() schema.load_custom_schema.cache_clear() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/coveragerc0000644000537500020070000000114000000000000017714 0ustar00wjamiesonSTSCI\science[run] source = {packagename} omit = asdf/_astropy_init* asdf/conftest* asdf/cython_version* asdf/setup_package* asdf/*/setup_package* asdf/*/*/setup_package* asdf/tests/* asdf/*/tests/* asdf/*/*/tests/* asdf/version.* asdf/compat* asdf/extern* [report] exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about packages we have installed except ImportError # Don't complain if tests don't hit assertions raise AssertionError raise NotImplementedError # Don't complain about script hooks def main\(.*\): ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.967614 asdf-2.9.2/asdf/tests/data/0000755000537500020070000000000000000000000016566 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/__init__.py0000644000537500020070000000000000000000000020665 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/data/asdf.fits.gz0000644000537500020070000000740000000000000021012 0ustar00wjamiesonSTSCI\science‹z¿2Zasdf.fitsíšyTSg·ÆçÙŠÊA˜¥È$((X& TT‚„A†`± ‹¨( ŠTÁ ˆZpBe12Ï3 „Të€Ráûu}íZÚ®uoïº|ùýu²ÏyŸõì÷}sÎNÎ&™˜YšÄfâ3X*Ī· •æE'Tb»‰5‰ 3ÈÞÎdšóçÄVkKÛÏê)ijôÈ4ùáLf ÆÊçUþ¹­ ‰ø‚?už·¯—…FP]~Wv÷¢xÓÝ©Þô/ë©}^O]õïÜ|AOýŸÓ3²µ627üâzüOôÌ ÌŒz ¤m&ŸB 8­BPü¿ÍáMöúÛå >™"Dˆ!B„ùéK–‡Oäƒ'ÅçWhŽZúF=ÓÂRÔwNS•]‰"š¡}¶ª«Î|<%6‚ër]ºÝHw7xa’æƒÇß¾H%[ùà¿"õEŸÿŒŽ”ss‹ä+Á5·0²hBw–Éé3'µðlhë8U¥µS£Ù1èo \ØG ßfß|·ãhK°‘ðOÇ}rhLߦ0töE'oÔ=^ö g¢†Î4ï“ÏŠÑãµPÚî¬=ÅÎü²}žm.ÛI‹³ÅÁ(TþVwICµE³Á¼>û»Y‹EÐUvsvy3wwî}®/‚æÑ¼¨Åå¹è|·ÇéšnÜ«ÕòAÅ(=q6ë¦3x;(Ls´ŒgH,ÍGšYŠzѯàWŽGÆûK¢xñ*ùÌ‹A`;ïK$IëƒwCCùùjºóL_Jwb$á«ÔICk¿ÕV¶Â@Ïß%~É*‰È%Å×£C|ß5¯ãçÐÏÑŠÜ5m»SîÆR ‘;ß‚ªŽéh;-a}¶" ½1?y~(VBÝöïò+41TTë7ãä6°ž&æê~Ý‚ž½•i×äÀ^u4¹ç0x»“•¬øo{õÙi.Þ!ehiûñøªFÀ®|vËnÞä¬bÔ%û€c1àcäFFGƒGË:Áz;ªg5z"¾×X&uf>ú÷u?ã€ožÔ²ï·Ój«-ÀsþÝpÜ:ô_ªÔÜ¢žgÉÌÎú™(߯;›†6O/¯E£7,mý°˜:štØ´,1 ËI™ú½ïïxÝÙþ´ë‹%´;Ñæ+E› þãÀz«í;À](’é$åî˜F„¨õnûç=M)®FÿúêŽÈ4%*oJ‰ÍwWž‹ýeOpîRÉy'®ƒ綉ý¾/V(ô…º¡ÅdlŸÒºïÀm\~ Ù´úÚyŠj£hæ–§5ƒÕ¼Cô“l°$¸™·çlÛ¬…Ö ¤xêì p×î/ýêž;8­*çM¯ ùÞXMÔUô´šÊ5Äݧ­8üÁZ:bgÊš—סVMüõùS4ðSï%+RxÜ8÷FúŽ:j3ƒ†®K—DxbhÚ4nnP‹þ”ãÑÌï¯"5#Ct†f7Ø+‚#ÆÄrP9=lÝÚŸìWçaöhzÚ%œ ç¿ÕC›KÛ“_A€÷äû±Ð©Þì~½º ܳG¿Öì?ŠŽÅ'dœº]P·{Û‹ ¶XòÅ¥ ¤ÆÁk_8=™r§Íg·g(‹ }fÅsxcp"<7³ä,ÚC~ ·CáÚBRmqšc6J™e¾BZþ9¦ÕIÁþµÝq8¾àïbù8 |Æ–züPÓ¬…¯ ÑsÊWC>i‰«Îk¡9GªŸ1ÝúJÌÃQÈr|+¿Ä,´ë¿ ÷Gmçîà§ò¨kêÔQÓjF]¾†É›’«If~;f5ÌdOªÔchŠh´õ†wè®âŽqªÔÑÛ'f&Uö Ú\Ç5â¬ÀQ÷9“¸§4îÕƒ7P›>[W²}ó6麾û®cÕº¡©¸{çðƒyàòDS¶YBT¼mÃiù”ÉVªÞ:솶Ÿ“kÞ—¢nµØœTC $…~ë¬ ü»MU¹ï1âu(æVáWâ—i­® Gìè’6dN/ZïŽ&š» ÷z8XbC?|3‹§ÁŸ”r3ÀOýÁ¨4ùwN˜bhE~åïâà}l|Iù//V¦ÌVDýµ¼‹G:–Þ›‘È>…LëÒWožY"ùÆG¹°=[?þjÏù׸T¯tÄ·¹Ã6—½É©A-gê*ñu×A¿ù¡´#e#%›¤IÖVTŠ`œQ?ª¾´ê­&X4ÒÌX©hÔš³æ„¢¦³¿<÷ šNß>”Tˆl“ e6y£ÈÐ{é4ufºYTûí_áñxJб3óP«¡oß!Ú n ÉM‘H@ks¦ÞÖf74ÉZF>n¨EÊ Nˆ£Õäsj×F•9[ÑhñaÓŠ=WQíXüRѼ©wæ[† €ÕÏÎ\ÄA%-¡JsfØ~[ü*rŽ#èàár­ùè5^ðâò™8 …„¬¬:­ öÍW;?òW /é­Íû‰SH,9‡×…–hwX—[¸÷ßHHÍFÁÎ7còz(øaPJ#äg4æ†{×$!Û÷ÙÒv6êÒéë}neáy󔹇.£WG‚“zHðý2î»ï"ö¢¡)Ö\gjÒ–ò«Ñõ¶Z!•†âÁÀà ÅkÀc=(Xóƒww-¢j‚Ý:Ƭ ©EsüF‚Z€øgÿ¸,å ?îæ–ƒ[2÷fÝÃÜ9#ràù测¹-OaFú¼Ó+Ái ÚÜ¿ã*ØKìÒDÚåÝë¥ó1xåÃÂ_ß,BïŠXÍ÷\{ô†:t3ïº#ö¼vTv7JE-Þ'>*؇9{T$ø2‹’ú,1ÄЖ°/–A›Þ΃šc#à‡\¯¯êz„fù'×ø Ã{‰©…×zt¹z.ïëbtùˆžU¸Ž‘%-—Ë»ö£ÐðÇ#™¸:s·èÓ*4¿K¾ð@{ Z§s}öI«ƒ=â;Äs¿Ž§‰Rï°FÑ}¿§OÕñøšæ ^½ß O·á¼}Ëe M„´Š††¢—uP/hšlò6ÜׂE³G]ß‚¸ŽFF¡ ác†•+–oÄc¹CœöÛÕàÝ4\^f" ¾ÃXðþøçèÙpÔã‰K z¥ïtïÜw²R$™&$ô_˜£›  þ±’Û5õÓÐX.ÙãÇ@1=4¨½ü)Šþ4­xt螈ìîñÄ_>‡ÁW·v}œŠŒè®µ·è‹5;ýP[pÿV0\Öô°m"¬GV-Zà:‘KU12–5åc”%ºú’Þ(ÏÅ€›bÔ²Œ¨xæ¼Ã Û œœ=š æZ "ý©Õt37°š(–~[Á­´âç¨ymê„m7ýrŒp $i». sÄ€Ý5q¾ÚäøQÅ“u¡Ï±gégó¼ä¾ÚÄ®Hdn=·ZÙ9¬LÏê¡à!°Ž•S¦¬AÕ™iK¦]kÆûÈ©zOÁŸkéÁ@GUŸ£ß( =!®vÍk QD}.5VÕñÃ*èñc5„̲E‡FøásËãT"¬ÍÐô8(úBç2äæ ø¸8âÁÏÅù“©YIòsÑoí¨Þð,k¼œò¨ÊSl_.—»xQQ–A‚:Ù¶¦¦VmCs¬Þ=<MoiÎãfð-´/G\kEßXˆKIíïzžÍEWò%Ænäìžj¿ó¦ ?ƒ7$éhtÊžŒLÉTEÿòó·[ÏaÄ}F“Ãb#tµ&šlrFÿ•>â»eÁ’¦—ñ¿BߎÓòÌ:ôK1»ÔšDФdìbñÑCÏTÂl^b¨KfMÜzUÁóõ¹Nç«2°ŽÆ²ôHÁÃtçD{Yc 鮹Ë߀jñs"Þ¿ÆÐ!ów~1"B„"Dˆ!Bþ£˜l ™X˜o& H†Û'ÿ’þóûýOÑ¿äÿ;þ¢ƒ ´þáþ µ²C]KKu2_w:áIñve¸ý·I,·Yؘ[ÁŸêŸüùid/ ƒBû¼³ßØñWzÎוFõõù+­I¶Y˜YZ‘HŸÕÛ.г¡Sè„k€»q€êåC£Ðÿz™ÿÐòùýb>Ù"p÷鬱¡Íßø34°6 ™ì5úŒ¿šš“z˜?ggwoWÂéƒòÅ´…ý%B„"Dˆÿkd?Õ9jʪʪ’ŸŽHÖæ†V†‚ º (·ÇÀÌTp¬&)gm°ƒ!dW:ƒ~À]™âì«C¦;»¨H*))2¨4ŠÊäg¥ßä&<ÝhdÚ¦S]‡É4Êo—Gɾ 7*M‡ ùPkŠ'…~€*¨ªIÜ)Þ‚ˆ‰7áÎðeP 7ªŇìJÑ!Ü Ww†›¯“² îS¡OŽgü>ü“ EIâSO¯1ùQ‘ðT®‚ÚPGÌFAjÎ?5UmÕc’t†¯‹Ëï'[»eø¿R :Õ—v@ áâΠ붙(ª ‚“Õÿdñ/{RÉ M Al²ª£Òœ)‚dœÜ]'Gº‘'¯Ø§®ªH¨«ÚK*++KþÿZ"Dˆ!B„ò¿å¿§­%q@8././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/complex-42.0.0.yaml0000644000537500020070000000021200000000000021633 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/custom-42.0.0" type: string././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/custom-1.0.0.yaml0000644000537500020070000000022500000000000021415 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom-1.0.0" type: integer default: 42././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/custom_flow-1.0.0.yaml0000644000537500020070000000032600000000000022446 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_flow-1.0.0" type: object properties: a: type: number b: type: number flowStyle: block././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/custom_flow-1.1.0.yaml0000644000537500020070000000032700000000000022450 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_flow-1.1.0" type: object properties: c: type: number d: type: number flowStyle: block ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/custom_schema.yaml0000644000537500020070000000104100000000000022300 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" title: | Top-level custom schema used for testing. description: | This schema is used to test the custom schema validation mechanism in ASDF. type: object properties: foo: description: | Your generic kind of foo. type: object properties: x: type: number y: type: number bar: type: object properties: a: type: string b: type: string required: [foo, bar] additionalProperties: true ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/custom_schema_definitions.yaml0000644000537500020070000000046200000000000024701 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" type: object properties: thing: $ref: "#/definitions/bizbaz" required: [thing] additionalProperties: true definitions: bizbaz: type: object properties: biz: type: string baz: type: string ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/custom_schema_external_ref.yaml0000644000537500020070000000061300000000000025042 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" title: | Custom schema with an external reference, used for testing. description: | This schema is used to test custom schema validation with an external reference. type: object properties: foo: anyOf: - $ref: "http://stsci.edu/schemas/asdf/core/software-1.0.0" required: [foo] additionalProperties: true ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/custom_style-1.0.0.yaml0000644000537500020070000000023600000000000022637 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_style-1.0.0" type: string style: literal ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/default-1.0.0.yaml0000644000537500020070000000206000000000000021526 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/default-1.0.0" type: object properties: a: type: integer default: 42 b: type: object properties: c: type: integer default: 82 d: allOf: - type: object properties: e: type: integer default: 122 - type: object properties: f: type: integer default: 162 g: anyOf: - type: object properties: h: type: integer default: 202 - type: object properties: i: type: integer default: 242 j: oneOf: - type: object properties: k: type: integer default: 282 required: [k] additionalProperties: false - type: object properties: l: type: integer default: 322 required: [l] additionalProperties: false ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/example_schema.json0000755000537500020070000000423300000000000022441 0ustar00wjamiesonSTSCI\science{ "date" : { "title" : "[yyyy-mm-ddThh:mm:ss.ssssss] UTC date file created", "type" : "string", "sql_dtype" : "datetime2", "fits_keyword" : "DATE", "description" : "The UTC date and time when the HDU was created, in the form YYYY-MM-DDThh:mm:ss.ssssss, where YYYY shall be the four-digit calendar year number, MM the two-digit month number with January given by 01 and December by 12, and DD the two-digit day of the month. The literal T shall separate the date and time, hh shall be the two-digit hour in the day, mm the two-digit number of minutes after the hour, and ss.ssssss the number of seconds (two digits followed by a fraction accurate to microseconds) after the minute. Default values must not be given to any portion of the date/time string, and leading zeros must not be omitted.", "calculation" : "Operating system time in the format of YYYY-MM-DDThh:mm:ss.ssssss", "default_value" : "", "example" : "2015-01-01T00:00:00.000001", "units" : "", "sw_source" : "calculation", "source" : "Science Data Processing (SDP)", "destination" : ["ScienceCommon.date","GuideStar.date"], "level" : "1a", "si" : "Multiple", "section" : "Basic", "mode" : "All", "fits_hdu" : "PRIMARY", "misc" : "" }, "origin" : { "title" : "institution responsible for creating FITS file", "type" : "string", "sql_dtype" : "nvarchar(20)", "fits_keyword" : "ORIGIN", "description" : "Identifies the organization or institution responsible for creating the FITS file.", "calculation" : "", "default_value" : "STSCI", "example" : "STSCI", "units" : "", "sw_source" : "", "source" : "Science Data Processing (SDP)", "destination" : ["ScienceCommon.origin","GuideStar.origin"], "level" : "1a", "si" : "Multiple", "section" : "Basic", "mode" : "All", "fits_hdu" : "PRIMARY", "misc" : "" } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/data/extension_check.fits0000644000537500020070000002070100000000000022626 0ustar00wjamiesonSTSCI\scienceSIMPLE = T / conforms to FITS standard BITPIX = 8 / array data type NAXIS = 0 / number of array dimensions EXTEND = T END XTENSION= 'ASDF ' / ASDF extension BITPIX = 8 / array data type NAXIS = 1 / number of array dimensions NAXIS1 = 538 / Axis length PCOUNT = 0 / number of parameters GCOUNT = 1 / number of groups COMPRESS= F / Uses gzip compression EXTNAME = 'ASDF ' / Name of ASDF extension END #ASDF 1.0.0 #ASDF_STANDARD 1.2.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 2.0.0.dev1264} history: extensions: - !core/extension_metadata-1.0.0 extension_class: foo.bar.FooBar software: {name: foo, version: 1.2.3} - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: {name: asdf, version: 2.0.0} ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/foreign_tag_reference-1.0.0.yaml0000644000537500020070000000101500000000000024403 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/foreign_tag_reference-1.0.0" title: An example custom type for testing tag references tag: "tag:nowhere.org:custom/foreign_tag_reference-1.0.0" type: object properties: a: # Test foreign tag reference using tag URI $ref: "tag:nowhere.org:custom/tag_reference-1.0.0" b: # Test foreign tag reference using tag ID $ref: "http://nowhere.org/schemas/custom/tag_reference-1.0.0" required: [a, b] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/fraction-1.0.0.yaml0000644000537500020070000000110500000000000021706 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fraction-1.0.0" title: An example custom type for handling fractions tag: "tag:nowhere.org:custom/fraction-1.0.0" # Using anyOf here is a cheap hack to allow us to support both formats in an # example that is used in the documentation (see docs/asdf/extensions.rst). anyOf: - type: array items: type: integer minItems: 2 maxItems: 2 - type: object properties: numerator: type: integer denominator: type: integer ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/fraction_with_inverse-1.0.0.yaml0000644000537500020070000000067200000000000024504 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fraction_with_inverse-1.0.0" title: An example custom type for handling fractions with inverses tag: "tag:nowhere.org:custom/fraction_with_inverse-1.0.0" type: object properties: numerator: type: integer denominator: type: integer inverse: $ref: fraction_with_inverse-1.0.0 required: [numerator, denominator] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/fractional_2d_coord-1.0.0.yaml0000644000537500020070000000054300000000000024003 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fractional_2d_coord-1.0.0" title: An example custom type for handling components tag: "tag:nowhere.org:custom/fractional_2d_coord-1.0.0" type: object properties: x: $ref: fraction-1.0.0 y: $ref: fraction-1.0.0 required: [x, y] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/missing-1.1.0.yaml0000644000537500020070000000021100000000000021550 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/missing-1.1.0" type: object././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/one_of-1.0.0.yaml0000644000537500020070000000062700000000000021356 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/one_of-1.0.0" title: | oneOf test schema oneOf: - type: object properties: value: type: number required: [value] additionalProperties: false - type: object properties: value: type: string required: [value] additionalProperties: false ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/self_referencing-1.0.0.yaml0000644000537500020070000000026700000000000023411 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/self_referencing-1.0.0" anyOf: - type: object - $ref: "#/anyOf/0" ...././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/tag_reference-1.0.0.yaml0000644000537500020070000000057200000000000022701 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/tag_reference-1.0.0" title: An example custom type for testing tag references tag: "tag:nowhere.org:custom/tag_reference-1.0.0" type: object properties: name: type: string things: $ref: "tag:stsci.edu:asdf/core/ndarray-1.0.0" required: [name, things] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/asdf/tests/data/version_mismatch.fits0000644000537500020070000001320000000000000023023 0ustar00wjamiesonSTSCI\scienceSIMPLE = T / conforms to FITS standard BITPIX = 8 / array data type NAXIS = 1 / number of array dimensions NAXIS1 = 288 EXTNAME = 'ASDF ' / extension name END #ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 a: !core/complex-7.0.0 0j asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev874} ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266458.0 asdf-2.9.2/asdf/tests/helpers.py0000644000537500020070000003566700000000000017712 0ustar00wjamiesonSTSCI\scienceimport io import os import warnings from contextlib import contextmanager from pathlib import Path try: from astropy.coordinates import ICRS except ImportError: ICRS = None try: from astropy.coordinates.representation import CartesianRepresentation except ImportError: CartesianRepresentation = None try: from astropy.coordinates.representation import CartesianDifferential except ImportError: CartesianDifferential = None import yaml import asdf from ..asdf import AsdfFile, get_asdf_library_info from ..block import Block from .httpserver import RangeHTTPServer from ..extension import default_extensions from ..exceptions import AsdfConversionWarning from .. import versioning from ..resolver import Resolver, ResolverChain from .. import generic_io from ..constants import YAML_TAG_PREFIX from ..versioning import AsdfVersion, get_version_map from ..tags.core import AsdfObject try: from pytest_remotedata.disable_internet import INTERNET_OFF except ImportError: INTERNET_OFF = False __all__ = ['get_test_data_path', 'assert_tree_match', 'assert_roundtrip_tree', 'yaml_to_asdf', 'get_file_sizes', 'display_warnings'] def get_test_data_path(name, module=None): if module is None: from . import data as test_data module = test_data module_root = Path(module.__file__).parent if name is None or name == "": return str(module_root) else: return str(module_root/name) def assert_tree_match(old_tree, new_tree, ctx=None, funcname='assert_equal', ignore_keys=None): """ Assert that two ASDF trees match. Parameters ---------- old_tree : ASDF tree new_tree : ASDF tree ctx : ASDF file context Used to look up the set of types in effect. funcname : `str` or `callable` The name of a method on members of old_tree and new_tree that will be used to compare custom objects. The default of `assert_equal` handles Numpy arrays. ignore_keys : list of str List of keys to ignore """ seen = set() if ignore_keys is None: ignore_keys = ['asdf_library', 'history'] ignore_keys = set(ignore_keys) if ctx is None: version_string = str(versioning.default_version) ctx = default_extensions.extension_list else: version_string = ctx.version_string def recurse(old, new): if id(old) in seen or id(new) in seen: return seen.add(id(old)) seen.add(id(new)) old_type = ctx.type_index.from_custom_type(type(old), version_string) new_type = ctx.type_index.from_custom_type(type(new), version_string) if (old_type is not None and new_type is not None and old_type is new_type and (callable(funcname) or hasattr(old_type, funcname))): if callable(funcname): funcname(old, new) else: getattr(old_type, funcname)(old, new) elif isinstance(old, dict) and isinstance(new, dict): assert (set(x for x in old.keys() if x not in ignore_keys) == set(x for x in new.keys() if x not in ignore_keys)) for key in old.keys(): if key not in ignore_keys: recurse(old[key], new[key]) elif isinstance(old, (list, tuple)) and isinstance(new, (list, tuple)): assert len(old) == len(new) for a, b in zip(old, new): recurse(a, b) # The astropy classes CartesianRepresentation, CartesianDifferential, # and ICRS do not define equality in a way that is meaningful for unit # tests. We explicitly compare the fields that we care about in order # to enable our unit testing. It is possible that in the future it will # be necessary or useful to account for fields that are not currently # compared. elif CartesianRepresentation is not None and \ isinstance(old, CartesianRepresentation): assert old.x == new.x and old.y == new.y and old.z == new.z elif CartesianDifferential is not None and \ isinstance(old, CartesianDifferential): assert old.d_x == new.d_x and old.d_y == new.d_y and \ old.d_z == new.d_z elif ICRS is not None and isinstance(old, ICRS): assert old.ra == new.ra and old.dec == new.dec else: assert old == new recurse(old_tree, new_tree) def assert_roundtrip_tree(*args, **kwargs): """ Assert that a given tree saves to ASDF and, when loaded back, the tree matches the original tree. tree : ASDF tree tmpdir : str Path to temporary directory to save file tree_match_func : `str` or `callable` Passed to `assert_tree_match` and used to compare two objects in the tree. raw_yaml_check_func : callable, optional Will be called with the raw YAML content as a string to perform any additional checks. asdf_check_func : callable, optional Will be called with the reloaded ASDF file to perform any additional checks. """ with warnings.catch_warnings(): warnings.filterwarnings("error", category=AsdfConversionWarning) _assert_roundtrip_tree(*args, **kwargs) def _assert_roundtrip_tree(tree, tmpdir, *, asdf_check_func=None, raw_yaml_check_func=None, write_options={}, init_options={}, extensions=None, tree_match_func='assert_equal'): fname = str(tmpdir.join('test.asdf')) # First, test writing/reading a BytesIO buffer buff = io.BytesIO() AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) assert not buff.closed buff.seek(0) with asdf.open(buff, mode='rw', extensions=extensions) as ff: assert not buff.closed assert isinstance(ff.tree, AsdfObject) assert 'asdf_library' in ff.tree assert ff.tree['asdf_library'] == get_asdf_library_info() assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) buff.seek(0) ff = AsdfFile(extensions=extensions, **init_options) content = AsdfFile._open_impl(ff, buff, mode='r', _get_yaml_content=True) buff.close() # We *never* want to get any raw python objects out assert b'!!python' not in content assert b'!core/asdf' in content assert content.startswith(b'%YAML 1.1') if raw_yaml_check_func: raw_yaml_check_func(content) # Then, test writing/reading to a real file ff = AsdfFile(tree, extensions=extensions, **init_options) ff.write_to(fname, **write_options) with asdf.open(fname, mode='rw', extensions=extensions) as ff: assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) # Make sure everything works without a block index write_options['include_block_index'] = False buff = io.BytesIO() AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) assert not buff.closed buff.seek(0) with asdf.open(buff, mode='rw', extensions=extensions) as ff: assert not buff.closed assert isinstance(ff.tree, AsdfObject) assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) # Now try everything on an HTTP range server if not INTERNET_OFF: server = RangeHTTPServer() try: ff = AsdfFile(tree, extensions=extensions, **init_options) ff.write_to(os.path.join(server.tmpdir, 'test.asdf'), **write_options) with asdf.open(server.url + 'test.asdf', mode='r', extensions=extensions) as ff: assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) finally: server.finalize() # Now don't be lazy and check that nothing breaks with io.BytesIO() as buff: AsdfFile(tree, extensions=extensions, **init_options).write_to(buff, **write_options) buff.seek(0) ff = asdf.open(buff, extensions=extensions, copy_arrays=True, lazy_load=False) # Ensure that all the blocks are loaded for block in ff.blocks._internal_blocks: assert isinstance(block, Block) assert block._data is not None # The underlying file is closed at this time and everything should still work assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) # Now repeat with copy_arrays=False and a real file to test mmap() AsdfFile(tree, extensions=extensions, **init_options).write_to(fname, **write_options) with asdf.open(fname, mode='rw', extensions=extensions, copy_arrays=False, lazy_load=False) as ff: for block in ff.blocks._internal_blocks: assert isinstance(block, Block) assert block._data is not None assert_tree_match(tree, ff.tree, ff, funcname=tree_match_func) if asdf_check_func: asdf_check_func(ff) def yaml_to_asdf(yaml_content, yaml_headers=True, standard_version=None): """ Given a string of YAML content, adds the extra pre- and post-amble to make it an ASDF file. Parameters ---------- yaml_content : string yaml_headers : bool, optional When True (default) add the standard ASDF YAML headers. Returns ------- buff : io.BytesIO() A file-like object containing the ASDF-like content. """ if isinstance(yaml_content, str): yaml_content = yaml_content.encode('utf-8') buff = io.BytesIO() if standard_version is None: standard_version = versioning.default_version standard_version = AsdfVersion(standard_version) vm = get_version_map(standard_version) file_format_version = vm["FILE_FORMAT"] yaml_version = vm["YAML_VERSION"] tree_version = vm["tags"]["tag:stsci.edu:asdf/core/asdf"] if yaml_headers: buff.write("""#ASDF {0} #ASDF_STANDARD {1} %YAML {2} %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-{3} """.format(file_format_version, standard_version, yaml_version, tree_version).encode('ascii')) buff.write(yaml_content) if yaml_headers: buff.write(b"\n...\n") buff.seek(0) return buff def get_file_sizes(dirname): """ Get the file sizes in a directory. Parameters ---------- dirname : string Path to a directory Returns ------- sizes : dict Dictionary of (file, size) pairs. """ files = {} for filename in os.listdir(dirname): path = os.path.join(dirname, filename) if os.path.isfile(path): files[filename] = os.stat(path).st_size return files def display_warnings(_warnings): """ Return a string that displays a list of unexpected warnings Parameters ---------- _warnings : iterable List of warnings to be displayed Returns ------- msg : str String containing the warning messages to be displayed """ if len(_warnings) == 0: return "No warnings occurred (was one expected?)" msg = "Unexpected warning(s) occurred:\n" for warning in _warnings: msg += "{}:{}: {}: {}\n".format( warning.filename, warning.lineno, warning.category.__name__, warning.message) return msg @contextmanager def assert_no_warnings(warning_class=None): """ Assert that no warnings were emitted within the context. Requires that pytest be installed. Parameters ---------- warning_class : type, optional Assert only that no warnings of the specified class were emitted. """ import pytest if warning_class is None: with warnings.catch_warnings(): warnings.simplefilter("error") yield else: with pytest.warns(Warning) as recorded_warnings: yield assert not any(isinstance(w.message, warning_class) for w in recorded_warnings), \ display_warnings(recorded_warnings) def assert_extension_correctness(extension): """ Assert that an ASDF extension's types are all correctly formed and that the extension provides all of the required schemas. Parameters ---------- extension : asdf.AsdfExtension The extension to validate """ __tracebackhide__ = True resolver = ResolverChain( Resolver(extension.tag_mapping, "tag"), Resolver(extension.url_mapping, "url"), ) for extension_type in extension.types: _assert_extension_type_correctness(extension, extension_type, resolver) def _assert_extension_type_correctness(extension, extension_type, resolver): __tracebackhide__ = True if extension_type.yaml_tag is not None and extension_type.yaml_tag.startswith(YAML_TAG_PREFIX): return if extension_type == asdf.stream.Stream: # Stream is a special case. It was implemented as a subclass of NDArrayType, # but shares a tag with that class, so it isn't really a distinct type. return assert extension_type.name is not None, "{} must set the 'name' class attribute".format(extension_type.__name__) # Currently ExtensionType sets a default version of 1.0.0, # but we want to encourage an explicit version on the subclass. assert "version" in extension_type.__dict__, "{} must set the 'version' class attribute".format(extension_type.__name__) for check_type in extension_type.versioned_siblings + [extension_type]: schema_location = resolver(check_type.yaml_tag) assert schema_location is not None, ( "{} supports tag, {}, ".format(extension_type.__name__, check_type.yaml_tag) + "but tag does not resolve. Check the tag_mapping and uri_mapping " + "properties on the related extension ({}).".format(extension_type.__name__) ) try: with generic_io.get_file(schema_location) as f: schema = yaml.safe_load(f.read()) except Exception: assert False, ( "{} supports tag, {}, ".format(extension_type.__name__, check_type.yaml_tag) + "which resolves to schema at {}, but ".format(schema_location) + "schema cannot be read." ) assert "tag" in schema, ( "{} supports tag, {}, ".format(extension_type.__name__, check_type.yaml_tag) + "but tag resolves to a schema at {} that is ".format(schema_location) + "missing its tag field." ) assert schema["tag"] == check_type.yaml_tag, ( "{} supports tag, {}, ".format(extension_type.__name__, check_type.yaml_tag) + "but tag resolves to a schema at {} that ".format(schema_location) + "describes a different tag: {}".format(schema["tag"]) ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/httpserver.py0000644000537500020070000000402600000000000020437 0ustar00wjamiesonSTSCI\scienceimport os import queue import shutil import tempfile import threading import http.server import socketserver from ..extern.RangeHTTPServer import RangeHTTPRequestHandler __all__ = ['HTTPServer', 'RangeHTTPServer'] def run_server(tmpdir, handler_class, stop_event, queue): # pragma: no cover """ Runs an HTTP server serving files from given tmpdir in a separate process. When it's ready, it sends a URL to the server over a queue so the main process (the HTTP client) can start making requests of it. """ class HTTPRequestHandler(handler_class): def translate_path(self, path): path = handler_class.translate_path(self, path) path = os.path.join( tmpdir, os.path.relpath(path, os.getcwd())) return path server = socketserver.TCPServer(("127.0.0.1", 0), HTTPRequestHandler) domain, port = server.server_address url = "http://{0}:{1}/".format(domain, port) # Set a reasonable timeout so that invalid requests (which may occur during # testing) do not cause the entire test suite to hang indefinitely server.timeout = 0.1 queue.put(url) # Using server.serve_forever does not work here since it ignores the # timeout value set above. Having an explicit loop also allows us to kill # the server from the parent thread. while not stop_event.is_set(): server.handle_request() server.server_close() class HTTPServer: handler_class = http.server.SimpleHTTPRequestHandler def __init__(self): self.tmpdir = tempfile.mkdtemp() q = queue.Queue() self.stop_event = threading.Event() args = (self.tmpdir, self.handler_class, self.stop_event, q) self.thread = threading.Thread(target=run_server, args=args) self.thread.start() self.url = q.get() def finalize(self): self.stop_event.set() self.thread.join() shutil.rmtree(self.tmpdir) class RangeHTTPServer(HTTPServer): handler_class = RangeHTTPRequestHandler ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_api.py0000644000537500020070000004363400000000000020051 0ustar00wjamiesonSTSCI\scienceimport os import io import getpass import pathlib import sys import numpy as np from numpy.testing import assert_array_equal from astropy.modeling import models import pytest from jsonschema.exceptions import ValidationError import asdf from asdf import get_config, config_context from asdf import treeutil from asdf import extension from asdf import resolver from asdf import schema from asdf import versioning from asdf.exceptions import AsdfDeprecationWarning, AsdfWarning from asdf.extension import ExtensionProxy from .helpers import ( assert_tree_match, assert_roundtrip_tree, yaml_to_asdf, assert_no_warnings, ) def test_get_data_from_closed_file(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.arange(0, 64).reshape((8, 8)) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.open(path) as ff: pass with pytest.raises(IOError): assert_array_equal(my_array, ff.tree['my_array']) def test_no_warning_nan_array(tmpdir): """ Tests for a regression that was introduced by https://github.com/asdf-format/asdf/pull/557 """ tree = dict(array=np.array([1, 2, np.nan])) with assert_no_warnings(): assert_roundtrip_tree(tree, tmpdir) def test_warning_deprecated_open(tmpdir): tmpfile = str(tmpdir.join('foo.asdf')) tree = dict(foo=42, bar='hello') with asdf.AsdfFile(tree) as af: af.write_to(tmpfile) with pytest.warns(AsdfDeprecationWarning): with asdf.AsdfFile.open(tmpfile) as af: assert_tree_match(tree, af.tree) @pytest.mark.skipif( not sys.platform.startswith('win') and getpass.getuser() == 'root', reason="Cannot make file read-only if user is root" ) def test_open_readonly(tmpdir): tmpfile = str(tmpdir.join('readonly.asdf')) tree = dict(foo=42, bar='hello', baz=np.arange(20)) with asdf.AsdfFile(tree) as af: af.write_to(tmpfile, all_array_storage='internal') os.chmod(tmpfile, 0o440) assert os.access(tmpfile, os.W_OK) == False with asdf.open(tmpfile) as af: assert af['baz'].flags.writeable == False with pytest.raises(PermissionError): with asdf.open(tmpfile, mode='rw'): pass def test_open_validate_on_read(tmpdir): content = """ invalid_software: !core/software-1.0.0 name: Minesweeper version: 3 """ buff = yaml_to_asdf(content) with pytest.raises(ValidationError): get_config().validate_on_read = True with asdf.open(buff): pass buff.seek(0) get_config().validate_on_read = False with asdf.open(buff) as af: assert af["invalid_software"]["name"] == "Minesweeper" assert af["invalid_software"]["version"] == 3 def test_open_stream(tmp_path): file_path = tmp_path / "test.asdf" with asdf.AsdfFile() as af: af["foo"] = "bar" af.write_to(file_path) class StreamWrapper: def __init__(self, fd): self._fd = fd def read(self, size=-1): return self._fd.read(size) with file_path.open("rb") as fd: with asdf.open(StreamWrapper(fd)) as af: assert af["foo"] == "bar" def test_atomic_write(tmpdir, small_tree): tmpfile = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(small_tree) ff.write_to(tmpfile) with asdf.open(tmpfile, mode='r') as ff: ff.write_to(tmpfile) def test_overwrite(tmpdir): # This is intended to reproduce the following issue: # https://github.com/asdf-format/asdf/issues/100 tmpfile = os.path.join(str(tmpdir), 'test.asdf') aff = models.AffineTransformation2D(matrix=[[1, 2], [3, 4]]) f = asdf.AsdfFile() f.tree['model'] = aff f.write_to(tmpfile) model = f.tree['model'] ff = asdf.AsdfFile() ff.tree['model'] = model ff.write_to(tmpfile) def test_default_version(): # See https://github.com/asdf-format/asdf/issues/364 version_map = versioning.get_version_map(versioning.default_version) ff = asdf.AsdfFile() assert ff.file_format_version == version_map['FILE_FORMAT'] def test_update_exceptions(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.open(path, mode='r', copy_arrays=True) as ff: with pytest.raises(IOError): ff.update() ff = asdf.AsdfFile(tree) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) with asdf.open(buff, mode='rw') as ff: ff.update() with pytest.raises(ValueError): asdf.AsdfFile().update() def test_top_level_tree(small_tree): tree = {'tree': small_tree} ff = asdf.AsdfFile(tree) assert_tree_match(ff.tree['tree'], ff['tree']) ff2 = asdf.AsdfFile() ff2['tree'] = small_tree assert_tree_match(ff2.tree['tree'], ff2['tree']) def test_top_level_keys(small_tree): tree = {'tree': small_tree} ff = asdf.AsdfFile(tree) assert ff.tree.keys() == ff.keys() def test_top_level_contains(): tree = { 'foo': 42, 'bar': 43, } with asdf.AsdfFile(tree) as af: assert 'foo' in af assert 'bar' in af def test_walk_and_modify_remove_keys(): tree = { 'foo': 42, 'bar': 43 } def func(x): if x == 42: return treeutil.RemoveNode return x tree2 = treeutil.walk_and_modify(tree, func) assert 'foo' not in tree2 assert 'bar' in tree2 def test_walk_and_modify_retain_none(): tree = { 'foo': 42, 'bar': None } def func(x): if x == 42: return None return x tree2 = treeutil.walk_and_modify(tree, func) assert tree2['foo'] is None assert tree2['bar'] is None def test_copy(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array, 'foo': {'bar': 'baz'}} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, 'test.asdf')) with asdf.open(os.path.join(tmpdir, 'test.asdf')) as ff: ff2 = ff.copy() ff2.tree['my_array'] *= 2 ff2.tree['foo']['bar'] = 'boo' assert np.all(ff2.tree['my_array'] == ff.tree['my_array'] * 2) assert ff.tree['foo']['bar'] == 'baz' assert_array_equal(ff2.tree['my_array'], ff2.tree['my_array']) def test_tag_to_schema_resolver_deprecation(): ff = asdf.AsdfFile() with pytest.warns(AsdfDeprecationWarning): ff.tag_to_schema_resolver('foo') with pytest.warns(AsdfDeprecationWarning): extension_list = extension.default_extensions.extension_list extension_list.tag_to_schema_resolver('foo') def test_access_tree_outside_handler(tmpdir): tempname = str(tmpdir.join('test.asdf')) tree = {'random': np.random.random(10)} ff = asdf.AsdfFile(tree) ff.write_to(str(tempname)) with asdf.open(tempname) as newf: pass # Accessing array data outside of handler should fail with pytest.raises(OSError): repr(newf.tree['random']) # Using the top-level getattr should also fail with pytest.raises(OSError): repr(newf['random']) def test_context_handler_resolve_and_inline(tmpdir): # This reproduces the issue reported in # https://github.com/asdf-format/asdf/issues/406 tempname = str(tmpdir.join('test.asdf')) tree = {'random': np.random.random(10)} ff = asdf.AsdfFile(tree) ff.write_to(str(tempname)) with asdf.open(tempname) as newf: newf.resolve_and_inline() with pytest.raises(OSError): newf.tree['random'][0] def test_open_pathlib_path(tmpdir): filename = str(tmpdir.join('pathlib.asdf')) path = pathlib.Path(filename) tree = {'data': np.ones(10)} with asdf.AsdfFile(tree) as af: af.write_to(path) with asdf.open(path) as af: assert (af['data'] == tree['data']).all() class FooExtension: types = [] tag_mapping = [] url_mapping = [] @pytest.mark.parametrize('installed,extension,warns', [ ('1.2.3', '2.0.0', True), ('1.2.3', '2.0.dev10842', True), ('2.0.0', '2.0.0', False), ('2.0.1', '2.0.0', False), ('2.0.1', '2.0.dev12345', False), ]) def test_extension_version_check(installed, extension, warns): proxy = ExtensionProxy(FooExtension(), package_name="foo", package_version=installed) with config_context() as config: config.add_extension(proxy) af = asdf.AsdfFile() af._fname = 'test.asdf' tree = { 'history': { 'extensions': [ asdf.tags.core.ExtensionMetadata(extension_class='asdf.tests.test_api.FooExtension', software=asdf.tags.core.Software(name='foo', version=extension)), ] } } if warns: with pytest.warns(AsdfWarning, match="File 'test.asdf' was created with"): af._check_extensions(tree) with pytest.raises(RuntimeError) as err: af._check_extensions(tree, strict=True) err.match("^File 'test.asdf' was created with") else: af._check_extensions(tree) @pytest.mark.filterwarnings(AsdfDeprecationWarning) def test_auto_inline(tmpdir): outfile = str(tmpdir.join('test.asdf')) tree = {"small_array": np.arange(6), "large_array": np.arange(100)} # Use the same object for each write in order to make sure that there # aren't unanticipated side effects with asdf.AsdfFile(tree) as af: # By default blocks are written internal. af.write_to(outfile) assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 2 af.write_to(outfile, auto_inline=10) assert len(list(af.blocks.inline_blocks)) == 1 assert len(list(af.blocks.internal_blocks)) == 1 # The previous write modified the small array block's storage # to inline, and a subsequent write should maintain that setting. af.write_to(outfile) assert len(list(af.blocks.inline_blocks)) == 1 assert len(list(af.blocks.internal_blocks)) == 1 af.write_to(outfile, auto_inline=7) assert len(list(af.blocks.inline_blocks)) == 1 assert len(list(af.blocks.internal_blocks)) == 1 af.write_to(outfile, auto_inline=5) assert len(list(af.blocks.inline_blocks)) == 0 assert len(list(af.blocks.internal_blocks)) == 2 @pytest.mark.parametrize("array_inline_threshold, inline_blocks, internal_blocks", [ (None, 0, 2), (10, 1, 1), (7, 1, 1), (5, 0, 2), (0, 0, 2), (1, 0, 2), ]) def test_array_inline_threshold(array_inline_threshold, inline_blocks, internal_blocks, tmp_path): file_path = tmp_path / "test.asdf" tree = {"small_array": np.arange(6), "large_array": np.arange(100)} with asdf.config_context() as config: config.array_inline_threshold = array_inline_threshold with asdf.AsdfFile(tree) as af: af.write_to(file_path) assert len(list(af.blocks.inline_blocks)) == inline_blocks assert len(list(af.blocks.internal_blocks)) == internal_blocks @pytest.mark.parametrize("array_inline_threshold, inline_blocks, internal_blocks", [ (None, 0, 2), (10, 2, 0), (5, 0, 2), ]) def test_array_inline_threshold_masked_array(array_inline_threshold, inline_blocks, internal_blocks, tmp_path): file_path = tmp_path / "test.asdf" arr = np.arange(6) masked_arr = np.ma.masked_equal(arr, 3) tree = {"masked_arr": masked_arr} with asdf.config_context() as config: config.array_inline_threshold = array_inline_threshold with asdf.AsdfFile(tree) as af: af.write_to(file_path) assert len(list(af.blocks.inline_blocks)) == inline_blocks assert len(list(af.blocks.internal_blocks)) == internal_blocks @pytest.mark.parametrize("array_inline_threshold, inline_blocks, internal_blocks", [ (None, 0, 1), (10, 1, 0), (5, 0, 1), ]) def test_array_inline_threshold_string_array(array_inline_threshold, inline_blocks, internal_blocks, tmp_path): file_path = tmp_path / "test.asdf" arr = np.array(["peach", "plum", "apricot", "nectarine", "cherry", "pluot"]) tree = {"array": arr} with asdf.config_context() as config: config.array_inline_threshold = array_inline_threshold with asdf.AsdfFile(tree) as af: af.write_to(file_path) assert len(list(af.blocks.inline_blocks)) == inline_blocks assert len(list(af.blocks.internal_blocks)) == internal_blocks def test_resolver_deprecations(): for resolver_method in [ resolver.default_resolver, resolver.default_tag_to_url_mapping, resolver.default_url_mapping, schema.default_ext_resolver ]: with pytest.warns(AsdfDeprecationWarning): resolver_method("foo") def test_get_default_resolver(): resolver = extension.get_default_resolver() result = resolver('tag:stsci.edu:asdf/core/ndarray-1.0.0') assert result.endswith("/schemas/stsci.edu/asdf/core/ndarray-1.0.0.yaml") def test_info_module(capsys, tmpdir): tree = dict( foo=42, bar="hello", baz=np.arange(20), nested={"woo": "hoo", "yee": "haw"}, long_line="a" * 100 ) af = asdf.AsdfFile(tree) def _assert_correct_info(node_or_path): asdf.info(node_or_path) captured = capsys.readouterr() assert "foo" in captured.out assert "bar" in captured.out assert "baz" in captured.out _assert_correct_info(af) _assert_correct_info(af.tree) tmpfile = str(tmpdir.join("written.asdf")) af.write_to(tmpfile) af.close() _assert_correct_info(tmpfile) _assert_correct_info(pathlib.Path(tmpfile)) for i in range(1, 10): asdf.info(af, max_rows=i) lines = capsys.readouterr().out.strip().split("\n") assert len(lines) <= i asdf.info(af, max_cols=80) assert "(truncated)" in capsys.readouterr().out asdf.info(af, max_cols=None) captured = capsys.readouterr().out assert "(truncated)" not in captured assert "a" * 100 in captured asdf.info(af, show_values=True) assert "hello" in capsys.readouterr().out asdf.info(af, show_values=False) assert "hello" not in capsys.readouterr().out tree = { "foo": ["alpha", "bravo", "charlie", "delta", "eagle"] } af = asdf.AsdfFile(tree) asdf.info(af, max_rows=(None,)) assert "alpha" not in capsys.readouterr().out for i in range(1, 5): asdf.info(af, max_rows=(None, i)) captured = capsys.readouterr() for val in tree["foo"][0:i-1]: assert val in captured.out for val in tree["foo"][i-1:]: assert val not in captured.out def test_info_asdf_file(capsys, tmpdir): tree = dict( foo=42, bar="hello", baz=np.arange(20), nested={"woo": "hoo", "yee": "haw"}, long_line="a" * 100 ) af = asdf.AsdfFile(tree) af.info() captured = capsys.readouterr() assert "foo" in captured.out assert "bar" in captured.out assert "baz" in captured.out class ObjectWithInfoSupport: def __init__(self): self._tag = "foo" def __asdf_traverse__(self): return {'the_meaning_of_life_the_universe_and_everything': 42, 'clown': 'Bozo'} def test_info_object_support(capsys): tree = dict(random=3.14159, object=ObjectWithInfoSupport()) af = asdf.AsdfFile(tree) af.info() captured = capsys.readouterr() assert "the_meaning_of_life_the_universe_and_everything" in captured.out assert "clown" in captured.out assert "42" in captured.out assert "Bozo" in captured.out class RecursiveObjectWithInfoSupport: def __init__(self): self._tag = "foo" self.the_meaning = 42 self.clown = "Bozo" self.recursive = None def __asdf_traverse__(self): return {'the_meaning': self.the_meaning, 'clown': self.clown, 'recursive': self.recursive} def test_recursive_info_object_support(capsys): recursive_obj = RecursiveObjectWithInfoSupport() recursive_obj.recursive = recursive_obj tree = dict(random=3.14159, rtest=recursive_obj) af = asdf.AsdfFile(tree) af.info() captured = capsys.readouterr() assert "recursive reference" in captured.out def test_search(): tree = dict(foo=42, bar="hello", baz=np.arange(20)) af = asdf.AsdfFile(tree) result = af.search("foo") assert result.node == 42 result = af.search(type="ndarray") assert (result.node == tree["baz"]).all() result = af.search(value="hello") assert result.node == "hello" def test_history_entries(tmpdir): path = str(tmpdir.join("test.asdf")) message = "Twas brillig, and the slithy toves" af = asdf.AsdfFile() af.add_history_entry(message) af.write_to(path) with asdf.open(path) as af: assert af["history"]["entries"][0]["description"] == message af = asdf.AsdfFile() af.write_to(path) with asdf.open(path) as af: af.add_history_entry(message) assert af["history"]["entries"][0]["description"] == message def test_array_access_after_file_close(tmpdir): path = str(tmpdir.join("test.asdf")) data = np.arange(10) asdf.AsdfFile({"data": data}).write_to(path) # Normally it's not possible to read the array after # the file has been closed: with asdf.open(path) as af: tree = af.tree with pytest.raises(OSError, match="ASDF file has already been closed"): tree["data"][0] # With memory mapping disabled and copying arrays enabled, # the array data should still persist in memory after close: with asdf.open(path, lazy_load=False, copy_arrays=True) as af: tree = af.tree assert_array_equal(tree["data"], data) def test_none_values(tmpdir): path = str(tmpdir.join("test.asdf")) af = asdf.AsdfFile({"foo": None}) af.write_to(path) with asdf.open(path) as af: assert "foo" in af assert af["foo"] is None ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_array_blocks.py0000644000537500020070000006047700000000000021757 0ustar00wjamiesonSTSCI\scienceimport io import os import numpy as np from numpy.random import random from numpy.testing import assert_array_equal import pytest import asdf from asdf import block from asdf import constants from asdf import generic_io def test_external_block(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.set_array_storage(my_array, 'external') assert ff.get_array_storage(my_array) == 'external' ff.write_to(os.path.join(tmpdir, "test.asdf")) assert 'test0000.asdf' in os.listdir(tmpdir) def test_external_block_non_url(): my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.set_array_storage(my_array, 'external') assert ff.get_array_storage(my_array) == 'external' buff = io.BytesIO() with pytest.raises(ValueError): ff.write_to(buff) def test_invalid_array_storage(): my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.set_array_storage(my_array, 'foo') b = block.Block() b._array_storage = 'foo' with pytest.raises(ValueError): ff.blocks.add(b) with pytest.raises(ValueError): ff.blocks.remove(b) def test_transfer_array_sources(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf")) with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(my_array, ff.tree['my_array']) ff.write_to(os.path.join(tmpdir, "test2.asdf")) # write_to should have no effect on getting the original data assert_array_equal(my_array, ff.tree['my_array']) assert ff._fd is None def test_write_to_same(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf")) with asdf.open( os.path.join(tmpdir, "test.asdf"), mode='rw') as ff: assert_array_equal(my_array, ff.tree['my_array']) ff.tree['extra'] = [0] * 1000 ff.write_to(os.path.join(tmpdir, "test2.asdf")) with asdf.open( os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: assert_array_equal(my_array, ff.tree['my_array']) def test_pad_blocks(tmpdir): tmpdir = str(tmpdir) # This is the case where the new tree can't fit in the available space my_array = np.ones((8, 8)) * 1 my_array2 = np.ones((42, 5)) * 2 tree = { 'my_array': my_array, 'my_array2': my_array2 } ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True) with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['my_array'], my_array) assert_array_equal(ff.tree['my_array2'], my_array2) def test_update_expand_tree(tmpdir): tmpdir = str(tmpdir) testpath = os.path.join(tmpdir, "test.asdf") # This is the case where the new tree can't fit in the available space my_array = np.arange(64) * 1 my_array2 = np.arange(64) * 2 tree = { 'arrays': [ my_array, my_array2, np.arange(3) ] } ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['arrays'][2], 'inline') assert len(list(ff.blocks.inline_blocks)) == 1 ff.write_to(testpath, pad_blocks=True) with asdf.open(testpath, mode='rw') as ff: assert_array_equal(ff.tree['arrays'][0], my_array) orig_offset = ff.blocks[ff.tree['arrays'][0]].offset ff.tree['extra'] = [0] * 6000 ff.update() with asdf.open(testpath) as ff: assert orig_offset <= ff.blocks[ff.tree['arrays'][0]].offset assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline' assert_array_equal(ff.tree['arrays'][0], my_array) assert_array_equal(ff.tree['arrays'][1], my_array2) # Now, we expand the header only by a little bit ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['arrays'][2], 'inline') ff.write_to(os.path.join(tmpdir, "test2.asdf"), pad_blocks=True) with asdf.open(os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: orig_offset = ff.blocks[ff.tree['arrays'][0]].offset ff.tree['extra'] = [0] * 2 ff.update() with asdf.open(os.path.join(tmpdir, "test2.asdf")) as ff: assert orig_offset == ff.blocks[ff.tree['arrays'][0]].offset assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline' assert_array_equal(ff.tree['arrays'][0], my_array) assert_array_equal(ff.tree['arrays'][1], my_array2) def _get_update_tree(): return { 'arrays': [ np.arange(64) * 1, np.arange(64) * 2, np.arange(64) * 3 ] } def test_update_delete_first_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][0] ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2]) def test_update_delete_last_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][-1] ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) def test_update_delete_middle_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][1] ff.update() assert len(ff.blocks._internal_blocks) == 2 assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert len(ff.tree['arrays']) == 2 assert ff.tree['arrays'][0]._source == 0 assert ff.tree['arrays'][1]._source == 1 assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2]) def test_update_replace_first_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][0] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], np.arange(32)) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) def test_update_replace_last_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][2] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], np.arange(32)) def test_update_replace_middle_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][1] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], np.arange(32)) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) def test_update_add_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'].append(np.arange(32)) ff.update() with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) assert_array_equal(ff.tree['arrays'][3], np.arange(32)) def test_update_add_array_at_end(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'].append(np.arange(65536, dtype='= original_size with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) assert_array_equal(ff.tree['arrays'][3], np.arange(65536, dtype='= len(core_mappings) new_mapping = {"http://somewhere.org/schemas/foo-1.0.0": b"foo"} config.add_resource_mapping(new_mapping) assert len(config.resource_mappings) == len(default_mappings) + 1 assert any(m for m in config.resource_mappings if m.delegate is new_mapping) # Adding a mapping should be idempotent: config.add_resource_mapping(new_mapping) # ... even if wrapped: config.add_resource_mapping(ResourceMappingProxy(new_mapping)) assert len(config.resource_mappings) == len(default_mappings) + 1 # Adding a mapping should place it at the front of the line: front_mapping = {"http://somewhere.org/schemas/baz-1.0.0": b"baz"} config.add_resource_mapping(front_mapping) assert len(config.resource_mappings) == len(default_mappings) + 2 assert config.resource_mappings[0].delegate is front_mapping # ... even if the mapping is already in the list: config.add_resource_mapping(new_mapping) assert len(config.resource_mappings) == len(default_mappings) + 2 assert config.resource_mappings[0].delegate is new_mapping # Reset should get rid of any additions: config.reset_resources() assert len(config.resource_mappings) == len(default_mappings) # Should be able to remove a mapping: config.add_resource_mapping(new_mapping) config.remove_resource_mapping(new_mapping) assert len(config.resource_mappings) == len(default_mappings) # ... even if wrapped: config.add_resource_mapping(new_mapping) config.remove_resource_mapping(ResourceMappingProxy(new_mapping)) assert len(config.resource_mappings) == len(default_mappings) # ... and also by the name of the package the mappings came from: config.add_resource_mapping(ResourceMappingProxy(new_mapping, package_name="foo")) config.add_resource_mapping(ResourceMappingProxy({"http://somewhere.org/schemas/bar-1.0.0": b"bar"}, package_name="foo")) config.remove_resource_mapping(package="foo") assert len(config.resource_mappings) == len(default_mappings) # Can combine the package and mapping filters when removing: config.add_resource_mapping(ResourceMappingProxy(new_mapping, package_name="foo")) config.remove_resource_mapping(new_mapping, package="foo") assert len(config.resource_mappings) == len(default_mappings) # But not omit both: with pytest.raises(ValueError): config.remove_resource_mapping() # Removing a mapping should be idempotent: config.add_resource_mapping(new_mapping) config.remove_resource_mapping(new_mapping) config.remove_resource_mapping(new_mapping) assert len(config.resource_mappings) == len(default_mappings) def test_resource_manager(): with asdf.config_context() as config: # Initial resource manager should contain just the entry points resources: assert "http://stsci.edu/schemas/asdf/core/asdf-1.1.0" in config.resource_manager assert b"http://stsci.edu/schemas/asdf/core/asdf-1.1.0" in config.resource_manager["http://stsci.edu/schemas/asdf/core/asdf-1.1.0"] assert "http://somewhere.org/schemas/foo-1.0.0" not in config.resource_manager # Add a mapping and confirm that the manager now contains it: new_mapping = {"http://somewhere.org/schemas/foo-1.0.0": b"foo"} config.add_resource_mapping(new_mapping) assert "http://stsci.edu/schemas/asdf/core/asdf-1.1.0" in config.resource_manager assert b"http://stsci.edu/schemas/asdf/core/asdf-1.1.0" in config.resource_manager["http://stsci.edu/schemas/asdf/core/asdf-1.1.0"] assert "http://somewhere.org/schemas/foo-1.0.0" in config.resource_manager assert config.resource_manager["http://somewhere.org/schemas/foo-1.0.0"] == b"foo" # Remove a mapping and confirm that the manager no longer contains it: config.remove_resource_mapping(new_mapping) assert "http://stsci.edu/schemas/asdf/core/asdf-1.1.0" in config.resource_manager assert b"http://stsci.edu/schemas/asdf/core/asdf-1.1.0" in config.resource_manager["http://stsci.edu/schemas/asdf/core/asdf-1.1.0"] assert "http://somewhere.org/schemas/foo-1.0.0" not in config.resource_manager # Reset and confirm that the manager no longer contains the custom mapping: config.add_resource_mapping(new_mapping) config.reset_resources() assert "http://stsci.edu/schemas/asdf/core/asdf-1.1.0" in config.resource_manager assert b"http://stsci.edu/schemas/asdf/core/asdf-1.1.0" in config.resource_manager["http://stsci.edu/schemas/asdf/core/asdf-1.1.0"] assert "http://somewhere.org/schemas/foo-1.0.0" not in config.resource_manager def test_extensions(): with asdf.config_context() as config: original_extensions = config.extensions assert any(isinstance(e.delegate, BuiltinExtension) for e in original_extensions) class FooExtension: types = [] tag_mapping = [] url_mapping = [] new_extension = FooExtension() class BarExtension: extension_uri = "asdf://somewhere.org/extensions/bar-1.0" types = [] tag_mapping = [] url_mapping = [] uri_extension = BarExtension() # Add an extension: config.add_extension(new_extension) assert len(config.extensions) == len(original_extensions) + 1 assert any(e for e in config.extensions if e.delegate is new_extension) # Adding an extension should be idempotent: config.add_extension(new_extension) assert len(config.extensions) == len(original_extensions) + 1 # Even when wrapped: config.add_extension(ExtensionProxy(new_extension)) assert len(config.extensions) == len(original_extensions) + 1 # Remove an extension: config.remove_extension(new_extension) assert len(config.extensions) == len(original_extensions) # Removing should work when wrapped: config.add_extension(new_extension) config.remove_extension(ExtensionProxy(new_extension)) assert len(config.extensions) == len(original_extensions) # And also by URI: config.add_extension(uri_extension) config.remove_extension(uri_extension.extension_uri) assert len(config.extensions) == len(original_extensions) # And also by URI pattern: config.add_extension(uri_extension) config.remove_extension("asdf://somewhere.org/extensions/*") assert len(config.extensions) == len(original_extensions) # Remove by the name of the extension's package: config.add_extension(ExtensionProxy(new_extension, package_name="foo")) config.add_extension(ExtensionProxy(uri_extension, package_name="foo")) config.remove_extension(package="foo") assert len(config.extensions) == len(original_extensions) # Can combine remove filters: config.add_extension(ExtensionProxy(new_extension, package_name="foo")) config.add_extension(ExtensionProxy(uri_extension, package_name="foo")) config.remove_extension(uri_extension.extension_uri, package="foo") assert len(config.extensions) == len(original_extensions) + 1 # ... but not omit both: with pytest.raises(ValueError): config.remove_extension() # Removing an extension should be idempotent: config.add_extension(new_extension) config.remove_extension(new_extension) config.remove_extension(new_extension) assert len(config.extensions) == len(original_extensions) # Resetting should get rid of any additions: config.add_extension(new_extension) config.add_extension(FooExtension()) config.reset_extensions() assert len(config.extensions) == len(original_extensions) def test_config_repr(): with asdf.config_context() as config: config.validate_on_read = True config.default_version = "1.5.0" config.io_block_size = 9999 config.legacy_fill_schema_defaults = False config.array_inline_threshold = 14 assert "validate_on_read: True" in repr(config) assert "default_version: 1.5.0" in repr(config) assert "io_block_size: 9999" in repr(config) assert "legacy_fill_schema_defaults: False" in repr(config) assert "array_inline_threshold: 14" in repr(config) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_entry_points.py0000644000537500020070000001135400000000000022027 0ustar00wjamiesonSTSCI\sciencefrom pkg_resources import EntryPoint import pkg_resources import pytest from asdf import entry_points from asdf.exceptions import AsdfWarning from asdf.resource import ResourceMappingProxy from asdf.version import version as asdf_package_version from asdf.extension import ExtensionProxy @pytest.fixture def mock_entry_points(): return [] @pytest.fixture(autouse=True) def monkeypatch_entry_points(monkeypatch, mock_entry_points): def _iter_entry_points(*, group): for candidate_group, name, func_name in mock_entry_points: if candidate_group == group: yield EntryPoint( name, "asdf.tests.test_entry_points", attrs=(func_name,), dist=pkg_resources.get_distribution("asdf"), ) monkeypatch.setattr(entry_points, "iter_entry_points", _iter_entry_points) def resource_mappings_entry_point_successful(): return [ {"http://somewhere.org/schemas/foo-1.0.0": b"foo"}, {"http://somewhere.org/schemas/bar-1.0.0": b"bar"}, ] def resource_mappings_entry_point_failing(): raise Exception("NOPE") def resource_mappings_entry_point_bad_element(): return [ {"http://somewhere.org/schemas/baz-1.0.0": b"baz"}, object(), {"http://somewhere.org/schemas/foz-1.0.0": b"foz"}, ] def test_get_resource_mappings(mock_entry_points): mock_entry_points.append(("asdf.resource_mappings", "successful", "resource_mappings_entry_point_successful")) mappings = entry_points.get_resource_mappings() assert len(mappings) == 2 for m in mappings: assert isinstance(m, ResourceMappingProxy) assert m.package_name == "asdf" assert m.package_version == asdf_package_version mock_entry_points.clear() mock_entry_points.append(("asdf.resource_mappings", "failing", "resource_mappings_entry_point_failing")) with pytest.warns(AsdfWarning, match="Exception: NOPE"): mappings = entry_points.get_resource_mappings() assert len(mappings) == 0 mock_entry_points.clear() mock_entry_points.append(("asdf.resource_mappings", "bad_element", "resource_mappings_entry_point_bad_element")) with pytest.warns(AsdfWarning, match="TypeError: Resource mapping must implement the Mapping interface"): mappings = entry_points.get_resource_mappings() assert len(mappings) == 2 class MinimumExtension: def __init__(self, extension_uri): self._extension_uri = extension_uri @property def extension_uri(self): return self._extension_uri def extensions_entry_point_successful(): return [ MinimumExtension("http://somewhere.org/extensions/foo-1.0"), MinimumExtension("http://somewhere.org/extensions/bar-1.0"), ] def extensions_entry_point_failing(): raise Exception("NOPE") def extensions_entry_point_bad_element(): return [ MinimumExtension("http://somewhere.org/extensions/baz-1.0"), object(), MinimumExtension("http://somewhere.org/extensions/foz-1.0"), ] class LegacyExtension: types = [] tag_mapping = [] url_mapping = [] class FauxLegacyExtension: pass def test_get_extensions(mock_entry_points): mock_entry_points.append(("asdf.extensions", "successful", "extensions_entry_point_successful")) extensions = entry_points.get_extensions() assert len(extensions) == 2 for e in extensions: assert isinstance(e, ExtensionProxy) assert e.package_name == "asdf" assert e.package_version == asdf_package_version mock_entry_points.clear() mock_entry_points.append(("asdf.extensions", "failing", "extensions_entry_point_failing")) with pytest.warns(AsdfWarning, match="Exception: NOPE"): extensions = entry_points.get_extensions() assert len(extensions) == 0 mock_entry_points.clear() mock_entry_points.append(("asdf.extensions", "bad_element", "extensions_entry_point_bad_element")) with pytest.warns(AsdfWarning, match="TypeError: Extension must implement the Extension or AsdfExtension interface"): extensions = entry_points.get_extensions() assert len(extensions) == 2 mock_entry_points.clear() mock_entry_points.append(("asdf_extensions", "legacy", "LegacyExtension")) extensions = entry_points.get_extensions() assert len(extensions) == 1 for e in extensions: assert isinstance(e, ExtensionProxy) assert e.package_name == "asdf" assert e.package_version == asdf_package_version assert e.legacy is True mock_entry_points.clear() mock_entry_points.append(("asdf_extensions", "failing", "FauxLegacyExtension")) with pytest.warns(AsdfWarning, match="TypeError"): extensions = entry_points.get_extensions() assert len(extensions) == 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_extension.py0000644000537500020070000006034500000000000021312 0ustar00wjamiesonSTSCI\scienceimport pytest from packaging.specifiers import SpecifierSet from asdf.extension import ( Extension, ExtensionProxy, ManifestExtension, ExtensionManager, get_cached_extension_manager, TagDefinition, Converter, ConverterProxy, Compressor, AsdfExtension, BuiltinExtension, get_cached_asdf_extension_list ) from asdf import config_context from asdf.exceptions import AsdfDeprecationWarning from asdf.types import CustomType from asdf.tests.helpers import assert_extension_correctness def test_builtin_extension(): extension = BuiltinExtension() assert_extension_correctness(extension) class LegacyType(dict, CustomType): organization = "somewhere.org" name = "test" version = "1.0.0" class LegacyExtension: types = [LegacyType] tag_mapping = [("tag:somewhere.org/", "http://somewhere.org/{tag_suffix}")] url_mapping = [("http://somewhere.org/", "http://somewhere.org/{url_suffix}.yaml")] class MinimumExtension: extension_uri = "asdf://somewhere.org/extensions/minimum-1.0" class MinimumExtensionSubclassed(Extension): extension_uri = "asdf://somewhere.org/extensions/minimum-1.0" class FullExtension: extension_uri = "asdf://somewhere.org/extensions/full-1.0" def __init__( self, converters=None, compressors=None, asdf_standard_requirement=None, tags=None, legacy_class_names=None, ): self._converters = [] if converters is None else converters self._compressors = [] if compressors is None else compressors self._asdf_standard_requirement = asdf_standard_requirement self._tags = tags self._legacy_class_names = [] if legacy_class_names is None else legacy_class_names @property def converters(self): return self._converters @property def compressors(self): return self._compressors @property def asdf_standard_requirement(self): return self._asdf_standard_requirement @property def tags(self): return self._tags @property def legacy_class_names(self): return self._legacy_class_names class MinimumConverter: def __init__(self, tags=None, types=None): if tags is None: self._tags = [] else: self._tags = tags if types is None: self._types = [] else: self._types = types @property def tags(self): return self._tags @property def types(self): return self._types def to_yaml_tree(self, obj, tag, ctx): return "to_yaml_tree result" def from_yaml_tree(self, obj, tag, ctx): return "from_yaml_tree result" class FullConverter(MinimumConverter): def select_tag(self, obj, tags, ctx): return "select_tag result" class MinimalCompressor(Compressor): def compress(data): return b'' @property def label(self): return b'mini' # Some dummy types for testing converters: class FooType: pass class BarType: pass class BazType: pass def test_extension_proxy_maybe_wrap(): extension = MinimumExtension() proxy = ExtensionProxy.maybe_wrap(extension) assert proxy.delegate is extension assert ExtensionProxy.maybe_wrap(proxy) is proxy with pytest.raises(TypeError): ExtensionProxy.maybe_wrap(object()) def test_extension_proxy(): # Test with minimum properties: extension = MinimumExtension() proxy = ExtensionProxy(extension) assert isinstance(proxy, Extension) assert isinstance(proxy, AsdfExtension) assert proxy.extension_uri == "asdf://somewhere.org/extensions/minimum-1.0" assert proxy.legacy_class_names == set() assert proxy.asdf_standard_requirement == SpecifierSet() assert proxy.converters == [] assert proxy.compressors == [] assert proxy.tags == [] assert proxy.types == [] assert proxy.tag_mapping == [] assert proxy.url_mapping == [] assert proxy.delegate is extension assert proxy.legacy is False assert proxy.package_name is None assert proxy.package_version is None assert proxy.class_name == "asdf.tests.test_extension.MinimumExtension" # The subclassed version should have the same defaults: extension = MinimumExtensionSubclassed() subclassed_proxy = ExtensionProxy(extension) assert subclassed_proxy.extension_uri == proxy.extension_uri assert subclassed_proxy.legacy_class_names == proxy.legacy_class_names assert subclassed_proxy.asdf_standard_requirement == proxy.asdf_standard_requirement assert subclassed_proxy.converters == proxy.converters assert subclassed_proxy.compressors == proxy.compressors assert subclassed_proxy.tags == proxy.tags assert subclassed_proxy.types == proxy.types assert subclassed_proxy.tag_mapping == proxy.tag_mapping assert subclassed_proxy.url_mapping == proxy.url_mapping assert subclassed_proxy.delegate is extension assert subclassed_proxy.legacy == proxy.legacy assert subclassed_proxy.package_name == proxy.package_name assert subclassed_proxy.package_version == proxy.package_name assert subclassed_proxy.class_name == "asdf.tests.test_extension.MinimumExtensionSubclassed" # Test with all properties present: converters = [ MinimumConverter( tags=["asdf://somewhere.org/extensions/full/tags/foo-*"], types=[] ) ] compressors = [ MinimalCompressor() ] extension = FullExtension( converters=converters, compressors=compressors, asdf_standard_requirement=">=1.4.0", tags=["asdf://somewhere.org/extensions/full/tags/foo-1.0"], legacy_class_names=["foo.extensions.SomeOldExtensionClass"] ) proxy = ExtensionProxy(extension, package_name="foo", package_version="1.2.3") assert proxy.extension_uri == "asdf://somewhere.org/extensions/full-1.0" assert proxy.legacy_class_names == {"foo.extensions.SomeOldExtensionClass"} assert proxy.asdf_standard_requirement == SpecifierSet(">=1.4.0") assert proxy.converters == [ConverterProxy(c, proxy) for c in converters] assert proxy.compressors == compressors assert len(proxy.tags) == 1 assert proxy.tags[0].tag_uri == "asdf://somewhere.org/extensions/full/tags/foo-1.0" assert proxy.types == [] assert proxy.tag_mapping == [] assert proxy.url_mapping == [] assert proxy.delegate is extension assert proxy.legacy is False assert proxy.package_name == "foo" assert proxy.package_version == "1.2.3" assert proxy.class_name == "asdf.tests.test_extension.FullExtension" # Should fail when the input is not one of the two extension interfaces: with pytest.raises(TypeError): ExtensionProxy(object) # Should fail with a bad converter: with pytest.raises(TypeError): ExtensionProxy(FullExtension(converters=[object()])) # Should fail with a bad compressor: with pytest.raises(TypeError): ExtensionProxy(FullExtension(compressors=[object()])) # Unparseable ASDF Standard requirement: with pytest.raises(ValueError): ExtensionProxy(FullExtension(asdf_standard_requirement="asdf-standard >= 1.4.0")) # Unrecognized ASDF Standard requirement type: with pytest.raises(TypeError): ExtensionProxy(FullExtension(asdf_standard_requirement=object())) # Bad tag: with pytest.raises(TypeError): ExtensionProxy(FullExtension(tags=[object()])) # Bad legacy class names: with pytest.raises(TypeError): ExtensionProxy(FullExtension(legacy_class_names=[object])) def test_extension_proxy_tags(): """ The tags behavior is a tad complex, so they get their own test. """ foo_tag_uri = "asdf://somewhere.org/extensions/full/tags/foo-1.0" foo_tag_def = TagDefinition( foo_tag_uri, schema_uris="asdf://somewhere.org/extensions/full/schemas/foo-1.0", title="Some tag title", description="Some tag description" ) bar_tag_uri = "asdf://somewhere.org/extensions/full/tags/bar-1.0" bar_tag_def = TagDefinition( bar_tag_uri, schema_uris="asdf://somewhere.org/extensions/full/schemas/bar-1.0", title="Some other tag title", description="Some other tag description" ) # The converter should return only the tags # supported by the extension. converter = FullConverter(tags=["**"]) extension = FullExtension(tags=[foo_tag_def], converters=[converter]) proxy = ExtensionProxy(extension) assert proxy.converters[0].tags == [foo_tag_uri] # The converter should not return tags that # its patterns do not match. converter = FullConverter(tags=["**/foo-1.0"]) extension = FullExtension(tags=[foo_tag_def, bar_tag_def], converters=[converter]) proxy = ExtensionProxy(extension) assert proxy.converters[0].tags == [foo_tag_uri] # The process should still work if the extension property # contains str instead of TagDescription. converter = FullConverter(tags=["**/foo-1.0"]) extension = FullExtension(tags=[foo_tag_uri, bar_tag_uri], converters=[converter]) proxy = ExtensionProxy(extension) assert proxy.converters[0].tags == [foo_tag_uri] def test_extension_proxy_legacy(): extension = LegacyExtension() proxy = ExtensionProxy(extension, package_name="foo", package_version="1.2.3") assert proxy.extension_uri is None assert proxy.legacy_class_names == {"asdf.tests.test_extension.LegacyExtension"} assert proxy.asdf_standard_requirement == SpecifierSet() assert proxy.converters == [] assert proxy.tags == [] assert proxy.types == [LegacyType] assert proxy.tag_mapping == LegacyExtension.tag_mapping assert proxy.url_mapping == LegacyExtension.url_mapping assert proxy.delegate is extension assert proxy.legacy is True assert proxy.package_name == "foo" assert proxy.package_version == "1.2.3" assert proxy.class_name == "asdf.tests.test_extension.LegacyExtension" def test_extension_proxy_hash_and_eq(): extension = MinimumExtension() proxy1 = ExtensionProxy(extension) proxy2 = ExtensionProxy(extension, package_name="foo", package_version="1.2.3") assert proxy1 == proxy2 assert hash(proxy1) == hash(proxy2) assert proxy1 != extension assert proxy2 != extension def test_extension_proxy_repr(): proxy = ExtensionProxy(MinimumExtension(), package_name="foo", package_version="1.2.3") assert "class: asdf.tests.test_extension.MinimumExtension" in repr(proxy) assert "package: foo==1.2.3" in repr(proxy) assert "legacy: False" in repr(proxy) proxy = ExtensionProxy(MinimumExtension()) assert "class: asdf.tests.test_extension.MinimumExtension" in repr(proxy) assert "package: (none)" in repr(proxy) assert "legacy: False" in repr(proxy) proxy = ExtensionProxy(LegacyExtension(), package_name="foo", package_version="1.2.3") assert "class: asdf.tests.test_extension.LegacyExtension" in repr(proxy) assert "package: foo==1.2.3" in repr(proxy) assert "legacy: True" in repr(proxy) def test_extension_manager(): converter1 = FullConverter( tags=[ "asdf://somewhere.org/extensions/full/tags/foo-*", "asdf://somewhere.org/extensions/full/tags/bar-*", ], types=[ FooType, "asdf.tests.test_extension.BarType", ], ) converter2 = FullConverter( tags=[ "asdf://somewhere.org/extensions/full/tags/baz-*", ], types=[ BazType ], ) converter3= FullConverter( tags=[ "asdf://somewhere.org/extensions/full/tags/foo-*", ], types=[ FooType, BarType, ], ) extension1 = FullExtension( converters=[converter1, converter2], tags=[ "asdf://somewhere.org/extensions/full/tags/foo-1.0", "asdf://somewhere.org/extensions/full/tags/baz-1.0", ] ) extension2 = FullExtension( converters=[converter3], tags = [ "asdf://somewhere.org/extensions/full/tags/foo-1.0", ] ) manager = ExtensionManager([extension1, extension2]) assert manager.extensions == [ExtensionProxy(extension1), ExtensionProxy(extension2)] assert manager.handles_tag("asdf://somewhere.org/extensions/full/tags/foo-1.0") is True assert manager.handles_tag("asdf://somewhere.org/extensions/full/tags/bar-1.0") is False assert manager.handles_tag("asdf://somewhere.org/extensions/full/tags/baz-1.0") is True assert manager.handles_type(FooType) is True # This should return True even though BarType was listed # as string class name: assert manager.handles_type(BarType) is True assert manager.handles_type(BazType) is True assert manager.get_tag_definition("asdf://somewhere.org/extensions/full/tags/foo-1.0").tag_uri == "asdf://somewhere.org/extensions/full/tags/foo-1.0" assert manager.get_tag_definition("asdf://somewhere.org/extensions/full/tags/baz-1.0").tag_uri == "asdf://somewhere.org/extensions/full/tags/baz-1.0" with pytest.raises(KeyError): manager.get_tag_definition("asdf://somewhere.org/extensions/full/tags/bar-1.0") assert manager.get_converter_for_tag("asdf://somewhere.org/extensions/full/tags/foo-1.0").delegate is converter1 assert manager.get_converter_for_tag("asdf://somewhere.org/extensions/full/tags/baz-1.0").delegate is converter2 with pytest.raises(KeyError): manager.get_converter_for_tag("asdf://somewhere.org/extensions/full/tags/bar-1.0") assert manager.get_converter_for_type(FooType).delegate is converter1 assert manager.get_converter_for_type(BarType).delegate is converter1 assert manager.get_converter_for_type(BazType).delegate is converter2 with pytest.raises(KeyError): manager.get_converter_for_type(object) def test_get_cached_extension_manager(): extension = MinimumExtension() extension_manager = get_cached_extension_manager([extension]) assert get_cached_extension_manager([extension]) is extension_manager assert get_cached_extension_manager([MinimumExtension()]) is not extension_manager def test_tag_definition(): tag_def = TagDefinition( "asdf://somewhere.org/extensions/foo/tags/foo-1.0", schema_uris="asdf://somewhere.org/extensions/foo/schemas/foo-1.0", title="Some title", description="Some description", ) assert tag_def.tag_uri == "asdf://somewhere.org/extensions/foo/tags/foo-1.0" assert tag_def.schema_uris == ["asdf://somewhere.org/extensions/foo/schemas/foo-1.0"] assert tag_def.title == "Some title" assert tag_def.description == "Some description" assert "URI: asdf://somewhere.org/extensions/foo/tags/foo-1.0" in repr(tag_def) with pytest.warns(AsdfDeprecationWarning): assert tag_def.schema_uri == "asdf://somewhere.org/extensions/foo/schemas/foo-1.0" tag_def = TagDefinition( "asdf://somewhere.org/extensions/foo/tags/foo-1.0", schema_uris=["asdf://somewhere.org/extensions/foo/schemas/foo-1.0", "asdf://somewhere.org/extensions/foo/schemas/base-1.0"], title="Some title", description="Some description", ) assert tag_def.schema_uris == ["asdf://somewhere.org/extensions/foo/schemas/foo-1.0", "asdf://somewhere.org/extensions/foo/schemas/base-1.0"] with pytest.warns(AsdfDeprecationWarning): with pytest.raises(RuntimeError): tag_def.schema_uri with pytest.raises(ValueError): TagDefinition("asdf://somewhere.org/extensions/foo/tags/foo-*") def test_converter(): class ConverterNoSubclass: tags = [] types = [] def to_yaml_tree(self, *args): pass def from_yaml_tree(self, *args): pass assert issubclass(ConverterNoSubclass, Converter) class ConverterWithSubclass(Converter): tags = [] types = [] def to_yaml_tree(self, *args): pass def from_yaml_tree(self, *args): pass # Confirm the behavior of the default select_tag implementation assert ConverterWithSubclass().select_tag(object(), ["tag1", "tag2"], object()) == "tag1" def test_converter_proxy(): # Test the minimum set of converter methods: extension = ExtensionProxy(MinimumExtension()) converter = MinimumConverter() proxy = ConverterProxy(converter, extension) assert isinstance(proxy, Converter) assert proxy.tags == [] assert proxy.types == [] assert proxy.to_yaml_tree(None, None, None) == "to_yaml_tree result" assert proxy.from_yaml_tree(None, None, None) == "from_yaml_tree result" assert proxy.tags == [] assert proxy.delegate is converter assert proxy.extension == extension assert proxy.package_name is None assert proxy.package_version is None assert proxy.class_name == "asdf.tests.test_extension.MinimumConverter" # Check the __eq__ and __hash__ behavior: assert proxy == ConverterProxy(converter, extension) assert proxy != ConverterProxy(MinimumConverter(), extension) assert proxy != ConverterProxy(converter, MinimumExtension()) assert proxy in {ConverterProxy(converter, extension)} assert proxy not in { ConverterProxy(MinimumConverter(), extension), ConverterProxy(converter, MinimumExtension()) } # Check the __repr__: assert "class: asdf.tests.test_extension.MinimumConverter" in repr(proxy) assert "package: (none)" in repr(proxy) # Test the full set of converter methods: converter = FullConverter( tags=[ "asdf://somewhere.org/extensions/test/tags/foo-*", "asdf://somewhere.org/extensions/test/tags/bar-*", ], types=[FooType, BarType] ) extension = FullExtension( tags=[ TagDefinition( "asdf://somewhere.org/extensions/test/tags/foo-1.0", schema_uris="asdf://somewhere.org/extensions/test/schemas/foo-1.0", title="Foo tag title", description="Foo tag description" ), TagDefinition( "asdf://somewhere.org/extensions/test/tags/bar-1.0", schema_uris="asdf://somewhere.org/extensions/test/schemas/bar-1.0", title="Bar tag title", description="Bar tag description" ), ] ) extension_proxy = ExtensionProxy(extension, package_name="foo", package_version="1.2.3") proxy = ConverterProxy(converter, extension_proxy) assert len(proxy.tags) == 2 assert "asdf://somewhere.org/extensions/test/tags/foo-1.0" in proxy.tags assert "asdf://somewhere.org/extensions/test/tags/bar-1.0" in proxy.tags assert proxy.types == [FooType, BarType] assert proxy.to_yaml_tree(None, None, None) == "to_yaml_tree result" assert proxy.from_yaml_tree(None, None, None) == "from_yaml_tree result" assert proxy.select_tag(None, None) == "select_tag result" assert proxy.delegate is converter assert proxy.extension == extension_proxy assert proxy.package_name == "foo" assert proxy.package_version == "1.2.3" assert proxy.class_name == "asdf.tests.test_extension.FullConverter" # Check the __repr__ since it will contain package info now: assert "class: asdf.tests.test_extension.FullConverter" in repr(proxy) assert "package: foo==1.2.3" in repr(proxy) # Should error because object() does fulfill the Converter interface: with pytest.raises(TypeError): ConverterProxy(object(), extension) # Should fail because tags must be str: with pytest.raises(TypeError): ConverterProxy(MinimumConverter(tags=[object()]), extension) # Should fail because types must instances of type: with pytest.raises(TypeError): ConverterProxy(MinimumConverter(types=[object()]), extension) def test_get_cached_asdf_extension_list(): extension = LegacyExtension() extension_list = get_cached_asdf_extension_list([extension]) assert get_cached_asdf_extension_list([extension]) is extension_list assert get_cached_asdf_extension_list([LegacyExtension()]) is not extension_list def test_manifest_extension(): with config_context() as config: minimal_manifest = """%YAML 1.1 --- id: asdf://somewhere.org/manifests/foo extension_uri: asdf://somewhere.org/extensions/foo ... """ config.add_resource_mapping({"asdf://somewhere.org/extensions/foo": minimal_manifest}) extension = ManifestExtension.from_uri("asdf://somewhere.org/extensions/foo") assert isinstance(extension, Extension) assert extension.extension_uri == "asdf://somewhere.org/extensions/foo" assert extension.legacy_class_names == [] assert extension.asdf_standard_requirement is None assert extension.converters == [] assert extension.compressors == [] assert extension.tags == [] proxy = ExtensionProxy(extension) assert proxy.extension_uri == "asdf://somewhere.org/extensions/foo" assert proxy.legacy_class_names == set() assert proxy.asdf_standard_requirement == SpecifierSet() assert proxy.converters == [] assert proxy.compressors == [] assert proxy.tags == [] with config_context() as config: full_manifest = """%YAML 1.1 --- id: asdf://somewhere.org/manifests/foo extension_uri: asdf://somewhere.org/extensions/foo asdf_standard_requirement: gte: 1.6.0 lt: 2.0.0 tags: - asdf://somewhere.org/tags/bar - tag_uri: asdf://somewhere.org/tags/baz schema_uri: asdf://somewhere.org/schemas/baz title: Baz title description: Bar description ... """ config.add_resource_mapping({"asdf://somewhere.org/extensions/foo": full_manifest}) class FooConverter: tags = ["asdf://somewhere.org/tags/bar", "asdf://somewhere.org/tags/baz"] types = [] def select_tag(self, *args): pass def to_yaml_tree(self, *args): pass def from_yaml_tree(self, *args): pass converter = FooConverter() compressor = MinimalCompressor() extension = ManifestExtension.from_uri( "asdf://somewhere.org/extensions/foo", legacy_class_names=["foo.extension.LegacyExtension"], converters=[converter], compressors=[compressor], ) assert extension.extension_uri == "asdf://somewhere.org/extensions/foo" assert extension.legacy_class_names == ["foo.extension.LegacyExtension"] assert extension.asdf_standard_requirement == SpecifierSet(">=1.6.0,<2.0.0") assert extension.converters == [converter] assert extension.compressors == [compressor] assert len(extension.tags) == 2 assert extension.tags[0] == "asdf://somewhere.org/tags/bar" assert extension.tags[1].tag_uri == "asdf://somewhere.org/tags/baz" assert extension.tags[1].schema_uris == ["asdf://somewhere.org/schemas/baz"] assert extension.tags[1].title == "Baz title" assert extension.tags[1].description == "Bar description" proxy = ExtensionProxy(extension) assert proxy.extension_uri == "asdf://somewhere.org/extensions/foo" assert proxy.legacy_class_names == {"foo.extension.LegacyExtension"} assert proxy.asdf_standard_requirement == SpecifierSet(">=1.6.0,<2.0.0") assert proxy.converters == [ConverterProxy(converter, proxy)] assert proxy.compressors == [compressor] assert len(proxy.tags) == 2 assert proxy.tags[0].tag_uri == "asdf://somewhere.org/tags/bar" assert proxy.tags[1].tag_uri == "asdf://somewhere.org/tags/baz" assert proxy.tags[1].schema_uris == ["asdf://somewhere.org/schemas/baz"] assert proxy.tags[1].title == "Baz title" assert proxy.tags[1].description == "Bar description" with config_context() as config: simple_asdf_standard_manifest = """%YAML 1.1 --- id: asdf://somewhere.org/manifests/foo extension_uri: asdf://somewhere.org/extensions/foo asdf_standard_requirement: 1.6.0 ... """ config.add_resource_mapping({"asdf://somewhere.org/extensions/foo": simple_asdf_standard_manifest}) extension = ManifestExtension.from_uri("asdf://somewhere.org/extensions/foo") assert extension.asdf_standard_requirement == SpecifierSet("==1.6.0") proxy = ExtensionProxy(extension) assert proxy.asdf_standard_requirement == SpecifierSet("==1.6.0") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_file_format.py0000644000537500020070000001234400000000000021561 0ustar00wjamiesonSTSCI\scienceimport os import io import pytest import asdf from asdf import generic_io def test_no_yaml_end_marker(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: bar...baz baz: 42 """ path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with pytest.raises(ValueError): with asdf.open(buff): pass buff.seek(0) fd = generic_io.InputStream(buff, 'r') with pytest.raises(ValueError): with asdf.open(fd): pass with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with pytest.raises(ValueError): with asdf.open(fd): pass def test_no_final_newline(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: ...bar... baz: 42 ...""" path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with asdf.open(buff) as ff: assert len(ff.tree) == 2 buff.seek(0) fd = generic_io.InputStream(buff, 'r') with asdf.open(fd) as ff: assert len(ff.tree) == 2 with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with asdf.open(fd) as ff: assert len(ff.tree) == 2 @pytest.mark.filterwarnings('ignore::astropy.io.fits.verify.VerifyWarning') def test_no_asdf_header(tmpdir): content = b"What? This ain't no ASDF file" path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with pytest.raises(ValueError): asdf.open(buff) with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with pytest.raises(ValueError): asdf.open(fd) def test_no_asdf_blocks(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: bar ... XXXXXXXX """ path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with asdf.open(buff) as ff: assert len(ff.blocks) == 0 buff.seek(0) fd = generic_io.InputStream(buff, 'r') with asdf.open(fd) as ff: assert len(ff.blocks) == 0 with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with asdf.open(fd) as ff: assert len(ff.blocks) == 0 def test_invalid_source(small_tree): buff = io.BytesIO() ff = asdf.AsdfFile(small_tree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ff.write_to(buff, all_array_storage='internal') buff.seek(0) with asdf.open(buff) as ff2: ff2.blocks.get_block(0) with pytest.raises(ValueError): ff2.blocks.get_block(2) with pytest.raises(IOError): ff2.blocks.get_block("http://ABadUrl.verybad/test.asdf") with pytest.raises(TypeError): ff2.blocks.get_block(42.0) with pytest.raises(ValueError): ff2.blocks.get_source(42.0) block = ff2.blocks.get_block(0) assert ff2.blocks.get_source(block) == 0 def test_empty_file(): buff = io.BytesIO(b"#ASDF 1.0.0\n") buff.seek(0) with asdf.open(buff) as ff: assert ff.tree == {} assert len(ff.blocks) == 0 buff = io.BytesIO(b"#ASDF 1.0.0\n#ASDF_STANDARD 1.0.0") buff.seek(0) with asdf.open(buff) as ff: assert ff.tree == {} assert len(ff.blocks) == 0 @pytest.mark.filterwarnings('ignore::astropy.io.fits.verify.VerifyWarning') def test_not_asdf_file(): buff = io.BytesIO(b"SIMPLE") buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass buff = io.BytesIO(b"SIMPLE\n") buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass def test_junk_file(): buff = io.BytesIO(b"#ASDF 1.0.0\nFOO") buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass def test_block_mismatch(): # This is a file with a single small block, followed by something # that has an invalid block magic number. buff = io.BytesIO( b'#ASDF 1.0.0\n\xd3BLK\x00\x28\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0FOOBAR') buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass def test_block_header_too_small(): # The block header size must be at least 40 buff = io.BytesIO( b'#ASDF 1.0.0\n\xd3BLK\0\0') buff.seek(0) with pytest.raises(ValueError): with asdf.open(buff): pass def test_invalid_version(tmpdir): content = b"""#ASDF 0.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-0.1.0 foo : bar ...""" buff = io.BytesIO(content) with pytest.raises(ValueError): with asdf.open(buff): pass def test_valid_version(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo : bar ...""" buff = io.BytesIO(content) with asdf.open(buff) as ff: version = ff.file_format_version assert version.major == 1 assert version.minor == 0 assert version.patch == 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_fits_embed.py0000644000537500020070000004233600000000000021377 0ustar00wjamiesonSTSCI\scienceimport copy import os import pytest import numpy as np from numpy.testing import assert_array_equal astropy = pytest.importorskip('astropy') from astropy.io import fits from astropy.table import Table from jsonschema.exceptions import ValidationError import asdf from asdf import get_config from asdf import fits_embed from asdf import open as asdf_open from asdf.exceptions import AsdfWarning, AsdfConversionWarning from .helpers import ( assert_tree_match, get_test_data_path, yaml_to_asdf, assert_no_warnings, ) TEST_DTYPES = ['f8', 'u4', 'i4'] def create_asdf_in_fits(dtype): """Test fixture to create AsdfInFits object to use for testing""" hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype))) tree = { 'model': { 'sci': { 'data': hdulist[0].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist[1].data, 'wcs': 'WCS info' }, 'err': { 'data': hdulist[2].data, 'wcs': 'WCS info' } } } return fits_embed.AsdfInFits(hdulist, tree) # Testing backwards compatibility ensures that we can continue to read and # write files that use the old convention of ImageHDU to store the ASDF file. @pytest.mark.parametrize('backwards_compat', [False, True]) @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_embed_asdf_in_fits_file(tmpdir, backwards_compat, dtype): fits_testfile = str(tmpdir.join('test.fits')) asdf_testfile = str(tmpdir.join('test.asdf')) hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype), name='SCI')) hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype), name='DQ')) # Test a name with underscores to make sure it works hdulist.append(fits.ImageHDU(np.arange(512, dtype=dtype), name='WITH_UNDERSCORE')) tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist['DQ'].data, 'wcs': 'WCS info' }, 'with_underscore': { 'data': hdulist['WITH_UNDERSCORE'].data, 'wcs': 'WCS info' } } } ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to(fits_testfile, use_image_hdu=backwards_compat) with fits.open(fits_testfile) as hdulist2: assert len(hdulist2) == 4 assert [x.name for x in hdulist2] == ['SCI', 'DQ', 'WITH_UNDERSCORE', 'ASDF'] assert_array_equal(hdulist2[0].data, np.arange(512, dtype=dtype)) asdf_hdu = hdulist2['ASDF'] assert asdf_hdu.data.tobytes().startswith(b'#ASDF') # When in backwards compatibility mode, the ASDF file will be contained # in an ImageHDU if backwards_compat: assert isinstance(asdf_hdu, fits.ImageHDU) assert asdf_hdu.data.tobytes().strip().endswith(b'...') else: assert isinstance(asdf_hdu, fits.BinTableHDU) with fits_embed.AsdfInFits.open(hdulist2) as ff2: assert_tree_match(tree, ff2.tree) ff = asdf.AsdfFile(copy.deepcopy(ff2.tree)) ff.write_to(asdf_testfile) with asdf.open(asdf_testfile) as ff: assert_tree_match(tree, ff.tree) @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_embed_asdf_in_fits_file_anonymous_extensions(tmpdir, dtype): # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits(dtype) asdf_in_fits.write_to(os.path.join(str(tmpdir), 'test.fits')) ff2 = asdf.AsdfFile(asdf_in_fits.tree) ff2.write_to(os.path.join(str(tmpdir), 'plain.asdf')) with fits.open(os.path.join(str(tmpdir), 'test.fits')) as hdulist: assert len(hdulist) == 4 assert [x.name for x in hdulist] == ['PRIMARY', '', '', 'ASDF'] asdf_hdu = hdulist['ASDF'] assert isinstance(asdf_hdu, fits.BinTableHDU) assert asdf_hdu.data.tobytes().startswith(b'#ASDF') with fits_embed.AsdfInFits.open(hdulist) as ff2: assert_tree_match(asdf_in_fits.tree, ff2.tree) ff = asdf.AsdfFile(copy.deepcopy(ff2.tree)) ff.write_to(os.path.join(str(tmpdir), 'test.asdf')) with asdf.open(os.path.join(str(tmpdir), 'test.asdf')) as ff: assert_tree_match(asdf_in_fits.tree, ff.tree) @pytest.mark.xfail( reason="In-place update for ASDF-in-FITS does not currently work") @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_update_in_place(tmpdir, dtype): tempfile = str(tmpdir.join('test.fits')) # Create a file and write it out asdf_in_fits = create_asdf_in_fits(dtype) asdf_in_fits.write_to(tempfile) # Open the file and add data so it needs to be updated with fits_embed.AsdfInFits.open(tempfile) as ff: ff.tree['new_stuff'] = "A String" ff.update() # Open the updated file and make sure everything looks okay with fits_embed.AsdfInFits.open(tempfile) as ff: assert ff.tree['new_stuff'] == "A String" assert_tree_match(ff.tree['model'], asdf_in_fits.tree['model']) @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_update_and_write_new(tmpdir, dtype): tempfile = str(tmpdir.join('test.fits')) newfile = str(tmpdir.join('new.fits')) # Create a file and write it out asdf_in_fits = create_asdf_in_fits(dtype) asdf_in_fits.write_to(tempfile) # Open the file and add data so it needs to be updated with fits_embed.AsdfInFits.open(tempfile) as ff: ff.tree['new_stuff'] = "A String" ff.write_to(newfile) # Open the updated file and make sure everything looks okay with fits_embed.AsdfInFits.open(newfile) as ff: assert ff.tree['new_stuff'] == "A String" assert_tree_match(ff.tree['model'], asdf_in_fits.tree['model']) @pytest.mark.xfail( reason="ASDF HDU implementation does not currently reseek after writing") @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_access_hdu_data_after_write(tmpdir, dtype): # There is actually probably not a great reason to support this kind of # functionality, but I am adding a test here to record the failure for # posterity. tempfile = str(tmpdir.join('test.fits')) asdf_in_fits = create_asdf_in_fits(dtype) asdf_in_fits.write_to(tempfile) asdf_hdu = asdf_in_fits._hdulist['ASDF'] assert asdf_hdu.data.tobytes().startswith('#ASDF') @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_create_in_tree_first(tmpdir, dtype): tree = { 'model': { 'sci': { 'data': np.arange(512, dtype=dtype), 'wcs': 'WCS info' }, 'dq': { 'data': np.arange(512, dtype=dtype), 'wcs': 'WCS info' }, 'err': { 'data': np.arange(512, dtype=dtype), 'wcs': 'WCS info' } } } hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(tree['model']['sci']['data'])) hdulist.append(fits.ImageHDU(tree['model']['dq']['data'])) hdulist.append(fits.ImageHDU(tree['model']['err']['data'])) tmpfile = os.path.join(str(tmpdir), 'test.fits') with fits_embed.AsdfInFits(hdulist, tree) as ff: ff.write_to(tmpfile) with asdf.AsdfFile(tree) as ff: ff.write_to(os.path.join(str(tmpdir), 'plain.asdf')) with asdf.open(os.path.join(str(tmpdir), 'plain.asdf')) as ff: assert_array_equal(ff.tree['model']['sci']['data'], np.arange(512, dtype=dtype)) # This tests the changes that allow FITS files with ASDF extensions to be # opened directly by the top-level asdf.open API with asdf_open(tmpfile) as ff: assert_array_equal(ff.tree['model']['sci']['data'], np.arange(512, dtype=dtype)) def compare_asdfs(asdf0, asdf1): # Make sure the trees match assert_tree_match(asdf0.tree, asdf1.tree) # Compare the data blocks for key in asdf0.tree['model'].keys(): assert_array_equal( asdf0.tree['model'][key]['data'], asdf1.tree['model'][key]['data']) @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_asdf_in_fits_open(tmpdir, dtype): """Test the open method of AsdfInFits""" tmpfile = os.path.join(str(tmpdir), 'test.fits') # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits(dtype) asdf_in_fits.write_to(tmpfile) # Test opening the file directly from the URI with fits_embed.AsdfInFits.open(tmpfile) as ff: compare_asdfs(asdf_in_fits, ff) # Test open/close without context handler ff = fits_embed.AsdfInFits.open(tmpfile) compare_asdfs(asdf_in_fits, ff) ff.close() # Test reading in the file from an already-opened file handle with open(tmpfile, 'rb') as handle: with fits_embed.AsdfInFits.open(handle) as ff: compare_asdfs(asdf_in_fits, ff) # Test opening the file as a FITS file first and passing the HDUList with fits.open(tmpfile) as hdulist: with fits_embed.AsdfInFits.open(hdulist) as ff: compare_asdfs(asdf_in_fits, ff) @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_asdf_open(tmpdir, dtype): """Test the top-level open method of the asdf module""" tmpfile = os.path.join(str(tmpdir), 'test.fits') # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits(dtype) asdf_in_fits.write_to(tmpfile) # Test opening the file directly from the URI with asdf_open(tmpfile) as ff: compare_asdfs(asdf_in_fits, ff) # Test open/close without context handler ff = asdf_open(tmpfile) compare_asdfs(asdf_in_fits, ff) ff.close() # Test reading in the file from an already-opened file handle with open(tmpfile, 'rb') as handle: with asdf_open(handle) as ff: compare_asdfs(asdf_in_fits, ff) def test_validate_on_read(tmpdir): tmpfile = str(tmpdir.join('invalid.fits')) content = """ invalid_software: !core/software-1.0.0 name: Minesweeper version: 3 """ buff = yaml_to_asdf(content) hdul = fits.HDUList() data = np.array(buff.getbuffer(), dtype=np.uint8)[None, :] fmt = '{}B'.format(len(data[0])) column = fits.Column(array=data, format=fmt, name='ASDF_METADATA') hdu = fits.BinTableHDU.from_columns([column], name='ASDF') hdul.append(hdu) hdul.writeto(tmpfile) for open_method in [asdf.open, fits_embed.AsdfInFits.open]: with pytest.raises(ValidationError): get_config().validate_on_read = True with open_method(tmpfile): pass get_config().validate_on_read = False with open_method(tmpfile) as af: assert af["invalid_software"]["name"] == "Minesweeper" assert af["invalid_software"]["version"] == 3 def test_open_gzipped(): testfile = get_test_data_path('asdf.fits.gz') with fits_embed.AsdfInFits.open(testfile) as af: assert af.tree['stuff'].shape == (20, 20) @pytest.mark.filterwarnings('ignore::astropy.io.fits.verify.VerifyWarning') def test_bad_input(tmpdir): """Make sure these functions behave properly with bad input""" text_file = os.path.join(str(tmpdir), 'test.txt') with open(text_file, 'w') as fh: fh.write('I <3 ASDF!!!!!') with pytest.raises(ValueError): asdf_open(text_file) def test_version_mismatch_file(): testfile = str(get_test_data_path('version_mismatch.fits')) with pytest.warns(AsdfConversionWarning, match="tag:stsci.edu:asdf/core/complex"): with asdf.open(testfile, ignore_version_mismatch=False) as fits_handle: assert fits_handle.tree['a'] == complex(0j) # Make sure warning does not occur when warning is ignored (default) with assert_no_warnings(AsdfConversionWarning): with asdf.open(testfile) as fits_handle: assert fits_handle.tree['a'] == complex(0j) with pytest.warns(AsdfConversionWarning, match="tag:stsci.edu:asdf/core/complex"): with fits_embed.AsdfInFits.open(testfile, ignore_version_mismatch=False) as fits_handle: assert fits_handle.tree['a'] == complex(0j) # Make sure warning does not occur when warning is ignored (default) with assert_no_warnings(AsdfConversionWarning): with fits_embed.AsdfInFits.open(testfile) as fits_handle: assert fits_handle.tree['a'] == complex(0j) def test_serialize_table(tmpdir): tmpfile = str(tmpdir.join('table.fits')) data = np.random.random((10, 10)) table = Table(data) hdu = fits.BinTableHDU(table) hdulist = fits.HDUList() hdulist.append(hdu) tree = {'my_table': hdulist[1].data} with fits_embed.AsdfInFits(hdulist, tree) as ff: ff.write_to(tmpfile) with asdf.open(tmpfile) as ff: data = ff.tree['my_table'] assert data._source.startswith('fits:') def test_extension_check(): testfile = get_test_data_path('extension_check.fits') with pytest.warns(AsdfWarning, match="was created with extension class 'foo.bar.FooBar'"): with asdf.open(testfile): pass # Make sure that suppressing the warning works as well with assert_no_warnings(): with asdf.open(testfile, ignore_missing_extensions=True): pass with pytest.raises(RuntimeError): with asdf.open(testfile, strict_extension_check=True): pass @pytest.mark.parametrize('dtype', TEST_DTYPES) def test_verify_with_astropy(tmpdir, dtype): tmpfile = str(tmpdir.join('asdf.fits')) with create_asdf_in_fits(dtype) as aif: aif.write_to(tmpfile) with fits.open(tmpfile) as hdu: hdu.verify('exception') def test_dangling_file_handle(tmpdir): """ This tests the bug fix introduced in #533. Without the bug fix, this test will fail when running the test suite with pytest-openfiles. """ import gc fits_filename = str(tmpdir.join('dangling.fits')) # Create FITS file to use for test hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=float))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=float))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=float))) hdulist.writeto(fits_filename) hdulist.close() hdul = fits.open(fits_filename) gc.collect() ctx = asdf.AsdfFile() gc.collect() ctx.blocks.find_or_create_block_for_array(hdul[0].data, ctx) gc.collect() hdul.close() gc.collect() ctx.close() gc.collect() del ctx def test_array_view(tmp_path): """ Special handling is required when a view over a larger array is assigned to an HDU and referenced from the ASDF tree. """ file_path = tmp_path / "test.fits" data = np.arange(100, dtype=np.float64).reshape(5, 20) data_view = data[:, :10] hdul = fits.HDUList([fits.PrimaryHDU(), fits.ImageHDU(data_view)]) with asdf.fits_embed.AsdfInFits(hdulist=hdul) as af: af["data"] = hdul[-1].data af.write_to(file_path) with asdf.open(file_path) as af: assert_array_equal(af["data"], data_view) def test_array_view_compatible_layout(tmp_path): """ We should be able to serialize additional views that have the same memory layout. """ file_path = tmp_path / "test.fits" data = np.arange(100, dtype=np.float64).reshape(5, 20) data_view = data[:, :10] other_view = data_view[:, :] hdul = fits.HDUList([fits.PrimaryHDU(), fits.ImageHDU(data_view)]) with asdf.fits_embed.AsdfInFits(hdulist=hdul) as af: af["data"] = hdul[-1].data af["other"] = other_view af.write_to(file_path) with asdf.open(file_path) as af: assert_array_equal(af["data"], data_view) assert_array_equal(af["other"], other_view) def test_array_view_compatible_dtype(tmp_path): """ Changing the dtype of a view over a FITS array is prohibited. """ file_path = tmp_path / "test.fits" data = np.arange(100, dtype=np.float64) hdul = fits.HDUList([fits.PrimaryHDU(), fits.ImageHDU(data)]) with pytest.raises(ValueError, match="ASDF has only limited support for serializing views over arrays stored in FITS HDUs"): with asdf.fits_embed.AsdfInFits(hdulist=hdul) as af: af["view"] = hdul[-1].data.view(np.int64) af.write_to(file_path) def test_array_view_different_layout(tmp_path): """ A view over the FITS array with a different memory layout might end up corrupted when astropy.io.fits changes the array to C-contiguous and big-endian on write. """ file_path = tmp_path / "test.fits" data = np.arange(100, dtype=np.float64).reshape(5, 20) data_view = data[:, :10] other_view = data_view[:, 10:] hdul = fits.HDUList([fits.PrimaryHDU(), fits.ImageHDU(data_view)]) with asdf.fits_embed.AsdfInFits(hdulist=hdul) as af: af["data"] = hdul[-1].data af["other"] = other_view with pytest.raises(ValueError, match="ASDF has only limited support for serializing views over arrays stored in FITS HDUs"): af.write_to(file_path) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_generic_io.py0000644000537500020070000005713600000000000021405 0ustar00wjamiesonSTSCI\scienceimport io import os import sys import pytest import urllib.request as urllib_request import numpy as np import asdf from asdf import exceptions from asdf import util from asdf import generic_io from asdf.config import config_context from . import helpers, create_small_tree, create_large_tree @pytest.fixture(params=[create_small_tree, create_large_tree]) def tree(request): return request.param() def _roundtrip(tree, get_write_fd, get_read_fd, write_options={}, read_options={}): # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. write_options.setdefault('all_array_storage', 'internal') with get_write_fd() as fd: asdf.AsdfFile(tree).write_to(fd, **write_options) # Work around the fact that generic_io's get_file doesn't have a way of # determining whether or not the underlying file handle should be # closed as part of the exit handler if isinstance(fd._fd, io.FileIO): fd._fd.close() with get_read_fd() as fd: ff = asdf.open(fd, **read_options) helpers.assert_tree_match(tree, ff.tree) return ff def test_mode_fail(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with pytest.raises(ValueError): generic_io.get_file(path, mode="r+") def test_open(tmpdir, small_tree): from .. import open path = os.path.join(str(tmpdir), 'test.asdf') # Simply tests the high-level "open" function ff = asdf.AsdfFile(small_tree) ff.write_to(path) with open(path) as ff2: helpers.assert_tree_match(ff2.tree, ff.tree) def test_path(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(path, mode='w') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f def get_read_fd(): # Must open with mode=rw in order to get memmapped data f = generic_io.get_file(path, mode='rw') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) # This is to check for a "feature" in Python 3.x that reading zero # bytes from a socket causes it to stop. We have code in generic_io.py # to workaround it. f.read(0) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 next(ff.blocks.internal_blocks).data assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) def test_open2(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(open(path, 'wb'), mode='w', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f def get_read_fd(): # Must open with mode=rw in order to get memmapped data f = generic_io.get_file(open(path, 'r+b'), mode='rw', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) def test_open_fail(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='w') def test_open_fail2(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with io.open(path, 'w') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='w') def test_open_fail3(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: fd.write("\n\n\n") with open(path, 'r') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='r') def test_open_fail4(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: fd.write("\n\n\n") with io.open(path, 'r') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='r') def test_io_open(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(io.open(path, 'wb'), mode='w', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f def get_read_fd(): f = generic_io.get_file(io.open(path, 'r+b'), mode='rw', close=True) assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) ff.tree['science_data'][0] = 42 def test_close_underlying(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with generic_io.get_file(open(path, 'wb'), mode='w', close=True) as ff: pass assert ff.is_closed() == True assert ff._fd.closed == True with generic_io.get_file(open(path, 'rb'), close=True) as ff2: pass assert ff2.is_closed() == True assert ff2._fd.closed == True def test_bytes_io(tree): buff = io.BytesIO() def get_write_fd(): f = generic_io.get_file(buff, mode='w') assert isinstance(f, generic_io.MemoryIO) return f def get_read_fd(): buff.seek(0) f = generic_io.get_file(buff, mode='rw') assert isinstance(f, generic_io.MemoryIO) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] = 42 def test_streams(tree): buff = io.BytesIO() def get_write_fd(): return generic_io.OutputStream(buff) def get_read_fd(): buff.seek(0) return generic_io.InputStream(buff, 'rw') with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(ff.blocks) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] = 42 def test_streams2(): buff = io.BytesIO(b'\0' * 60) buff.seek(0) fd = generic_io.InputStream(buff, 'r') x = fd.peek(10) x = fd.read() assert len(x) == 60 @pytest.mark.remote_data def test_urlopen(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(open(path, 'wb'), mode='w') def get_read_fd(): return generic_io.get_file( urllib_request.urlopen( httpserver.url + "test.asdf")) with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) @pytest.mark.remote_data def test_http_connection(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(open(path, 'wb'), mode='w') def get_read_fd(): fd = generic_io.get_file(httpserver.url + "test.asdf") # This is to check for a "feature" in Python 3.x that reading zero # bytes from a socket causes it to stop. We have code in generic_io.py # to workaround it. fd.read(0) return fd with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] == 42 def test_exploded_filesystem(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): return generic_io.get_file(path, mode='r') with _roundtrip(tree, get_write_fd, get_read_fd, write_options={'all_array_storage': 'external'}) as ff: assert len(list(ff.blocks.internal_blocks)) == 0 assert len(list(ff.blocks.external_blocks)) == 2 def test_exploded_filesystem_fail(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): fd = io.BytesIO() with open(path, mode='rb') as fd2: fd.write(fd2.read()) fd.seek(0) return fd with get_write_fd() as fd: asdf.AsdfFile(tree).write_to(fd, all_array_storage='external') with get_read_fd() as fd: with asdf.open(fd) as ff: with pytest.raises(ValueError): helpers.assert_tree_match(tree, ff.tree) @pytest.mark.remote_data def test_exploded_http(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): return generic_io.get_file(httpserver.url + "test.asdf") with _roundtrip(tree, get_write_fd, get_read_fd, write_options={'all_array_storage': 'external'}) as ff: assert len(list(ff.blocks.internal_blocks)) == 0 assert len(list(ff.blocks.external_blocks)) == 2 def test_exploded_stream_write(small_tree): # Writing an exploded file to an output stream should fail, since # we can't write "files" alongside it. ff = asdf.AsdfFile(small_tree) with pytest.raises(ValueError): ff.write_to(io.BytesIO(), all_array_storage='external') def test_exploded_stream_read(tmpdir, small_tree): # Reading from an exploded input file should fail, but only once # the data block is accessed. This behavior is important so that # the tree can still be accessed even if the data is missing. path = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(small_tree) ff.write_to(path, all_array_storage='external') with open(path, 'rb') as fd: # This should work, so we can get the tree content x = generic_io.InputStream(fd, 'r') with asdf.open(x) as ff: # It's only when trying to access external data that an error occurs with pytest.raises(ValueError): ff.tree['science_data'][:] def test_unicode_open(tmpdir, small_tree): path = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(small_tree) ff.write_to(path) with io.open(path, 'rt', encoding="utf-8") as fd: with pytest.raises(ValueError): with asdf.open(fd): pass def test_invalid_obj(tmpdir): with pytest.raises(ValueError): generic_io.get_file(42) path = os.path.join(str(tmpdir), 'test.asdf') with generic_io.get_file(path, 'w') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, 'r') with pytest.raises(ValueError): generic_io.get_file("http://www.google.com", "w") with pytest.raises(TypeError): generic_io.get_file(io.StringIO()) with open(path, 'rb') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, 'w') with io.open(path, 'rb') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, 'w') with generic_io.get_file(sys.__stdout__, 'w'): pass def test_nonseekable_file(tmpdir): base = io.IOBase class FileWrapper(base): def tell(self): raise IOError() def seekable(self): return False def readable(self): return True def writable(self): return True with FileWrapper(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: assert isinstance(generic_io.get_file(fd, 'w'), generic_io.OutputStream) with pytest.raises(ValueError): generic_io.get_file(fd, 'rw') with FileWrapper(os.path.join(str(tmpdir), 'test.asdf'), 'rb') as fd: assert isinstance(generic_io.get_file(fd, 'r'), generic_io.InputStream) def test_relative_uri(): assert generic_io.relative_uri( 'http://www.google.com', 'file://local') == 'file://local' @pytest.mark.parametrize("protocol", ["http", "asdf"]) def test_resolve_uri(protocol): """ Confirm that the patched urllib.parse is handling asdf:// URIs correctly. """ assert generic_io.resolve_uri( '{}://somewhere.org/some-schema'.format(protocol), '#/definitions/foo' ) == '{}://somewhere.org/some-schema#/definitions/foo'.format(protocol) assert generic_io.resolve_uri( '{}://somewhere.org/path/to/some-schema'.format(protocol), '../../some/other/path/to/some-other-schema' ) == '{}://somewhere.org/some/other/path/to/some-other-schema'.format(protocol) def test_arbitrary_file_object(): class Wrapper: def __init__(self, init): self._fd = init class Random: def seek(self, *args): return self._fd.seek(*args) def tell(self, *args): return self._fd.tell(*args) class Reader(Wrapper): def read(self, *args): return self._fd.read(*args) class RandomReader(Reader, Random): pass class Writer(Wrapper): def write(self, *args): return self._fd.write(*args) class RandomWriter(Writer, Random): pass class All(Reader, Writer, Random): pass buff = io.BytesIO() assert isinstance( generic_io.get_file(Reader(buff), 'r'), generic_io.InputStream) assert isinstance( generic_io.get_file(Writer(buff), 'w'), generic_io.OutputStream) assert isinstance( generic_io.get_file(RandomReader(buff), 'r'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(RandomWriter(buff), 'w'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'rw'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'r'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'w'), generic_io.MemoryIO) with pytest.raises(ValueError): generic_io.get_file(Reader(buff), 'w') with pytest.raises(ValueError): generic_io.get_file(Writer(buff), 'r') def test_check_bytes(tmpdir): with io.open(os.path.join(str(tmpdir), 'test.asdf'), 'w', encoding='utf-8') as fd: assert generic_io._check_bytes(fd, 'r') is False assert generic_io._check_bytes(fd, 'rw') is False assert generic_io._check_bytes(fd, 'w') is False with io.open(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: assert generic_io._check_bytes(fd, 'r') is True assert generic_io._check_bytes(fd, 'rw') is True assert generic_io._check_bytes(fd, 'w') is True def test_truncated_reader(): """ Tests several edge cases for _TruncatedReader.read() Includes regression test for https://github.com/asdf-format/asdf/pull/181 """ # TODO: Should probably break this up into multiple test cases fd = generic_io.RandomAccessFile(io.BytesIO(), 'rw') content = b'a' * 100 + b'b' fd.write(content) fd.seek(0) # Simple cases where the delimiter is not found at all tr = generic_io._TruncatedReader(fd, b'x', 1) with pytest.raises(exceptions.DelimiterNotFoundError): tr.read() fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1) assert tr.read(100) == content[:100] assert tr.read(1) == content[100:] with pytest.raises(exceptions.DelimiterNotFoundError): tr.read() fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, exception=False) assert tr.read() == content # No delimiter but with 'initial_content' init = b'abcd' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read(100) == (init + content)[:100] assert tr.read() == (init + content)[100:] fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read() == init + content fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read(2) == init[:2] assert tr.read() == init[2:] + content # Some tests of a single character delimiter # Add some trailing data after the delimiter fd.seek(0, 2) fd.write(b'ffff') # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1) assert tr.read(100) == content[:100] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1) assert tr.read() == content[:100] # Delimiter included fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read() == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(101) == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(102) == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(100) == content[:100] assert tr.read(1) == content[100:101] assert tr.read() == b'' # Longer delimiter with variable length content = b'a' * 100 + b'\n...\n' + b'ffffff' delimiter = br'\r?\n\.\.\.((\r?\n)|$)' readahead = 7 fd = generic_io.RandomAccessFile(io.BytesIO(), 'rw') fd.write(content) # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read() == content[:100] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(100) == content[:100] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read() == content[99:100] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read(2) == content[99:100] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read(50) == content[99:100] assert tr.read() == b'' # Same as the previous set of tests, but including the delimiter fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read() == content[:105] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(105) == content[:105] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read() == content[99:105] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read(2) == content[99:101] assert tr.read() == content[101:105] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read(50) == content[99:105] assert tr.read() == b'' # Same sequence of tests but with some 'initial_content' init = b'abcd' # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read() == (init + content[:100]) assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(100) == (init + content[:96]) assert tr.read() == content[96:100] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read() == content[95:100] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(6) == content[95:100] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(50) == content[95:100] assert tr.read() == b'' # Same as the previous set of tests, but including the delimiter fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read() == (init + content[:105]) assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(105) == (init + content[:101]) assert tr.read() == content[101:105] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(103) == (init + content[:99]) assert tr.read() == content[99:105] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(6) == content[95:101] assert tr.read() == content[101:105] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(50) == content[95:105] assert tr.read() == b'' def test_blocksize(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(open(path, 'wb'), mode='w', close=True) return f def get_read_fd(): # Must open with mode=rw in order to get memmapped data f = generic_io.get_file(open(path, 'r+b'), mode='rw', close=True) return f with config_context() as config: config.io_block_size = 1233 # make sure everything works with a strange blocksize with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert ff._fd.block_size == 1233 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_helpers.py0000644000537500020070000000220100000000000020723 0ustar00wjamiesonSTSCI\scienceimport pytest from asdf import types from asdf.exceptions import AsdfConversionWarning, AsdfWarning from asdf.tests.helpers import assert_roundtrip_tree def test_conversion_error(tmpdir): class FooType(types.CustomType): name = 'foo' def __init__(self, a, b): self.a = a self.b = b @classmethod def from_tree(cls, tree, ctx): raise TypeError("This allows us to test the failure") @classmethod def to_tree(cls, node, ctx): return dict(a=node.a, b=node.b) def __eq__(self, other): return self.a == other.a and self.b == other.b class FooExtension: @property def types(self): return [FooType] @property def tag_mapping(self): return [] @property def url_mapping(self): return [] foo = FooType(10, 'hello') tree = dict(foo=foo) with pytest.raises(AsdfConversionWarning): with pytest.warns(AsdfWarning, match="Unable to locate schema file"): assert_roundtrip_tree(tree, tmpdir, extensions=FooExtension()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_integration.py0000644000537500020070000000655600000000000021625 0ustar00wjamiesonSTSCI\science""" Integration tests for the new plugin APIs. """ import pytest import asdf from asdf.extension import TagDefinition FOO_SCHEMA_URI = "asdf://somewhere.org/extensions/foo/schemas/foo-1.0" FOO_SCHEMA = f""" id: {FOO_SCHEMA_URI} type: object properties: value: type: string required: ["value"] """ class Foo: def __init__(self, value): self._value = value @property def value(self): return self._value class FooConverter: types = [Foo] tags = ["asdf://somewhere.org/extensions/foo/tags/foo-*"] def to_yaml_tree(self, obj, tag, ctx): return { "value": obj.value } def from_yaml_tree(self, obj, tag, ctx): return Foo(obj["value"]) class FooExtension: extension_uri = "asdf://somewhere.org/extensions/foo-1.0" converters = [FooConverter()] tags = [ TagDefinition( "asdf://somewhere.org/extensions/foo/tags/foo-1.0", schema_uris=FOO_SCHEMA_URI, ) ] def test_serialize_custom_type(tmpdir): with asdf.config_context() as config: config.add_resource_mapping({FOO_SCHEMA_URI: FOO_SCHEMA}) config.add_extension(FooExtension()) path = str(tmpdir/"test.asdf") af = asdf.AsdfFile() af["foo"] = Foo("bar") af.write_to(path) with asdf.open(path) as af2: assert af2["foo"].value == "bar" with pytest.raises(asdf.ValidationError): af["foo"] = Foo(12) af.write_to(path) FOOFOO_SCHEMA_URI = "asdf://somewhere.org/extensions/foo/schemas/foo_foo-1.0" FOOFOO_SCHEMA = f""" id: {FOOFOO_SCHEMA_URI} type: object properties: value_value: type: string required: ["value_value"] """ class FooFoo(Foo): def __init__(self, value, value_value): super().__init__(value) self._value_value = value_value @property def value_value(self): return self._value_value class FooFooConverter: types = [FooFoo] tags = ["asdf://somewhere.org/extensions/foo/tags/foo_foo-*"] def to_yaml_tree(self, obj, tag, ctx): return { "value": obj.value, "value_value": obj.value_value } def from_yaml_tree(self, obj, tag, ctx): return FooFoo(obj["value"], obj["value_value"]) class FooFooExtension: extension_uri = "asdf://somewhere.org/extensions/foo_foo-1.0" converters = [FooFooConverter()] tags = [ TagDefinition( "asdf://somewhere.org/extensions/foo/tags/foo_foo-1.0", schema_uris=[FOO_SCHEMA_URI, FOOFOO_SCHEMA_URI], ) ] def test_serialize_with_multiple_schemas(tmpdir): with asdf.config_context() as config: config.add_resource_mapping({FOO_SCHEMA_URI: FOO_SCHEMA, FOOFOO_SCHEMA_URI: FOOFOO_SCHEMA}) config.add_extension(FooFooExtension()) path = str(tmpdir/"test.asdf") af = asdf.AsdfFile() af["foo_foo"] = FooFoo("bar", "bar_bar") af.write_to(path) with asdf.open(path) as af2: assert af2["foo_foo"].value == "bar" assert af2["foo_foo"].value_value == "bar_bar" with pytest.raises(asdf.ValidationError): af["foo_foo"] = FooFoo(12, "bar_bar") af.write_to(path) with pytest.raises(asdf.ValidationError): af["foo_foo"] = FooFoo("bar", 34) af.write_to(path) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_reference.py0000644000537500020070000002015500000000000021227 0ustar00wjamiesonSTSCI\scienceimport io import os import numpy as np from numpy.testing import assert_array_equal import pytest import asdf from asdf import reference from asdf import util from asdf.tags.core import ndarray from .helpers import assert_tree_match def test_external_reference(tmpdir): exttree = { 'cool_stuff': { 'a': np.array([0, 1, 2], float), 'b': np.array([3, 4, 5], float) }, 'list_of_stuff': [ 'foobar', 42, np.array([7, 8, 9], float) ] } external_path = os.path.join(str(tmpdir), 'external.asdf') ext = asdf.AsdfFile(exttree) # Since we're testing with small arrays, force all arrays to be stored # in internal blocks rather than letting some of them be automatically put # inline. ext.write_to(external_path, all_array_storage='internal') external_path = os.path.join(str(tmpdir), 'external2.asdf') ff = asdf.AsdfFile(exttree) ff.write_to(external_path, all_array_storage='internal') tree = { # The special name "data" here must be an array. This is # included so that such validation can be ignored when we just # have a "$ref". 'data': { '$ref': 'external.asdf#/cool_stuff/a' }, 'science_data': { '$ref': 'external.asdf#/cool_stuff/a' }, 'science_data2': { '$ref': 'external2.asdf#/cool_stuff/a' }, 'foobar': { '$ref': 'external.asdf#/list_of_stuff/0', }, 'answer': { '$ref': 'external.asdf#/list_of_stuff/1' }, 'array': { '$ref': 'external.asdf#/list_of_stuff/2', }, 'whole_thing': { '$ref': 'external.asdf#' }, 'myself': { '$ref': '#', }, 'internal': { '$ref': '#science_data' } } def do_asserts(ff): assert 'unloaded' in repr(ff.tree['science_data']) assert 'unloaded' in str(ff.tree['science_data']) assert len(ff._external_asdf_by_uri) == 0 assert_array_equal(ff.tree['science_data'], exttree['cool_stuff']['a']) assert len(ff._external_asdf_by_uri) == 1 with pytest.raises((ValueError, RuntimeError)): # Assignment destination is readonly ff.tree['science_data'][0] = 42 assert_array_equal(ff.tree['science_data2'], exttree['cool_stuff']['a']) assert len(ff._external_asdf_by_uri) == 2 assert ff.tree['foobar']() == 'foobar' assert ff.tree['answer']() == 42 assert_array_equal(ff.tree['array'], exttree['list_of_stuff'][2]) assert_tree_match(ff.tree['whole_thing'](), exttree) assert_array_equal( ff.tree['whole_thing']['cool_stuff']['a'], exttree['cool_stuff']['a']) assert_array_equal( ff.tree['myself']['science_data'], exttree['cool_stuff']['a']) # Make sure that referencing oneself doesn't make another call # to disk. assert len(ff._external_asdf_by_uri) == 2 assert_array_equal(ff.tree['internal'], exttree['cool_stuff']['a']) with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: do_asserts(ff) internal_path = os.path.join(str(tmpdir), 'main.asdf') ff.write_to(internal_path) with asdf.open(internal_path) as ff: do_asserts(ff) with asdf.open(internal_path) as ff: assert len(ff._external_asdf_by_uri) == 0 ff.resolve_references() assert len(ff._external_asdf_by_uri) == 2 assert isinstance(ff.tree['data'], ndarray.NDArrayType) assert isinstance(ff.tree['science_data'], ndarray.NDArrayType) assert_array_equal(ff.tree['science_data'], exttree['cool_stuff']['a']) assert_array_equal(ff.tree['science_data2'], exttree['cool_stuff']['a']) assert ff.tree['foobar'] == 'foobar' assert ff.tree['answer'] == 42 assert_array_equal(ff.tree['array'], exttree['list_of_stuff'][2]) assert_tree_match(ff.tree['whole_thing'], exttree) assert_array_equal( ff.tree['whole_thing']['cool_stuff']['a'], exttree['cool_stuff']['a']) assert_array_equal( ff.tree['myself']['science_data'], exttree['cool_stuff']['a']) assert_array_equal(ff.tree['internal'], exttree['cool_stuff']['a']) @pytest.mark.remote_data def test_external_reference_invalid(tmpdir): tree = { 'foo': { '$ref': 'fail.asdf' } } ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.resolve_references() ff = asdf.AsdfFile(tree, uri="http://httpstat.us/404") with pytest.raises(IOError): ff.resolve_references() ff = asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) with pytest.raises(IOError): ff.resolve_references() def test_external_reference_invalid_fragment(tmpdir): exttree = { 'list_of_stuff': [ 'foobar', 42, np.array([7, 8, 9], float) ] } external_path = os.path.join(str(tmpdir), 'external.asdf') ff = asdf.AsdfFile(exttree) ff.write_to(external_path) tree = { 'foo': { '$ref': 'external.asdf#/list_of_stuff/a' } } with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: with pytest.raises(ValueError): ff.resolve_references() tree = { 'foo': { '$ref': 'external.asdf#/list_of_stuff/3' } } with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: with pytest.raises(ValueError): ff.resolve_references() def test_make_reference(tmpdir): exttree = { # Include some ~ and / in the name to make sure that escaping # is working correctly 'f~o~o/': { 'a': np.array([0, 1, 2], float), 'b': np.array([3, 4, 5], float) } } external_path = os.path.join(str(tmpdir), 'external.asdf') ext = asdf.AsdfFile(exttree) ext.write_to(external_path) with asdf.open(external_path) as ext: ff = asdf.AsdfFile() ff.tree['ref'] = ext.make_reference(['f~o~o/', 'a']) assert_array_equal(ff.tree['ref'], ext.tree['f~o~o/']['a']) ff.write_to(os.path.join(str(tmpdir), 'source.asdf')) with asdf.open(os.path.join(str(tmpdir), 'source.asdf')) as ff: assert ff.tree['ref']._uri == 'external.asdf#f~0o~0o~1/a' def test_internal_reference(tmpdir): testfile = os.path.join(str(tmpdir), 'test.asdf') tree = { 'foo': 2, 'bar': {'$ref': '#'} } ff = asdf.AsdfFile(tree) ff.find_references() assert isinstance(ff.tree['bar'], reference.Reference) ff.resolve_references() assert ff.tree['bar']['foo'] == 2 tree = { 'foo': 2 } ff = asdf.AsdfFile( tree, uri=util.filepath_to_url(os.path.abspath(testfile))) ff.tree['bar'] = ff.make_reference([]) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) ff = asdf.AsdfFile() content = asdf.AsdfFile()._open_impl(ff, buff, _get_yaml_content=True) assert b"{$ref: ''}" in content def test_implicit_internal_reference(tmpdir): target = {"foo": "bar"} nested_in_dict = {"target": target} nested_in_list = [target] tree = {"target": target, "nested_in_dict": nested_in_dict, "nested_in_list": nested_in_list} assert tree["target"] is tree["nested_in_dict"]["target"] assert tree["target"] is tree["nested_in_list"][0] af = asdf.AsdfFile(tree) assert af["target"] is af["nested_in_dict"]["target"] assert af["target"] is af["nested_in_list"][0] output_path = os.path.join(str(tmpdir), "test.asdf") af.write_to(output_path) with asdf.open(output_path) as af: assert af["target"] is af["nested_in_dict"]["target"] assert af["target"] is af["nested_in_list"][0] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_reference_files.py0000644000537500020070000000466400000000000022420 0ustar00wjamiesonSTSCI\scienceimport os import sys import pytest from asdf import open as asdf_open from asdf import versioning from .helpers import assert_tree_match _REFFILE_PATH = os.path.join(os.path.dirname(__file__), '..', '..', 'asdf-standard', 'reference_files') def get_test_id(reference_file_path): """Helper function to return the informative part of a schema path""" path = os.path.normpath(str(reference_file_path)) return os.path.sep.join(path.split(os.path.sep)[-3:]) def collect_reference_files(): """Function used by pytest to collect ASDF reference files for testing.""" for version in versioning.supported_versions: version_dir = os.path.join(_REFFILE_PATH, str(version)) if os.path.exists(version_dir): for filename in os.listdir(version_dir): if filename.endswith(".asdf"): filepath = os.path.join(version_dir, filename) basename, _ = os.path.splitext(filepath) if os.path.exists(basename + ".yaml"): yield filepath def _compare_trees(name_without_ext, expect_warnings=False): asdf_path = name_without_ext + ".asdf" yaml_path = name_without_ext + ".yaml" with asdf_open(asdf_path) as af_handle: af_handle.resolve_and_inline() with asdf_open(yaml_path) as ref: def _compare_func(): assert_tree_match(af_handle.tree, ref.tree, funcname='assert_allclose') if expect_warnings: # Make sure to only suppress warnings when they are expected. # However, there's still a chance of missing warnings that we # actually care about here. with pytest.warns(RuntimeWarning): _compare_func() else: _compare_func() @pytest.mark.parametrize( 'reference_file', collect_reference_files(), ids=get_test_id) def test_reference_file(reference_file): basename = os.path.basename(reference_file) name_without_ext, _ = os.path.splitext(reference_file) known_fail = False expect_warnings = 'complex' in reference_file if sys.maxunicode <= 65535: known_fail = known_fail or (basename in ('unicode_spp.asdf')) try: _compare_trees(name_without_ext, expect_warnings=expect_warnings) except Exception: if known_fail: pytest.xfail() else: raise ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_resolver.py0000644000537500020070000000707700000000000021142 0ustar00wjamiesonSTSCI\scienceimport pytest from asdf.resolver import Resolver, ResolverChain from asdf.exceptions import AsdfDeprecationWarning def test_resolver_no_mappings(): r = Resolver([], "test") assert r("united_states:maryland:baltimore") == "united_states:maryland:baltimore" def test_resolver_tuple_mapping(): r = Resolver([("united_states:", "earth:{test}")], "test") assert r("united_states:maryland:baltimore") == "earth:united_states:maryland:baltimore" r = Resolver([("united_states:", "{test_prefix}texas:houston")], "test") assert r("united_states:maryland:baltimore") == "united_states:texas:houston" r = Resolver([("united_states:", "{test_suffix}:hampden")], "test") assert r("united_states:maryland:baltimore") == "maryland:baltimore:hampden" def test_resolver_callable_mapping(): r = Resolver([lambda inp: "nowhere"], "test") assert r("united_states:maryland:baltimore") == "nowhere" def test_resolver_multiple_mappings(): r = Resolver([ ("united_states:", "unknown_region:{test_suffix}"), ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}") ], "test") # Should choose the mapping with the longest matched prefix: assert r("united_states:maryland:baltimore") == "mid_atlantic:maryland:baltimore" r = Resolver([ ("united_states:", "unknown_region:{test_suffix}"), lambda inp: "nowhere", ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}") ], "test") # Should prioritize the mapping offered by the callable: assert r("united_states:maryland:baltimore") == "nowhere" r = Resolver([ ("united_states:", "unknown_region:{test_suffix}"), lambda inp: None, ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}") ], "test") # None from the callable is a signal that it can't handle the input, # so we should fall back to the longest matched prefix: assert r("united_states:maryland:baltimore") == "mid_atlantic:maryland:baltimore" def test_resolver_non_prefix(): r = Resolver([("maryland:", "shouldn't happen")], "test") assert r("united_states:maryland:baltimore") == "united_states:maryland:baltimore" def test_resolver_invalid_mapping(): with pytest.raises(ValueError): Resolver([("foo",)], "test") with pytest.raises(ValueError): Resolver([12], "test") def test_resolver_hash_and_equals(): r1 = Resolver([("united_states:", "earth:{test}")], "test") r2 = Resolver([("united_states:", "earth:{test}")], "test") r3 = Resolver([("united_states:", "{test}:hampden")], "test") assert hash(r1) == hash(r2) assert r1 == r2 assert hash(r1) != hash(r3) assert r1 != r3 def test_resolver_add_mapping_deprecated(): r = Resolver([], "test") with pytest.warns(AsdfDeprecationWarning): r.add_mapping([("united_states:", "earth:{test}")], "test") def test_resolver_chain(): r1 = Resolver([("maryland:", "united_states:{test}")], "test") r2 = Resolver([("united_states:", "earth:{test}")], "test") chain = ResolverChain(r1, r2) assert chain("maryland:baltimore") == "earth:united_states:maryland:baltimore" def test_resolver_chain_hash_and_equals(): r1 = Resolver([("united_states:", "earth:{test}")], "test") r2 = Resolver([("united_states:", "earth:{test}")], "test") r3 = Resolver([("united_states:", "{test}:hampden")], "test") c1 = ResolverChain(r1, r3) c2 = ResolverChain(r2, r3) c3 = ResolverChain(r1, r2) assert hash(c1) == hash(c2) assert c1 == c2 assert hash(c1) != hash(c3) assert c1 != c3 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_resource.py0000644000537500020070000002752200000000000021125 0ustar00wjamiesonSTSCI\scienceimport io import sys from pathlib import Path from collections.abc import Mapping if sys.version_info < (3, 9): import importlib_resources as importlib else: import importlib import pytest from asdf.resource import ( DirectoryResourceMapping, ResourceManager, ResourceMappingProxy, get_core_resource_mappings, JsonschemaResourceMapping, ) def test_directory_resource_mapping(tmpdir): tmpdir.mkdir("schemas") (tmpdir/"schemas").mkdir("nested") with (tmpdir/"schemas"/"foo-1.2.3.yaml").open("w") as f: f.write("id: http://somewhere.org/schemas/foo-1.2.3\n") with (tmpdir/"schemas"/"nested"/"bar-4.5.6.yaml").open("w") as f: f.write("id: http://somewhere.org/schemas/nested/bar-4.5.6\n") with (tmpdir/"schemas"/"baz-7.8.9").open("w") as f: f.write("id: http://somewhere.org/schemas/baz-7.8.9\n") mapping = DirectoryResourceMapping(str(tmpdir/"schemas"), "http://somewhere.org/schemas") assert isinstance(mapping, Mapping) assert len(mapping) == 1 assert set(mapping) == {"http://somewhere.org/schemas/foo-1.2.3"} assert "http://somewhere.org/schemas/foo-1.2.3" in mapping assert b"http://somewhere.org/schemas/foo-1.2.3" in mapping["http://somewhere.org/schemas/foo-1.2.3"] assert "http://somewhere.org/schemas/baz-7.8.9" not in mapping assert "http://somewhere.org/schemas/baz-7.8" not in mapping assert "http://somewhere.org/schemas/foo-1.2.3.yaml" not in mapping assert "http://somewhere.org/schemas/nested/bar-4.5.6" not in mapping mapping = DirectoryResourceMapping(str(tmpdir/"schemas"), "http://somewhere.org/schemas", recursive=True) assert len(mapping) == 2 assert set(mapping) == {"http://somewhere.org/schemas/foo-1.2.3", "http://somewhere.org/schemas/nested/bar-4.5.6"} assert "http://somewhere.org/schemas/foo-1.2.3" in mapping assert b"http://somewhere.org/schemas/foo-1.2.3" in mapping["http://somewhere.org/schemas/foo-1.2.3"] assert "http://somewhere.org/schemas/baz-7.8.9" not in mapping assert "http://somewhere.org/schemas/baz-7.8" not in mapping assert "http://somewhere.org/schemas/nested/bar-4.5.6" in mapping assert b"http://somewhere.org/schemas/nested/bar-4.5.6" in mapping["http://somewhere.org/schemas/nested/bar-4.5.6"] mapping = DirectoryResourceMapping( str(tmpdir/"schemas"), "http://somewhere.org/schemas", recursive=True, filename_pattern="baz-*", stem_filename=False ) assert len(mapping) == 1 assert set(mapping) == {"http://somewhere.org/schemas/baz-7.8.9"} assert "http://somewhere.org/schemas/foo-1.2.3" not in mapping assert "http://somewhere.org/schemas/baz-7.8.9" in mapping assert b"http://somewhere.org/schemas/baz-7.8.9" in mapping["http://somewhere.org/schemas/baz-7.8.9"] assert "http://somewhere.org/schemas/nested/bar-4.5.6" not in mapping # Check that the repr is reasonable # Need to be careful checking the path string because # pathlib normalizes Windows paths. assert repr(Path(str(tmpdir/"schemas"))) in repr(mapping) assert "http://somewhere.org/schemas" in repr(mapping) assert "recursive=True" in repr(mapping) assert "filename_pattern='baz-*'" in repr(mapping) assert "stem_filename=False" in repr(mapping) # Make sure trailing slash is handled correctly mapping = DirectoryResourceMapping(str(tmpdir/"schemas"), "http://somewhere.org/schemas/") assert len(mapping) == 1 assert set(mapping) == {"http://somewhere.org/schemas/foo-1.2.3"} assert "http://somewhere.org/schemas/foo-1.2.3" in mapping assert b"http://somewhere.org/schemas/foo-1.2.3" in mapping["http://somewhere.org/schemas/foo-1.2.3"] def test_directory_resource_mapping_with_traversable(): """ Confirm that DirectoryResourceMapping doesn't use pathlib.Path methods outside of the Traversable interface. """ class MockTraversable(importlib.abc.Traversable): def __init__(self, name, value): self._name = name self._value = value def iterdir(self): if isinstance(self._value, dict): for key, child in self._value.items(): yield MockTraversable(key, child) def read_bytes(self): if not isinstance(self._value, bytes): raise RuntimeError("Not a file") return self._value def read_text(self, encoding="utf-8"): return self.read_bytes().decode(encoding) def is_dir(self): return isinstance(self._value, dict) def is_file(self): return self._value is not None and not isinstance(self._value, dict) def joinpath(self, child): if isinstance(self._value, dict): child_value = self._value.get(child) else: child_value = None return MockTraversable(child, child_value) def __truediv__(self, child): return self.joinpath(child) def open(self, mode="r", *args, **kwargs): if not self.is_file(): raise RuntimeError("Not a file") if mode == "r": return io.TextIOWrapper(io.BytesIO(self._value), *args, **kwargs) elif mode == "rb": return io.BytesIO(self._value) else: raise "Not a valid mode" @property def name(self): return self._name root = MockTraversable("/path/to/some/root", { "foo-1.0.0.yaml": b"foo", "bar-1.0.0.yaml": b"bar", "baz-1.0.0": b"baz", "nested": { "foz-1.0.0.yaml": b"foz" } }) mapping = DirectoryResourceMapping(root, "http://somewhere.org/schemas") assert len(mapping) == 2 assert set(mapping) == {"http://somewhere.org/schemas/foo-1.0.0", "http://somewhere.org/schemas/bar-1.0.0"} assert "http://somewhere.org/schemas/foo-1.0.0" in mapping assert mapping["http://somewhere.org/schemas/foo-1.0.0"] == b"foo" assert "http://somewhere.org/schemas/bar-1.0.0" in mapping assert mapping["http://somewhere.org/schemas/bar-1.0.0"] == b"bar" assert "http://somewhere.org/schemas/baz-1.0.0" not in mapping assert "http://somewhere.org/schemas/nested/foz-1.0.0" not in mapping mapping = DirectoryResourceMapping(root, "http://somewhere.org/schemas", recursive=True) assert len(mapping) == 3 assert set(mapping) == { "http://somewhere.org/schemas/foo-1.0.0", "http://somewhere.org/schemas/bar-1.0.0", "http://somewhere.org/schemas/nested/foz-1.0.0" } assert "http://somewhere.org/schemas/foo-1.0.0" in mapping assert mapping["http://somewhere.org/schemas/foo-1.0.0"] == b"foo" assert "http://somewhere.org/schemas/bar-1.0.0" in mapping assert mapping["http://somewhere.org/schemas/bar-1.0.0"] == b"bar" assert "http://somewhere.org/schemas/baz-1.0.0" not in mapping assert "http://somewhere.org/schemas/nested/foz-1.0.0" in mapping assert mapping["http://somewhere.org/schemas/nested/foz-1.0.0"] == b"foz" mapping = DirectoryResourceMapping(root, "http://somewhere.org/schemas", filename_pattern="baz-*", stem_filename=False) assert len(mapping) == 1 assert set(mapping) == {"http://somewhere.org/schemas/baz-1.0.0"} assert "http://somewhere.org/schemas/foo-1.0.0" not in mapping assert "http://somewhere.org/schemas/bar-1.0.0" not in mapping assert "http://somewhere.org/schemas/baz-1.0.0" in mapping assert mapping["http://somewhere.org/schemas/baz-1.0.0"] == b"baz" assert "http://somewhere.org/schemas/nested/foz-1.0.0" not in mapping def test_resource_manager(): mapping1 = { "http://somewhere.org/schemas/foo-1.0.0": b"foo", "http://somewhere.org/schemas/bar-1.0.0": b"bar", } mapping2 = { "http://somewhere.org/schemas/foo-1.0.0": b"duplicate foo", "http://somewhere.org/schemas/baz-1.0.0": b"baz", "http://somewhere.org/schemas/foz-1.0.0": "foz", } manager = ResourceManager([mapping1, mapping2]) assert isinstance(manager, Mapping) assert len(manager) == 4 assert set(manager) == { "http://somewhere.org/schemas/foo-1.0.0", "http://somewhere.org/schemas/bar-1.0.0", "http://somewhere.org/schemas/baz-1.0.0", "http://somewhere.org/schemas/foz-1.0.0", } assert "http://somewhere.org/schemas/foo-1.0.0" in manager assert manager["http://somewhere.org/schemas/foo-1.0.0"] == b"foo" assert "http://somewhere.org/schemas/bar-1.0.0" in manager assert manager["http://somewhere.org/schemas/bar-1.0.0"] == b"bar" assert "http://somewhere.org/schemas/baz-1.0.0" in manager assert manager["http://somewhere.org/schemas/baz-1.0.0"] == b"baz" assert "http://somewhere.org/schemas/foz-1.0.0" in manager assert manager["http://somewhere.org/schemas/foz-1.0.0"] == b"foz" with pytest.raises(KeyError, match="http://somewhere.org/schemas/missing-1.0.0"): manager["http://somewhere.org/schemas/missing-1.0.0"] # Confirm that the repr string is reasonable: assert "len: 4" in repr(manager) def test_jsonschema_resource_mapping(): mapping = JsonschemaResourceMapping() assert isinstance(mapping, Mapping) assert len(mapping) == 1 assert set(mapping) == {"http://json-schema.org/draft-04/schema"} assert "http://json-schema.org/draft-04/schema" in mapping assert b"http://json-schema.org/draft-04/schema" in mapping["http://json-schema.org/draft-04/schema"] assert repr(mapping) == "JsonschemaResourceMapping()" @pytest.mark.parametrize("uri", [ "http://json-schema.org/draft-04/schema", "http://stsci.edu/schemas/yaml-schema/draft-01", "http://stsci.edu/schemas/asdf/core/asdf-1.1.0", "asdf://asdf-format.org/core/schemas/extension_manifest-1.0.0", ]) def test_get_core_resource_mappings(uri): mappings = get_core_resource_mappings() mapping = next(m for m in mappings if uri in m) assert mapping is not None assert uri.encode("utf-8") in mapping[uri] def test_proxy_is_mapping(): assert isinstance(ResourceMappingProxy({}), Mapping) def test_proxy_maybe_wrap(): mapping = { "http://somewhere.org/resources/foo": "foo", "http://somewhere.org/resources/bar": "bar", } proxy = ResourceMappingProxy.maybe_wrap(mapping) assert proxy.delegate is mapping assert ResourceMappingProxy.maybe_wrap(proxy) is proxy with pytest.raises(TypeError): ResourceMappingProxy.maybe_wrap([]) def test_proxy_properties(): mapping = { "http://somewhere.org/resources/foo": "foo", "http://somewhere.org/resources/bar": "bar", } proxy = ResourceMappingProxy(mapping, package_name="foo", package_version="1.2.3") assert len(proxy) == len(mapping) assert set(proxy.keys()) == set(mapping.keys()) for uri in mapping: assert proxy[uri] is mapping[uri] assert proxy.package_name == "foo" assert proxy.package_version == "1.2.3" assert proxy.class_name.endswith("dict") def test_proxy_hash_and_eq(): mapping = { "http://somewhere.org/resources/foo": "foo", "http://somewhere.org/resources/bar": "bar", } proxy1 = ResourceMappingProxy(mapping) proxy2 = ResourceMappingProxy(mapping, package_name="foo", package_version="1.2.3") assert proxy1 == proxy2 assert hash(proxy1) == hash(proxy2) assert proxy1 != mapping assert proxy2 != mapping def test_proxy_repr(): mapping = { "http://somewhere.org/resources/foo": "foo", "http://somewhere.org/resources/bar": "bar", } proxy = ResourceMappingProxy(mapping, package_name="foo", package_version="1.2.3") assert ".dict" in repr(proxy) assert "package: foo==1.2.3" in repr(proxy) assert "len: 2" in repr(proxy) empty_proxy = ResourceMappingProxy({}) assert ".dict" in repr(empty_proxy) assert "package: (none)" in repr(empty_proxy) assert "len: 0" in repr(empty_proxy) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_schema.py0000644000537500020070000010540700000000000020535 0ustar00wjamiesonSTSCI\scienceimport io from datetime import datetime from jsonschema import ValidationError import numpy as np from numpy.testing import assert_array_equal import pytest import asdf from asdf import constants from asdf import get_config, config_context from asdf import extension from asdf import resolver from asdf import schema from asdf import types from asdf import util from asdf import yamlutil from asdf import tagged from asdf.tests import helpers, CustomExtension from asdf.exceptions import AsdfWarning, AsdfConversionWarning, AsdfDeprecationWarning class TagReferenceType(types.CustomType): """ This class is used by several tests below for validating foreign type references in schemas and ASDF files. """ name = 'tag_reference' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' @classmethod def from_tree(cls, tree, ctx): node = {} node['name'] = tree['name'] node['things'] = tree['things'] return node def test_tagging_scalars(): pytest.importorskip('astropy', '3.0.0') from astropy import units as u yaml = """ unit: !unit/unit-1.0.0 m not_unit: m """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff) as ff: assert isinstance(ff.tree['unit'], u.UnitBase) assert not isinstance(ff.tree['not_unit'], u.UnitBase) assert isinstance(ff.tree['not_unit'], str) assert ff.tree == { 'unit': u.m, 'not_unit': 'm' } def test_read_json_schema(): """Pytest to make sure reading JSON schemas succeeds. This was known to fail on Python 3.5 See issue #314 at https://github.com/asdf-format/asdf/issues/314 for more details. """ json_schema = helpers.get_test_data_path('example_schema.json') schema_tree = schema.load_schema(json_schema, resolve_references=True) schema.check_schema(schema_tree) def test_load_schema(tmpdir): schema_def = """ %YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "../core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_full_tag(tmpdir): schema_def = """ %YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "tag:stsci.edu:asdf/core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_tag_address(tmpdir): schema_def = """ %YAML 1.1 %TAG !asdf! tag:stsci.edu:asdf/ --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_file_url(tmpdir): schema_def = """ %YAML 1.1 %TAG !asdf! tag:stsci.edu:asdf/ --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "{}" required: [foobar] ... """.format(extension.get_default_resolver()('tag:stsci.edu:asdf/core/ndarray-1.0.0')) schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_asdf_uri_scheme(): subschema_content="""%YAML 1.1 --- $schema: http://stsci.edu/schemas/asdf/asdf-schema-1.0.0 id: asdf://somewhere.org/schemas/bar bar: type: string ... """ content = """%YAML 1.1 --- $schema: http://stsci.edu/schemas/asdf/asdf-schema-1.0.0 id: asdf://somewhere.org/schemas/foo definitions: local_bar: type: string type: object properties: bar: $ref: asdf://somewhere.org/schemas/bar#/bar local_bar: $ref: '#/definitions/local_bar' ... """ with asdf.config_context() as config: config.add_resource_mapping({"asdf://somewhere.org/schemas/foo": content}) config.add_resource_mapping({"asdf://somewhere.org/schemas/bar": subschema_content}) schema_tree = schema.load_schema("asdf://somewhere.org/schemas/foo") instance = {"bar": "baz", "local_bar": "foz"} schema.validate(instance, schema=schema_tree) with pytest.raises(ValidationError): schema.validate({"bar": 12}, schema=schema_tree) def test_load_schema_with_stsci_id(): """ This tests the following edge case: - schema references a subschema provided by the new extension API - subschema URI shares a prefix with one of the old-style extension resolvers - resolve_references is enabled If we're not careful, the old-style resolver will mangle the URI and we won't be able to retrieve the schema content. """ subschema_content="""%YAML 1.1 --- $schema: http://stsci.edu/schemas/asdf/asdf-schema-1.0.0 id: http://stsci.edu/schemas/bar bar: type: string ... """ content = """%YAML 1.1 --- $schema: http://stsci.edu/schemas/asdf/asdf-schema-1.0.0 id: http://stsci.edu/schemas/foo definitions: local_bar: type: string type: object properties: bar: $ref: http://stsci.edu/schemas/bar#/bar local_bar: $ref: '#/definitions/local_bar' ... """ with asdf.config_context() as config: config.add_resource_mapping({"http://stsci.edu/schemas/foo": content}) config.add_resource_mapping({"http://stsci.edu/schemas/bar": subschema_content}) schema_tree = schema.load_schema("http://stsci.edu/schemas/foo", resolve_references=True) instance = {"bar": "baz", "local_bar": "foz"} schema.validate(instance, schema=schema_tree) with pytest.raises(ValidationError): schema.validate({"bar": 12}, schema=schema_tree) def test_schema_caching(): # Make sure that if we request the same URL, we get a different object # (despite the caching internal to load_schema). Changes to a schema # dict should not impact other uses of that schema. s1 = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') s2 = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') assert s1 is not s2 def test_asdf_file_resolver_hashing(): # Confirm that resolvers from distinct AsdfFile instances # hash to the same value (this allows schema caching to function). a1 = asdf.AsdfFile() a2 = asdf.AsdfFile() assert hash(a1.resolver) == hash(a2.resolver) assert a1.resolver == a2.resolver def test_load_schema_from_resource_mapping(): content = """ id: http://somewhere.org/schemas/razmataz-1.0.0 type: object properties: foo: type: string bar: type: boolean """.encode("utf-8") get_config().add_resource_mapping({"http://somewhere.org/schemas/razmataz-1.0.0": content}) s = schema.load_schema("http://somewhere.org/schemas/razmataz-1.0.0") assert s["id"] == "http://somewhere.org/schemas/razmataz-1.0.0" def test_flow_style(): class CustomFlowStyleType(dict, types.CustomType): name = 'custom_flow' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomFlowStyleExtension(CustomExtension): @property def types(self): return [CustomFlowStyleType] tree = { 'custom_flow': CustomFlowStyleType({'a': 42, 'b': 43}) } buff = io.BytesIO() ff = asdf.AsdfFile(tree, extensions=CustomFlowStyleExtension()) ff.write_to(buff) assert b' a: 42\n b: 43' in buff.getvalue() def test_style(): class CustomStyleType(str, types.CustomType): name = 'custom_style' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomStyleExtension(CustomExtension): @property def types(self): return [CustomStyleType] tree = { 'custom_style': CustomStyleType("short") } buff = io.BytesIO() ff = asdf.AsdfFile(tree, extensions=CustomStyleExtension()) ff.write_to(buff) assert b'|-\n short\n' in buff.getvalue() def test_property_order(): tree = {'foo': np.ndarray([1, 2, 3])} buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.write_to(buff) ndarray_schema = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/ndarray-1.0.0') property_order = ndarray_schema['anyOf'][1]['propertyOrder'] last_index = 0 for prop in property_order: index = buff.getvalue().find(prop.encode('utf-8') + b':') if index != -1: assert index > last_index last_index = index def test_invalid_nested(): class CustomType(str, types.CustomType): name = 'custom' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomTypeExtension(CustomExtension): @property def types(self): return [CustomType] yaml = """ custom: ! foo """ buff = helpers.yaml_to_asdf(yaml) # This should cause a warning but not an error because without explicitly # providing an extension, our custom type will not be recognized and will # simply be converted to a raw type. with pytest.warns(AsdfConversionWarning, match="tag:nowhere.org:custom/custom-1.0.0"): with asdf.open(buff): pass buff.seek(0) with pytest.raises(ValidationError): with asdf.open(buff, extensions=[CustomTypeExtension()]): pass # Make sure tags get validated inside of other tags that know # nothing about them. yaml = """ array: !core/ndarray-1.0.0 data: [0, 1, 2] custom: ! foo """ buff = helpers.yaml_to_asdf(yaml) with pytest.raises(ValidationError): with asdf.open(buff, extensions=[CustomTypeExtension()]): pass def test_invalid_schema(): s = {'type': 'integer'} schema.check_schema(s) s = {'type': 'foobar'} with pytest.raises(ValidationError): schema.check_schema(s) def test_defaults(): s = { 'type': 'object', 'properties': { 'a': { 'type': 'integer', 'default': 42 } } } t = {} cls = schema._create_validator(schema.FILL_DEFAULTS) validator = cls(s) validator.validate(t, _schema=s) assert t['a'] == 42 cls = schema._create_validator(schema.REMOVE_DEFAULTS) validator = cls(s) validator.validate(t, _schema=s) assert t == {} def test_default_check_in_schema(): s = { 'type': 'object', 'properties': { 'a': { 'type': 'integer', 'default': 'foo' } } } with pytest.raises(ValidationError): schema.check_schema(s) schema.check_schema(s, validate_default=False) def test_check_complex_default(): default_software = tagged.TaggedDict( {"name": "asdf", "version": "2.7.0"}, "tag:stsci.edu/asdf/core/software-1.0.0" ) s = { 'type': 'object', 'properties': { 'a': { 'type': 'object', 'tag': 'tag:stsci.edu/asdf/core/software-1.0.0', 'default': default_software } } } schema.check_schema(s) s['properties']['a']['tag'] = 'tag:stsci.edu/asdf/core/ndarray-1.0.0' with pytest.raises(ValidationError): schema.check_schema(s) def test_fill_and_remove_defaults(): class DefaultType(dict, types.CustomType): name = 'default' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class DefaultTypeExtension(CustomExtension): @property def types(self): return [DefaultType] yaml = """ custom: ! b: {} d: {} g: {} j: l: 362 """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert 'a' in ff.tree['custom'] assert ff.tree['custom']['a'] == 42 assert ff.tree['custom']['b']['c'] == 82 # allOf combiner should fill defaults from all subschemas: assert ff.tree['custom']['d']['e'] == 122 assert ff.tree['custom']['d']['f'] == 162 # anyOf combiners should be ignored: assert 'h' not in ff.tree['custom']['g'] assert 'i' not in ff.tree['custom']['g'] # oneOf combiners should be ignored: assert 'k' not in ff.tree['custom']['j'] assert ff.tree['custom']['j']['l'] == 362 buff.seek(0) with pytest.warns(AsdfDeprecationWarning, match='do_not_fill_defaults'): with asdf.open(buff, extensions=[DefaultTypeExtension()], do_not_fill_defaults=True) as ff: assert 'a' not in ff.tree['custom'] assert 'c' not in ff.tree['custom']['b'] assert 'e' not in ff.tree['custom']['d'] assert 'f' not in ff.tree['custom']['d'] assert 'h' not in ff.tree['custom']['g'] assert 'i' not in ff.tree['custom']['g'] assert 'k' not in ff.tree['custom']['j'] assert ff.tree['custom']['j']['l'] == 362 ff.fill_defaults() assert 'a' in ff.tree['custom'] assert ff.tree['custom']['a'] == 42 assert 'c' in ff.tree['custom']['b'] assert ff.tree['custom']['b']['c'] == 82 assert ff.tree['custom']['b']['c'] == 82 assert ff.tree['custom']['d']['e'] == 122 assert ff.tree['custom']['d']['f'] == 162 assert 'h' not in ff.tree['custom']['g'] assert 'i' not in ff.tree['custom']['g'] assert 'k' not in ff.tree['custom']['j'] assert ff.tree['custom']['j']['l'] == 362 ff.remove_defaults() assert 'a' not in ff.tree['custom'] assert 'c' not in ff.tree['custom']['b'] assert 'e' not in ff.tree['custom']['d'] assert 'f' not in ff.tree['custom']['d'] assert 'h' not in ff.tree['custom']['g'] assert 'i' not in ff.tree['custom']['g'] assert 'k' not in ff.tree['custom']['j'] assert ff.tree['custom']['j']['l'] == 362 buff.seek(0) with config_context() as config: config.legacy_fill_schema_defaults = False with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert 'a' not in ff.tree['custom'] assert 'c' not in ff.tree['custom']['b'] assert 'e' not in ff.tree['custom']['d'] assert 'f' not in ff.tree['custom']['d'] assert 'h' not in ff.tree['custom']['g'] assert 'i' not in ff.tree['custom']['g'] assert 'k' not in ff.tree['custom']['j'] assert ff.tree['custom']['j']['l'] == 362 def test_one_of(): """ Covers https://github.com/asdf-format/asdf/issues/809 """ class OneOfType(dict, types.CustomType): name = 'one_of' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class OneOfTypeExtension(CustomExtension): @property def types(self): return [OneOfType] yaml = """ one_of: ! value: foo """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=[OneOfTypeExtension()]) as ff: assert ff['one_of']['value'] == 'foo' def test_tag_reference_validation(): class DefaultTypeExtension(CustomExtension): @property def types(self): return [TagReferenceType] yaml = """ custom: ! name: "Something" things: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: custom = ff.tree['custom'] assert custom['name'] == "Something" assert_array_equal(custom['things'], [1, 2, 3]) def test_foreign_tag_reference_validation(): class ForeignTagReferenceType(types.CustomType): name = 'foreign_tag_reference' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' @classmethod def from_tree(cls, tree, ctx): node = {} node['a'] = tree['a'] node['b'] = tree['b'] return node class ForeignTypeExtension(CustomExtension): @property def types(self): return [TagReferenceType, ForeignTagReferenceType] yaml = """ custom: ! a: ! name: "Something" things: !core/ndarray-1.0.0 data: [1, 2, 3] b: ! name: "Anything" things: !core/ndarray-1.0.0 data: [4, 5, 6] """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=ForeignTypeExtension()) as ff: a = ff.tree['custom']['a'] b = ff.tree['custom']['b'] assert a['name'] == 'Something' assert_array_equal(a['things'], [1, 2, 3]) assert b['name'] == 'Anything' assert_array_equal(b['things'], [4, 5, 6]) def test_self_reference_resolution(): r = resolver.Resolver(CustomExtension().url_mapping, 'url') s = schema.load_schema( helpers.get_test_data_path('self_referencing-1.0.0.yaml'), resolver=r, resolve_references=True) assert '$ref' not in repr(s) assert s['anyOf'][1] == s['anyOf'][0] def test_schema_resolved_via_entry_points(): """Test that entry points mappings to core schema works""" r = extension.get_default_resolver() tag = types.format_tag('stsci.edu', 'asdf', '1.0.0', 'fits/fits') url = extension.default_extensions.extension_list.tag_mapping(tag) s = schema.load_schema(url, resolver=r, resolve_references=True) assert tag in repr(s) @pytest.mark.parametrize("num", [constants.MAX_NUMBER+1, constants.MIN_NUMBER-1]) def test_max_min_literals(num): tree = { 'test_int': num, } with pytest.raises(ValidationError): asdf.AsdfFile(tree) tree = { 'test_list': [num], } with pytest.raises(ValidationError): asdf.AsdfFile(tree) tree = { num: 'test_key', } with pytest.raises(ValidationError): asdf.AsdfFile(tree) @pytest.mark.parametrize("num", [constants.MAX_NUMBER+1, constants.MIN_NUMBER-1]) @pytest.mark.parametrize("ttype", ["val", "list", "key"]) def test_max_min_literals_write(num, ttype, tmpdir): outfile = tmpdir / "test.asdf" af = asdf.AsdfFile() # Validation doesn't occur here, so no warning/error will be raised. if ttype == "val": af.tree['test_int'] = num elif ttype == "list": af.tree['test_int'] = [num] else: af.tree[num] = 'test_key' # Validation will occur on write, though, so detect it. with pytest.raises(ValidationError): af.write_to(outfile) af.close() @pytest.mark.parametrize("value", [constants.MAX_NUMBER+1, constants.MIN_NUMBER-1]) def test_read_large_literal(value): yaml = f"integer: {value}" buff = helpers.yaml_to_asdf(yaml) with pytest.warns(AsdfWarning, match="Invalid integer literal value"): with asdf.open(buff) as af: assert af['integer'] == value yaml = f"{value}: foo" buff = helpers.yaml_to_asdf(yaml) with pytest.warns(AsdfWarning, match="Invalid integer literal value"): with asdf.open(buff) as af: assert af[value] == "foo" @pytest.mark.parametrize( "version,keys", [ ("1.6.0", ["foo", 42, True]), ("1.5.0", ["foo", 42, True, 3.14159, datetime.now(), b"foo", None]), ] ) def test_mapping_supported_key_types(keys, version): for key in keys: with helpers.assert_no_warnings(): af = asdf.AsdfFile({key: "value"}, version=version) buff = io.BytesIO() af.write_to(buff) buff.seek(0) with asdf.open(buff) as af: assert af[key] == "value" @pytest.mark.parametrize( "version,keys", [ ("1.6.0", [3.14159, datetime.now(), b"foo", None, ("foo", "bar")]), ] ) def test_mapping_unsupported_key_types(keys, version): for key in keys: with pytest.raises(ValidationError, match="Mapping key .* is not permitted"): af = asdf.AsdfFile({key: "value"}, version=version) buff = io.BytesIO() af.write_to(buff) def test_nested_array(): s = { 'type': 'object', 'properties': { 'stuff': { 'type': 'array', 'items': { 'type': 'array', 'items': [ { 'type': 'integer' }, { 'type': 'string' }, { 'type': 'number' }, ], 'minItems': 3, 'maxItems': 3 } } } } good = dict(stuff=[[1, 'hello', 2], [4, 'world', 9.7]]) schema.validate(good, schema=s) bads = [ dict(stuff=[[1, 2, 3]]), dict(stuff=[12,'dldl']), dict(stuff=[[12, 'dldl']]), dict(stuff=[[1, 'hello', 2], [4, 5]]), dict(stuff=[[1, 'hello', 2], [4, 5, 6]]) ] for b in bads: with pytest.raises(ValidationError): schema.validate(b, schema=s) def test_nested_array_yaml(tmpdir): schema_def = """ %YAML 1.1 --- type: object properties: stuff: type: array items: type: array items: - type: integer - type: string - type: number minItems: 3 maxItems: 3 ... """ schema_path = tmpdir.join('nested.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path)) schema.check_schema(schema_tree) good = dict(stuff=[[1, 'hello', 2], [4, 'world', 9.7]]) schema.validate(good, schema=schema_tree) bads = [ dict(stuff=[[1, 2, 3]]), dict(stuff=[12,'dldl']), dict(stuff=[[12, 'dldl']]), dict(stuff=[[1, 'hello', 2], [4, 5]]), dict(stuff=[[1, 'hello', 2], [4, 5, 6]]) ] for b in bads: with pytest.raises(ValidationError): schema.validate(b, schema=schema_tree) def test_type_missing_dependencies(): pytest.importorskip('astropy', '3.0.0') class MissingType(types.CustomType): name = 'missing' organization = 'nowhere.org' version = (1, 1, 0) standard = 'custom' types = ['asdfghjkl12345.foo'] requires = ["ASDFGHJKL12345"] class DefaultTypeExtension(CustomExtension): @property def types(self): return [MissingType] yaml = """ custom: ! b: {foo: 42} """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(AsdfConversionWarning, match="Failed to convert tag:nowhere.org:custom/missing-1.1.0"): with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert ff.tree['custom']['b']['foo'] == 42 def test_assert_roundtrip_with_extension(tmpdir): called_custom_assert_equal = [False] class CustomType(dict, types.CustomType): name = 'custom_flow' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' @classmethod def assert_equal(cls, old, new): called_custom_assert_equal[0] = True class CustomTypeExtension(CustomExtension): @property def types(self): return [CustomType] tree = { 'custom': CustomType({'a': 42, 'b': 43}) } def check(ff): assert isinstance(ff.tree['custom'], CustomType) with helpers.assert_no_warnings(): helpers.assert_roundtrip_tree( tree, tmpdir, extensions=[CustomTypeExtension()]) assert called_custom_assert_equal[0] is True def test_custom_validation_bad(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema.yaml') asdf_file = str(tmpdir.join('out.asdf')) # This tree does not conform to the custom schema tree = {'stuff': 42, 'other_stuff': 'hello'} # Creating file without custom schema should pass with asdf.AsdfFile(tree) as ff: ff.write_to(asdf_file) # Creating file using custom schema should fail with pytest.raises(ValidationError): with asdf.AsdfFile(tree, custom_schema=custom_schema_path): pass # Opening file without custom schema should pass with asdf.open(asdf_file): pass # Opening file with custom schema should fail with pytest.raises(ValidationError): with asdf.open(asdf_file, custom_schema=custom_schema_path): pass def test_custom_validation_good(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema.yaml') asdf_file = str(tmpdir.join('out.asdf')) # This tree conforms to the custom schema tree = { 'foo': {'x': 42, 'y': 10}, 'bar': {'a': 'hello', 'b': 'banjo'} } with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: ff.write_to(asdf_file) with asdf.open(asdf_file, custom_schema=custom_schema_path): pass def test_custom_validation_pathlib(tmpdir): """ Make sure custom schema paths can be pathlib.Path objects See https://github.com/asdf-format/asdf/issues/653 for discussion. """ from pathlib import Path custom_schema_path = Path(helpers.get_test_data_path('custom_schema.yaml')) asdf_file = str(tmpdir.join('out.asdf')) # This tree conforms to the custom schema tree = { 'foo': {'x': 42, 'y': 10}, 'bar': {'a': 'hello', 'b': 'banjo'} } with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: ff.write_to(asdf_file) with asdf.open(asdf_file, custom_schema=custom_schema_path): pass def test_custom_validation_with_definitions_good(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema_definitions.yaml') asdf_file = str(tmpdir.join('out.asdf')) # This tree conforms to the custom schema tree = { 'thing': { 'biz': 'hello', 'baz': 'world' } } with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: ff.write_to(asdf_file) with asdf.open(asdf_file, custom_schema=custom_schema_path): pass def test_custom_validation_with_definitions_bad(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema_definitions.yaml') asdf_file = str(tmpdir.join('out.asdf')) # This tree does NOT conform to the custom schema tree = { 'forb': { 'biz': 'hello', 'baz': 'world' } } # Creating file without custom schema should pass with asdf.AsdfFile(tree) as ff: ff.write_to(asdf_file) # Creating file with custom schema should fail with pytest.raises(ValidationError): with asdf.AsdfFile(tree, custom_schema=custom_schema_path): pass # Opening file without custom schema should pass with asdf.open(asdf_file): pass # Opening file with custom schema should fail with pytest.raises(ValidationError): with asdf.open(asdf_file, custom_schema=custom_schema_path): pass def test_custom_validation_with_external_ref_good(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema_external_ref.yaml') asdf_file = str(tmpdir.join('out.asdf')) # This tree conforms to the custom schema tree = { 'foo': asdf.tags.core.Software(name="Microsoft Windows", version="95") } with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff: ff.write_to(asdf_file) with asdf.open(asdf_file, custom_schema=custom_schema_path): pass def test_custom_validation_with_external_ref_bad(tmpdir): custom_schema_path = helpers.get_test_data_path('custom_schema_external_ref.yaml') asdf_file = str(tmpdir.join('out.asdf')) # This tree does not conform to the custom schema tree = { 'foo': False } # Creating file without custom schema should pass with asdf.AsdfFile(tree) as ff: ff.write_to(asdf_file) # Creating file with custom schema should fail with pytest.raises(ValidationError): with asdf.AsdfFile(tree, custom_schema=custom_schema_path): pass # Opening file without custom schema should pass with asdf.open(asdf_file): pass # Opening file with custom schema should fail with pytest.raises(ValidationError): with asdf.open(asdf_file, custom_schema=custom_schema_path): pass def test_load_custom_schema_deprecated(): custom_schema_path = helpers.get_test_data_path('custom_schema.yaml') with pytest.deprecated_call(): schema.load_custom_schema(custom_schema_path) def test_load_schema_resolve_local_refs_deprecated(): custom_schema_path = helpers.get_test_data_path('custom_schema_definitions.yaml') with pytest.deprecated_call(): schema.load_schema(custom_schema_path, resolve_local_refs=True) def test_nonexistent_tag(tmpdir): """ This tests the case where a node is tagged with a type that apparently comes from an extension that is known, but the type itself can't be found. This could occur when a more recent version of an installed package provides the new type, but an older version of the package is installed. ASDF should still be able to open the file in this case, but it won't be able to restore the type. The bug that prompted this test results from attempting to load a schema file that doesn't exist, which is why this test belongs in this file. """ # This shouldn't ever happen, but it's a useful test case yaml = """ a: !core/doesnt_exist-1.0.0 hello """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(AsdfWarning, match="Unable to locate schema file"): with asdf.open(buff) as af: assert str(af['a']) == 'hello' # This is a more realistic case since we're using an external extension yaml = """ a: ! hello """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(AsdfWarning, match="Unable to locate schema file"): with asdf.open(buff, extensions=CustomExtension()) as af: assert str(af['a']) == 'hello' @pytest.mark.parametrize("numpy_value,valid_types", [ (np.str_("foo"), {"string"}), (np.bytes_("foo"), set()), (np.float16(3.14), {"number"}), (np.float32(3.14159), {"number"}), (np.float64(3.14159), {"number"}), # Evidently float128 is not available on Windows: (getattr(np, "float128", np.float64)(3.14159), {"number"}), (np.int8(42), {"number", "integer"}), (np.int16(42), {"number", "integer"}), (np.int32(42), {"number", "integer"}), (np.longlong(42), {"number", "integer"}), (np.uint8(42), {"number", "integer"}), (np.uint16(42), {"number", "integer"}), (np.uint32(42), {"number", "integer"}), (np.uint64(42), {"number", "integer"}), (np.ulonglong(42), {"number", "integer"}), ]) def test_numpy_scalar_type_validation(numpy_value, valid_types): def _assert_validation(jsonschema_type, expected_valid): validator = schema.get_validator() try: validator.validate(numpy_value, _schema={"type": jsonschema_type}) except ValidationError: valid = False else: valid = True if valid is not expected_valid: if expected_valid: description = "valid" else: description = "invalid" assert False, "Expected numpy.{} to be {} against jsonschema type '{}'".format( type(numpy_value).__name__, description, jsonschema_type ) for jsonschema_type in valid_types: _assert_validation(jsonschema_type, True) invalid_types = {"string", "number", "integer", "boolean", "null", "object"} - valid_types for jsonschema_type in invalid_types: _assert_validation(jsonschema_type, False) def test_validator_visit_repeat_nodes(): ctx = asdf.AsdfFile() node = asdf.tags.core.Software(name="Minesweeper") tree = yamlutil.custom_tree_to_tagged_tree( {"node": node, "other_node": node, "nested": {"node": node}}, ctx ) visited_nodes = [] def _test_validator(validator, value, instance, schema): visited_nodes.append(instance) validator = schema.get_validator(ctx=ctx, validators=util.HashableDict(type=_test_validator)) validator.validate(tree) assert len(visited_nodes) == 1 visited_nodes.clear() validator = schema.get_validator( validators=util.HashableDict(type=_test_validator), _visit_repeat_nodes=True ) validator.validate(tree) assert len(visited_nodes) == 3 def test_tag_validator(): content="""%YAML 1.1 --- $schema: http://stsci.edu/schemas/asdf/asdf-schema-1.0.0 id: asdf://somewhere.org/schemas/foo tag: asdf://somewhere.org/tags/foo ... """ with asdf.config_context() as config: config.add_resource_mapping({"asdf://somewhere.org/schemas/foo": content}) schema_tree = schema.load_schema("asdf://somewhere.org/schemas/foo") instance = tagged.TaggedDict(tag="asdf://somewhere.org/tags/foo") schema.validate(instance, schema=schema_tree) with pytest.raises(ValidationError): schema.validate(tagged.TaggedDict(tag="asdf://somewhere.org/tags/bar"), schema=schema_tree) content="""%YAML 1.1 --- $schema: http://stsci.edu/schemas/asdf/asdf-schema-1.0.0 id: asdf://somewhere.org/schemas/bar tag: asdf://somewhere.org/tags/bar-* ... """ with asdf.config_context() as config: config.add_resource_mapping({"asdf://somewhere.org/schemas/bar": content}) schema_tree = schema.load_schema("asdf://somewhere.org/schemas/bar") instance = tagged.TaggedDict(tag="asdf://somewhere.org/tags/bar-2.5") schema.validate(instance, schema=schema_tree) with pytest.raises(ValidationError): schema.validate(tagged.TaggedDict(tag="asdf://somewhere.org/tags/foo-1.0"), schema=schema_tree) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_search.py0000644000537500020070000001163500000000000020541 0ustar00wjamiesonSTSCI\scienceimport re import pytest import numpy as np from asdf import AsdfFile @pytest.fixture def asdf_file(): tree = { "foo": 42, "nested": {"foo": 24, "foible": "whoops", "folicle": "yup", "moo": 24}, "bar": "hello", "list": [{"index": 0}, {"index": 1}, {"index": 2}] } return AsdfFile(tree) def test_no_arguments(asdf_file): result = asdf_file.search() assert len(result.paths) == 15 assert len(result.nodes) == 15 def test_repr(asdf_file): result = asdf_file.search() assert "foo" in repr(result) assert "nested" in repr(result) assert "bar" in repr(result) assert "list" in repr(result) def test_single_result(asdf_file): result = asdf_file.search("bar") assert len(result.paths) == 1 assert len(result.nodes) == 1 assert result.node == "hello" assert result.path == "root['bar']" result.replace("goodbye") assert asdf_file["bar"] == "goodbye" def test_multiple_results(asdf_file): result = asdf_file.search("foo") assert len(result.paths) == 2 assert len(result.nodes) == 2 assert 42 in result.nodes assert 24 in result.nodes assert "root['foo']" in result.paths assert "root['nested']['foo']" in result.paths with pytest.raises(RuntimeError): result.path with pytest.raises(RuntimeError): result.node result.replace(54) assert asdf_file["foo"] == 54 assert asdf_file["nested"]["foo"] == 54 def test_by_key(asdf_file): result = asdf_file.search("bar") assert result.node == "hello" result = asdf_file.search("^b.r$") assert result.node == "hello" result = asdf_file.search(re.compile("fo[oi]")) assert set(result.nodes) == {42, 24, "whoops"} result = asdf_file.search(0) assert result.node == {"index": 0} def test_by_type(asdf_file): result = asdf_file.search(type=str) assert sorted(result.nodes) == sorted(["hello", "whoops", "yup"]) result = asdf_file.search(type="int") assert result.nodes == [42, 24, 24, 0, 1, 2] result = asdf_file.search(type="dict|list") assert len(result.nodes) == 5 result = asdf_file.search(type=re.compile("^i.t$")) assert result.nodes == [42, 24, 24, 0, 1, 2] with pytest.raises(TypeError): asdf_file.search(type=4) def test_by_value(asdf_file): result = asdf_file.search(value=42) assert result.node == 42 def test_by_value_with_ndarray(): """ Check some edge cases when comparing integers and booleans to numpy arrays. """ tree = { "foo": np.arange(10) } af = AsdfFile(tree) result = af.search(value=True) assert len(result.nodes) == 0 result = af.search(value=42) assert len(result.nodes) == 0 def test_by_filter(asdf_file): with pytest.raises(ValueError): asdf_file.search(filter=lambda: True) result = asdf_file.search(filter=lambda n: isinstance(n, int) and n % 2 == 0) assert result.nodes == [42, 24, 24, 0, 2] result = asdf_file.search(filter=lambda n, k: k == "foo" and n > 30) assert result.node == 42 def test_multiple_conditions(asdf_file): result = asdf_file.search("foo", value=24) assert len(result.nodes) == 1 assert result.node == 24 result.replace(19) assert len(result.nodes) == 0 assert asdf_file["foo"] == 42 assert asdf_file["nested"]["foo"] == 19 def test_chaining(asdf_file): result = asdf_file.search("foo").search(value=24) assert len(result.nodes) == 1 assert result.node == 24 result.replace(19) assert len(result.nodes) == 0 assert asdf_file["foo"] == 42 assert asdf_file["nested"]["foo"] == 19 def test_index_operator(asdf_file): result = asdf_file.search()["nested"].search("foo") assert len(result.nodes) == 1 assert result.node == 24 with pytest.raises(TypeError): asdf_file.search()["foo"][0] def test_format(asdf_file): result = asdf_file.search() original_len = len(repr(result).split("\n")) result = result.format(max_rows=original_len - 5) new_len = len(repr(result).split("\n")) assert new_len < original_len result = result.format(max_rows=(None, 5)) new_len = len(repr(result).split("\n")) assert new_len < original_len assert repr(result) == repr(result.format()) def test_no_results(asdf_file): result = asdf_file.search("missing") assert len(result.nodes) == 0 assert "No results found." in repr(result) assert result.node is None assert result.path is None # Testing no exceptions here: result.replace("foo") def test_recursive_tree(): tree = {"foo": {"bar": "baz"}} af = AsdfFile(tree) af.tree["foo"]["nested"] = af.tree["foo"] result = af.search() assert "(recursive reference)" in repr(result) result = af.search("bar") assert len(result.nodes) == 1 assert result.node == "baz" result.replace("zap") assert af["foo"]["bar"] == "zap" assert af["foo"]["nested"]["bar"] == "zap" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_stream.py0000644000537500020070000001304700000000000020566 0ustar00wjamiesonSTSCI\scienceimport io import os import numpy as np from numpy.testing import assert_array_equal import pytest import asdf from asdf import generic_io from asdf import stream def test_stream(): buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.float64).tobytes()) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (100, 6, 2) for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_write_nothing(): # Test that if you write nothing, you get a zero-length array buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (0, 6, 2) def test_stream_twice(): # Test that if you write nothing, you get a zero-length array buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.uint8), 'stream2': stream.Stream([12, 2], np.uint8) } ff = asdf.AsdfFile(tree) ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.uint8).tobytes()) buff.seek(0) ff = asdf.open(buff) assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (100, 6, 2) assert ff.tree['stream2'].shape == (50, 12, 2) def test_stream_with_nonstream(): buff = io.BytesIO() tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) # Since we're testing with small arrays, force this array to be stored in # an internal block rather than letting it be automatically put inline. ff.set_array_storage(ff['nonstream'], 'internal') ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.float64).tobytes()) buff.seek(0) with asdf.open(buff) as ff: assert len(ff.blocks) == 1 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) assert len(ff.blocks) == 2 for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_real_file(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } with open(path, 'wb') as fd: ff = asdf.AsdfFile(tree) # Since we're testing with small arrays, force this array to be stored # in an internal block rather than letting it be automatically put # inline. ff.set_array_storage(ff['nonstream'], 'internal') ff.write_to(fd) for i in range(100): fd.write(np.array([i] * 12, np.float64).tobytes()) with asdf.open(path) as ff: assert len(ff.blocks) == 1 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) assert len(ff.blocks) == 2 for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_to_stream(): tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } buff = io.BytesIO() fd = generic_io.OutputStream(buff) ff = asdf.AsdfFile(tree) ff.write_to(fd) for i in range(100): fd.write(np.array([i] * 12, np.float64).tobytes()) buff.seek(0) with asdf.open(generic_io.InputStream(buff, 'r')) as ff: assert len(ff.blocks) == 2 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_array_to_stream(tmpdir): tree = { 'stream': np.array([1, 2, 3, 4], np.int64), } buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream'], 'streamed') ff.write_to(buff) buff.write(np.array([5, 6, 7, 8], np.int64).tobytes()) buff.seek(0) ff = asdf.open(generic_io.InputStream(buff)) assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8]) buff.seek(0) ff2 = asdf.AsdfFile(ff) ff2.write_to(buff) assert b"shape: ['*']" in buff.getvalue() with open(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream'], 'streamed') ff.write_to(fd) fd.write(np.array([5, 6, 7, 8], np.int64).tobytes()) with asdf.open(os.path.join(str(tmpdir), 'test.asdf')) as ff: assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8]) ff2 = asdf.AsdfFile(ff) ff2.write_to(buff) assert b"shape: ['*']" in buff.getvalue() def test_too_many_streams(): tree = { 'stream1': np.array([1, 2, 3, 4], np.int64), 'stream2': np.array([1, 2, 3, 4], np.int64) } ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream1'], 'streamed') with pytest.raises(ValueError): ff.set_array_storage(tree['stream2'], 'streamed') def test_stream_repr_and_str(): tree = { 'stream': stream.Stream([16], np.int64) } ff = asdf.AsdfFile(tree) repr(ff.tree['stream']) str(ff.tree['stream']) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_tagged.py0000644000537500020070000000474600000000000020534 0ustar00wjamiesonSTSCI\sciencefrom copy import deepcopy, copy from asdf.tagged import TaggedList, TaggedDict, TaggedString def test_tagged_list_deepcopy(): original = TaggedList([0, 1, 2, ["foo"]], "tag:nowhere.org:custom/foo-1.0.0") result = deepcopy(original) assert result == original assert result.data == original.data assert result._tag == original._tag original.append(4) assert len(result) == 4 original[3].append("bar") assert len(result[3]) == 1 def test_tagged_list_copy(): original = TaggedList([0, 1, 2, ["foo"]], "tag:nowhere.org:custom/foo-1.0.0") result = copy(original) assert result == original assert result.data == original.data assert result._tag == original._tag original.append(4) assert len(result) == 4 original[3].append("bar") assert len(result[3]) == 2 def test_tagged_list_isinstance(): value = TaggedList([0, 1, 2, ["foo"]], "tag:nowhere.org:custom/foo-1.0.0") assert isinstance(value, list) def test_tagged_dict_deepcopy(): original = TaggedDict({"a": 0, "b": 1, "c": 2, "nested": {"d": 3}}, "tag:nowhere.org:custom/foo-1.0.0") result = deepcopy(original) assert result == original assert result.data == original.data assert result._tag == original._tag original["e"] = 4 assert len(result) == 4 original["nested"]["f"] = 5 assert len(result["nested"]) == 1 def test_tagged_dict_copy(): original = TaggedDict({"a": 0, "b": 1, "c": 2, "nested": {"d": 3}}, "tag:nowhere.org:custom/foo-1.0.0") result = copy(original) assert result == original assert result.data == original.data assert result._tag == original._tag original["e"] = 4 assert len(result) == 4 original["nested"]["f"] = 5 assert len(result["nested"]) == 2 def test_tagged_dict_isinstance(): value = TaggedDict({"a": 0, "b": 1, "c": 2, "nested": {"d": 3}}, "tag:nowhere.org:custom/foo-1.0.0") assert isinstance(value, dict) def test_tagged_string_deepcopy(): original = TaggedString("You're it!") original._tag = "tag:nowhere.org:custom/foo-1.0.0" result = deepcopy(original) assert result == original assert result._tag == original._tag def test_tagged_string_copy(): original = TaggedString("You're it!") original._tag = "tag:nowhere.org:custom/foo-1.0.0" result = copy(original) assert result == original assert result._tag == original._tag def test_tagged_string_isinstance(): value = TaggedString("You're it!") assert isinstance(value, str) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_treeutil.py0000644000537500020070000000275100000000000021130 0ustar00wjamiesonSTSCI\sciencefrom asdf import treeutil def test_get_children(): parent = ["foo", "bar"] assert treeutil.get_children(parent) == [(0, "foo"), (1, "bar")] parent = ("foo", "bar") assert treeutil.get_children(parent) == [(0, "foo"), (1, "bar")] parent = {"foo": "bar", "ding": "dong"} assert sorted(treeutil.get_children(parent)) == sorted([("foo", "bar"), ("ding", "dong")]) parent = "foo" assert treeutil.get_children(parent) == [] parent = None assert treeutil.get_children(parent) == [] def test_is_container(): for value in [[], {}, tuple()]: assert treeutil.is_container(value) is True for value in ["foo", 12, 13.9827]: assert treeutil.is_container(value) is False def test_walk_and_modify_shared_references(): target = {"foo": "bar"} nested_in_dict = {"target": target} nested_in_list = [target] tree = {"target": target, "nested_in_dict": nested_in_dict, "nested_in_list": nested_in_list} assert tree["target"] is tree["nested_in_dict"]["target"] assert tree["target"] is tree["nested_in_list"][0] def _callback(node): if "foo" in node: return {"foo": "baz"} else: return node result = treeutil.walk_and_modify(tree, _callback) assert result is not tree assert result["target"] is not target assert result["target"]["foo"] == "baz" assert result["target"] is result["nested_in_dict"]["target"] assert result["target"] is result["nested_in_list"][0] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266458.0 asdf-2.9.2/asdf/tests/test_types.py0000644000537500020070000004661600000000000020447 0ustar00wjamiesonSTSCI\scienceimport io import os from fractions import Fraction import pytest import asdf from asdf import types from asdf import extension from asdf import util from asdf import versioning from asdf.exceptions import AsdfWarning, AsdfConversionWarning from . import helpers, CustomTestType, CustomExtension TEST_DATA_PATH = str(helpers.get_test_data_path('')) class Fractional2dCoord: def __init__(self, x, y): self.x = x self.y = y class FractionWithInverse(Fraction): def __init__(self, *args, **kwargs): self._inverse = None @property def inverse(self): return self._inverse @inverse.setter def inverse(self, value): self._inverse = value class FractionWithInverseType(asdf.CustomType): name = 'fraction_with_inverse' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [FractionWithInverse] @classmethod def to_tree(cls, node, ctx): return { "numerator": node.numerator, "denominator": node.denominator, "inverse": node.inverse } @classmethod def from_tree(cls, tree, ctx): result = FractionWithInverse( tree["numerator"], tree["denominator"] ) yield result result.inverse = tree["inverse"] class FractionWithInverseExtension(CustomExtension): @property def types(self): return [FractionWithInverseType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] def fractiontype_factory(): class FractionType(types.CustomType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [Fraction] handle_dynamic_subclasses = True @classmethod def to_tree(cls, node, ctx): return [node.numerator, node.denominator] @classmethod def from_tree(cls, tree, ctx): return Fraction(tree[0], tree[1]) return FractionType def fractional2dcoordtype_factory(): FractionType = fractiontype_factory() class Fractional2dCoordType(types.CustomType): name = 'fractional_2d_coord' organization = 'nowhere.org' standard = 'custom' version = (1, 0, 0) types = [Fractional2dCoord] @classmethod def to_tree(cls, node, ctx): return { "x": node.x, "y": node.y } @classmethod def from_tree(cls, tree, ctx): return Fractional2dCoord(tree["x"], tree["y"]) class Fractional2dCoordExtension(CustomExtension): @property def types(self): return [FractionType, Fractional2dCoordType] return FractionType, Fractional2dCoordType, Fractional2dCoordExtension def test_custom_tag(): FractionType = fractiontype_factory() class FractionExtension(CustomExtension): @property def types(self): return [FractionType] class FractionCallable(FractionExtension): @property def tag_mapping(self): def check(tag): prefix = 'tag:nowhere.org:custom' if tag.startswith(prefix): return 'http://nowhere.org/schemas/custom' + tag[len(prefix):] return [check] yaml = """ a: ! [2, 3] b: !core/complex-1.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=FractionExtension()) as ff: assert ff.tree['a'] == Fraction(2, 3) buff = io.BytesIO() ff.write_to(buff) buff = helpers.yaml_to_asdf(yaml) with asdf.open(buff, extensions=FractionCallable()) as ff: assert ff.tree['a'] == Fraction(2, 3) buff = io.BytesIO() ff.write_to(buff) buff.close() def test_version_mismatch(): yaml = """ a: !core/complex-42.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(AsdfConversionWarning, match="tag:stsci.edu:asdf/core/complex"): with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) # Make sure warning is repeatable buff.seek(0) with pytest.warns(AsdfConversionWarning, match="tag:stsci.edu:asdf/core/complex"): with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) # Make sure the warning does not occur if it is being ignored (default) buff.seek(0) with helpers.assert_no_warnings(AsdfConversionWarning): with asdf.open(buff) as ff: assert isinstance(ff.tree['a'], complex) # If the major and minor match, but the patch doesn't, there # should still be a warning. yaml = """ a: !core/complex-1.0.1 0j """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(AsdfConversionWarning, match="tag:stsci.edu:asdf/core/complex"): with asdf.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) def test_version_mismatch_file(tmpdir): testfile = os.path.join(str(tmpdir), 'mismatch.asdf') yaml = """ a: !core/complex-42.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with open(testfile, 'wb') as handle: handle.write(buff.read()) expected_uri = util.filepath_to_url(str(testfile)) with pytest.warns(AsdfConversionWarning, match="tag:stsci.edu:asdf/core/complex"): with asdf.open(testfile, ignore_version_mismatch=False) as ff: assert ff._fname == expected_uri assert isinstance(ff.tree['a'], complex) def test_version_mismatch_with_supported_versions(): """Make sure that defining the supported_versions field eliminates the schema mismatch warning.""" class CustomFlow: pass class CustomFlowType(CustomTestType): version = '1.1.0' supported_versions = ['1.0.0', '1.1.0'] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] class CustomFlowExtension(CustomExtension): @property def types(self): return [CustomFlowType] yaml = """ flow_thing: ! c: 100 d: 3.14 """ buff = helpers.yaml_to_asdf(yaml) with helpers.assert_no_warnings(): asdf.open(buff, ignore_version_mismatch=False, extensions=CustomFlowExtension()) def test_versioned_writing(monkeypatch): from ..tags.core.complex import ComplexType # Create a bogus version map monkeypatch.setitem(versioning._version_map, '42.0.0', { 'FILE_FORMAT': '42.0.0', 'YAML_VERSION': '1.1', 'tags': { 'tag:stsci.edu:asdf/core/complex': '42.0.0', 'tag:stscu.edu:asdf/core/asdf': '1.0.0' }, # We need to insert these explicitly since we're monkeypatching 'core': { 'tag:stsci.edu:asdf/core/complex': '42.0.0', 'tag:stscu.edu:asdf/core/asdf': '1.0.0' }, 'standard': {} }) # Add bogus version to supported versions monkeypatch.setattr(versioning, 'supported_versions', versioning.supported_versions + [versioning.AsdfVersion('42.0.0')] ) class FancyComplexType(types.CustomType): name = 'core/complex' organization = 'stsci.edu' standard = 'asdf' version = (42, 0, 0) types = [complex] @classmethod def to_tree(cls, node, ctx): return ComplexType.to_tree(node, ctx) @classmethod def from_tree(cls, tree, ctx): return ComplexType.from_tree(tree, ctx) class FancyComplexExtension: @property def types(self): return [FancyComplexType] @property def tag_mapping(self): return [] @property def url_mapping(self): return [('http://stsci.edu/schemas/asdf/core/complex-42.0.0', util.filepath_to_url(TEST_DATA_PATH) + '/complex-42.0.0.yaml')] tree = {'a': complex(0, -1)} buff = io.BytesIO() ff = asdf.AsdfFile(tree, version="42.0.0", extensions=[FancyComplexExtension()]) ff.write_to(buff) assert b'complex-42.0.0' in buff.getvalue() def test_longest_match(): class FancyComplexExtension: @property def types(self): return [] @property def tag_mapping(self): return [] @property def url_mapping(self): return [('http://stsci.edu/schemas/asdf/core/', 'FOOBAR/{url_suffix}')] l = extension.AsdfExtensionList( [extension.BuiltinExtension(), FancyComplexExtension()]) assert l.url_mapping( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') == 'FOOBAR/asdf-1.0.0' assert l.url_mapping( 'http://stsci.edu/schemas/asdf/transform/transform-1.0.0') != 'FOOBAR/transform-1.0.0' def test_module_versioning(): class NoModuleType(types.CustomType): # It seems highly unlikely that this would be a real module requires = ['qkjvqdja'] class HasCorrectPytest(types.CustomType): # This means it requires 1.0.0 or greater, so it should succeed requires = ['pytest-1.0.0'] class DoesntHaveCorrectPytest(types.CustomType): requires = ['pytest-91984.1.7'] nmt = NoModuleType() hcp = HasCorrectPytest() # perhaps an unfortunate acroynm dhcp = DoesntHaveCorrectPytest() assert nmt.has_required_modules == False assert hcp.has_required_modules == True assert dhcp.has_required_modules == False def test_undefined_tag(): # This tests makes sure that ASDF still returns meaningful structured data # even when it encounters a schema tag that it does not specifically # implement as an extension from numpy import array yaml = """ undefined_data: ! - 5 - {'message': 'there is no tag'} - !core/ndarray-1.0.0 [[1, 2, 3], [4, 5, 6]] - ! - !core/ndarray-1.0.0 [[7],[8],[9],[10]] - !core/complex-1.0.0 3.14j """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(Warning) as warning: afile = asdf.open(buff) missing = afile.tree['undefined_data'] assert missing[0] == 5 assert missing[1] == {'message': 'there is no tag'} assert (missing[2] == array([[1, 2, 3], [4, 5, 6]])).all() assert (missing[3][0] == array([[7],[8],[9],[10]])).all() assert missing[3][1] == 3.14j # There are two undefined tags, so we expect two warnings assert len(warning) == 2 for i, tag in enumerate(["also_undefined-1.3.0", "undefined_tag-1.0.0"]): assert str(warning[i].message) == ( "tag:nowhere.org:custom/{} is not recognized, converting to raw " "Python data structure".format(tag)) # Make sure no warning occurs if explicitly ignored buff.seek(0) with helpers.assert_no_warnings(): afile = asdf.open(buff, ignore_unrecognized_tag=True) def test_newer_tag(): # This test simulates a scenario where newer versions of CustomFlow # provides different keyword parameters that the older schema and tag class # do not account for. We want to test whether ASDF can handle this problem # gracefully and still provide meaningful data as output. The test case is # fairly contrived but we want to test whether ASDF can handle backwards # compatibility even when an explicit tag class for different versions of a # schema is not available. class CustomFlow: def __init__(self, c=None, d=None): self.c = c self.d = d class CustomFlowType(types.CustomType): version = '1.1.0' name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] @classmethod def from_tree(cls, tree, ctx): kwargs = {} for name in tree: kwargs[name] = tree[name] return CustomFlow(**kwargs) @classmethod def to_tree(cls, data, ctx): return dict(c=data.c, d=data.d) class CustomFlowExtension(CustomExtension): @property def types(self): return [CustomFlowType] new_yaml = """ flow_thing: ! c: 100 d: 3.14 """ new_buff = helpers.yaml_to_asdf(new_yaml) new_data = asdf.open(new_buff, extensions=CustomFlowExtension()) assert type(new_data.tree['flow_thing']) == CustomFlow old_yaml = """ flow_thing: ! a: 100 b: 3.14 """ old_buff = helpers.yaml_to_asdf(old_yaml) # We expect this warning since it will not be possible to convert version # 1.0.0 of CustomFlow to a CustomType (by design, for testing purposes). with pytest.warns(AsdfConversionWarning, match="Failed to convert tag:nowhere.org:custom/custom_flow-1.0.0"): asdf.open(old_buff, extensions=CustomFlowExtension()) def test_incompatible_version_check(): class TestType0(types.CustomType): supported_versions = versioning.AsdfSpec('>=1.2.0') assert TestType0.incompatible_version('1.1.0') == True assert TestType0.incompatible_version('1.2.0') == False assert TestType0.incompatible_version('2.0.1') == False class TestType1(types.CustomType): supported_versions = versioning.AsdfVersion('1.0.0') assert TestType1.incompatible_version('1.0.0') == False assert TestType1.incompatible_version('1.1.0') == True class TestType2(types.CustomType): supported_versions = '1.0.0' assert TestType2.incompatible_version('1.0.0') == False assert TestType2.incompatible_version('1.1.0') == True class TestType3(types.CustomType): # This doesn't make much sense, but it's just for the sake of example supported_versions = ['1.0.0', versioning.AsdfSpec('>=2.0.0')] assert TestType3.incompatible_version('1.0.0') == False assert TestType3.incompatible_version('1.1.0') == True assert TestType3.incompatible_version('2.0.0') == False assert TestType3.incompatible_version('2.0.1') == False class TestType4(types.CustomType): supported_versions = ['1.0.0', versioning.AsdfVersion('1.1.0')] assert TestType4.incompatible_version('1.0.0') == False assert TestType4.incompatible_version('1.0.1') == True assert TestType4.incompatible_version('1.1.0') == False assert TestType4.incompatible_version('1.1.1') == True class TestType5(types.CustomType): supported_versions = \ [versioning.AsdfSpec('<1.0.0'), versioning.AsdfSpec('>=2.0.0')] assert TestType5.incompatible_version('0.9.9') == False assert TestType5.incompatible_version('2.0.0') == False assert TestType5.incompatible_version('2.0.1') == False assert TestType5.incompatible_version('1.0.0') == True assert TestType5.incompatible_version('1.1.0') == True with pytest.raises(ValueError): class TestType6(types.CustomType): supported_versions = 'blue' with pytest.raises(ValueError): class TestType7(types.CustomType): supported_versions = ['1.1.0', '2.2.0', 'blue'] def test_supported_versions(): class CustomFlow: def __init__(self, c=None, d=None): self.c = c self.d = d class CustomFlowType(types.CustomType): version = '1.1.0' supported_versions = [(1,0,0), versioning.AsdfSpec('>=1.1.0')] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] @classmethod def from_tree(cls, tree, ctx): # Convert old schema to new CustomFlow type if cls.version == '1.0.0': return CustomFlow(c=tree['a'], d=tree['b']) else: return CustomFlow(**tree) @classmethod def to_tree(cls, data, ctx): if cls.version == '1.0.0': return dict(a=data.c, b=data.d) else: return dict(c=data.c, d=data.d) class CustomFlowExtension(CustomExtension): @property def types(self): return [CustomFlowType] new_yaml = """ flow_thing: ! c: 100 d: 3.14 """ old_yaml = """ flow_thing: ! a: 100 b: 3.14 """ new_buff = helpers.yaml_to_asdf(new_yaml) new_data = asdf.open(new_buff, extensions=CustomFlowExtension()) assert type(new_data.tree['flow_thing']) == CustomFlow old_buff = helpers.yaml_to_asdf(old_yaml) old_data = asdf.open(old_buff, extensions=CustomFlowExtension()) assert type(old_data.tree['flow_thing']) == CustomFlow def test_unsupported_version_warning(): class CustomFlow: pass class CustomFlowType(types.CustomType): version = '1.0.0' supported_versions = [(1,0,0)] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] class CustomFlowExtension(CustomExtension): @property def types(self): return [CustomFlowType] yaml = """ flow_thing: ! c: 100 d: 3.14 """ buff = helpers.yaml_to_asdf(yaml) with pytest.warns(AsdfConversionWarning, match="Version 1.1.0 of tag:nowhere.org:custom/custom_flow is not compatible"): asdf.open(buff, extensions=CustomFlowExtension()) def test_tag_without_schema(tmpdir): tmpfile = str(tmpdir.join('foo.asdf')) class FooType(types.CustomType): name = 'foo' def __init__(self, a, b): self.a = a self.b = b @classmethod def from_tree(cls, tree, ctx): return cls(tree['a'], tree['b']) @classmethod def to_tree(cls, node, ctx): return dict(a=node.a, b=node.b) def __eq__(self, other): return self.a == other.a and self.b == other.b class FooExtension: @property def types(self): return [FooType] @property def tag_mapping(self): return [] @property def url_mapping(self): return [] foo = FooType('hello', 42) tree = dict(foo=foo) with pytest.warns(AsdfWarning, match="Unable to locate schema file"): with asdf.AsdfFile(tree, extensions=FooExtension()) as af: af.write_to(tmpfile) with pytest.warns(AsdfWarning, match="Unable to locate schema file"): with asdf.AsdfFile(tree, extensions=FooExtension()) as ff: assert isinstance(ff.tree['foo'], FooType) assert ff.tree['foo'] == tree['foo'] def test_custom_reference_cycle(tmpdir): f1 = FractionWithInverse(3, 5) f2 = FractionWithInverse(5, 3) f1.inverse = f2 f2.inverse = f1 tree = {"fraction": f1} path = str(tmpdir.join("with_inverse.asdf")) with asdf.AsdfFile(tree, extensions=FractionWithInverseExtension()) as af: af.write_to(path) with asdf.open(path, extensions=FractionWithInverseExtension()) as af: assert af["fraction"].inverse.inverse is af["fraction"] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/tests/test_util.py0000644000537500020070000000663500000000000020255 0ustar00wjamiesonSTSCI\scienceimport io import pytest from asdf import util, generic_io from asdf.extension import BuiltinExtension def test_is_primitive(): for value in [None, "foo", 1, 1.39, 1 + 1j, True]: assert util.is_primitive(value) is True for value in [[], tuple(), {}, set()]: assert util.is_primitive(value) is False def test_not_set(): assert util.NotSet != None assert repr(util.NotSet) == "NotSet" class SomeClass: class SomeInnerClass: pass def test_get_class_name(): assert util.get_class_name(SomeClass()) == "asdf.tests.test_util.SomeClass" assert util.get_class_name(SomeClass, instance=False) == "asdf.tests.test_util.SomeClass" assert util.get_class_name(SomeClass.SomeInnerClass()) == "asdf.tests.test_util.SomeClass.SomeInnerClass" assert util.get_class_name(SomeClass.SomeInnerClass, instance=False) == "asdf.tests.test_util.SomeClass.SomeInnerClass" def test_get_class_name_override(): assert util.get_class_name(BuiltinExtension, instance=False) == "asdf.extension.BuiltinExtension" def test_patched_urllib_parse(): assert "asdf" in util.patched_urllib_parse.uses_relative assert "asdf" in util.patched_urllib_parse.uses_netloc import urllib.parse assert urllib.parse is not util.patched_urllib_parse assert "asdf" not in urllib.parse.uses_relative assert "asdf" not in urllib.parse.uses_netloc @pytest.mark.parametrize("pattern, uri, result", [ ("asdf://somewhere.org/tags/foo-1.0", "asdf://somewhere.org/tags/foo-1.0", True), ("asdf://somewhere.org/tags/foo-1.0", "asdf://somewhere.org/tags/bar-1.0", False), ("asdf://somewhere.org/tags/foo-*", "asdf://somewhere.org/tags/foo-1.0", True), ("asdf://somewhere.org/tags/foo-*", "asdf://somewhere.org/tags/bar-1.0", False), ("asdf://somewhere.org/tags/foo-*", "asdf://somewhere.org/tags/foo-extras/bar-1.0", False), ("asdf://*/tags/foo-*", "asdf://anywhere.org/tags/foo-4.9", True), ("asdf://*/tags/foo-*", "asdf://anywhere.org/tags/bar-4.9", False), ("asdf://*/tags/foo-*", "asdf://somewhere.org/tags/foo-extras/bar-4.9", False), ("asdf://**/*-1.0", "asdf://somewhere.org/tags/foo-1.0", True), ("asdf://**/*-1.0", "asdf://somewhere.org/tags/foo-2.0", False), ("asdf://**/*-1.0", "asdf://somewhere.org/tags/foo-extras/bar-1.0", True), ("asdf://**/*-1.0", "asdf://somewhere.org/tags/foo-extras/bar-2.0", False), ("asdf://somewhere.org/tags/foo-*", None, False), ("**", None, False), ]) def test_uri_match(pattern, uri, result): assert util.uri_match(pattern, uri) is result @pytest.mark.parametrize("content, expected_type", [ (b"#ASDF blahblahblah", util.FileType.ASDF), (b"SIMPLE = T blah blah blah blah", util.FileType.FITS), (b"SIMPLY NOT A FITS FILE", util.FileType.UNKNOWN), (b"#ASDQ", util.FileType.UNKNOWN), ]) def test_get_file_type(content, expected_type): fd = generic_io.get_file(io.BytesIO(content)) assert util.get_file_type(fd) == expected_type # Confirm that no content was lost assert fd.read() == content # We've historically had a problem detecting file type # of generic_io.InputStream: class OnlyHasAReadMethod: def __init__(self, content): self._fd = io.BytesIO(content) def read(self, size=-1): return self._fd.read(size) fd = generic_io.get_file(OnlyHasAReadMethod(content)) assert util.get_file_type(fd) == expected_type assert fd.read() == content ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_versioning.py0000644000537500020070000002070200000000000021452 0ustar00wjamiesonSTSCI\sciencefrom itertools import combinations import pytest from asdf.versioning import ( AsdfVersion, AsdfSpec, supported_versions, get_version_map, join_tag_version, ) from asdf.extension import default_extensions from asdf.schema import load_schema def test_version_constructor(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion((1,0,0)) ver2 = AsdfVersion([1,0,0]) assert str(ver0) == '1.0.0' assert str(ver1) == '1.0.0' assert str(ver2) == '1.0.0' def test_version_and_version_equality(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion('1.0.0') assert ver0 is not ver1 assert ver0 == ver1 assert ver1 == ver0 assert not (ver0 != ver1) assert not (ver1 != ver0) def test_version_and_string_equality(): version = AsdfVersion('1.0.0') string_ver = '1.0.0' assert version == string_ver assert string_ver == version assert not (version != string_ver) assert not (string_ver != version) def test_version_and_tuple_equality(): version = AsdfVersion('1.0.0') tuple_ver = (1,0,0) assert version == tuple_ver assert tuple_ver == version assert not (version != tuple_ver) assert not (tuple_ver != version) def test_version_and_version_inequality(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion('1.0.1') ver2 = AsdfVersion('1.1.0') ver3 = AsdfVersion('1.1.1') ver4 = AsdfVersion('2.0.0') ver5 = AsdfVersion('2.0.1') ver6 = AsdfVersion('2.1.0') ver7 = AsdfVersion('2.1.1') versions = [ver0, ver1, ver2, ver3, ver4, ver5, ver6, ver7] for x,y in combinations(versions, 2): assert not (x == y) assert x != y assert ver0 < ver1 < ver2 < ver3 < ver4 < ver5 < ver6 < ver7 assert ver7 > ver6 > ver5 > ver4 > ver3 > ver2 > ver1 > ver0 assert (ver0 < ver1 < ver2 < ver4 < ver3 < ver5 < ver6 < ver7) == False assert (ver7 > ver6 > ver5 > ver3 > ver4 > ver2 > ver1 > ver0) == False assert ver0 <= ver1 <= ver2 <= ver3 <= ver4 <= ver5 <= ver6 <= ver7 assert ver7 >= ver6 >= ver5 >= ver4 >= ver3 >= ver2 >= ver1 >= ver0 def test_version_and_string_inequality(): version = AsdfVersion('2.0.0') assert version > '1.0.0' assert version > '1.0.1' assert version > '1.1.0' assert version > '1.1.1' assert (version > '2.0.0') == False assert (version < '2.0.0') == False assert version < '2.0.1' assert version < '2.1.0' assert version < '2.1.1' assert version >= '1.0.0' assert version >= '1.0.1' assert version >= '1.1.0' assert version >= '1.1.1' assert version >= '2.0.0' assert version <= '2.0.0' assert version <= '2.0.1' assert version <= '2.1.0' assert version <= '2.1.1' assert '1.0.0' < version assert '1.0.1' < version assert '1.1.0' < version assert '1.1.1' < version assert ('2.0.0' < version) == False assert ('2.0.0' > version) == False assert '2.0.1' > version assert '2.1.0' > version assert '2.1.1' > version assert '1.0.0' <= version assert '1.0.1' <= version assert '1.1.0' <= version assert '1.1.1' <= version assert '2.0.0' <= version assert '2.0.0' >= version assert '2.0.1' >= version assert '2.1.0' >= version assert '2.1.1' >= version def test_version_and_tuple_inequality(): version = AsdfVersion('2.0.0') assert version > (1,0,0) assert version > (1,0,1) assert version > (1,1,0) assert version > (1,1,1) assert (version > (2,0,0)) == False assert (version < (2,0,0)) == False assert version < (2,0,1) assert version < (2,1,0) assert version < (2,1,1) assert version >= (1,0,0) assert version >= (1,0,1) assert version >= (1,1,0) assert version >= (1,1,1) assert version >= (2,0,0) assert version <= (2,0,0) assert version <= (2,0,1) assert version <= (2,1,0) assert version <= (2,1,1) assert (1,0,0) < version assert (1,0,1) < version assert (1,1,0) < version assert (1,1,1) < version assert ((2,0,0) < version) == False assert ((2,0,0) > version) == False assert (2,0,1) > version assert (2,1,0) > version assert (2,1,1) > version assert (1,0,0) <= version assert (1,0,1) <= version assert (1,1,0) <= version assert (1,1,1) <= version assert (2,0,0) <= version assert (2,0,0) >= version assert (2,0,1) >= version assert (2,1,0) >= version assert (2,1,1) >= version def test_spec_version_match(): spec = AsdfSpec('>=1.1.0') assert spec.match(AsdfVersion('1.1.0')) assert spec.match(AsdfVersion('1.2.0')) assert not spec.match(AsdfVersion('1.0.0')) assert not spec.match(AsdfVersion('1.0.9')) def test_spec_version_select(): spec = AsdfSpec('>=1.1.0') versions = [AsdfVersion(x) for x in ['1.0.0', '1.0.9', '1.1.0', '1.2.0']] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_version_filter(): spec = AsdfSpec('>=1.1.0') versions = [AsdfVersion(x) for x in ['1.0.0', '1.0.9', '1.1.0', '1.2.0']] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_string_match(): spec = AsdfSpec('>=1.1.0') assert spec.match('1.1.0') assert spec.match('1.2.0') assert not spec.match('1.0.0') assert not spec.match('1.0.9') def test_spec_string_select(): spec = AsdfSpec('>=1.1.0') versions = ['1.0.0', '1.0.9', '1.1.0', '1.2.0'] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_string_filter(): spec = AsdfSpec('>=1.1.0') versions = ['1.0.0', '1.0.9', '1.1.0', '1.2.0'] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_tuple_match(): spec = AsdfSpec('>=1.1.0') assert spec.match((1,1,0)) assert spec.match((1,2,0)) assert not spec.match((1,0,0)) assert not spec.match((1,0,9)) def test_spec_tuple_select(): spec = AsdfSpec('>=1.1.0') versions = [(1,0,0), (1,0,9), (1,1,0), (1,2,0)] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_tuple_filter(): spec = AsdfSpec('>=1.1.0') versions = [(1,0,0), (1,0,9), (1,1,0), (1,2,0)] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_equal(): """Make sure that equality means match""" spec = AsdfSpec('>=1.2.0') version0 = AsdfVersion('1.1.0') version1 = AsdfVersion('1.3.0') assert spec != version0 assert version0 != spec assert spec == version1 assert version1 == spec assert spec != '1.1.0' assert '1.1.0' != spec assert spec == '1.3.0' assert '1.3.0' == spec assert spec != (1, 1, 0) assert (1, 1, 0) != spec assert spec == (1, 3, 0) assert (1, 3, 0) == spec @pytest.mark.parametrize("version", supported_versions) def test_version_map_core_support(version): _test_version_map_support(version, "core") @pytest.mark.parametrize("version", supported_versions) @pytest.mark.xfail( reason="astropy does not yet explicitly support older schema versions", strict=True ) def test_version_map_standard_support(version): _test_version_map_support(version, "standard") def _test_version_map_support(version, schema_type): vm = get_version_map(version) type_index = default_extensions.extension_list.type_index class MockContext: def __init__(self): self._fname = None ctx = MockContext() for tag_base, tag_version in vm[schema_type].items(): tag = join_tag_version(tag_base, tag_version) try: load_schema(tag) except Exception: assert False, ( "ASDF Standard version {} requires support for ".format(version) + "{}, but the corresponding schema cannot be loaded.".format(tag) ) extension_type = type_index.from_yaml_tag(ctx, tag) assert extension_type is not None, ( "ASDF Standard version {} requires support for ".format(version) + "{}, but no ExtensionType exists to support that tag.".format(tag) ) assert extension_type.yaml_tag == tag, ( "ASDF Standard version {} requires support for ".format(version) + "{}, but no ExtensionType exists that explicitly ".format(tag) + "supports that version." ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/tests/test_yaml.py0000644000537500020070000002017200000000000020232 0ustar00wjamiesonSTSCI\scienceimport io from collections import namedtuple, OrderedDict from typing import NamedTuple import numpy as np import pytest import yaml import asdf from asdf import tagged from asdf import treeutil from asdf import yamlutil from asdf.compat.numpycompat import NUMPY_LT_1_14 from asdf.exceptions import AsdfWarning from . import helpers def test_ordered_dict(tmpdir): # Test that we can write out and read in ordered dicts. tree = { "ordered_dict": OrderedDict( [('first', 'foo'), ('second', 'bar'), ('third', 'baz')]), "unordered_dict": { 'first': 'foo', 'second': 'bar', 'third': 'baz' } } def check_asdf(asdf): tree = asdf.tree assert isinstance(tree['ordered_dict'], OrderedDict) assert list(tree['ordered_dict'].keys()) == ['first', 'second', 'third'] assert not isinstance(tree['unordered_dict'], OrderedDict) assert isinstance(tree['unordered_dict'], dict) def check_raw_yaml(content): assert b'OrderedDict' not in content helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml) def test_unicode_write(tmpdir): # We want to write unicode out as regular utf-8-encoded # characters, not as escape sequences tree = { "ÉʇÉp‾Çpoɔıun": 42, "ascii_only": "this is ascii" } def check_asdf(asdf): assert "ÉʇÉp‾Çpoɔıun" in asdf.tree assert isinstance(asdf.tree['ascii_only'], str) def check_raw_yaml(content): # Ensure that unicode is written out as UTF-8 without escape # sequences assert "ÉʇÉp‾Çpoɔıun".encode('utf-8') in content # Ensure that the unicode "tag" is not used assert b"unicode" not in content helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml) def test_arbitrary_python_object(): # Putting "just any old" Python object in the tree should raise an # exception. class Foo: pass tree = {'object': Foo()} buff = io.BytesIO() ff = asdf.AsdfFile(tree) with pytest.raises(yaml.YAMLError): ff.write_to(buff) def run_tuple_test(tree, tmpdir): def check_asdf(asdf): assert isinstance(asdf.tree['val'], list) def check_raw_yaml(content): assert b'tuple' not in content # Ignore these warnings for the tests that don't actually test the warning init_options = dict(ignore_implicit_conversion=True) helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, raw_yaml_check_func=check_raw_yaml, init_options=init_options) def test_python_tuple(tmpdir): # We don't want to store tuples as tuples, because that's not a # built-in YAML data type. This test ensures that they are # converted to lists. tree = { "val": (1, 2, 3) } run_tuple_test(tree, tmpdir) def test_named_tuple_collections(tmpdir): # Ensure that we are able to serialize a collections.namedtuple. nt = namedtuple("TestNamedTuple1", ("one", "two", "three")) tree = { "val": nt(1, 2, 3) } run_tuple_test(tree, tmpdir) def test_named_tuple_typing(tmpdir): # Ensure that we are able to serialize a typing.NamedTuple. nt = NamedTuple("TestNamedTuple2", (("one", int), ("two", int), ("three", int))) tree = { "val": nt(1, 2, 3) } run_tuple_test(tree, tmpdir) def test_named_tuple_collections_recursive(tmpdir): nt = namedtuple("TestNamedTuple3", ("one", "two", "three")) tree = { "val": nt(1, 2, np.ones(3)) } def check_asdf(asdf): assert (asdf.tree['val'][2] == np.ones(3)).all() init_options = dict(ignore_implicit_conversion=True) helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, init_options=init_options) def test_named_tuple_typing_recursive(tmpdir): nt = NamedTuple("TestNamedTuple4", (("one", int), ("two", int), ("three", np.ndarray))) tree = { "val": nt(1, 2, np.ones(3)) } def check_asdf(asdf): assert (asdf.tree['val'][2] == np.ones(3)).all() init_options = dict(ignore_implicit_conversion=True) helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf, init_options=init_options) def test_implicit_conversion_warning(): nt = namedtuple("TestTupleWarning", ("one", "two", "three")) tree = { "val": nt(1, 2, np.ones(3)) } with pytest.warns(AsdfWarning, match="Failed to serialize instance"): with asdf.AsdfFile(tree): pass with helpers.assert_no_warnings(): with asdf.AsdfFile(tree, ignore_implicit_conversion=True): pass @pytest.mark.xfail(reason='pyyaml has a bug and does not support tuple keys') def test_python_tuple_key(tmpdir): """ This tests whether tuple keys are round-tripped properly. As of this writing, this does not work in pyyaml but does work in ruamel.yaml. If/when we decide to switch to ruamel.yaml, this test should pass. """ tree = { (42, 1): 'foo' } helpers.assert_roundtrip_tree(tree, tmpdir) def test_tags_removed_after_load(tmpdir): tree = { "foo": ["bar", (1, 2, None)] } def check_asdf(asdf): for node in treeutil.iter_tree(asdf.tree): if node != asdf.tree: assert not isinstance(node, tagged.Tagged) helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check_asdf) def test_explicit_tags(): yaml = """#ASDF {} %YAML 1.1 --- ! foo: ! [1, 2, 3] ... """.format(asdf.versioning.default_version) # Check that fully qualified explicit tags work buff = helpers.yaml_to_asdf(yaml, yaml_headers=False) with asdf.open(buff) as ff: assert all(ff.tree['foo'] == [1, 2, 3]) def test_yaml_internal_reference(tmpdir): # Test that YAML internal references (anchors and aliases) work, # as well as recursive data structures. d = { 'foo': '2', } d['bar'] = d l = [] l.append(l) tree = { 'first': d, 'second': d, 'list': l } def check_yaml(content): assert b'list:&id002-*id002' in b''.join(content.split()) helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_yaml) def test_yaml_nan_inf(): tree = { 'a': np.nan, 'b': np.inf, 'c': -np.inf } buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.open(buff) as ff: assert np.isnan(ff.tree['a']) assert np.isinf(ff.tree['b']) assert np.isinf(ff.tree['c']) def test_tag_object(): class SomeObject: pass tag = 'tag:nowhere.org:none/some/thing' instance = tagged.tag_object(tag, SomeObject()) assert instance._tag == tag @pytest.mark.parametrize("numpy_value,expected_value", [ (np.str_("foo"), "foo"), (np.bytes_("foo"), b"foo"), (np.float16(3.14), 3.14), (np.float32(3.14159), 3.14159), (np.float64(3.14159), 3.14159), # Evidently float128 is not available on Windows: (getattr(np, "float128", np.float64)(3.14159), 3.14159), (np.int8(42), 42), (np.int16(42), 42), (np.int32(42), 42), (np.int64(42), 42), (np.longlong(42), 42), (np.uint8(42), 42), (np.uint16(42), 42), (np.uint32(42), 42), (np.uint64(42), 42), (np.ulonglong(42), 42), ]) def test_numpy_scalar(numpy_value, expected_value): ctx = asdf.AsdfFile({"value": numpy_value}) tree = ctx.tree buffer = io.BytesIO() yamlutil.dump_tree(tree, buffer, ctx) buffer.seek(0) if isinstance(expected_value, float) and NUMPY_LT_1_14: assert yamlutil.load_tree(buffer)["value"] == pytest.approx(expected_value, rel=0.001) else: assert yamlutil.load_tree(buffer)["value"] == expected_value ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/treeutil.py0000644000537500020070000003517700000000000016737 0ustar00wjamiesonSTSCI\science""" Utility functions for managing tree-like data structures. """ import warnings import types from contextlib import contextmanager from . import tagged from .exceptions import AsdfWarning __all__ = ["walk", "iter_tree", "walk_and_modify", "get_children", "is_container", "PendingValue"] def walk(top, callback): """ Walking through a tree of objects, calling a given function at each node. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. The callback is called on an instance after all of its children have been visited (depth-first order). Returns ------- tree : object The modified tree. """ for x in iter_tree(top): callback(x) def iter_tree(top): """ Iterate over all nodes in a tree, in depth-first order. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. The callback is called on an instance after all of its children have been visited (depth-first order). Returns ------- tree : object The modified tree. """ seen = set() def recurse(tree): tree_id = id(tree) if tree_id in seen: return if isinstance(tree, (list, tuple)): seen.add(tree_id) for val in tree: for sub in recurse(val): yield sub seen.remove(tree_id) elif isinstance(tree, dict): seen.add(tree_id) for val in tree.values(): for sub in recurse(val): yield sub seen.remove(tree_id) yield tree return recurse(top) class _TreeModificationContext: """ Context of a call to walk_and_modify, which includes a map of already modified nodes, a list of generators to drain before exiting the call, and a set of node object ids that are currently pending modification. Instances of this class are context managers that track how many times they have been entered, and only drain generators and reset themselves when exiting the outermost context. They are also collections that map unmodified nodes to the corresponding modified result. """ def __init__(self): self._map = {} self._generators = [] self._depth = 0 self._pending = set() def add_generator(self, generator): """ Add a generator that should be drained before exiting the outermost call to walk_and_modify. """ self._generators.append(generator) def is_pending(self, node): """ Return True if the node is already being modified. This will not be the case unless the node contains a reference to itself somewhere among its descendents. """ return id(node) in self._pending @contextmanager def pending(self, node): """ Context manager that marks a node as pending for the duration of the context. """ if id(node) in self._pending: raise RuntimeError( "Unhandled cycle in tree. This is possibly a bug " "in extension code, which should be yielding " "nodes that may contain reference cycles." ) self._pending.add(id(node)) try: yield self finally: self._pending.remove(id(node)) def __enter__(self): self._depth += 1 return self def __exit__(self, exc_type, exc_value, traceback): self._depth -= 1 if self._depth == 0: # If we're back to 0 depth, then we're exiting # the outermost context, so it's time to drain # the generators and reset this object for next # time. if exc_type is None: self._drain_generators() self._generators = [] self._map = {} self._pending = set() def _drain_generators(self): """ Drain each generator we've accumulated during this call to walk_and_modify. """ # Generator code may add yet more generators # to the list, so we need to loop until the # list is empty. while len(self._generators) > 0: generators = self._generators self._generators = [] for generator in generators: for _ in generator: # Subsequent yields of the generator should # always return the same value. What we're # really doing here is executing the generator's # remaining code, to further modify that first # yielded object. pass def __contains__(self, node): return id(node) in self._map def __getitem__(self, node): return self._map[id(node)][1] def __setitem__(self, node, result): if id(node) in self._map: # This indicates that an already defined # modified node is being replaced, which is an # error because it breaks references within the # tree. raise RuntimeError("Node already has an associated result") self._map[id(node)] = (node, result) class _PendingValue: """ Class of the PendingValue singleton instance. The presence of the instance in an asdf tree indicates that extension code is failing to handle reference cycles. """ def __repr__(self): return "PendingValue" PendingValue = _PendingValue() class _RemoveNode: """ Class of the RemoveNode singleton instance. This instance is used as a signal for `asdf.treeutil.walk_and_modify` to remove the node received by the callback. """ def __repr__(self): return "RemoveNode" RemoveNode = _RemoveNode() def walk_and_modify(top, callback, ignore_implicit_conversion=False, postorder=True, _context=None): """Modify a tree by walking it with a callback function. It also has the effect of doing a deep copy. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. It takes either one or two arguments: - an instance from the tree - a json id (optional) It may return a different instance in order to modify the tree. If the singleton instance `asdf.treeutil.RemoveNode` is returned, the node will be removed from the tree. The json id is the context under which any relative URLs should be resolved. It may be `None` if no ids are in the file The tree is traversed depth-first, with order specified by the ``postorder`` argument. postorder : bool Determines the order in which the callable is invoked on nodes of the tree. If `True`, the callable will be invoked on children before their parents. If `False`, the callable is invoked on the parents first. Defaults to `True`. ignore_implicit_conversion : bool Controls whether warnings should be issued when implicitly converting a given type instance in the tree into a serializable object. The primary case for this is currently `namedtuple`. Defaults to `False`. Returns ------- tree : object The modified tree. """ callback_arity = callback.__code__.co_argcount if callback_arity < 1 or callback_arity > 2: raise ValueError("Expected callback to accept one or two arguments") def _handle_generator(result): # If the result is a generator, generate one value to # extract the true result, then register the generator # to be drained later. if isinstance(result, types.GeneratorType): generator = result result = next(generator) _context.add_generator(generator) return result def _handle_callback(node, json_id): if callback_arity == 1: result = callback(node) else: result = callback(node, json_id) return _handle_generator(result) def _handle_mapping(node, json_id): result = node.__class__() if isinstance(node, tagged.Tagged): result._tag = node._tag pending_items = {} for key, value in node.items(): if _context.is_pending(value): # The child node is pending modification, which means # it must be its own ancestor. Assign the special # PendingValue instance for now, and note that we'll # need to fill in the real value later. pending_items[key] = value result[key] = PendingValue else: value = _recurse(value, json_id) if value is not RemoveNode: result[key] = value yield result if len(pending_items) > 0: # Now that we've yielded, the pending children should # be available. for key, value in pending_items.items(): value = _recurse(value, json_id) if value is not RemoveNode: result[key] = value else: # The callback may have decided to delete # this node after all. del result[key] def _handle_mutable_sequence(node, json_id): result = node.__class__() if isinstance(node, tagged.Tagged): result._tag = node._tag pending_items = {} for i, value in enumerate(node): if _context.is_pending(value): # The child node is pending modification, which means # it must be its own ancestor. Assign the special # PendingValue instance for now, and note that we'll # need to fill in the real value later. pending_items[i] = value result.append(PendingValue) else: result.append(_recurse(value, json_id)) yield result for i, value in pending_items.items(): # Now that we've yielded, the pending children should # be available. result[i] = _recurse(value, json_id) def _handle_immutable_sequence(node, json_id): # Immutable sequences containing themselves are impossible # to construct (well, maybe possible in a C extension, but # we're not going to worry about that), so we don't need # to yield here. contents = [_recurse(value, json_id) for value in node] try: result = node.__class__(contents) if isinstance(node, tagged.Tagged): result._tag = node._tag except TypeError: # The derived class signature is different, so simply store the # list representing the contents. Currently this is primarly # intended to handle namedtuple and NamedTuple instances. if not ignore_implicit_conversion: msg = "Failed to serialize instance of {}, converting to list instead" warnings.warn(msg.format(type(node)), AsdfWarning) result = contents return result def _handle_children(node, json_id): if isinstance(node, dict): result = _handle_mapping(node, json_id) elif isinstance(node, tuple): result = _handle_immutable_sequence(node, json_id) elif isinstance(node, list): result = _handle_mutable_sequence(node, json_id) else: result = node return _handle_generator(result) def _recurse(node, json_id=None): if node in _context: # The node's modified result has already been # created, all we need to do is return it. This # occurs when the tree contains multiple references # to the same object id. return _context[node] # Inform the context that we're going to start modifing # this node. with _context.pending(node): # Take note of the "id" field, in case we're modifying # a schema and need to know the namespace for resolving # URIs. Ignore an id that is not a string, since it may # be an object defining an id property and not an id # itself (this is common in metaschemas). if isinstance(node, dict) and "id" in node and isinstance(node["id"], str): json_id = node["id"] if postorder: # If this is a postorder modification, invoke the # callback on this node's children first. result = _handle_children(node, json_id) result = _handle_callback(result, json_id) else: # Otherwise, invoke the callback on the node first, # then its children. result = _handle_callback(node, json_id) result = _handle_children(result, json_id) # Store the result in the context, in case there are # additional references to the same node elsewhere in # the tree. _context[node] = result return result if _context is None: _context = _TreeModificationContext() with _context: return _recurse(top) # Generators will be drained here, if this is the outermost # call to walk_and_modify. def get_children(node): """ Retrieve the children (and their dict keys or list/tuple indices) of an ASDF tree node. Parameters ---------- node : object an ASDF tree node Returns ------- list of (object, object) tuples list of (identifier, child node) tuples, or empty list if the node has no children (either it is an empty container, or is a non-container type) """ if isinstance(node, dict): return list(node.items()) elif isinstance(node, list) or isinstance(node, tuple): return list(enumerate(node)) else: return [] def is_container(node): """ Determine if an ASDF tree node is an instance of a "container" type (i.e., value may contain child nodes). Parameters ---------- node : object an ASDF tree node Returns ------- bool True if node is a container, False otherwise """ return isinstance(node, dict) or isinstance(node,list) or isinstance(node, tuple) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/type_index.py0000644000537500020070000003202100000000000017233 0ustar00wjamiesonSTSCI\scienceimport bisect from functools import lru_cache from collections import OrderedDict from . import util from .versioning import (AsdfVersion, get_version_map, default_version, split_tag_version, join_tag_version) __all__ = ['AsdfTypeIndex'] _BASIC_PYTHON_TYPES = [str, int, float, list, dict, tuple] class _AsdfWriteTypeIndex: """ The _AsdfWriteTypeIndex is a helper class for AsdfTypeIndex that manages an index of types for writing out ASDF files, i.e. from converting from custom types to tagged_types. It is not always the inverse of the mapping from tags to custom types, since there are likely multiple versions present for a given tag. This uses the `version_map.yaml` file that ships with the ASDF standard to figure out which schemas correspond to a particular version of the ASDF standard. An AsdfTypeIndex manages multiple _AsdfWriteTypeIndex instances for each version the user may want to write out, and they are instantiated on-demand. If version is ``'latest'``, it will just use the highest-numbered versions of each of the schemas. This is currently only used to aid in testing. In the future, this may be renamed to _ExtensionWriteTypeIndex since it is not specific to classes that inherit `AsdfType`. """ _version_map = None def __init__(self, version, index): self._version = version self._type_by_cls = {} self._type_by_name = {} self._type_by_subclasses = {} self._class_by_subclass = {} self._types_with_dynamic_subclasses = {} self._extension_by_cls = {} self._extensions_used = set() try: version_map = get_version_map(self._version) core_version_map = version_map['core'] standard_version_map = version_map['standard'] except ValueError: raise ValueError( "Don't know how to write out ASDF version {0}".format( self._version)) # Process all types defined in the ASDF version map. It is important to # make sure that tags that are associated with the core part of the # standard are processed first in order to handle subclasses properly. for name, _version in core_version_map.items(): self._add_by_tag(index, name, AsdfVersion(_version)) for name, _version in standard_version_map.items(): self._add_by_tag(index, name, AsdfVersion(_version)) # Now add any extension types that aren't known to the ASDF standard. # This expects that all types defined by ASDF will be encountered # before any types that are defined by external packages. This # allows external packages to override types that are also defined # by ASDF. The ordering is guaranteed due to the use of OrderedDict # for _versions_by_type_name, and due to the fact that the built-in # extension will always be processed first. for name, versions in index._versions_by_type_name.items(): if name not in self._type_by_name: self._add_by_tag(index, name, versions[-1]) for asdftype in index._unnamed_types: self._add_all_types(index, asdftype) def _should_overwrite(self, cls, new_type): existing_type = self._type_by_cls[cls] # Types that are provided by extensions from other packages should # only override the type index corresponding to the latest version # of ASDF. if existing_type.tag_base() != new_type.tag_base(): return self._version == default_version return True def _add_type_to_index(self, index, cls, typ): if cls in self._type_by_cls and not self._should_overwrite(cls, typ): return self._type_by_cls[cls] = typ self._extension_by_cls[cls] = index._extension_by_type[typ] def _add_subclasses(self, index, typ, asdftype): for subclass in util.iter_subclasses(typ): # Do not overwrite the tag type for an existing subclass if the # new tag serializes a class that is higher in the type # hierarchy than the existing subclass. if subclass in self._class_by_subclass: if issubclass(self._class_by_subclass[subclass], typ): # Allow for cases where a subclass tag is being # overridden by a tag from another extension. if (self._extension_by_cls[subclass] == index._extension_by_type[asdftype]): continue self._class_by_subclass[subclass] = typ self._type_by_subclasses[subclass] = asdftype self._extension_by_cls[subclass] = index._extension_by_type[asdftype] def _add_all_types(self, index, asdftype): self._add_type_to_index(index, asdftype, asdftype) for typ in asdftype.types: self._add_type_to_index(index, typ, asdftype) self._add_subclasses(index, typ, asdftype) if asdftype.handle_dynamic_subclasses: for typ in asdftype.types: self._types_with_dynamic_subclasses[typ] = asdftype def _add_by_tag(self, index, name, version): tag = join_tag_version(name, version) if tag in index._type_by_tag: asdftype = index._type_by_tag[tag] self._type_by_name[name] = asdftype self._add_all_types(index, asdftype) def _mark_used_extension(self, custom_type, serialization_context): extension = self._extension_by_cls[custom_type] self._extensions_used.add(extension) if serialization_context is not None: serialization_context._mark_extension_used(extension) def _process_dynamic_subclass(self, custom_type, serialization_context): for key, val in self._types_with_dynamic_subclasses.items(): if issubclass(custom_type, key): self._type_by_cls[custom_type] = val self._mark_used_extension(key, serialization_context) return val return None def from_custom_type(self, custom_type, _serialization_context=None): """ Given a custom type, return the corresponding `ExtensionType` definition. """ asdftype = None # Try to find an exact class match first... try: asdftype = self._type_by_cls[custom_type] except KeyError: # ...failing that, match any subclasses try: asdftype = self._type_by_subclasses[custom_type] except KeyError: # ...failing that, try any subclasses that we couldn't # cache in _type_by_subclasses. This generally only # includes classes that are created dynamically post # Python-import, e.g. astropy.modeling._CompoundModel # subclasses. return self._process_dynamic_subclass(custom_type, _serialization_context) if asdftype is not None: extension = self._extension_by_cls.get(custom_type) if extension is not None: self._mark_used_extension(custom_type, _serialization_context) else: # Handle the case where the dynamic subclass was identified as # a proper subclass above, but it has not yet been registered # as such. self._process_dynamic_subclass(custom_type, _serialization_context) return asdftype class AsdfTypeIndex: """ An index of the known `ExtensionType` classes. In the future this class may be renamed to ExtensionTypeIndex, since it is not specific to classes that inherit `AsdfType`. """ def __init__(self): self._write_type_indices = {} self._type_by_tag = {} # Use OrderedDict here to preserve the order in which types are added # to the type index. Since the ASDF built-in extension is always # processed first, this ensures that types defined by external packages # will always override corresponding types that are defined by ASDF # itself. However, if two different external packages define tags for # the same type, the result is currently undefined. self._versions_by_type_name = OrderedDict() self._best_matches = {} self._unnamed_types = set() self._hooks_by_type = {} self._all_types = set() self._has_warned = {} self._extension_by_type = {} def add_type(self, asdftype, extension): """ Add a type to the index. """ self._all_types.add(asdftype) self._extension_by_type[asdftype] = extension if asdftype.yaml_tag is None and asdftype.name is None: return if isinstance(asdftype.name, list): yaml_tags = [asdftype.make_yaml_tag(name) for name in asdftype.name] elif isinstance(asdftype.name, str): yaml_tags = [asdftype.yaml_tag] elif asdftype.name is None: yaml_tags = [] else: raise TypeError("name must be a string, list or None") for yaml_tag in yaml_tags: self._type_by_tag[yaml_tag] = asdftype name, version = split_tag_version(yaml_tag) versions = self._versions_by_type_name.get(name) if versions is None: self._versions_by_type_name[name] = [version] else: idx = bisect.bisect_left(versions, version) if idx == len(versions) or versions[idx] != version: versions.insert(idx, version) if not len(yaml_tags): self._unnamed_types.add(asdftype) def from_custom_type(self, custom_type, version=default_version, _serialization_context=None): """ Given a custom type, return the corresponding `ExtensionType` definition. """ # Basic Python types should not ever have an AsdfType associated with # them. if custom_type in _BASIC_PYTHON_TYPES: return None write_type_index = self._write_type_indices.get(str(version)) if write_type_index is None: write_type_index = _AsdfWriteTypeIndex(version, self) self._write_type_indices[version] = write_type_index return write_type_index.from_custom_type(custom_type, _serialization_context=_serialization_context) def fix_yaml_tag(self, ctx, tag): """ Given a YAML tag, adjust it to the best supported version. If there is no exact match, this finds the newest version understood that is still less than the version in file. Or, the earliest understood version if none are less than the version in the file. """ if tag in self._type_by_tag: return tag if tag in self._best_matches: best_tag = self._best_matches[tag] ctx._warn_tag_mismatch(tag, best_tag) return best_tag name, version = split_tag_version(tag) versions = self._versions_by_type_name.get(name) if versions is None: return tag # The versions list is kept sorted, so bisect can be used to # quickly find the best option. i = bisect.bisect_left(versions, version) i = max(0, i - 1) best_version = versions[i] best_tag = join_tag_version(name, best_version) ctx._warn_tag_mismatch(tag, best_tag) self._best_matches[tag] = best_tag return best_tag def from_yaml_tag(self, ctx, tag, _serialization_context=None): """ From a given YAML tag string, return the corresponding AsdfType definition. """ tag = self.fix_yaml_tag(ctx, tag) asdftype = self._type_by_tag.get(tag) if asdftype is not None and _serialization_context is not None: _serialization_context._mark_extension_used(self._extension_by_type[asdftype]) return asdftype @lru_cache(5) def has_hook(self, hook_name): """ Returns `True` if the given hook name exists on any of the managed types. """ for cls in self._all_types: if hasattr(cls, hook_name): return True return False def get_hook_for_type(self, hookname, typ, version=default_version): """ Get the hook function for the given type, if it exists, else return None. """ hooks = self._hooks_by_type.setdefault(hookname, {}) hook = hooks.get(typ, None) if hook is not None: return hook tag = self.from_custom_type(typ, version) if tag is not None: hook = getattr(tag, hookname, None) if hook is not None: hooks[typ] = hook return hook hooks[typ] = None return None def get_extensions_used(self, version=default_version): write_type_index = self._write_type_indices.get(str(version)) if write_type_index is None: return [] return list(write_type_index._extensions_used) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/types.py0000644000537500020070000003767400000000000016252 0ustar00wjamiesonSTSCI\scienceimport re import importlib from copy import copy from . import tagged from . import util from .versioning import AsdfVersion, AsdfSpec __all__ = ['format_tag', 'CustomType'] # regex used to parse module name from optional version string MODULE_RE = re.compile(r'([a-zA-Z]+)(-(\d+\.\d+\.\d+))?') def format_tag(organization, standard, version, tag_name): """ Format a YAML tag. """ tag = 'tag:{0}:{1}/{2}'.format(organization, standard, tag_name) if version is None: return tag if isinstance(version, AsdfSpec): version = str(version.spec) return "{0}-{1}".format(tag, version) _all_asdftypes = set() def _from_tree_tagged_missing_requirements(cls, tree, ctx): # A special version of AsdfType.from_tree_tagged for when the # required dependencies for an AsdfType are missing. plural, verb = ('s', 'are') if len(cls.requires) else ('', 'is') message = "{0} package{1} {2} required to instantiate '{3}'".format( util.human_list(cls.requires), plural, verb, tree._tag) # This error will be handled by yamlutil.tagged_tree_to_custom_tree, which # will cause a warning to be issued indicating that the tree failed to be # converted. raise TypeError(message) class ExtensionTypeMeta(type): """ Custom class constructor for tag types. """ _import_cache = {} @classmethod def _has_required_modules(cls, requires): for string in requires: has_module = True match = MODULE_RE.match(string) modname, _, version = match.groups() if modname in cls._import_cache: if not cls._import_cache[modname]: return False try: module = importlib.import_module(modname) if version and hasattr(module, '__version__'): if module.__version__ < version: has_module = False except ImportError: has_module = False finally: cls._import_cache[modname] = has_module if not has_module: return False return True @classmethod def _find_in_bases(cls, attrs, bases, name, default=None): if name in attrs: return attrs[name] for base in bases: if hasattr(base, name): return getattr(base, name) return default @property def versioned_siblings(mcls): return getattr(mcls, '__versioned_siblings') or [] def __new__(mcls, name, bases, attrs): requires = mcls._find_in_bases(attrs, bases, 'requires', []) if not mcls._has_required_modules(requires): attrs['from_tree_tagged'] = classmethod( _from_tree_tagged_missing_requirements) attrs['types'] = [] attrs['has_required_modules'] = False else: attrs['has_required_modules'] = True types = mcls._find_in_bases(attrs, bases, 'types', []) new_types = [] for typ in types: if isinstance(typ, str): typ = util.resolve_name(typ) new_types.append(typ) attrs['types'] = new_types cls = super(ExtensionTypeMeta, mcls).__new__(mcls, name, bases, attrs) if hasattr(cls, 'version'): if not isinstance(cls.version, (AsdfVersion, AsdfSpec)): cls.version = AsdfVersion(cls.version) if hasattr(cls, 'name'): if isinstance(cls.name, str): if 'yaml_tag' not in attrs: cls.yaml_tag = cls.make_yaml_tag(cls.name) elif isinstance(cls.name, list): pass elif cls.name is not None: raise TypeError("name must be string or list") if hasattr(cls, 'supported_versions'): if not isinstance(cls.supported_versions, (list, set)): cls.supported_versions = [cls.supported_versions] supported_versions = set() for version in cls.supported_versions: if not isinstance(version, (AsdfVersion, AsdfSpec)): version = AsdfVersion(version) # This should cause an exception for invalid input supported_versions.add(version) # We need to convert back to a list here so that the 'in' operator # uses actual comparison instead of hash equality cls.supported_versions = list(supported_versions) siblings = list() for version in cls.supported_versions: if version != cls.version: new_attrs = copy(attrs) new_attrs['version'] = version new_attrs['supported_versions'] = set() new_attrs['_latest_version'] = cls.version siblings.append( ExtensionTypeMeta. __new__(mcls, name, bases, new_attrs)) setattr(cls, '__versioned_siblings', siblings) return cls class AsdfTypeMeta(ExtensionTypeMeta): """ Keeps track of `AsdfType` subclasses that are created, and stores them in `AsdfTypeIndex`. """ def __new__(mcls, name, bases, attrs): cls = super(AsdfTypeMeta, mcls).__new__(mcls, name, bases, attrs) # Classes using this metaclass get added to the list of built-in # extensions if name != "AsdfType": _all_asdftypes.add(cls) return cls class ExtensionType: """ The base class of all custom types in the tree. Besides the attributes defined below, most subclasses will also override `to_tree` and `from_tree`. """ name = None organization = 'stsci.edu' standard = 'asdf' version = (1, 0, 0) supported_versions = set() types = [] handle_dynamic_subclasses = False validators = {} requires = [] yaml_tag = None @classmethod def names(cls): """ Returns the name(s) represented by this tag type as a list. While some tag types represent only a single custom type, others represent multiple types. In the latter case, the `name` attribute of the extension is actually a list, not simply a string. This method normalizes the value of `name` by returning a list in all cases. Returns ------- `list` of names represented by this tag type """ if cls.name is None: return None return cls.name if isinstance(cls.name, list) else [cls.name] @classmethod def make_yaml_tag(cls, name, versioned=True): """ Given the name of a type, returns a string representing its YAML tag. Parameters ---------- name : str The name of the type. In most cases this will correspond to the `name` attribute of the tag type. However, it is passed as a parameter since some tag types represent multiple custom types. versioned : bool If `True`, the tag will be versioned. Otherwise, a YAML tag without a version will be returned. Returns ------- `str` representing the YAML tag """ return format_tag( cls.organization, cls.standard, cls.version if versioned else None, name) @classmethod def tag_base(cls): """ Returns the base of the YAML tag for types represented by this class. This method returns the portion of the tag that represents the standard and the organization of any type represented by this class. Returns ------- `str` representing the base of the YAML tag """ return cls.make_yaml_tag('', versioned=False) @classmethod def to_tree(cls, node, ctx): """ Converts instances of custom types into YAML representations. This method should be overridden by custom extension classes in order to define how custom types are serialized into YAML. The method must return a single Python object corresponding to one of the basic YAML types (dict, list, str, or number). However, the types can be nested and combined in order to represent more complex custom types. This method is called as part of the process of writing an `AsdfFile` object. Whenever a custom type (or a subclass of that type) that is listed in the `types` attribute of this class is encountered, this method will be used to serialize that type. The name `to_tree` refers to the act of converting a custom type into part of a YAML object tree. Parameters ---------- node : `object` Instance of a custom type to be serialized. Will be an instance (or an instance of a subclass) of one of the types listed in the `types` attribute of this class. ctx : `AsdfFile` An instance of the `AsdfFile` object that is being written out. Returns ------- A basic YAML type (`dict`, `list`, `str`, `int`, `float`, or `complex`) representing the properties of the custom type to be serialized. These types can be nested in order to represent more complex custom types. """ return node.__class__.__bases__[0](node) @classmethod def to_tree_tagged(cls, node, ctx): """ Converts instances of custom types into tagged objects. It is more common for custom tag types to override `to_tree` instead of this method. This method should be overridden if it is necessary to modify the YAML tag that will be used to tag this object. Parameters ---------- node : `object` Instance of a custom type to be serialized. Will be an instance (or an instance of a subclass) of one of the types listed in the `types` attribute of this class. ctx : `AsdfFile` An instance of the `AsdfFile` object that is being written out. Returns ------- An instance of `asdf.tagged.Tagged`. """ obj = cls.to_tree(node, ctx) return tagged.tag_object(cls.yaml_tag, obj, ctx=ctx) @classmethod def from_tree(cls, tree, ctx): """ Converts basic types representing YAML trees into custom types. This method should be overridden by custom extension classes in order to define how custom types are deserialized from the YAML representation back into their original types. Typically the method will return an instance of the original custom type. It is also permitted to return a generator, which yields a partially constructed result, then completes construction once the generator is drained. This is useful when constructing objects that contain reference cycles. This method is called as part of the process of reading an ASDF file in order to construct an `AsdfFile` object. Whenever a YAML subtree is encountered that has a tag that corresponds to the `yaml_tag` property of this class, this method will be used to deserialize that tree back into an instance of the original custom type. Parameters ---------- tree : `object` representing YAML tree An instance of a basic Python type (possibly nested) that corresponds to a YAML subtree. ctx : `AsdfFile` An instance of the `AsdfFile` object that is being constructed. Returns ------- An instance of the custom type represented by this extension class, or a generator that yields that instance. """ return cls(tree) @classmethod def from_tree_tagged(cls, tree, ctx): """ Converts from tagged tree into custom type. It is more common for extension classes to override `from_tree` instead of this method. This method should only be overridden if it is necessary to access the `_tag` property of the `Tagged` object directly. Parameters ---------- tree : `asdf.tagged.Tagged` object representing YAML tree ctx : `AsdfFile` An instance of the `AsdfFile` object that is being constructed. Returns ------- An instance of the custom type represented by this extension class. """ return cls.from_tree(tree.data, ctx) @classmethod def incompatible_version(cls, version): """ Indicates if given version is known to be incompatible with this type. If this tag class explicitly identifies compatible versions then this checks whether a given version is compatible or not (see `supported_versions`). Otherwise, all versions are assumed to be compatible. Child classes can override this method to affect how version compatiblity for this type is determined. Parameters ---------- version : `str` or `~asdf.versioning.AsdfVersion` The version to test for compatibility. """ if cls.supported_versions: if version not in cls.supported_versions: return True return False class AsdfType(ExtensionType, metaclass=AsdfTypeMeta): """ Base class for all built-in ASDF types. Types that inherit this class will be automatically added to the list of built-ins. This should *not* be used for user-defined extensions. """ class CustomType(ExtensionType, metaclass=ExtensionTypeMeta): """ Base class for all user-defined types. """ # These attributes are duplicated here with docstrings since a bug in # sphinx prevents the docstrings of class attributes from being inherited # properly (see https://github.com/sphinx-doc/sphinx/issues/741). The # docstrings are not included anywhere else in the class hierarchy since # this class is the only one exposed in the public API. name = None """ `str` or `list`: The name of the type. """ organization = 'stsci.edu' """ `str`: The organization responsible for the type. """ standard = 'asdf' """ `str`: The standard the type is defined in. """ version = (1, 0, 0) """ `str`, `tuple`, `AsdfVersion`, or `AsdfSpec`: The version of the type. """ supported_versions = set() """ `set`: Versions that explicitly compatible with this extension class. If provided, indicates explicit compatibility with the given set of versions. Other versions of the same schema that are not included in this set will not be converted to custom types with this class. """ types = [] """ `list`: List of types that this extension class can convert to/from YAML. Custom Python types that, when found in the tree, will be converted into basic types for YAML output. Can be either strings referring to the types or the types themselves.""" handle_dynamic_subclasses = False """ `bool`: Indicates whether dynamically generated subclasses can be serialized Flag indicating whether this type is capable of serializing subclasses of any of the types listed in ``types`` that are generated dynamically. """ validators = {} """ `dict`: Mapping JSON Schema keywords to validation functions for jsonschema. Useful if the type defines extra types of validation that can be performed. """ requires = [] """ `list`: Python packages that are required to instantiate the object. """ yaml_tag = None """ `str`: The YAML tag to use for the type. If not provided, it will be automatically generated from name, organization, standard and version. """ has_required_modules = True """ `bool`: Indicates whether modules specified by `requires` are available. NOTE: This value is automatically generated. Do not set it in subclasses as it will be overwritten. """ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266559.0 asdf-2.9.2/asdf/util.py0000644000537500020070000003470000000000000016046 0ustar00wjamiesonSTSCI\scienceimport enum import inspect import math import struct import types import importlib.util import re from functools import lru_cache from urllib.request import pathname2url import numpy as np from . import constants # We're importing our own copy of urllib.parse because # we need to patch it to support asdf:// URIs, but it'd # be irresponsible to do this for all users of a # standard library. urllib_parse_spec = importlib.util.find_spec('urllib.parse') patched_urllib_parse = importlib.util.module_from_spec(urllib_parse_spec) urllib_parse_spec.loader.exec_module(patched_urllib_parse) del urllib_parse_spec # urllib.parse needs to know that it should treat asdf:// # URIs like http:// URIs for the purposes of joining # a relative path to a base URI. patched_urllib_parse.uses_relative.append('asdf') patched_urllib_parse.uses_netloc.append('asdf') __all__ = ['human_list', 'get_array_base', 'get_base_uri', 'filepath_to_url', 'iter_subclasses', 'calculate_padding', 'resolve_name', 'NotSet', 'is_primitive', 'uri_match', 'get_class_name'] def human_list(l, separator="and"): """ Formats a list for human readability. Parameters ---------- l : sequence A sequence of strings separator : string, optional The word to use between the last two entries. Default: ``"and"``. Returns ------- formatted_list : string Examples -------- >>> human_list(["vanilla", "strawberry", "chocolate"], "or") 'vanilla, strawberry or chocolate' """ if len(l) == 1: return l[0] else: return ', '.join(l[:-1]) + ' ' + separator + ' ' + l[-1] def get_array_base(arr): """ For a given Numpy array, finds the base array that "owns" the actual data. """ base = arr while isinstance(base.base, np.ndarray): base = base.base return base def get_base_uri(uri): """ For a given URI, return the part without any fragment. """ parts = patched_urllib_parse.urlparse(uri) return patched_urllib_parse.urlunparse(list(parts[:5]) + ['']) def filepath_to_url(path): """ For a given local file path, return a file:// url. """ return patched_urllib_parse.urljoin('file:', pathname2url(path)) def iter_subclasses(cls): """ Returns all subclasses of a class. """ for x in cls.__subclasses__(): yield x for y in iter_subclasses(x): yield y def calculate_padding(content_size, pad_blocks, block_size): """ Calculates the amount of extra space to add to a block given the user's request for the amount of extra space. Care is given so that the total of size of the block with padding is evenly divisible by block size. Parameters ---------- content_size : int The size of the actual content pad_blocks : float or bool If `False`, add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. block_size : int The filesystem block size to use. Returns ------- nbytes : int The number of extra bytes to add for padding. """ if not pad_blocks: return 0 if pad_blocks is True: pad_blocks = 1.1 new_size = content_size * pad_blocks new_size = int((math.ceil( float(new_size) / block_size) + 1) * block_size) return max(new_size - content_size, 0) class BinaryStruct: """ A wrapper around the Python stdlib struct module to define a binary struct more like a dictionary than a tuple. """ def __init__(self, descr, endian='>'): """ Parameters ---------- descr : list of tuple Each entry is a pair ``(name, format)``, where ``format`` is one of the format types understood by `struct`. endian : str, optional The endianness of the struct. Must be ``>`` or ``<``. """ self._fmt = [endian] self._offsets = {} self._names = [] i = 0 for name, fmt in descr: self._fmt.append(fmt) self._offsets[name] = (i, (endian + fmt).encode('ascii')) self._names.append(name) i += struct.calcsize(fmt.encode('ascii')) self._fmt = ''.join(self._fmt).encode('ascii') self._size = struct.calcsize(self._fmt) @property def size(self): """ Return the size of the struct. """ return self._size def pack(self, **kwargs): """ Pack the given arguments, which are given as kwargs, and return the binary struct. """ fields = [0] * len(self._names) for key, val in kwargs.items(): if key not in self._offsets: raise KeyError("No header field '{0}'".format(key)) i = self._names.index(key) fields[i] = val return struct.pack(self._fmt, *fields) def unpack(self, buff): """ Unpack the given binary buffer into the fields. The result is a dictionary mapping field names to values. """ args = struct.unpack_from(self._fmt, buff[:self._size]) return dict(zip(self._names, args)) def update(self, fd, **kwargs): """ Update part of the struct in-place. Parameters ---------- fd : generic_io.GenericIO instance A writable, seekable file descriptor, currently seeked to the beginning of the struct. **kwargs : values The values to update on the struct. """ updates = [] for key, val in kwargs.items(): if key not in self._offsets: raise KeyError("No header field '{0}'".format(key)) updates.append((self._offsets[key], val)) updates.sort() start = fd.tell() for ((offset, datatype), val) in updates: fd.seek(start + offset) fd.write(struct.pack(datatype, val)) class HashableDict(dict): """ A simple wrapper around dict to make it hashable. This is sure to be slow, but for small dictionaries it shouldn't matter. """ def __hash__(self): return hash(frozenset(self.items())) def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. This ends up working like ``from module import object`` but is easier to deal with than the `__import__` builtin and supports digging into submodules. Parameters ---------- name : `str` A dotted path to a Python object--that is, the name of a function, class, or other object in a module with the full path to that module, including parent modules, separated by dots. Also known as the fully qualified name of the object. Examples -------- >>> resolve_name('asdf.util.resolve_name') Raises ------ `ImportError` If the module or named object is not found. """ # Note: On python 2 these must be str objects and not unicode parts = [str(part) for part in name.split('.')] if len(parts) == 1: # No dots in the name--just a straight up module import cursor = 1 attr_name = str('') # Must not be unicode on Python 2 else: cursor = len(parts) - 1 attr_name = parts[-1] module_name = parts[:cursor] while cursor > 0: try: ret = __import__(str('.'.join(module_name)), fromlist=[attr_name]) break except ImportError: if cursor == 0: raise cursor -= 1 module_name = parts[:cursor] attr_name = parts[cursor] ret = '' for part in parts[cursor:]: try: ret = getattr(ret, part) except AttributeError: raise ImportError(name) return ret # Kludge to cover up the fact that BuiltinExtension was moved from extension.py # to extension/_legacy.py. Can be removed once BuiltinExtension is dropped # in asdf 3.0. _CLASS_NAME_OVERRIDES = { "asdf.extension._legacy.BuiltinExtension": "asdf.extension.BuiltinExtension", } def get_class_name(obj, instance=True): """ Given a class or instance of a class, returns a string representing the fully specified path of the class. Parameters ---------- obj : object An instance of any object instance: bool Indicates whether given object is an instance of the class to be named """ typ = type(obj) if instance else obj class_name = "{}.{}".format(typ.__module__, typ.__qualname__) return _CLASS_NAME_OVERRIDES.get(class_name, class_name) def minversion(module, version, inclusive=True, version_path='__version__'): """ Returns `True` if the specified Python module satisfies a minimum version requirement, and `False` if not. By default this uses `pkg_resources.parse_version` to do the version comparison if available. Otherwise it falls back on `packaging.version.Version`. Parameters ---------- module : module or `str` An imported module of which to check the version, or the name of that module (in which case an import of that module is attempted-- if this fails `False` is returned). version : `str` The version as a string that this module must have at a minimum (e.g. ``'0.12'``). inclusive : `bool` The specified version meets the requirement inclusively (i.e. ``>=``) as opposed to strictly greater than (default: `True`). version_path : `str` A dotted attribute path to follow in the module for the version. Defaults to just ``'__version__'``, which should work for most Python modules. """ if isinstance(module, types.ModuleType): module_name = module.__name__ elif isinstance(module, str): module_name = module try: module = resolve_name(module_name) except ImportError: return False else: raise ValueError('module argument must be an actual imported ' 'module, or the import name of the module; ' 'got {0!r}'.format(module)) if '.' not in version_path: have_version = getattr(module, version_path) else: have_version = resolve_name('.'.join([module.__name__, version_path])) try: from pkg_resources import parse_version except ImportError: from packaging.version import Version as parse_version if inclusive: return parse_version(have_version) >= parse_version(version) else: return parse_version(have_version) > parse_version(version) class InheritDocstrings(type): """ This metaclass makes methods of a class automatically have their docstrings filled in from the methods they override in the base class. If the class uses multiple inheritance, the docstring will be chosen from the first class in the bases list, in the same way as methods are normally resolved in Python. If this results in selecting the wrong docstring, the docstring will need to be explicitly included on the method. For example:: >>> from asdf.util import InheritDocstrings >>> class A(metaclass=InheritDocstrings): ... def wiggle(self): ... "Wiggle the thingamajig" ... pass >>> class B(A): ... def wiggle(self): ... pass >>> B.wiggle.__doc__ u'Wiggle the thingamajig' """ def __init__(cls, name, bases, dct): def is_public_member(key): return ( (key.startswith('__') and key.endswith('__') and len(key) > 4) or not key.startswith('_')) for key, val in dct.items(): if (inspect.isfunction(val) and is_public_member(key) and val.__doc__ is None): for base in cls.__mro__[1:]: super_method = getattr(base, key, None) if super_method is not None: val.__doc__ = super_method.__doc__ break super(InheritDocstrings, cls).__init__(name, bases, dct) class _NotSetType: def __repr__(self): return "NotSet" """ Special value indicating that a parameter is not set. Distinct from None, which may for example be a value of interest in a search. """ NotSet = _NotSetType() def is_primitive(value): """ Determine if a value is an instance of a "primitive" type. Parameters ---------- value : object the value to test Returns ------- bool True if the value is primitive, False otherwise """ return ( value is None or isinstance(value, bool) or isinstance(value, int) or isinstance(value, float) or isinstance(value, complex) or isinstance(value, str) ) def uri_match(pattern, uri): """ Determine if a URI matches a URI pattern with possible wildcards. The two recognized wildcards: "*": match any character except / "**": match any character Parameters ---------- pattern : str URI pattern. uri : str URI to check against the pattern. Returns ------- bool `True` if URI matches the pattern. """ if not isinstance(uri, str): return False if "*" in pattern: return _compile_uri_match_pattern(pattern).fullmatch(uri) is not None else: return pattern == uri @lru_cache(1024) def _compile_uri_match_pattern(pattern): # Escape the pattern in case it contains regex special characters # ('.' in particular is common in URIs) and then replace the # escaped asterisks with the appropriate regex matchers. pattern = re.escape(pattern) pattern = pattern.replace(r"\*\*", r".*") pattern = pattern.replace(r"\*", r"[^/]*") return re.compile(pattern) def get_file_type(fd): """ Determine the file type of an open GenericFile instance. Parameters ---------- fd : GenericFile Returns ------- FileType """ if fd.peek(5) == constants.ASDF_MAGIC: return FileType.ASDF elif fd.peek(6) == constants.FITS_MAGIC: return FileType.FITS else: return FileType.UNKNOWN class FileType(enum.Enum): """ Enum representing file types recognized by asdf. """ ASDF = 1 FITS = 2 UNKNOWN = 3 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644282534.0 asdf-2.9.2/asdf/version.py0000644000537500020070000000002200000000000016544 0ustar00wjamiesonSTSCI\scienceversion = "2.9.2" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643657431.0 asdf-2.9.2/asdf/versioning.py0000644000537500020070000001355600000000000017262 0ustar00wjamiesonSTSCI\science""" This module deals with things that change between different versions of the ASDF spec. """ from functools import total_ordering import yaml if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_loader = yaml.SafeLoader from semantic_version import Version, SimpleSpec from . import generic_io from . import resolver __all__ = ['AsdfVersion', 'AsdfSpec', 'split_tag_version', 'join_tag_version'] def split_tag_version(tag): """ Split a tag into its base and version. """ name, version = tag.rsplit('-', 1) version = AsdfVersion(version) return name, version def join_tag_version(name, version): """ Join the root and version of a tag back together. """ return '{0}-{1}'.format(name, version) _version_map = {} def get_version_map(version): version_map = _version_map.get(version) if version_map is None: version_map_path = resolver.DEFAULT_URL_MAPPING[0][1].replace( '{url_suffix}', 'asdf/version_map-{0}'.format(version)) try: with generic_io.get_file(version_map_path, 'r') as fd: # The following call to yaml.load is safe because we're # using a loader that inherits from pyyaml's SafeLoader. version_map = yaml.load( # nosec fd, Loader=_yaml_base_loader) except Exception: raise ValueError( "Could not load version map for version {0}".format(version)) # Separate the core tags from the rest of the standard for convenience version_map['core'] = {} version_map['standard'] = {} for tag_name, tag_version in version_map['tags'].items(): if tag_name.startswith('tag:stsci.edu:asdf/core'): version_map['core'][tag_name] = tag_version else: version_map['standard'][tag_name] = tag_version _version_map[version] = version_map return version_map @total_ordering class AsdfVersionMixin: """This mix-in is required in order to impose the total ordering that we want for ``AsdfVersion``, rather than accepting the total ordering that is already provided by ``Version`` from ``semantic_version``. Defining these comparisons directly in ``AsdfVersion`` and applying ``total_ordering`` there will not work since ``total_ordering`` only defines comparison operations if they do not exist already and the base class ``Version`` already defines these operations. """ def __eq__(self, other): # Seems like a bit of a hack... if isinstance(other, SimpleSpec): return other == self if isinstance(other, (str, tuple, list)): other = AsdfVersion(other) return Version.__eq__(self, other) def __ne__(self, other): return not self.__eq__(other) def __lt__(self, other): if isinstance(other, (str, tuple, list)): other = AsdfVersion(other) return Version.__lt__(self, other) def __hash__(self): # To be honest, I'm not sure why I had to make this explicit return Version.__hash__(self) class AsdfVersion(AsdfVersionMixin, Version): """This class adds features to the existing ``Version`` class from the ``semantic_version`` module. Namely, it allows ``Version`` objects to be constructed from tuples and lists as well as strings, and it allows ``Version`` objects to be compared with tuples, lists, and strings, instead of just other ``Version`` objects. If any of these features are added to the ``Version`` class itself (as requested in https://github.com/rbarrois/python-semanticversion/issues/52), then this class will become obsolete. """ def __init__(self, version): # This is a dirty hack and you know it if isinstance(version, AsdfVersion): version = str(version) if isinstance(version, (tuple, list)): version = '.'.join([str(x) for x in version]) super(AsdfVersion, self).__init__(version) class AsdfSpec(SimpleSpec): def __init__(self, *args, **kwargs): super(AsdfSpec, self).__init__(*args, **kwargs) def match(self, version): if isinstance(version, (str, tuple, list)): version = AsdfVersion(version) return super(AsdfSpec, self).match(version) def __iterate_versions(self, versions): for v in versions: if isinstance(v, (str, tuple, list)): v = AsdfVersion(v) yield v def select(self, versions): return super(AsdfSpec, self).select(self.__iterate_versions(versions)) def filter(self, versions): return super(AsdfSpec, self).filter(self.__iterate_versions(versions)) def __eq__(self, other): """Equality between Spec and Version, string, or tuple, means match""" if isinstance(other, SimpleSpec): return super(AsdfSpec, self).__eq__(other) return self.match(other) def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return super(AsdfSpec, self).__hash__() supported_versions = [ AsdfVersion('1.0.0'), AsdfVersion('1.1.0'), AsdfVersion('1.2.0'), AsdfVersion('1.3.0'), AsdfVersion('1.4.0'), AsdfVersion('1.5.0'), AsdfVersion('1.6.0'), ] default_version = AsdfVersion('1.5.0') # This is the ASDF Standard version at which the format of the history # field changed to include extension metadata. NEW_HISTORY_FORMAT_MIN_VERSION = AsdfVersion("1.2.0") # This is the ASDF Standard version at which we begin restricting # mapping keys to string, integer, and boolean only. RESTRICTED_KEYS_MIN_VERSION = AsdfVersion("1.6.0") # This library never removed defaults for ASDF Standard versions # later than 1.5.0, so filling them isn't necessary. FILL_DEFAULTS_MAX_VERSION = AsdfVersion("1.5.0") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf/yamlutil.py0000644000537500020070000003441400000000000016733 0ustar00wjamiesonSTSCI\scienceimport warnings from collections import OrderedDict from types import GeneratorType import numpy as np import yaml from . import schema from . import tagged from . import treeutil from . import util from .constants import YAML_TAG_PREFIX, STSCI_SCHEMA_TAG_BASE from .versioning import split_tag_version from .exceptions import AsdfConversionWarning from .tags.core import AsdfObject __all__ = ['custom_tree_to_tagged_tree', 'tagged_tree_to_custom_tree'] if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_dumper = yaml.CSafeDumper _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_dumper = yaml.SafeDumper _yaml_base_loader = yaml.SafeLoader YAML_OMAP_TAG = YAML_TAG_PREFIX + 'omap' # ---------------------------------------------------------------------- # Custom loader/dumpers class AsdfDumper(_yaml_base_dumper): """ A specialized YAML dumper that understands "tagged basic Python data types" as implemented in the `tagged` module. """ def __init__(self, *args, **kwargs): kwargs['default_flow_style'] = None super().__init__(*args, **kwargs) def represent_data(self, data): node = super(AsdfDumper, self).represent_data(data) tag_name = getattr(data, '_tag', None) if tag_name is not None: node.tag = tag_name return node _flow_style_map = { 'flow': True, 'block': False } def represent_sequence(dumper, sequence): flow_style = _flow_style_map.get(sequence.flow_style, None) sequence = sequence.data return super(AsdfDumper, dumper).represent_sequence( None, sequence, flow_style) def represent_mapping(dumper, mapping): flow_style = _flow_style_map.get(mapping.flow_style, None) node = super(AsdfDumper, dumper).represent_mapping( None, mapping.data, flow_style) if mapping.property_order: values = node.value new_mapping = {} for key, val in values: new_mapping[key.value] = (key, val) new_values = [] for key in mapping.property_order: if key in mapping: new_values.append(new_mapping[key]) property_order = set(mapping.property_order) for key, val in values: if key.value not in property_order: new_values.append((key, val)) node.value = new_values return node _style_map = { 'inline': '"', 'folded': '>', 'literal': '|' } def represent_scalar(dumper, value): style = _style_map.get(value.style, None) return super(AsdfDumper, dumper).represent_scalar( None, value.data, style) def represent_ordered_mapping(dumper, tag, data): # TODO: Again, adjust for preferred flow style, and other stylistic details # NOTE: For block style this uses the compact omap notation, but for flow style # it does not. # TODO: Need to see if I can figure out a mechanism so that classes that # use this representer can specify which values should use flow style values = [] node = yaml.SequenceNode(tag, values, flow_style=dumper.default_flow_style) if dumper.alias_key is not None: dumper.represented_objects[dumper.alias_key] = node for key, value in data.items(): key_item = dumper.represent_data(key) value_item = dumper.represent_data(value) node_item = yaml.MappingNode(YAML_OMAP_TAG, [(key_item, value_item)], flow_style=False) values.append(node_item) return node def represent_ordereddict(dumper, data): return represent_ordered_mapping(dumper, YAML_OMAP_TAG, data) AsdfDumper.add_representer(tagged.TaggedList, represent_sequence) AsdfDumper.add_representer(tagged.TaggedDict, represent_mapping) AsdfDumper.add_representer(tagged.TaggedString, represent_scalar) AsdfDumper.add_representer(OrderedDict, represent_ordereddict) # ---------------------------------------------------------------------- # Handle numpy scalars for scalar_type in util.iter_subclasses(np.floating): AsdfDumper.add_representer(scalar_type, AsdfDumper.represent_float) for scalar_type in util.iter_subclasses(np.integer): AsdfDumper.add_representer(scalar_type, AsdfDumper.represent_int) def represent_numpy_str(dumper, data): # The CSafeDumper implementation will raise an error if it # doesn't recognize data as a string. The Python SafeDumper # has no problem with np.str_. return dumper.represent_str(str(data)) AsdfDumper.add_representer(np.str_, represent_numpy_str) AsdfDumper.add_representer(np.bytes_, AsdfDumper.represent_binary) class AsdfLoader(_yaml_base_loader): """ A specialized YAML loader that can construct "tagged basic Python data types" as implemented in the `tagged` module. """ def construct_undefined(self, node): if isinstance(node, yaml.MappingNode): return self._construct_tagged_mapping(node) elif isinstance(node, yaml.SequenceNode): return self._construct_tagged_sequence(node) elif isinstance(node, yaml.ScalarNode): return self._construct_tagged_scalar(node) else: return super().construct_undefined(node) def _construct_tagged_mapping(self, node): data = tagged.tag_object(node.tag, {}) yield data data.update(self.construct_mapping(node)) def _construct_tagged_sequence(self, node): data = tagged.tag_object(node.tag, []) yield data data.extend(self.construct_sequence(node)) def _construct_tagged_scalar(self, node): return tagged.tag_object(node.tag, self.construct_scalar(node)) # Custom omap deserializer that builds an OrderedDict instead # of a list of tuples. Code is mostly identical to pyyaml's SafeConstructor. def construct_yaml_omap(self, node): omap = OrderedDict() yield omap if not isinstance(node, yaml.SequenceNode): raise yaml.ConstructorError("while constructing an ordered map", node.start_mark, "expected a sequence, but found %s" % node.id, node.start_mark) for subnode in node.value: if not isinstance(subnode, yaml.MappingNode): raise yaml.ConstructorError("while constructing an ordered map", node.start_mark, "expected a mapping of length 1, but found %s" % subnode.id, subnode.start_mark) if len(subnode.value) != 1: raise yaml.ConstructorError("while constructing an ordered map", node.start_mark, "expected a single mapping item, but found %d items" % len(subnode.value), subnode.start_mark) key_node, value_node = subnode.value[0] key = self.construct_object(key_node) value = self.construct_object(value_node) omap[key] = value # pyyaml will invoke the constructor associated with None when a node's # tag is not explicitly handled by another constructor. AsdfLoader.add_constructor(None, AsdfLoader.construct_undefined) AsdfLoader.add_constructor(YAML_TAG_PREFIX + "omap", AsdfLoader.construct_yaml_omap) def custom_tree_to_tagged_tree(tree, ctx, _serialization_context=None): """ Convert a tree, possibly containing custom data types that aren't directly representable in YAML, to a tree of basic data types, annotated with tags. """ if _serialization_context is None: _serialization_context = ctx._create_serialization_context() extension_manager = _serialization_context.extension_manager def _convert_obj(obj): converter = extension_manager.get_converter_for_type(type(obj)) tag = converter.select_tag(obj, _serialization_context) node = converter.to_yaml_tree(obj, tag, _serialization_context) if isinstance(node, GeneratorType): generator = node node = next(generator) else: generator = None if isinstance(node, dict): tagged_node = tagged.TaggedDict(node, tag) elif isinstance(node, list): tagged_node = tagged.TaggedList(node, tag) elif isinstance(node, str): tagged_node = tagged.TaggedString(node) tagged_node._tag = tag else: raise TypeError( "Converter returned illegal node type: {}".format(util.get_class_name(node)) ) _serialization_context._mark_extension_used(converter.extension) yield tagged_node if generator is not None: yield from generator def _walker(obj): if extension_manager.handles_type(type(obj)): return _convert_obj(obj) else: tag = ctx.type_index.from_custom_type( type(obj), ctx.version_string, _serialization_context=_serialization_context ) if tag is not None: return tag.to_tree_tagged(obj, ctx) return obj return treeutil.walk_and_modify( tree, _walker, ignore_implicit_conversion=ctx._ignore_implicit_conversion, # Walk the tree in preorder, so that extensions can return # container nodes with unserialized children. postorder=False, _context=ctx._tree_modification_context, ) def tagged_tree_to_custom_tree(tree, ctx, force_raw_types=False, _serialization_context=None): """ Convert a tree containing only basic data types, annotated with tags, to a tree containing custom data types. """ if _serialization_context is None: _serialization_context = ctx._create_serialization_context() extension_manager = _serialization_context.extension_manager def _walker(node): if force_raw_types: return node tag = getattr(node, '_tag', None) if tag is None: return node if extension_manager.handles_tag(tag): converter = extension_manager.get_converter_for_tag(tag) obj = converter.from_yaml_tree(node.data, tag, _serialization_context) _serialization_context._mark_extension_used(converter.extension) return obj tag_type = ctx.type_index.from_yaml_tag(ctx, tag, _serialization_context=_serialization_context) # This means the tag did not correspond to any type in our type index. if tag_type is None: if not ctx._ignore_unrecognized_tag: warnings.warn("{} is not recognized, converting to raw Python " "data structure".format(tag), AsdfConversionWarning) return node tag_name, tag_version = split_tag_version(tag) # This means that there is an explicit description of versions that are # compatible with the associated tag class implementation, but the # version we found does not fit that description. if tag_type.incompatible_version(tag_version): warnings.warn("Version {} of {} is not compatible with any " "existing tag implementations".format( tag_version, tag_name), AsdfConversionWarning) return node # If a tag class does not explicitly list compatible versions, then all # versions of the corresponding schema are assumed to be compatible. # Therefore we need to check to make sure whether the conversion is # actually successful, and just return a raw Python data type if it is # not. try: return tag_type.from_tree_tagged(node, ctx) except TypeError as err: warnings.warn("Failed to convert {} to custom type (detail: {}). " "Using raw Python data structure instead".format(tag, err), AsdfConversionWarning) return node return treeutil.walk_and_modify( tree, _walker, ignore_implicit_conversion=ctx._ignore_implicit_conversion, # Walk the tree in postorder, so that extensions receive # container nodes with children already deserialized. postorder=True, _context=ctx._tree_modification_context, ) def load_tree(stream): """ Load YAML, returning a tree of objects. Parameters ---------- stream : readable file-like object Stream containing the raw YAML content. """ # The following call to yaml.load is safe because we're # using a loader that inherits from pyyaml's SafeLoader. return yaml.load(stream, Loader=AsdfLoader) # nosec def dump_tree(tree, fd, ctx, tree_finalizer=None, _serialization_context=None): """ Dump a tree of objects, possibly containing custom types, to YAML. Parameters ---------- tree : object Tree of objects, possibly containing custom data types. fd : asdf.generic_io.GenericFile A file object to dump the serialized YAML to. ctx : Context The writing context. tree_finalizer : callable, optional Callback that receives the tagged tree before it is validated and defaults are removed. `asdf.AsdfFile` uses this to update extension metadata on the tagged tree before it is fully serialized to YAML. """ # The _serialization_context parameter allows AsdfFile to track # what extensions were used when converting the tree's custom # types. In 3.0, it will be passed as the `ctx` instead of the # AsdfFile itself. if type(tree) is not AsdfObject: raise TypeError("Root node of ASDF tree must be of type AsdfObject") tags = {'!': STSCI_SCHEMA_TAG_BASE + '/'} tree = custom_tree_to_tagged_tree(tree, ctx, _serialization_context=_serialization_context) if tree_finalizer is not None: tree_finalizer(tree) schema.validate(tree, ctx) yaml_version = tuple( int(x) for x in ctx.version_map['YAML_VERSION'].split('.')) # add yaml %TAG definitions from extensions if _serialization_context: for ext in _serialization_context._extensions_used: for key, val in ext.yaml_tag_handles.items(): if key not in tags: tags[key] = val yaml.dump_all( [tree], stream=fd, Dumper=AsdfDumper, explicit_start=True, explicit_end=True, version=yaml_version, allow_unicode=True, encoding='utf-8', tags=tags) ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.785295 asdf-2.9.2/asdf-standard/0000755000537500020070000000000000000000000016311 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.8145576 asdf-2.9.2/asdf-standard/reference_files/0000755000537500020070000000000000000000000021431 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.8225584 asdf-2.9.2/asdf-standard/reference_files/1.0.0/0000755000537500020070000000000000000000000022065 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/ascii.asdf0000644000537500020070000000070400000000000024015 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: [ascii, 5] byteorder: big shape: [2] ... ÓBLK0 ` o본R¦Ú¥+*¥¤ascii#ASDF BLOCK INDEX %YAML 1.1 --- [350] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/ascii.yaml0000644000537500020070000000052100000000000024037 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: ['', ascii] datatype: [ascii, 5] shape: [2] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/basic.asdf0000644000537500020070000000077000000000000024011 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [8] ... ÓBLK0@@@5YL®_±ãêAœ&¼Lûî#ASDF BLOCK INDEX %YAML 1.1 --- [348] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/basic.yaml0000644000537500020070000000053100000000000024031 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/complex.asdf0000644000537500020070000001315100000000000024374 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypec16: !core/ndarray-1.0.0 source: 1 datatype: complex128 byteorder: big shape: [100] datatype>c8: !core/ndarray-1.0.0 source: 0 datatype: complex64 byteorder: big shape: [100] ... ÓBLK0   ï&‹^Íl»e¡ûn™¦å†„ÀÀÿÀ€ÿÀÿ€ÿÿÿÿÿ43€€€ÀÀÿÀ€ÿÀÿ€€ÿÿÿÿÿ43€€ÀÀÀÀÀ€Àÿ€ÀÿÿÿÀÿÿÀ4À3€À€€€ÀÀÿÀ€ÿÀÿ€€ÿÿÿ€ÿÿ€4€3€€€ÿ€ÿ€ÀÀÿÀ€ÿÀÿ€ÿ€ÿÿÿÿ€ÿÿÿ€4ÿ€3€ÿ€€ÿÿÿÿÿÿÀÀÿÀ€ÿÀÿ€ÿÿÿÿÿÿÿÿÿÿÿÿÿÿ4ÿÿÿ3€ÿÿÿ€ÿÿÿÿÀÀÿÀ€ÿÀÿ€ÿÿÿÿÿÿÿÿÿÿÿ4ÿÿ3€ÿÿ€44ÀÀÿÀ€ÿÀÿ€4ÿÿÿ4ÿÿ4443€4€3€3€ÀÀÿÀ€ÿÀÿ€3€ÿÿÿ3€ÿÿ3€43€3€3€€€€ÀÀÿÀ€ÿÀÿ€€ÿÿÿ€ÿÿ€4€3€€€ÓBLK0@@@ÍÁnÂÓA[eû¬ŠWvøøÿøðÿøÿðÿïÿÿÿÿÿÿïÿÿÿÿÿÿ<°< €øøÿøðÿøÿð€ÿïÿÿÿÿÿÿïÿÿÿÿÿÿ<°< øøøøøðøÿðøÿïÿÿÿÿÿÿøïÿÿÿÿÿÿø<°ø< øððøøÿøðÿøÿððÿïÿÿÿÿÿÿðïÿÿÿÿÿÿð<°ð< ðÿðÿðøøÿøðÿøÿðÿðÿïÿÿÿÿÿÿÿðïÿÿÿÿÿÿÿð<°ÿð< ÿðÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿøøÿøðÿøÿðÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿ<°ÿïÿÿÿÿÿÿ< ÿïÿÿÿÿÿÿïÿÿÿÿÿÿïÿÿÿÿÿÿøøÿøðÿøÿðïÿÿÿÿÿÿÿïÿÿÿÿÿÿïÿÿÿÿÿÿïÿÿÿÿÿÿïÿÿÿÿÿÿ<°ïÿÿÿÿÿÿ< ïÿÿÿÿÿÿ<°<°øøÿøðÿøÿð<°ÿïÿÿÿÿÿÿ<°ïÿÿÿÿÿÿ<°<°<°< <°< < øøÿøðÿøÿð< ÿïÿÿÿÿÿÿ< ïÿÿÿÿÿÿ< <°< < < øøÿøðÿøÿðÿïÿÿÿÿÿÿïÿÿÿÿÿÿ<°< ÓBLK0   úMzj4ßo®A{»ÎÊ`«ÀÀÀÿ€Àÿ€ÿÿÿÿÿÿ4€3€€ÀÀÀÿ€Àÿ€ÿ€ÿÿÿÿÿ4€3€ÀÀÀÀÀ€À€ÿÀÿÿÿÀÿÿÀ4À€3À€€€ÀÀÀÿ€Àÿ€ÿ€ÿÿÿ€ÿÿ€4€€3€€€ÿ€ÿÀÀÀÿ€Àÿ€ÿ€ÿÿÿÿ€ÿÿÿ€ÿ4€ÿ€3€ÿ€ÿÿÿÿÿÿÀÀÀÿ€Àÿ€ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ4ÿÿÿ€3ÿÿÿ€ÿÿÿÿÀÀÀÿ€Àÿ€ÿÿÿÿÿÿÿÿÿÿÿÿ4ÿÿ€3ÿÿ€44ÀÀÀÿ€Àÿ€ÿ4ÿÿÿ4ÿÿ444€34€€3€3ÀÀÀÿ€Àÿ€ÿ€3ÿÿÿ€3ÿÿ€34€3€3€3€€€ÀÀÀÿ€Àÿ€ÿ€ÿÿÿ€ÿÿ€4€€3€€ÓBLK0@@@e1õö$¦èÇœ9¸0ˆm3øøøÿðøÿðÿÿÿÿÿÿÿïÿÿÿÿÿÿÿï°< <€øøøÿðøÿðÿ€ÿÿÿÿÿÿïÿÿÿÿÿÿÿï°< <øøøøøðøðÿøÿÿÿÿÿÿïÿøÿÿÿÿÿÿïø°<ø <øððøøøÿðøÿðÿðÿÿÿÿÿÿïÿðÿÿÿÿÿÿïð°<ð <ððÿðÿøøøÿðøÿðÿðÿÿÿÿÿÿÿïÿðÿÿÿÿÿÿÿïðÿ°<ðÿ <ðÿÿÿÿÿÿÿïÿÿÿÿÿÿÿïÿøøøÿðøÿðÿÿÿÿÿÿÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿïÿ°<ÿÿÿÿÿÿïÿ <ÿÿÿÿÿÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿïøøøÿðøÿðÿÿÿÿÿÿÿïÿÿÿÿÿÿïÿÿÿÿÿÿÿïÿÿÿÿÿÿïÿÿÿÿÿÿï°<ÿÿÿÿÿÿï <ÿÿÿÿÿÿï°<°<øøøÿðøÿðÿ°<ÿÿÿÿÿÿïÿ°<ÿÿÿÿÿÿï°<°<°< <°< < <øøøÿðøÿðÿ <ÿÿÿÿÿÿïÿ <ÿÿÿÿÿÿï <°< < < <øøøÿðøÿðÿÿÿÿÿÿÿïÿÿÿÿÿÿÿï°< <#ASDF BLOCK INDEX %YAML 1.1 --- [665, 1519, 3173, 4027] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/complex.yaml0000644000537500020070000004412100000000000024422 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypec16: !core/ndarray-1.0.0 data: [!core/complex-1.0.0 0j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 -1.79769313486e+308j, !core/complex-1.0.0 1.79769313486e+308j, !core/complex-1.0.0 2.22044604925e-16j, !core/complex-1.0.0 1.11022302463e-16j, !core/complex-1.0.0 2.22507385851e-308j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (-0+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-0-1.79769313486e+308j), !core/complex-1.0.0 1.79769313486e+308j, !core/complex-1.0.0 2.22044604925e-16j, !core/complex-1.0.0 1.11022302463e-16j, !core/complex-1.0.0 2.22507385851e-308j, !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (nan-1.79769313486e+308j), !core/complex-1.0.0 (nan+1.79769313486e+308j), !core/complex-1.0.0 (nan+2.22044604925e-16j), !core/complex-1.0.0 (nan+1.11022302463e-16j), !core/complex-1.0.0 (nan+2.22507385851e-308j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (inf-1.79769313486e+308j), !core/complex-1.0.0 (inf+1.79769313486e+308j), !core/complex-1.0.0 (inf+2.22044604925e-16j), !core/complex-1.0.0 (inf+1.11022302463e-16j), !core/complex-1.0.0 (inf+2.22507385851e-308j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-inf-1.79769313486e+308j), !core/complex-1.0.0 (-inf+1.79769313486e+308j), !core/complex-1.0.0 (-inf+2.22044604925e-16j), !core/complex-1.0.0 (-inf+1.11022302463e-16j), !core/complex-1.0.0 (-inf+2.22507385851e-308j), !core/complex-1.0.0 (-1.79769313486e+308+0j), !core/complex-1.0.0 (-1.79769313486e+308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-1.79769313486e+308-1.79769313486e+308j), !core/complex-1.0.0 (-1.79769313486e+308+1.79769313486e+308j), !core/complex-1.0.0 (-1.79769313486e+308+2.22044604925e-16j), !core/complex-1.0.0 (-1.79769313486e+308+1.11022302463e-16j), !core/complex-1.0.0 (-1.79769313486e+308+2.22507385851e-308j), !core/complex-1.0.0 (1.79769313486e+308+0j), !core/complex-1.0.0 (1.79769313486e+308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.79769313486e+308-1.79769313486e+308j), !core/complex-1.0.0 (1.79769313486e+308+1.79769313486e+308j), !core/complex-1.0.0 (1.79769313486e+308+2.22044604925e-16j), !core/complex-1.0.0 (1.79769313486e+308+1.11022302463e-16j), !core/complex-1.0.0 (1.79769313486e+308+2.22507385851e-308j), !core/complex-1.0.0 (2.22044604925e-16+0j), !core/complex-1.0.0 (2.22044604925e-16+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (2.22044604925e-16-1.79769313486e+308j), !core/complex-1.0.0 (2.22044604925e-16+1.79769313486e+308j), !core/complex-1.0.0 (2.22044604925e-16+2.22044604925e-16j), !core/complex-1.0.0 (2.22044604925e-16+1.11022302463e-16j), !core/complex-1.0.0 (2.22044604925e-16+2.22507385851e-308j), !core/complex-1.0.0 (1.11022302463e-16+0j), !core/complex-1.0.0 (1.11022302463e-16+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.11022302463e-16-1.79769313486e+308j), !core/complex-1.0.0 (1.11022302463e-16+1.79769313486e+308j), !core/complex-1.0.0 (1.11022302463e-16+2.22044604925e-16j), !core/complex-1.0.0 (1.11022302463e-16+1.11022302463e-16j), !core/complex-1.0.0 (1.11022302463e-16+2.22507385851e-308j), !core/complex-1.0.0 (2.22507385851e-308+0j), !core/complex-1.0.0 (2.22507385851e-308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (2.22507385851e-308-1.79769313486e+308j), !core/complex-1.0.0 (2.22507385851e-308+1.79769313486e+308j), !core/complex-1.0.0 (2.22507385851e-308+2.22044604925e-16j), !core/complex-1.0.0 (2.22507385851e-308+1.11022302463e-16j), !core/complex-1.0.0 (2.22507385851e-308+2.22507385851e-308j)] datatype: complex128 shape: [100] datatype>c8: !core/ndarray-1.0.0 data: [!core/complex-1.0.0 0j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 -3.40282346639e+38j, !core/complex-1.0.0 3.40282346639e+38j, !core/complex-1.0.0 1.19209289551e-07j, !core/complex-1.0.0 5.96046447754e-08j, !core/complex-1.0.0 1.17549435082e-38j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (-0+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-0-3.40282346639e+38j), !core/complex-1.0.0 3.40282346639e+38j, !core/complex-1.0.0 1.19209289551e-07j, !core/complex-1.0.0 5.96046447754e-08j, !core/complex-1.0.0 1.17549435082e-38j, !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (nan-3.40282346639e+38j), !core/complex-1.0.0 (nan+3.40282346639e+38j), !core/complex-1.0.0 (nan+1.19209289551e-07j), !core/complex-1.0.0 (nan+5.96046447754e-08j), !core/complex-1.0.0 (nan+1.17549435082e-38j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (inf-3.40282346639e+38j), !core/complex-1.0.0 (inf+3.40282346639e+38j), !core/complex-1.0.0 (inf+1.19209289551e-07j), !core/complex-1.0.0 (inf+5.96046447754e-08j), !core/complex-1.0.0 (inf+1.17549435082e-38j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-inf-3.40282346639e+38j), !core/complex-1.0.0 (-inf+3.40282346639e+38j), !core/complex-1.0.0 (-inf+1.19209289551e-07j), !core/complex-1.0.0 (-inf+5.96046447754e-08j), !core/complex-1.0.0 (-inf+1.17549435082e-38j), !core/complex-1.0.0 (-3.40282346639e+38+0j), !core/complex-1.0.0 (-3.40282346639e+38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-3.40282346639e+38-3.40282346639e+38j), !core/complex-1.0.0 (-3.40282346639e+38+3.40282346639e+38j), !core/complex-1.0.0 (-3.40282346639e+38+1.19209289551e-07j), !core/complex-1.0.0 (-3.40282346639e+38+5.96046447754e-08j), !core/complex-1.0.0 (-3.40282346639e+38+1.17549435082e-38j), !core/complex-1.0.0 (3.40282346639e+38+0j), !core/complex-1.0.0 (3.40282346639e+38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (3.40282346639e+38-3.40282346639e+38j), !core/complex-1.0.0 (3.40282346639e+38+3.40282346639e+38j), !core/complex-1.0.0 (3.40282346639e+38+1.19209289551e-07j), !core/complex-1.0.0 (3.40282346639e+38+5.96046447754e-08j), !core/complex-1.0.0 (3.40282346639e+38+1.17549435082e-38j), !core/complex-1.0.0 (1.19209289551e-07+0j), !core/complex-1.0.0 (1.19209289551e-07+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.19209289551e-07-3.40282346639e+38j), !core/complex-1.0.0 (1.19209289551e-07+3.40282346639e+38j), !core/complex-1.0.0 (1.19209289551e-07+1.19209289551e-07j), !core/complex-1.0.0 (1.19209289551e-07+5.96046447754e-08j), !core/complex-1.0.0 (1.19209289551e-07+1.17549435082e-38j), !core/complex-1.0.0 (5.96046447754e-08+0j), !core/complex-1.0.0 (5.96046447754e-08+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (5.96046447754e-08-3.40282346639e+38j), !core/complex-1.0.0 (5.96046447754e-08+3.40282346639e+38j), !core/complex-1.0.0 (5.96046447754e-08+1.19209289551e-07j), !core/complex-1.0.0 (5.96046447754e-08+5.96046447754e-08j), !core/complex-1.0.0 (5.96046447754e-08+1.17549435082e-38j), !core/complex-1.0.0 (1.17549435082e-38+0j), !core/complex-1.0.0 (1.17549435082e-38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.17549435082e-38-3.40282346639e+38j), !core/complex-1.0.0 (1.17549435082e-38+3.40282346639e+38j), !core/complex-1.0.0 (1.17549435082e-38+1.19209289551e-07j), !core/complex-1.0.0 (1.17549435082e-38+5.96046447754e-08j), !core/complex-1.0.0 (1.17549435082e-38+1.17549435082e-38j)] datatype: complex64 shape: [100] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/compressed.asdf0000644000537500020070000000200500000000000025065 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} bzp2: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [128] zlib: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [128] ... ÓBLK0bzp2ââ…¾ÔÏm¹@ã×ù]¼ZBZh91AY&SY\ã(W?ÿ€ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ ’¢&b4É„0™ C‰‰‚4Â`4È` Á< €˜ð a† H¡c?†Ž@‰"e –.`É£çž> $i¦N¡J¥k®^Á‹&m¶ná˧o¾ (q"Æ"L©s&ΟB*u*Ö¯bÍ«w.Þ¿ƒÕ‹6­Ü»zþ¼8¿?±äË›?ùEèÓ¨­@%’ˆˆˆA ":ÅÜ‘N$8ÊÀÓBLK0zlibÓÓ…¾ÔÏm¹@ã×ù]¼Zxœ-ÅÓBÀlÛܲms-Û¶íúõº{¹€€_r°Cê0‡;‘Žr´cë8Ç;Á‰Nr²Sœê4§;ÙÎr¶sœë<ç»À….r±ÿø¯K\ê2—»Â•®rµk\ë:×»Ánr³[Üê6·»Ãîr·{Üë>÷{Àƒò°G<ê1ûŸ'üß“žò´g<ë9Ï{Á‹^ò²W¼ê5¯{ÛÞò¶w¼ë=ïûÀ‡>ò±O|ê3Ÿû—¾òµo|ë;ßûÁ~ò³_üê7¿ûßþò·°vÁ#ASDF BLOCK INDEX %YAML 1.1 --- [441, 721] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/compressed.yaml0000644000537500020070000000273300000000000025122 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} bzp2: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127] datatype: int64 shape: [128] zlib: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127] datatype: int64 shape: [128] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/exploded.asdf0000644000537500020070000000055000000000000024530 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: exploded0000.asdf datatype: int64 byteorder: little shape: [8] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/exploded.yaml0000644000537500020070000000053100000000000024554 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/exploded0000.asdf0000644000537500020070000000063700000000000025036 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} ... ÓBLK0@@@5YL®_±ãêAœ&¼Lûî#ASDF BLOCK INDEX %YAML 1.1 --- [259] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/float.asdf0000644000537500020070000000220600000000000024031 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypef4: !core/ndarray-1.0.0 source: 1 datatype: float32 byteorder: big shape: [10] datatype>f8: !core/ndarray-1.0.0 source: 3 datatype: float64 byteorder: big shape: [10] ... ÓBLK0(((ƒ1[Œ±\ZïãÃ1¨„×€À€€ÿÿÿÿÿÿ4€3€ÓBLK0(((*¬!SeGƒÄ¾ß{$nÿT€À€ÿ€ÿÿÿÿÿ43€€ÓBLK0PPPáÁeÕ»­‚ í}Ý;ñb€øððÿÿÿÿÿÿÿïÿÿÿÿÿÿÿï°< <ÓBLK0PPPù¥Òlñ~ÔArQ§q®’€øðÿðÿïÿÿÿÿÿÿïÿÿÿÿÿÿ<°< #ASDF BLOCK INDEX %YAML 1.1 --- [649, 743, 837, 971] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/float.yaml0000644000537500020070000000223500000000000024060 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypef4: !core/ndarray-1.0.0 data: [0.0, -0.0, .nan, .inf, -.inf, -3.4028234663852886e+38, 3.4028234663852886e+38, 1.1920928955078125e-07, 5.960464477539063e-08, 1.1754943508222875e-38] datatype: float32 shape: [10] datatype>f8: !core/ndarray-1.0.0 data: [0.0, -0.0, .nan, .inf, -.inf, -1.7976931348623157e+308, 1.7976931348623157e+308, 2.220446049250313e-16, 1.1102230246251565e-16, 2.2250738585072014e-308] datatype: float64 shape: [10] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/int.asdf0000644000537500020070000000424600000000000023524 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypei1: !core/ndarray-1.0.0 source: 1 datatype: int8 byteorder: big shape: [3] datatype>i2: !core/ndarray-1.0.0 source: 0 datatype: int16 byteorder: big shape: [3] datatype>i4: !core/ndarray-1.0.0 source: 2 datatype: int32 byteorder: big shape: [3] datatype>u1: !core/ndarray-1.0.0 source: 8 datatype: uint8 byteorder: big shape: [2] datatype>u2: !core/ndarray-1.0.0 source: 7 datatype: uint16 byteorder: big shape: [2] datatype>u4: !core/ndarray-1.0.0 source: 6 datatype: uint32 byteorder: big shape: [2] ... ÓBLK0ebµbÒ-ÑóRª_…ðfÿ€ÓBLK0zätuÔ“êOIø+§NU€ÓBLK0  ŽýòI ÊR4jýÌÝÿÿÿ€ÓBLK0 Ò’kŸñS(iZûJ§£<«ÿÿÿ€ÓBLK0zätuÔ“êOIø+§NU€ÓBLK0øqÉ­ËòÓœr][s2ÿ€ÓBLK0ùÄ­•+ÿ²ë©û:®vÿÿÿÿÓBLK0]^¾wð-ìtÐÑÐø<ÿÿÓBLK0à迯»‰V;/ºxœ—³ÌÿÓBLK0à迯»‰V;/ºxœ—³ÌÿÓBLK0]^¾wð-ìtÐÑÐø<ÿÿÓBLK0ùÄ­•+ÿ²ë©û:®vÿÿÿÿ#ASDF BLOCK INDEX %YAML 1.1 --- [1391, 1451, 1508, 1574, 1640, 1697, 1757, 1819, 1877, 1933, 1989, 2047] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/int.yaml0000644000537500020070000000244700000000000023552 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypei1: !core/ndarray-1.0.0 data: [127, -128, 0] datatype: int8 shape: [3] datatype>i2: !core/ndarray-1.0.0 data: [32767, -32768, 0] datatype: int16 shape: [3] datatype>i4: !core/ndarray-1.0.0 data: [2147483647, -2147483648, 0] datatype: int32 shape: [3] datatype>u1: !core/ndarray-1.0.0 data: [255, 0] datatype: uint8 shape: [2] datatype>u2: !core/ndarray-1.0.0 data: [65535, 0] datatype: uint16 shape: [2] datatype>u4: !core/ndarray-1.0.0 data: [4294967295, 0] datatype: uint32 shape: [2] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/shared.asdf0000644000537500020070000000115700000000000024176 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [8] subset: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [4] offset: 8 strides: [16] ... ÓBLK0@@@5YL®_±ãêAœ&¼Lûî#ASDF BLOCK INDEX %YAML 1.1 --- [467] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/shared.yaml0000644000537500020070000000065100000000000024221 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] subset: !core/ndarray-1.0.0 data: [1, 3, 5, 7] datatype: int64 shape: [4] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/stream.asdf0000644000537500020070000000163300000000000024222 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} my_stream: !core/ndarray-1.0.0 source: -1 datatype: float64 byteorder: little shape: ['*', 8] ... ÓBLK0ð?ð?ð?ð?ð?ð?ð?ð?@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/stream.yaml0000644000537500020070000000126200000000000024245 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} my_stream: !core/ndarray-1.0.0 data: - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] - [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0] - [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0] - [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0] - [4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0] - [5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0] - [6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0] - [7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0] datatype: float64 shape: [8, 8] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/unicode_bmp.asdf0000644000537500020070000000117500000000000025214 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 source: 1 datatype: [ucs4, 2] byteorder: big shape: [2] ... ÓBLK0IS6æ×.ócܪMKqõÆ©ÓBLK0¦•’ŒšŒÍ÷Mt_Z¦QÆ©#ASDF BLOCK INDEX %YAML 1.1 --- [454, 524] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/unicode_bmp.yaml0000644000537500020070000000065300000000000025241 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 data: ['', Æʩ] datatype: [ucs4, 2] shape: [2] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/unicode_spp.asdf0000644000537500020070000000117500000000000025240 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 source: 1 datatype: [ucs4, 2] byteorder: big shape: [2] ... ÓBLK0ïõ½‘'0—Bù9Öïe< ÓBLK0¡ŽdËåàluÑ‹#` #ASDF BLOCK INDEX %YAML 1.1 --- [454, 524] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/1.0.0/unicode_spp.yaml0000644000537500020070000000067300000000000025267 0ustar00wjamiesonSTSCI\science#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 data: ['', "\U00010020"] datatype: [ucs4, 2] shape: [2] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/README.rst0000644000537500020070000000133000000000000023115 0ustar00wjamiesonSTSCI\scienceThis directory contains reference ADSF files. ASDF parser implementations are encouraged to use these files as part of their test suite. There is a directory here for each version of the ASDF standard. They contain pairs of files: one ``.asdf`` file and one ``.yaml`` file. To use the reference file suite, load the ``.asdf`` file and perform the following transformations: - Convert all ``core/ndarray`` tags to in-line YAML data. - Load and store inline all ``JSON Pointer`` references. - Dereference all YAML aliases to anchors. Compare the result to the matching ``.yaml`` file. For compliance, the files do not need to be byte-for-byte identical, but should represent the same values at the YAML level. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.8232296 asdf-2.9.2/asdf-standard/reference_files/generate/0000755000537500020070000000000000000000000023223 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/generate/README.rst0000644000537500020070000000036500000000000024716 0ustar00wjamiesonSTSCI\scienceTo regenerate the reference files: ./generate.py $version where $version is the version of the ASDF standard to generate. The resulting reference files should be inspected for correctness by hand and then committed to the git repository. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/reference_files/generate/generate0000755000537500020070000001043700000000000024750 0ustar00wjamiesonSTSCI\science#!/usr/bin/env python #-*- coding: utf-8 -*- from __future__ import unicode_literals import os try: import asdf except ImportError: raise ImportError( "asdf must be installed to regenerate the reference files.") import numpy as np def ref_basic(fd): tree = { 'data': np.arange(8) } asdf.AsdfFile(tree).write_to(fd) def ref_int(fd): tree = {} for size in (1, 2, 4): bits = size * 8 for endian in ['>', '<']: values = [(1 << (bits - 1)) - 1, -(1 << (bits - 1)), 0] datatype = '%si%d' % (endian, size) arr = np.array(values, datatype) tree['datatype' + datatype] = arr values = [(1 << bits) - 1, 0] datatype = '%su%d' % (endian, size) arr = np.array(values, datatype) tree['datatype' + datatype] = arr asdf.AsdfFile(tree).write_to(fd) def ref_float(fd): tree = {} for size in (4, 8): for endian in ['>', '<']: datatype = '%sf%d' % (endian, size) finfo = np.finfo(np.dtype(datatype)) values = [0.0, -0.0, np.nan, np.inf, -np.inf, finfo.min, finfo.max, finfo.eps, finfo.epsneg, finfo.tiny] arr = np.array(values, datatype) tree['datatype' + datatype] = arr asdf.AsdfFile(tree).write_to(fd) def ref_complex(fd): tree = {} for size in (4, 8): for endian in ['>', '<']: datatype = '%sf%d' % (endian, size) finfo = np.finfo(np.dtype(datatype)) values = [0.0, -0.0, np.nan, np.inf, -np.inf, finfo.min, finfo.max, finfo.eps, finfo.epsneg, finfo.tiny] complex_values = [] for x in values: for y in values: complex_values.append(x + 1j * y) datatype = '%sc%d' % (endian, size * 2) arr = np.array(complex_values, datatype) tree['datatype' + datatype] = arr asdf.AsdfFile(tree).write_to(fd) def ref_ascii(fd): arr = np.array([b'', b'ascii'], dtype='S') tree = {'data': arr} asdf.AsdfFile(tree).write_to(fd) def ref_unicode_bmp(fd): tree = {} for endian in ['>', '<']: arr = np.array(['', 'Æʩ'], dtype=endian + 'U') tree['datatype' + endian + 'U'] = arr asdf.AsdfFile(tree).write_to(fd) def ref_unicode_spp(fd): tree = {} for endian in ['>', '<']: arr = np.array(['', 'ð€ '], dtype=endian + 'U') tree['datatype' + endian + 'U'] = arr asdf.AsdfFile(tree).write_to(fd) def ref_shared(fd): data = np.arange(8) tree = { 'data': data, 'subset': data[1::2] } asdf.AsdfFile(tree).write_to(fd) def ref_stream(fd): tree = { # Each "row" of data will have 128 entries. 'my_stream': asdf.Stream([8], np.float64) } ff = asdf.AsdfFile(tree) with open(fd, 'wb') as fd: ff.write_to(fd) # Write 100 rows of data, one row at a time. ``write_to_stream`` # expects the raw binary bytes, not an array, so we use # ``tostring()``. for i in range(8): fd.write(np.array([i] * 8, np.float64).tostring()) def ref_exploded(fd): tree = { 'data': np.arange(8) } asdf.AsdfFile(tree).write_to(fd, all_array_storage='external') def ref_compressed(fd): tree = { 'zlib': np.arange(128), 'bzp2': np.arange(128) } ff = asdf.AsdfFile(tree) ff.set_array_compression(tree['zlib'], 'zlib') ff.set_array_compression(tree['bzp2'], 'bzp2') ff.write_to(fd) def generate(version): outdir = os.path.join(os.path.dirname(__file__), '..', version) for name, func in globals().items(): if not name.startswith("ref_"): continue name = name[4:] filename = os.path.join(outdir, name) func(filename + ".asdf") with asdf.open(filename + ".asdf") as asdf: asdf.resolve_and_inline() asdf.write_to(filename + ".yaml") if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( "generate", description="Regenerate the ASDF reference files") parser.add_argument( "version", type=str, nargs=1, help="The ASDF version") args = parser.parse_args() generate(args.version[0]) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.7848191 asdf-2.9.2/asdf-standard/resources/0000755000537500020070000000000000000000000020323 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.7849174 asdf-2.9.2/asdf-standard/resources/asdf-format.org/0000755000537500020070000000000000000000000023314 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.7851484 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/0000755000537500020070000000000000000000000024244 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.8255148 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/manifests/0000755000537500020070000000000000000000000026235 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/manifests/core-1.0.0.yaml0000644000537500020070000001755100000000000030514 0ustar00wjamiesonSTSCI\scienceid: asdf://asdf-format.org/core/manifests/core-1.0.0 extension_uri: asdf://asdf-format.org/core/extensions/core-1.0.0 title: Core extension 1.0.0 description: Tags for ASDF core objects. asdf_standard_requirement: 1.0.0 tags: - tag_uri: tag:stsci.edu:asdf/core/asdf-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/asdf-1.0.0 title: Top-level schema for every ASDF file. description: This schema contains the top-level attributes for every ASDF file. - tag_uri: tag:stsci.edu:asdf/core/column-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/column-1.0.0 title: A column in a table. description: |- Each column contains a name and an array of data, and an optional description and unit. - tag_uri: tag:stsci.edu:asdf/core/complex-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/complex-1.0.0 title: Complex number value. description: |- Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. - tag_uri: tag:stsci.edu:asdf/core/constant-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/constant-1.0.0 title: Specify that a value is a constant. description: Used as a utility to indicate that value is a literal constant. - tag_uri: tag:stsci.edu:asdf/core/history_entry-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/history_entry-1.0.0 title: An entry in the file history. description: |- A record of an operation that has been performed upon a file. - tag_uri: tag:stsci.edu:asdf/core/ndarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/ndarray-1.0.0 title: An *n*-dimensional array. description: |- There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. - tag_uri: tag:stsci.edu:asdf/core/software-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/software-1.0.0 title: Describes a software package. description: General-purpose description of a software package. - tag_uri: tag:stsci.edu:asdf/core/table-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/table-1.0.0 title: A table. description: |- A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. - tag_uri: tag:stsci.edu:asdf/fits/fits-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/fits/fits-1.0.0 title: A FITS file inside of an ASDF file. description: |- This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. - tag_uri: tag:stsci.edu:asdf/time/time-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/time/time-1.0.0 title: Represents an instance in time. description: |- A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. - tag_uri: tag:stsci.edu:asdf/unit/defunit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/defunit-1.0.0 title: Define a new physical unit. description: |- Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. - tag_uri: tag:stsci.edu:asdf/unit/unit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/unit-1.0.0 title: Physical unit. description: |- This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. - tag_uri: tag:stsci.edu:asdf/wcs/celestial_frame-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.0.0 title: Represents a celestial frame. description: Represents a celestial frame. - tag_uri: tag:stsci.edu:asdf/wcs/composite_frame-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/composite_frame-1.0.0 title: Represents a set of frames. description: Represents a set of frames. - tag_uri: tag:stsci.edu:asdf/wcs/spectral_frame-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.0.0 title: Represents a spectral frame. description: Represents a spectral frame. - tag_uri: tag:stsci.edu:asdf/wcs/step-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/step-1.0.0 title: Describes a single step of a WCS transform pipeline. description: Describes a single step of a WCS transform pipeline. - tag_uri: tag:stsci.edu:asdf/wcs/wcs-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/wcs-1.0.0 title: A system for describing generalized world coordinate transformations. description: ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/manifests/core-1.1.0.yaml0000644000537500020070000002100200000000000030477 0ustar00wjamiesonSTSCI\scienceid: asdf://asdf-format.org/core/manifests/core-1.1.0 extension_uri: asdf://asdf-format.org/core/extensions/core-1.1.0 title: Core extension 1.1.0 description: Tags for ASDF core objects. asdf_standard_requirement: 1.1.0 tags: - tag_uri: tag:stsci.edu:asdf/core/asdf-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/asdf-1.0.0 title: Top-level schema for every ASDF file. description: This schema contains the top-level attributes for every ASDF file. - tag_uri: tag:stsci.edu:asdf/core/column-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/column-1.0.0 title: A column in a table. description: |- Each column contains a name and an array of data, and an optional description and unit. - tag_uri: tag:stsci.edu:asdf/core/complex-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/complex-1.0.0 title: Complex number value. description: |- Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. - tag_uri: tag:stsci.edu:asdf/core/constant-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/constant-1.0.0 title: Specify that a value is a constant. description: Used as a utility to indicate that value is a literal constant. - tag_uri: tag:stsci.edu:asdf/core/history_entry-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/history_entry-1.0.0 title: An entry in the file history. description: |- A record of an operation that has been performed upon a file. - tag_uri: tag:stsci.edu:asdf/core/ndarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/ndarray-1.0.0 title: An *n*-dimensional array. description: |- There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. - tag_uri: tag:stsci.edu:asdf/core/software-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/software-1.0.0 title: Describes a software package. description: General-purpose description of a software package. - tag_uri: tag:stsci.edu:asdf/core/table-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/table-1.0.0 title: A table. description: |- A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. - tag_uri: tag:stsci.edu:asdf/fits/fits-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/fits/fits-1.0.0 title: A FITS file inside of an ASDF file. description: |- This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. - tag_uri: tag:stsci.edu:asdf/time/time-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/time/time-1.1.0 title: Represents an instance in time. description: |- A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. - tag_uri: tag:stsci.edu:asdf/unit/defunit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/defunit-1.0.0 title: Define a new physical unit. description: |- Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. - tag_uri: tag:stsci.edu:asdf/unit/quantity-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/unit/quantity-1.1.0 title: Represents a Quantity object from astropy description: |- A Quantity object represents a value that has some unit associated with the number. - tag_uri: tag:stsci.edu:asdf/unit/unit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/unit-1.0.0 title: Physical unit. description: |- This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. - tag_uri: tag:stsci.edu:asdf/wcs/celestial_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.1.0 title: Represents a celestial frame. description: Represents a celestial frame. - tag_uri: tag:stsci.edu:asdf/wcs/composite_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/composite_frame-1.1.0 title: Represents a set of frames. description: Represents a set of frames. - tag_uri: tag:stsci.edu:asdf/wcs/icrs_coord-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/icrs_coord-1.1.0 title: Represents an ICRS coordinate object from astropy description: This object represents the right ascension (RA) and declination of an ICRS coordinate or frame. The astropy ICRS class contains additional fields that may be useful to add here in the future. - tag_uri: tag:stsci.edu:asdf/wcs/spectral_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.1.0 title: Represents a spectral frame. description: Represents a spectral frame. - tag_uri: tag:stsci.edu:asdf/wcs/step-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/step-1.1.0 title: Describes a single step of a WCS transform pipeline. description: Describes a single step of a WCS transform pipeline. - tag_uri: tag:stsci.edu:asdf/wcs/wcs-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/wcs-1.0.0 title: A system for describing generalized world coordinate transformations. description: ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/manifests/core-1.2.0.yaml0000644000537500020070000002147400000000000030515 0ustar00wjamiesonSTSCI\scienceid: asdf://asdf-format.org/core/manifests/core-1.2.0 extension_uri: asdf://asdf-format.org/core/extensions/core-1.2.0 title: Core extension 1.2.0 description: Tags for ASDF core objects. asdf_standard_requirement: 1.2.0 tags: - tag_uri: tag:stsci.edu:asdf/core/asdf-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/core/asdf-1.1.0 title: Top-level schema for every ASDF file. description: This schema contains the top-level attributes for every ASDF file. - tag_uri: tag:stsci.edu:asdf/core/column-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/column-1.0.0 title: A column in a table. description: |- Each column contains a name and an array of data, and an optional description and unit. - tag_uri: tag:stsci.edu:asdf/core/complex-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/complex-1.0.0 title: Complex number value. description: |- Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. - tag_uri: tag:stsci.edu:asdf/core/constant-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/constant-1.0.0 title: Specify that a value is a constant. description: Used as a utility to indicate that value is a literal constant. - tag_uri: tag:stsci.edu:asdf/core/extension_metadata-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/extension_metadata-1.0.0 title: Metadata about specific ASDF extensions that were used to create this file. description: Metadata about specific ASDF extensions that were used to create this file. - tag_uri: tag:stsci.edu:asdf/core/history_entry-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/history_entry-1.0.0 title: An entry in the file history. description: |- A record of an operation that has been performed upon a file. - tag_uri: tag:stsci.edu:asdf/core/ndarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/ndarray-1.0.0 title: An *n*-dimensional array. description: |- There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. - tag_uri: tag:stsci.edu:asdf/core/software-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/software-1.0.0 title: Describes a software package. description: General-purpose description of a software package. - tag_uri: tag:stsci.edu:asdf/core/table-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/table-1.0.0 title: A table. description: |- A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. - tag_uri: tag:stsci.edu:asdf/fits/fits-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/fits/fits-1.0.0 title: A FITS file inside of an ASDF file. description: |- This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. - tag_uri: tag:stsci.edu:asdf/time/time-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/time/time-1.1.0 title: Represents an instance in time. description: |- A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. - tag_uri: tag:stsci.edu:asdf/unit/defunit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/defunit-1.0.0 title: Define a new physical unit. description: |- Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. - tag_uri: tag:stsci.edu:asdf/unit/quantity-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/unit/quantity-1.1.0 title: Represents a Quantity object from astropy description: |- A Quantity object represents a value that has some unit associated with the number. - tag_uri: tag:stsci.edu:asdf/unit/unit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/unit-1.0.0 title: Physical unit. description: |- This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. - tag_uri: tag:stsci.edu:asdf/wcs/celestial_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.1.0 title: Represents a celestial frame. description: Represents a celestial frame. - tag_uri: tag:stsci.edu:asdf/wcs/composite_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/composite_frame-1.1.0 title: Represents a set of frames. description: Represents a set of frames. - tag_uri: tag:stsci.edu:asdf/wcs/icrs_coord-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/icrs_coord-1.1.0 title: Represents an ICRS coordinate object from astropy description: This object represents the right ascension (RA) and declination of an ICRS coordinate or frame. The astropy ICRS class contains additional fields that may be useful to add here in the future. - tag_uri: tag:stsci.edu:asdf/wcs/spectral_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.1.0 title: Represents a spectral frame. description: Represents a spectral frame. - tag_uri: tag:stsci.edu:asdf/wcs/step-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/step-1.1.0 title: Describes a single step of a WCS transform pipeline. description: Describes a single step of a WCS transform pipeline. - tag_uri: tag:stsci.edu:asdf/wcs/wcs-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/wcs-1.1.0 title: A system for describing generalized world coordinate transformations. description: ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/manifests/core-1.3.0.yaml0000644000537500020070000002270100000000000030510 0ustar00wjamiesonSTSCI\scienceid: asdf://asdf-format.org/core/manifests/core-1.3.0 extension_uri: asdf://asdf-format.org/core/extensions/core-1.3.0 title: Core extension 1.3.0 description: Tags for ASDF core objects. asdf_standard_requirement: 1.3.0 tags: - tag_uri: tag:stsci.edu:asdf/core/asdf-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/core/asdf-1.1.0 title: Top-level schema for every ASDF file. description: This schema contains the top-level attributes for every ASDF file. - tag_uri: tag:stsci.edu:asdf/core/column-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/column-1.0.0 title: A column in a table. description: |- Each column contains a name and an array of data, and an optional description and unit. - tag_uri: tag:stsci.edu:asdf/core/complex-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/complex-1.0.0 title: Complex number value. description: |- Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. - tag_uri: tag:stsci.edu:asdf/core/constant-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/constant-1.0.0 title: Specify that a value is a constant. description: Used as a utility to indicate that value is a literal constant. - tag_uri: tag:stsci.edu:asdf/core/extension_metadata-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/extension_metadata-1.0.0 title: Metadata about specific ASDF extensions that were used to create this file. description: Metadata about specific ASDF extensions that were used to create this file. - tag_uri: tag:stsci.edu:asdf/core/externalarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/externalarray-1.0.0 title: Point to an array-like object in an external file. description: |- Allow referencing of array-like objects in external files. These files can be any type of file and in any absolute or relative location to the asdf file. Loading of these files into arrays is not handled by asdf. - tag_uri: tag:stsci.edu:asdf/core/history_entry-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/history_entry-1.0.0 title: An entry in the file history. description: |- A record of an operation that has been performed upon a file. - tag_uri: tag:stsci.edu:asdf/core/integer-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/integer-1.0.0 title: Arbitrary precision integer value. description: Represents an arbitrarily large integer value. - tag_uri: tag:stsci.edu:asdf/core/ndarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/ndarray-1.0.0 title: An *n*-dimensional array. description: |- There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. - tag_uri: tag:stsci.edu:asdf/core/software-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/software-1.0.0 title: Describes a software package. description: General-purpose description of a software package. - tag_uri: tag:stsci.edu:asdf/core/table-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/table-1.0.0 title: A table. description: |- A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. - tag_uri: tag:stsci.edu:asdf/fits/fits-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/fits/fits-1.0.0 title: A FITS file inside of an ASDF file. description: |- This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. - tag_uri: tag:stsci.edu:asdf/time/time-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/time/time-1.1.0 title: Represents an instance in time. description: |- A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. - tag_uri: tag:stsci.edu:asdf/unit/defunit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/defunit-1.0.0 title: Define a new physical unit. description: |- Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. - tag_uri: tag:stsci.edu:asdf/unit/quantity-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/unit/quantity-1.1.0 title: Represents a Quantity object from astropy description: |- A Quantity object represents a value that has some unit associated with the number. - tag_uri: tag:stsci.edu:asdf/unit/unit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/unit-1.0.0 title: Physical unit. description: |- This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. - tag_uri: tag:stsci.edu:asdf/wcs/celestial_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.1.0 title: Represents a celestial frame. description: Represents a celestial frame. - tag_uri: tag:stsci.edu:asdf/wcs/composite_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/composite_frame-1.1.0 title: Represents a set of frames. description: Represents a set of frames. - tag_uri: tag:stsci.edu:asdf/wcs/icrs_coord-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/icrs_coord-1.1.0 title: Represents an ICRS coordinate object from astropy description: This object represents the right ascension (RA) and declination of an ICRS coordinate or frame. The astropy ICRS class contains additional fields that may be useful to add here in the future. - tag_uri: tag:stsci.edu:asdf/wcs/spectral_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.1.0 title: Represents a spectral frame. description: Represents a spectral frame. - tag_uri: tag:stsci.edu:asdf/wcs/step-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/step-1.1.0 title: Describes a single step of a WCS transform pipeline. description: Describes a single step of a WCS transform pipeline. - tag_uri: tag:stsci.edu:asdf/wcs/wcs-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/wcs-1.1.0 title: A system for describing generalized world coordinate transformations. description: ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/manifests/core-1.4.0.yaml0000644000537500020070000002342500000000000030515 0ustar00wjamiesonSTSCI\scienceid: asdf://asdf-format.org/core/manifests/core-1.4.0 extension_uri: asdf://asdf-format.org/core/extensions/core-1.4.0 title: Core extension 1.4.0 description: Tags for ASDF core objects. asdf_standard_requirement: 1.4.0 tags: - tag_uri: tag:stsci.edu:asdf/core/asdf-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/core/asdf-1.1.0 title: Top-level schema for every ASDF file. description: This schema contains the top-level attributes for every ASDF file. - tag_uri: tag:stsci.edu:asdf/core/column-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/column-1.0.0 title: A column in a table. description: |- Each column contains a name and an array of data, and an optional description and unit. - tag_uri: tag:stsci.edu:asdf/core/complex-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/complex-1.0.0 title: Complex number value. description: |- Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. - tag_uri: tag:stsci.edu:asdf/core/constant-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/constant-1.0.0 title: Specify that a value is a constant. description: Used as a utility to indicate that value is a literal constant. - tag_uri: tag:stsci.edu:asdf/core/extension_metadata-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/extension_metadata-1.0.0 title: Metadata about specific ASDF extensions that were used to create this file. description: Metadata about specific ASDF extensions that were used to create this file. - tag_uri: tag:stsci.edu:asdf/core/externalarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/externalarray-1.0.0 title: Point to an array-like object in an external file. description: |- Allow referencing of array-like objects in external files. These files can be any type of file and in any absolute or relative location to the asdf file. Loading of these files into arrays is not handled by asdf. - tag_uri: tag:stsci.edu:asdf/core/history_entry-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/history_entry-1.0.0 title: An entry in the file history. description: |- A record of an operation that has been performed upon a file. - tag_uri: tag:stsci.edu:asdf/core/integer-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/integer-1.0.0 title: Arbitrary precision integer value. description: Represents an arbitrarily large integer value. - tag_uri: tag:stsci.edu:asdf/core/ndarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/ndarray-1.0.0 title: An *n*-dimensional array. description: |- There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. - tag_uri: tag:stsci.edu:asdf/core/software-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/software-1.0.0 title: Describes a software package. description: General-purpose description of a software package. - tag_uri: tag:stsci.edu:asdf/core/subclass_metadata-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/subclass_metadata-1.0.0 title: Metadata on a serialized subclass of an ASDF-enabled type. description: |- Identifies the specific subclass that was serialized, to enable ASDF readers to correctly deserialize the object. - tag_uri: tag:stsci.edu:asdf/core/table-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/table-1.0.0 title: A table. description: |- A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. - tag_uri: tag:stsci.edu:asdf/fits/fits-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/fits/fits-1.0.0 title: A FITS file inside of an ASDF file. description: |- This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. - tag_uri: tag:stsci.edu:asdf/time/time-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/time/time-1.1.0 title: Represents an instance in time. description: |- A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. - tag_uri: tag:stsci.edu:asdf/unit/defunit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/defunit-1.0.0 title: Define a new physical unit. description: |- Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. - tag_uri: tag:stsci.edu:asdf/unit/quantity-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/unit/quantity-1.1.0 title: Represents a Quantity object from astropy description: |- A Quantity object represents a value that has some unit associated with the number. - tag_uri: tag:stsci.edu:asdf/unit/unit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/unit-1.0.0 title: Physical unit. description: |- This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. - tag_uri: tag:stsci.edu:asdf/wcs/celestial_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.1.0 title: Represents a celestial frame. description: Represents a celestial frame. - tag_uri: tag:stsci.edu:asdf/wcs/composite_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/composite_frame-1.1.0 title: Represents a set of frames. description: Represents a set of frames. - tag_uri: tag:stsci.edu:asdf/wcs/icrs_coord-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/icrs_coord-1.1.0 title: Represents an ICRS coordinate object from astropy description: This object represents the right ascension (RA) and declination of an ICRS coordinate or frame. The astropy ICRS class contains additional fields that may be useful to add here in the future. - tag_uri: tag:stsci.edu:asdf/wcs/spectral_frame-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.1.0 title: Represents a spectral frame. description: Represents a spectral frame. - tag_uri: tag:stsci.edu:asdf/wcs/step-1.2.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/step-1.2.0 title: Describes a single step of a WCS transform pipeline. description: Describes a single step of a WCS transform pipeline. - tag_uri: tag:stsci.edu:asdf/wcs/wcs-1.2.0 schema_uri: http://stsci.edu/schemas/asdf/wcs/wcs-1.2.0 title: A system for describing generalized world coordinate transformations. description: ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/manifests/core-1.5.0.yaml0000644000537500020070000002027300000000000030514 0ustar00wjamiesonSTSCI\scienceid: asdf://asdf-format.org/core/manifests/core-1.5.0 extension_uri: asdf://asdf-format.org/core/extensions/core-1.5.0 title: Core extension 1.5.0 description: Tags for ASDF core objects. asdf_standard_requirement: 1.5.0 tags: - tag_uri: tag:stsci.edu:asdf/core/asdf-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/core/asdf-1.1.0 title: Top-level schema for every ASDF file. description: This schema contains the top-level attributes for every ASDF file. - tag_uri: tag:stsci.edu:asdf/core/column-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/column-1.0.0 title: A column in a table. description: |- Each column contains a name and an array of data, and an optional description and unit. - tag_uri: tag:stsci.edu:asdf/core/complex-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/complex-1.0.0 title: Complex number value. description: |- Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. - tag_uri: tag:stsci.edu:asdf/core/constant-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/constant-1.0.0 title: Specify that a value is a constant. description: Used as a utility to indicate that value is a literal constant. - tag_uri: tag:stsci.edu:asdf/core/extension_metadata-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/extension_metadata-1.0.0 title: Metadata about specific ASDF extensions that were used to create this file. description: Metadata about specific ASDF extensions that were used to create this file. - tag_uri: tag:stsci.edu:asdf/core/externalarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/externalarray-1.0.0 title: Point to an array-like object in an external file. description: |- Allow referencing of array-like objects in external files. These files can be any type of file and in any absolute or relative location to the asdf file. Loading of these files into arrays is not handled by asdf. - tag_uri: tag:stsci.edu:asdf/core/history_entry-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/history_entry-1.0.0 title: An entry in the file history. description: |- A record of an operation that has been performed upon a file. - tag_uri: tag:stsci.edu:asdf/core/integer-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/integer-1.0.0 title: Arbitrary precision integer value. description: Represents an arbitrarily large integer value. - tag_uri: tag:stsci.edu:asdf/core/ndarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/ndarray-1.0.0 title: An *n*-dimensional array. description: |- There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. - tag_uri: tag:stsci.edu:asdf/core/software-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/software-1.0.0 title: Describes a software package. description: General-purpose description of a software package. - tag_uri: tag:stsci.edu:asdf/core/subclass_metadata-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/subclass_metadata-1.0.0 title: Metadata on a serialized subclass of an ASDF-enabled type. description: |- Identifies the specific subclass that was serialized, to enable ASDF readers to correctly deserialize the object. - tag_uri: tag:stsci.edu:asdf/core/table-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/table-1.0.0 title: A table. description: |- A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. - tag_uri: tag:stsci.edu:asdf/fits/fits-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/fits/fits-1.0.0 title: A FITS file inside of an ASDF file. description: |- This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. - tag_uri: tag:stsci.edu:asdf/time/time-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/time/time-1.1.0 title: Represents an instance in time. description: |- A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. - tag_uri: tag:stsci.edu:asdf/unit/defunit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/defunit-1.0.0 title: Define a new physical unit. description: |- Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. - tag_uri: tag:stsci.edu:asdf/unit/quantity-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/unit/quantity-1.1.0 title: Represents a Quantity object from astropy description: |- A Quantity object represents a value that has some unit associated with the number. - tag_uri: tag:stsci.edu:asdf/unit/unit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/unit-1.0.0 title: Physical unit. description: |- This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/manifests/core-1.6.0.yaml0000644000537500020070000002027300000000000030515 0ustar00wjamiesonSTSCI\scienceid: asdf://asdf-format.org/core/manifests/core-1.6.0 extension_uri: asdf://asdf-format.org/core/extensions/core-1.6.0 title: Core extension 1.6.0 description: Tags for ASDF core objects. asdf_standard_requirement: 1.6.0 tags: - tag_uri: tag:stsci.edu:asdf/core/asdf-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/core/asdf-1.1.0 title: Top-level schema for every ASDF file. description: This schema contains the top-level attributes for every ASDF file. - tag_uri: tag:stsci.edu:asdf/core/column-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/column-1.0.0 title: A column in a table. description: |- Each column contains a name and an array of data, and an optional description and unit. - tag_uri: tag:stsci.edu:asdf/core/complex-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/complex-1.0.0 title: Complex number value. description: |- Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. - tag_uri: tag:stsci.edu:asdf/core/constant-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/constant-1.0.0 title: Specify that a value is a constant. description: Used as a utility to indicate that value is a literal constant. - tag_uri: tag:stsci.edu:asdf/core/extension_metadata-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/extension_metadata-1.0.0 title: Metadata about specific ASDF extensions that were used to create this file. description: Metadata about specific ASDF extensions that were used to create this file. - tag_uri: tag:stsci.edu:asdf/core/externalarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/externalarray-1.0.0 title: Point to an array-like object in an external file. description: |- Allow referencing of array-like objects in external files. These files can be any type of file and in any absolute or relative location to the asdf file. Loading of these files into arrays is not handled by asdf. - tag_uri: tag:stsci.edu:asdf/core/history_entry-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/history_entry-1.0.0 title: An entry in the file history. description: |- A record of an operation that has been performed upon a file. - tag_uri: tag:stsci.edu:asdf/core/integer-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/integer-1.0.0 title: Arbitrary precision integer value. description: Represents an arbitrarily large integer value. - tag_uri: tag:stsci.edu:asdf/core/ndarray-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/ndarray-1.0.0 title: An *n*-dimensional array. description: |- There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. - tag_uri: tag:stsci.edu:asdf/core/software-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/software-1.0.0 title: Describes a software package. description: General-purpose description of a software package. - tag_uri: tag:stsci.edu:asdf/core/subclass_metadata-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/subclass_metadata-1.0.0 title: Metadata on a serialized subclass of an ASDF-enabled type. description: |- Identifies the specific subclass that was serialized, to enable ASDF readers to correctly deserialize the object. - tag_uri: tag:stsci.edu:asdf/core/table-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/core/table-1.0.0 title: A table. description: |- A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. - tag_uri: tag:stsci.edu:asdf/fits/fits-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/fits/fits-1.0.0 title: A FITS file inside of an ASDF file. description: |- This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. - tag_uri: tag:stsci.edu:asdf/time/time-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/time/time-1.1.0 title: Represents an instance in time. description: |- A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. - tag_uri: tag:stsci.edu:asdf/unit/defunit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/defunit-1.0.0 title: Define a new physical unit. description: |- Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. - tag_uri: tag:stsci.edu:asdf/unit/quantity-1.1.0 schema_uri: http://stsci.edu/schemas/asdf/unit/quantity-1.1.0 title: Represents a Quantity object from astropy description: |- A Quantity object represents a value that has some unit associated with the number. - tag_uri: tag:stsci.edu:asdf/unit/unit-1.0.0 schema_uri: http://stsci.edu/schemas/asdf/unit/unit-1.0.0 title: Physical unit. description: |- This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.825831 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/schemas/0000755000537500020070000000000000000000000025667 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/asdf-standard/resources/asdf-format.org/core/schemas/extension_manifest-1.0.0.yaml0000644000537500020070000000544600000000000033120 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: http://stsci.edu/schemas/yaml-schema/draft-01 id: asdf://asdf-format.org/core/schemas/extension_manifest-1.0.0 title: ASDF extension manifest description: > Manifest of additional tags and other features associated with an extension to the ASDF Standard. This schema is provisional and not yet included in any ASDF Standard version. definitions: version: description: > A string property whose value matches a 1-3 part version number (pre-release version not permitted). type: string pattern: '^(0|[1-9]\d*)(\.(0|[1-9]\d*)){0,2}$' type: object properties: id: description: > URI of the extension manifest resource. type: string extension_uri: description: > The extension's identifying URI. type: string title: description: > Short description of the extension. type: string description: description: > Long description of the extension. type: string asdf_standard_requirement: description: > ASDF Standard version requirement. anyOf: - description: > Require exact version. $ref: '#/definitions/version' - type: object properties: gt: description: > Require versions greater than. $ref: '#/definitions/version' gte: description: > Require versions greater than or equal. $ref: '#/definitions/version' lt: description: > Require versions less than. $ref: '#/definitions/version' lte: description: > Require versions less than or equal. $ref: '#/definitions/version' additionalProperties: false tags: description: > List of additional tags supported by this extension. type: array items: anyOf: - description: > The tag's identifying URI. type: string - description: > Tag definition object. type: object properties: tag_uri: description: > The tag's identifying URI. type: string schema_uri: description: > URI of schema used to validate objects with this tag. anyOf: - type: string - type: array items: type: string title: description: > Short description of the tag. type: string description: description: > Long description of the tag. type: string required: [tag_uri] additionalProperties: false required: [id, extension_uri] additionalProperties: false ... ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.785487 asdf-2.9.2/asdf-standard/schemas/0000755000537500020070000000000000000000000017734 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.7875347 asdf-2.9.2/asdf-standard/schemas/stsci.edu/0000755000537500020070000000000000000000000021635 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.8287017 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/0000755000537500020070000000000000000000000022552 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/asdf-schema-1.0.0.yaml0000644000537500020070000000512300000000000026244 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://json-schema.org/draft-04/schema" id: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" title: ASDF Schema description: | Extending YAML Schema and JSON Schema to add support for some ASDF-specific checks, related to [ndarrays](ref:core/ndarray-1.0.0). allOf: - $ref: "http://stsci.edu/schemas/yaml-schema/draft-01" - type: object properties: max_ndim: description: | Specifies that the corresponding **ndarray** is at most the given number of dimensions. If the array has fewer dimensions, it should be logically treated as if it were "broadcast" to the expected dimensions by adding 1's to the front of the shape list. type: integer minimum: 0 ndim: description: | Specifies that the matching **ndarray** is exactly the given number of dimensions. type: integer minimum: 0 datatype: description: | Specifies the datatype of the **ndarray**. By default, an array is considered "matching" if the array can be cast to the given datatype without data loss. For exact datatype matching, set `exact_datatype` to `true`. allOf: - $ref: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0#/definitions/datatype" exact_datatype: description: | If `true`, the datatype must match exactly. type: boolean default: false # Redefine JSON schema validators in terms of this document so that # we can check nested objects: additionalItems: anyOf: - type: boolean - $ref: "#" items: anyOf: - $ref: "#" - $ref: "#/definitions/schemaArray" additionalProperties: anyOf: - type: boolean - $ref: "#" definitions: type: object additionalProperties: $ref: "#" properties: type: object additionalProperties: $ref: "#" patternProperties: type: object additionalProperties: $ref: "#" dependencies: type: object additionalProperties: anyOf: - $ref: "#" - $ref: "http://json-schema.org/draft-04/schema#definitions/stringArray" allOf: $ref: "#/definitions/schemaArray" anyOf: $ref: "#/definitions/schemaArray" oneOf: $ref: "#/definitions/schemaArray" not: $ref: "#" definitions: schemaArray: type: array minItems: 1 items: $ref: "#" ... ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.832708 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/0000755000537500020070000000000000000000000023502 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.0.0.yaml0000644000537500020070000000261300000000000025737 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/asdf-1.0.0" title: | Top-level schema for every ASDF file. description: | This schema contains the top-level attributes for every ASDF file. tag: "tag:stsci.edu:asdf/core/asdf-1.0.0" type: object properties: asdf_library: description: | Describes the ASDF library that produced the file. $ref: "software-1.0.0" history: description: | A log of transformations that have happened to the file. May include such things as data collection, data calibration pipelines, data analysis etc. type: array items: $ref: "history_entry-1.0.0" data: description: | The data array corresponds to the main science data array in the file. Oftentimes, the data model will be much more complex than a single array, but this array will be used by applications that just want to convert to a display an image or preview of the file. It is recommended, but not required, that it is a 2-dimensional image array. $ref: "ndarray-1.0.0" fits: description: | A way to specify exactly how this ASDF file should be converted to FITS. $ref: "../fits/fits-1.0.0" wcs: description: | The location of the main WCS for the main data. $ref: "../wcs/wcs-1.0.0" additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.1.0.yaml0000644000537500020070000000270600000000000025743 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/asdf-1.1.0" title: | Top-level schema for every ASDF file. description: | This schema contains the top-level attributes for every ASDF file. tag: "tag:stsci.edu:asdf/core/asdf-1.1.0" type: object properties: asdf_library: description: | Describes the ASDF library that produced the file. $ref: "software-1.0.0" history: description: | A log of transformations that have happened to the file. May include such things as data collection, data calibration pipelines, data analysis etc. anyOf: # This is to support backwards compatibility with older history formats - type: array items: - $ref: "history_entry-1.0.0" # This is the new, richer history implementation that includes # extension metadata. - $ref: "#/definitions/history-1.1.0" additionalProperties: true # Make sure that these two metadata fields are always at the top of the file propertyOrder: [asdf_library, history] # This contains the definition of the new history format, which includes # metadata about the extensions used to create the file. definitions: history-1.1.0: type: object properties: extensions: type: array items: - $ref: "extension_metadata-1.0.0" entries: type: array items: - $ref: "history_entry-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/column-1.0.0.yaml0000644000537500020070000000202300000000000026312 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/column-1.0.0" tag: "tag:stsci.edu:asdf/core/column-1.0.0" title: > A column in a table. description: | Each column contains a name and an array of data, and an optional description and unit. type: object properties: name: description: | The name of the column. Each name in a [table](http://stsci.edu/schemas/asdf/core/table-1.0.0) must be unique. type: string pattern: "[A-Za-z_][A-Za-z0-9_]*" data: description: | The array data for the column. allOf: - $ref: ndarray-1.0.0 description: description: | An optional description of the column. type: string default: '' unit: description: An optional unit for the column. allOf: - $ref: ../unit/unit-1.0.0 meta: description: Additional free-form metadata about the column. type: object default: {} required: [name, data] additionalProperties: false ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/complex-1.0.0.yaml0000644000537500020070000000547100000000000026476 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/complex-1.0.0" title: Complex number value. description: | Represents a complex number matching the following EBNF grammar ``` dot = "." plus-or-minus = "+" | "-" digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" sign = "" | plus-or-minus suffix = "J" | "j" | "I" | "i" inf = "inf" | "INF" nan = "nan" | "NAN" number = digits | dot digits | digits dot digits sci-suffix = "e" | "E" scientific = number sci-suffix sign digits real = sign number | sign scientific imag = number suffix | scientific suffix complex = real | sign imag | real plus-or-minus imag ``` Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. For historical reasons, it is necessary to accept as valid complex numbers that are surrounded by parenthesis. examples: - - 1 real, -1 imaginary - "!core/complex-1.0.0 1-1j" - - 0 real, 1 imaginary - "!core/complex-1.0.0 1J" - - -1 real, 0 imaginary - "!core/complex-1.0.0 -1" tag: "tag:stsci.edu:asdf/core/complex-1.0.0" type: string # This regex was automatically generated from a description of a grammar pattern: "^(((((([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN)))))|([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+)))|([+-]?(((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[iIjJ])|((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+[iIjJ])))|((([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN)))))|([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+))[+-](((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[iIjJ])|((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+[iIjJ])))))|(\\((((([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN)))))|([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+)))|([+-]?(((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[iIjJ])|((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+[iIjJ])))|((([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN)))))|([+-]?(([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+))[+-](((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[iIjJ])|((([0-9]+)|(\\.[0-9]+)|([0-9]+\\.[0-9]+)|(((inf)|(INF)))|(((nan)|(NAN))))[eE][+-]?[0-9]+[iIjJ]))))\\)))$" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/constant-1.0.0.yaml0000644000537500020070000000045500000000000026655 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/constant-1.0.0" tag: "tag:stsci.edu:asdf/core/constant-1.0.0" title: Specify that a value is a constant. description: | Used as a utility to indicate that value is a literal constant. ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/extension_metadata-1.0.0.yaml0000644000537500020070000000123700000000000030677 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/extension_metadata-1.0.0" title: | Metadata about specific ASDF extensions that were used to create this file. description: | Metadata about specific ASDF extensions that were used to create this file. tag: "tag:stsci.edu:asdf/core/extension_metadata-1.0.0" type: object properties: extension_class: description: | The fully-specified name of the extension class. type: string package: description: | The name and version of the package that contains the extension. $ref: "software-1.0.0" required: [extension_class] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/externalarray-1.0.0.yaml0000644000537500020070000000177200000000000027710 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/externalarray-1.0.0" tag: "tag:stsci.edu:asdf/core/externalarray-1.0.0" title: Point to an array-like object in an external file. description: | Allow referencing of array-like objects in external files. These files can be any type of file and in any absolute or relative location to the asdf file. Loading of these files into arrays is not handled by asdf. examples: - - Example external reference - | !core/externalarray-1.0.0 datatype: int16 fileuri: aia.lev1_euv_12s.2017-09-06T120001Z.94.image_lev1.fits shape: [4096, 4096] target: 1 type: object properties: fileuri: type: string target: anyOf: - type: integer - type: string datatype: type: string shape: type: array items: anyOf: - type: integer minimum: 0 required: [fileuri, target, datatype, shape] additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/history_entry-1.0.0.yaml0000644000537500020070000000147400000000000027750 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/history_entry-1.0.0" title: | An entry in the file history. description: | A record of an operation that has been performed upon a file. tag: "tag:stsci.edu:asdf/core/history_entry-1.0.0" type: object properties: description: description: | A description of the transformation performed. type: string time: description: | A timestamp for the operation, in UTC. type: string format: date-time software: description: | One or more descriptions of the software that performed the operation. anyOf: - $ref: "software-1.0.0" - type: array items: $ref: "software-1.0.0" required: [description] additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/integer-1.0.0.yaml0000644000537500020070000000313300000000000026455 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/integer-1.0.0" title: Arbitrary precision integer value. description: | Represents an arbitrarily large integer value. examples: - - An integer value that is stored using an internal array - | !core/integer-1.0.0 sign: + string: '1193942770599561143856918438330' words: !core/ndarray-1.0.0 source: 0 datatype: uint32 byteorder: little shape: [4] - - The same integer value is stored using an inline array - | !core/integer-1.0.0 sign: + string: '1193942770599561143856918438330' words: !core/ndarray-1.0.0 data: [1103110586, 1590521629, 299257845, 15] datatype: uint32 shape: [4] tag: "tag:stsci.edu:asdf/core/integer-1.0.0" type: object properties: words: $ref: "ndarray-1.0.0" description: | An array of unsigned 32-bit words representing the integer value, stored as little endian (i.e. the first word of the array represents the least significant bits of the integer value). sign: type: string pattern: "^[+-]$" description: | String indicating whether the integer value is positive or negative. string: type: string description: | Optional string representation of the integer value. This field is only intended to improve readability for humans, and therefore no assumptions about format should be made by ASDF readers. required: [words, sign] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/ndarray-1.0.0.yaml0000644000537500020070000002667400000000000026477 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0" tag: "tag:stsci.edu:asdf/core/ndarray-1.0.0" title: > An *n*-dimensional array. description: | There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. examples: - - An inline array, with implicit data type - | !core/ndarray-1.0.0 [[1, 0, 0], [0, 1, 0], [0, 0, 1]] - - An inline array, with an explicit data type - | !core/ndarray-1.0.0 datatype: float64 data: [[1, 0, 0], [0, 1, 0], [0, 0, 1]] - - An inline structured array, where the types of each column are automatically detected - | !core/ndarray-1.0.0 [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]] - - An inline structured array, where the types of each column are explicitly specified - | !core/ndarray-1.0.0 datatype: [['ascii', 4], uint16, uint16, ['ascii', 4]] data: [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]] - - A double-precision array, in contiguous memory in a block within the same file - | !core/ndarray-1.0.0 source: 0 shape: [1024, 1024] datatype: float64 byteorder: little - - A view of a tile in that image - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little strides: [8192, 8] offset: 2099200 - - A structured datatype, with nested columns for a coordinate in (*ra*, *dec*), and a 3x3 convolution kernel - | !core/ndarray-1.0.0 source: 0 shape: [64] datatype: - name: coordinate datatype: - name: ra datatype: float64 - name: dec datatype: float64 - name: kernel datatype: float32 shape: [3, 3] byteorder: little - - An array in Fortran order - | !core/ndarray-1.0.0 source: 0 shape: [1024, 1024] datatype: float64 byteorder: little strides: [8192, 8] - - An array where values of -999 are treated as missing - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little mask: -999 - - An array where another array is used as a mask - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little mask: !core/ndarray-1.0.0 source: 1 shape: [256, 256] datatype: bool8 byteorder: little - - An array where the data is stored in the first block in another ASDF file. - | !core/ndarray-1.0.0 source: external.asdf shape: [256, 256] datatype: float64 byteorder: little definitions: scalar-datatype: description: | Describes the type of a single element. There is a set of numeric types, each with a single identifier: - `int8`, `int16`, `int32`, `int64`: Signed integer types, with the given bit size. - `uint8`, `uint16`, `uint32`, `uint64`: Unsigned integer types, with the given bit size. - `float32`: Single-precision floating-point type or "binary32", as defined in IEEE 754. - `float64`: Double-precision floating-point type or "binary64", as defined in IEEE 754. - `complex64`: Complex number where the real and imaginary parts are each single-precision floating-point ("binary32") numbers, as defined in IEEE 754. - `complex128`: Complex number where the real and imaginary parts are each double-precision floating-point ("binary64") numbers, as defined in IEEE 754. There are two distinct fixed-length string types, which must be indicated with a 2-element array where the first element is an identifier for the string type, and the second is a length: - `ascii`: A string containing ASCII text (all codepoints < 128), where each character is 1 byte. - `ucs4`: A string containing unicode text in the UCS-4 encoding, where each character is always 4 bytes long. Here the number of bytes used is 4 times the given length. anyOf: - type: string enum: [int8, uint8, int16, uint16, int32, uint32, int64, uint64, float32, float64, complex64, complex128, bool8] - type: array items: - type: string enum: [ascii, ucs4] - type: integer minimum: 0 minLength: 2 maxLength: 2 datatype: description: | The data format of the array elements. May be a single scalar datatype, or may be a nested list of datatypes. When a list, each field may have a name. anyOf: - $ref: "#/definitions/scalar-datatype" - type: array items: anyOf: - $ref: "#/definitions/scalar-datatype" - type: object properties: name: type: string pattern: "[A-Za-z_][A-Za-z0-9_]*" description: The name of the field datatype: $ref: "#/definitions/datatype" byteorder: type: string enum: [big, little] description: | The byteorder for the field. If not provided, the byteorder of the datatype as a whole will be used. shape: type: array items: type: integer minimum: 0 required: [datatype] inline-data: description: | Inline data is stored in YAML format directly in the tree, rather than referencing a binary block. It is made out of nested lists. If the datatype of the array is not specified, it is inferred from the array contents. Type inference is supported only for homogeneous arrays, not tables. - If any of the elements in the array are YAML strings, the `datatype` of the entire array is `ucs4`, with the width of the largest string in the column, otherwise... - If any of the elements in the array are complex numbers, the `datatype` of the entire column is `complex128`, otherwise... - If any of the types in the column are numbers with a decimal point, the `datatype` of the entire column is `float64`, otherwise.. - If any of the types in the column are integers, the `datatype` of the entire column is `int64`, otherwise... - The `datatype` of the entire column is `bool8`. Masked values may be included in the array using `null`. If an explicit mask array is also provided, it takes precedence. type: array items: anyOf: - type: number - type: string - type: "null" - $ref: "complex-1.0.0" - $ref: "#/definitions/inline-data" - type: boolean anyOf: - $ref: "#/definitions/inline-data" - type: object properties: source: description: | The source of the data. - If an integer: If positive, the zero-based index of the block within the same file. If negative, the index from the last block within the same file. For example, a source of `-1` corresponds to the last block in the same file. - If a string, a URI to an external ASDF file containing the block data. Relative URIs and ``file:`` and ``http:`` protocols must be supported. Other protocols may be supported by specific library implementations. The ability to reference block data in an external ASDF file is intentionally limited to the first block in the external ASDF file, and is intended only to support the needs of [exploded](ref:exploded). For the more general case of referencing data in an external ASDF file, use tree [references](ref:references). anyOf: - type: integer - type: string format: uri data: description: | The data for the array inline. If `datatype` and/or `shape` are also provided, they must match the data here and can be used as a consistency check. `strides`, `offset` and `byteorder` are meaningless when `data` is provided. $ref: "#/definitions/inline-data" shape: description: | The shape of the array. The first entry may be the string `*`, indicating that the length of the first index of the array will be automatically determined from the size of the block. This is used for streaming support. type: array items: anyOf: - type: integer minimum: 0 - enum: ['*'] datatype: description: | The data format of the array elements. $ref: "#/definitions/datatype" byteorder: description: > The byte order (big- or little-endian) of the array data. type: string enum: [big, little] offset: description: > The offset, in bytes, within the data for this start of this view. type: integer minimum: 0 default: 0 strides: description: > The number of bytes to skip in each dimension. If not provided, the array is assumed by be contiguous and in C order. If provided, must be the same length as the shape property. type: array items: anyOf: - type: integer minimum: 1 - type: integer maximum: -1 mask: description: > Describes how missing values in the array are stored. If a scalar number, that number is used to represent missing values. If an ndarray, the given array provides a mask, where non-zero values represent missing values in this array. The mask array must be broadcastable to the dimensions of this array. anyOf: - type: number - $ref: "complex-1.0.0" - allOf: - $ref: "ndarray-1.0.0" - datatype: bool8 dependencies: source: [shape, datatype, byteorder] propertyOrder: [source, data, mask, datatype, byteorder, shape, offset, strides] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/software-1.0.0.yaml0000644000537500020070000000162400000000000026655 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/software-1.0.0" title: | Describes a software package. description: | General-purpose description of a software package. tag: "tag:stsci.edu:asdf/core/software-1.0.0" type: object properties: name: description: | The name of the application or library. type: string author: description: | The author (or institution) that produced the software package. type: string homepage: description: | A URI to the homepage of the software. type: string format: uri version: description: | The version of the software used. It is recommended, but not required, that this follows the (Semantic Versioning Specification)[http://semver.org/spec/v2.0.0.html]. type: string required: [name, version] additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/subclass_metadata-1.0.0.yaml0000644000537500020070000000107200000000000030477 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/subclass_metadata-1.0.0" title: | Metadata on a serialized subclass of an ASDF-enabled type. description: | Identifies the specific subclass that was serialized, to enable ASDF readers to correctly deserialize the object. tag: "tag:stsci.edu:asdf/core/subclass_metadata-1.0.0" type: object properties: name: description: | The name of the subclass that represents this object when deserialized. type: string required: [name] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/core/table-1.0.0.yaml0000644000537500020070000000551100000000000026111 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/table-1.0.0" tag: "tag:stsci.edu:asdf/core/table-1.0.0" title: > A table. description: | A table is represented as a list of columns, where each entry is a [column](ref:core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. examples: - - A table stored in column-major order, with each column in a separate block - | !core/table-1.0.0 columns: - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] description: RA meta: {foo: bar} name: a unit: !unit/unit-1.0.0 deg - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 1 datatype: float64 byteorder: little shape: [3] description: DEC name: b - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 2 datatype: [ascii, 1] byteorder: big shape: [3] description: The target name name: c - - A table stored in row-major order, all stored in the same block - | !core/table-1.0.0 columns: - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] strides: [13] description: RA meta: {foo: bar} name: a unit: !unit/unit-1.0.0 deg - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] offset: 4 strides: [13] description: DEC name: b - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: [ascii, 1] byteorder: big shape: [3] offset: 12 strides: [13] description: The target name name: c type: object properties: columns: description: | A list of columns in the table. type: array items: $ref: column-1.0.0 meta: description: | Additional free-form metadata about the table. type: object default: {} additionalProperties: false required: [columns] ... ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.8329866 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/fits/0000755000537500020070000000000000000000000023517 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/fits/fits-1.0.0.yaml0000644000537500020070000000754300000000000026013 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/fits/fits-1.0.0" title: > A FITS file inside of an ASDF file. description: | This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. examples: - - A simple FITS file with a primary header and two extensions - | !fits/fits-1.0.0 - header: - [SIMPLE, true, conforms to FITS standard] - [BITPIX, 8, array data type] - [NAXIS, 0, number of array dimensions] - [EXTEND, true] - [] - ['', Top Level MIRI Metadata] - [] - [DATE, '2013-08-30T10:49:55.070373', The date this file was created (UTC)] - [FILENAME, MiriDarkReferenceModel_test.fits, The name of the file] - [TELESCOP, JWST, The telescope used to acquire the data] - [] - ['', Information about the observation] - [] - [DATE-OBS, '2013-08-30T10:49:55.000000', The date the observation was made (UTC)] - data: !core/ndarray-1.0.0 datatype: float32 shape: [2, 3, 3, 4] source: 0 byteorder: big header: - [XTENSION, IMAGE, Image extension] - [BITPIX, -32, array data type] - [NAXIS, 4, number of array dimensions] - [NAXIS1, 4] - [NAXIS2, 3] - [NAXIS3, 3] - [NAXIS4, 2] - [PCOUNT, 0, number of parameters] - [GCOUNT, 1, number of groups] - [EXTNAME, SCI, extension name] - [BUNIT, DN, Units of the data array] - data: !core/ndarray-1.0.0 datatype: float32 shape: [2, 3, 3, 4] source: 1 byteorder: big header: - [XTENSION, IMAGE, Image extension] - [BITPIX, -32, array data type] - [NAXIS, 4, number of array dimensions] - [NAXIS1, 4] - [NAXIS2, 3] - [NAXIS3, 3] - [NAXIS4, 2] - [PCOUNT, 0, number of parameters] - [GCOUNT, 1, number of groups] - [EXTNAME, ERR, extension name] - [BUNIT, DN, Units of the error array] tag: "tag:stsci.edu:asdf/fits/fits-1.0.0" type: array items: description: > Each item represents a single header/data unit (HDU). type: object properties: header: description: > A list of the keyword/value/comment triples from the header, in the order they appear in the FITS file. type: array items: type: array minItems: 0 maxItems: 3 items: - description: "The keyword." type: string maxLength: 8 pattern: "[A-Z0-9]*" - description: "The value." anyOf: - type: string maxLength: 60 - type: number - type: boolean - description: "The comment." type: string maxLength: 60 data: description: "The data part of the HDU." anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../core/table-1.0.0" - type: "null" default: null required: [header] additionalProperties: false ... ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.8335536 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/time/0000755000537500020070000000000000000000000023510 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/time/time-1.0.0.yaml0000644000537500020070000002015700000000000025771 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/time/time-1.0.0" tag: "tag:stsci.edu:asdf/time/time-1.0.0" title: Represents an instance in time. description: | A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. examples: - - Example ISO time - | !time/time-1.0.0 "2000-12-31T13:05:27.737" - - Example year, day-of-year and time format time - | !time/time-1.0.0 "2001:003:04:05:06.789" - - Example Besselian Epoch time - | !time/time-1.0.0 B2000.0 - - Example Besselian Epoch time, equivalent to above - | !time/time-1.0.0 value: 2000.0 format: byear - - Example list of times - | !time/time-1.0.0 ["2000-12-31T13:05:27.737", "2000-12-31T13:06:38.444"] - - Example of an array of times - | !time/time-1.0.0 value: !core/ndarray-1.0.0 data: [2000, 2001] datatype: float64 format: jyear - - Example with a location - | !time/time-1.0.0 value: 2000.0 format: jyear scale: tdb location: x: 6378100 y: 0 z: 0 definitions: iso_time: type: string pattern: "[0-9]{4}-(0[1-9])|(1[0-2])-(0[1-9])|([1-2][0-9])|(3[0-1])[T ]([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" byear: type: string pattern: "B[0-9]+(.[0-9]+)?" jyear: type: string pattern: "J[0-9]+(.[0-9]+)?" yday: type: string pattern: "[0-9]{4}:(00[1-9])|(0[1-9][0-9])|([1-2][0-9][0-9])|(3[0-5][0-9])|(36[0-5]):([0-1][0-9])|([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" string_formats: anyOf: - $ref: "#/definitions/iso_time" - $ref: "#/definitions/byear" - $ref: "#/definitions/jyear" - $ref: "#/definitions/yday" array_of_strings: type: array items: anyOf: - $ref: "#/definitions/array_of_strings" - $ref: "#/definitions/string_formats" anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0#/anyOf/1" - type: object properties: value: description: | The value(s) of the time. anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0" - type: number format: description: | The format of the time. If not provided, the the format should be guessed from the string from among the following unambiguous options: `iso`, `byear`, `jyear` and `yday`. The supported formats are: - `iso`: ISO 8601 compliant date-time format `YYYY-MM-DDTHH:MM:SS.sss...`. For example, `2000-01-01 00:00:00.000` is midnight on January 1, 2000. The `T` separating the date from the time section is optional. - `yday`: Year, day-of-year and time as `YYYY:DOY:HH:MM:SS.sss...`. The day-of-year (DOY) goes from 001 to 365 (366 in leap years). For example, `2000:001:00:00:00.000` is midnight on January 1, 2000. - `byear`: Besselian Epoch year, eg. `B1950.0`. The `B` is optional if the `byear` format is explicitly specified. - `jyear`: Julian Epoch year, eg. `J2000.0`. The `J` is optional if the `jyear` format is explicitly specified. - `decimalyear`: Time as a decimal year, with integer values corresponding to midnight of the first day of each year. For example 2000.5 corresponds to the ISO time `2000-07-02 00:00:00`. - `jd`: Julian Date time format. This represents the number of days since the beginning of the Julian Period. For example, 2451544.5 in `jd` is midnight on January 1, 2000. - `mjd`: Modified Julian Date time format. This represents the number of days since midnight on November 17, 1858. For example, 51544.0 in MJD is midnight on January 1, 2000. - `gps`: GPS time: seconds from 1980-01-06 00:00:00 UTC For example, 630720013.0 is midnight on January 1, 2000. - `unix`: Unix time: seconds from 1970-01-01 00:00:00 UTC. For example, 946684800.0 in Unix time is midnight on January 1, 2000. [TODO: Astropy's definition of UNIX time doesn't match POSIX's here. What should we do for the purposes of ASDF?] enum: - iso - yday - byear - jyear - decimalyear - jd - mjd - gps - unix - cxcsec scale: description: | The time scale (or time standard) is a specification for measuring time: either the rate at which time passes; or points in time; or both. See also [3] and [4]. These scales are defined in detail in [SOFA Time Scale and Calendar Tools](http://www.iausofa.org/sofa_ts_c.pdf). The supported time scales are: - `utc`: Coordinated Universal Time (UTC). This is the default time scale, except for `gps`, `unix`. - `tai`: International Atomic Time (TAI). - `tcb`: Barycentric Coordinate Time (TCB). - `tcg`: Geocentric Coordinate Time (TCG). - `tdb`: Barycentric Dynamical Time (TDB). - `tt`: Terrestrial Time (TT). - `ut1`: Universal Time (UT1). enum: - utc - tai - tcb - tcg - tdb - tt - ut1 location: description: | Specifies the observer location for scales that are sensitive to observer location, currently only `tdb`. May be specified either with geocentric coordinates (X, Y, Z) with an optional unit or geodetic coordinates: - `long`: longitude in degrees - `lat`: in degrees - `h`: optional height anyOf: - type: object properties: x: type: number y: type: number z: type: number unit: allOf: - $ref: "../unit/unit-1.0.0" - default: m required: [x, y, z] - type: object properties: long: type: number minimum: -180 maximum: 180 lat: type: number minimum: -90 maximum: 90 h: type: number default: 0 unit: allOf: - $ref: "../unit/unit-1.0.0" - default: m required: [long, lat] required: [value] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/time/time-1.1.0.yaml0000644000537500020070000001740200000000000025771 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/time/time-1.1.0" tag: "tag:stsci.edu:asdf/time/time-1.1.0" title: Represents an instance in time. description: | A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. examples: - - Example ISO time - | !time/time-1.1.0 "2000-12-31T13:05:27.737" - - Example year, day-of-year and time format time - | !time/time-1.1.0 "2001:003:04:05:06.789" - - Example Besselian Epoch time - | !time/time-1.1.0 B2000.0 - - Example Besselian Epoch time, equivalent to above - | !time/time-1.1.0 value: 2000.0 format: byear - - Example list of times - | !time/time-1.1.0 ["2000-12-31T13:05:27.737", "2000-12-31T13:06:38.444"] - - Example of an array of times - | !time/time-1.1.0 value: !core/ndarray-1.0.0 data: [2000, 2001] datatype: float64 format: jyear - - Example with a location - | !time/time-1.1.0 value: 2000.0 format: jyear scale: tdb location: x: !unit/quantity-1.1.0 value: 6378100 unit: !unit/unit-1.0.0 m y: !unit/quantity-1.1.0 value: 0 unit: !unit/unit-1.0.0 m z: !unit/quantity-1.1.0 value: 0 unit: !unit/unit-1.0.0 m definitions: iso_time: type: string pattern: "[0-9]{4}-(0[1-9])|(1[0-2])-(0[1-9])|([1-2][0-9])|(3[0-1])[T ]([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" byear: type: string pattern: "B[0-9]+(.[0-9]+)?" jyear: type: string pattern: "J[0-9]+(.[0-9]+)?" yday: type: string pattern: "[0-9]{4}:(00[1-9])|(0[1-9][0-9])|([1-2][0-9][0-9])|(3[0-5][0-9])|(36[0-5]):([0-1][0-9])|([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" string_formats: anyOf: - $ref: "#/definitions/iso_time" - $ref: "#/definitions/byear" - $ref: "#/definitions/jyear" - $ref: "#/definitions/yday" array_of_strings: type: array items: anyOf: - $ref: "#/definitions/array_of_strings" - $ref: "#/definitions/string_formats" anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0#/anyOf/1" - type: object properties: value: description: | The value(s) of the time. anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0" - type: number format: description: | The format of the time. If not provided, the the format should be guessed from the string from among the following unambiguous options: `iso`, `byear`, `jyear` and `yday`. The supported formats are: - `iso`: ISO 8601 compliant date-time format `YYYY-MM-DDTHH:MM:SS.sss...`. For example, `2000-01-01 00:00:00.000` is midnight on January 1, 2000. The `T` separating the date from the time section is optional. - `yday`: Year, day-of-year and time as `YYYY:DOY:HH:MM:SS.sss...`. The day-of-year (DOY) goes from 001 to 365 (366 in leap years). For example, `2000:001:00:00:00.000` is midnight on January 1, 2000. - `byear`: Besselian Epoch year, eg. `B1950.0`. The `B` is optional if the `byear` format is explicitly specified. - `jyear`: Julian Epoch year, eg. `J2000.0`. The `J` is optional if the `jyear` format is explicitly specified. - `decimalyear`: Time as a decimal year, with integer values corresponding to midnight of the first day of each year. For example 2000.5 corresponds to the ISO time `2000-07-02 00:00:00`. - `jd`: Julian Date time format. This represents the number of days since the beginning of the Julian Period. For example, 2451544.5 in `jd` is midnight on January 1, 2000. - `mjd`: Modified Julian Date time format. This represents the number of days since midnight on November 17, 1858. For example, 51544.0 in MJD is midnight on January 1, 2000. - `gps`: GPS time: seconds from 1980-01-06 00:00:00 UTC For example, 630720013.0 is midnight on January 1, 2000. - `unix`: Unix time: seconds from 1970-01-01 00:00:00 UTC. For example, 946684800.0 in Unix time is midnight on January 1, 2000. [TODO: Astropy's definition of UNIX time doesn't match POSIX's here. What should we do for the purposes of ASDF?] enum: - iso - yday - byear - jyear - decimalyear - jd - mjd - gps - unix - cxcsec scale: description: | The time scale (or time standard) is a specification for measuring time: either the rate at which time passes; or points in time; or both. See also [3] and [4]. These scales are defined in detail in [SOFA Time Scale and Calendar Tools](http://www.iausofa.org/sofa_ts_c.pdf). The supported time scales are: - `utc`: Coordinated Universal Time (UTC). This is the default time scale, except for `gps`, `unix`. - `tai`: International Atomic Time (TAI). - `tcb`: Barycentric Coordinate Time (TCB). - `tcg`: Geocentric Coordinate Time (TCG). - `tdb`: Barycentric Dynamical Time (TDB). - `tt`: Terrestrial Time (TT). - `ut1`: Universal Time (UT1). enum: - utc - tai - tcb - tcg - tdb - tt - ut1 location: description: | Specifies the observer location for scales that are sensitive to observer location, currently only `tdb`. May be specified either with geocentric coordinates (X, Y, Z) with an optional unit or geodetic coordinates: - `long`: longitude in degrees - `lat`: in degrees - `h`: optional height type: object properties: x: $ref: "../unit/quantity-1.1.0" y: $ref: "../unit/quantity-1.1.0" z: $ref: "../unit/quantity-1.1.0" required: [x, y, z] required: [value] ... ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9131486 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/0000755000537500020070000000000000000000000024565 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/add-1.0.0.yaml0000644000537500020070000000141400000000000026633 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/add-1.0.0" tag: "tag:stsci.edu:asdf/transform/add-1.0.0" title: > Perform a list of subtransforms in parallel and then add their results together. description: | Each of the subtransforms must have the same number of inputs and outputs. examples: - - A list of transforms, performed in parallel and added together - | !transform/add-1.0.0 forward: - !transform/shift-1.0.0 offset: 2.0 - !transform/shift-1.0.0 offset: 3.0 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/add-1.1.0.yaml0000644000537500020070000000141400000000000026634 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/add-1.1.0" tag: "tag:stsci.edu:asdf/transform/add-1.1.0" title: > Perform a list of subtransforms in parallel and then add their results together. description: | Each of the subtransforms must have the same number of inputs and outputs. examples: - - A list of transforms, performed in parallel and added together - | !transform/add-1.1.0 forward: - !transform/shift-1.1.0 offset: 2.0 - !transform/shift-1.1.0 offset: 3.0 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/add-1.2.0.yaml0000644000537500020070000000141400000000000026635 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/add-1.2.0" tag: "tag:stsci.edu:asdf/transform/add-1.2.0" title: > Perform a list of subtransforms in parallel and then add their results together. description: | Each of the subtransforms must have the same number of inputs and outputs. examples: - - A list of transforms, performed in parallel and added together - | !transform/add-1.2.0 forward: - !transform/shift-1.2.0 offset: 2.0 - !transform/shift-1.2.0 offset: 3.0 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.0.0.yaml0000644000537500020070000000233000000000000027331 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.0.0" tag: "tag:stsci.edu:asdf/transform/affine-1.0.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.1.0.yaml0000644000537500020070000000233000000000000027332 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.1.0" tag: "tag:stsci.edu:asdf/transform/affine-1.1.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.2.0.yaml0000644000537500020070000000245600000000000027344 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.2.0" tag: "tag:stsci.edu:asdf/transform/affine-1.2.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.3.0.yaml0000644000537500020070000000245600000000000027345 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.3.0" tag: "tag:stsci.edu:asdf/transform/affine-1.3.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.2.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.0.0.yaml0000644000537500020070000000121700000000000027050 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy-1.0.0" tag: "tag:stsci.edu:asdf/transform/airy-1.0.0" title: | The Airy projection. description: | Corresponds to the `AIR` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. allOf: - $ref: "zenithal-1.0.0" - type: object properties: theta_b: type: number description: | The latitude $\theta_b$ at which to minimize the error, in degrees. default: 90 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.1.0.yaml0000644000537500020070000000116100000000000027047 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy-1.1.0" tag: "tag:stsci.edu:asdf/transform/airy-1.1.0" title: | The Airy projection. description: | Corresponds to the `AIR` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. allOf: - $ref: "zenithal-1.1.0" - type: object properties: theta_b: type: number description: | The latitude $\theta_b$ at which to minimize the error, in degrees. default: 90 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.2.0.yaml0000644000537500020070000000125700000000000027056 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy-1.2.0" tag: "tag:stsci.edu:asdf/transform/airy-1.2.0" title: | The Airy projection. description: | Corresponds to the `AIR` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. allOf: - $ref: "zenithal-1.2.0" - type: object properties: theta_b: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | The latitude $\theta_b$ at which to minimize the error, in degrees. default: 90 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/airy_disk2d-1.0.0.yaml0000644000537500020070000000254400000000000030314 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy_disk2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/airy_disk2d-1.0.0" title: > Two dimensional Airy disk model. description: > Two dimensional Airy disk model. examples: - - $f(r)=43.8[\frac{2J_1(\frac{\pi\sqrt{(x-0.5)^2+(y-1.5)^2}}{10.2/R_z})}{\frac{\pi\sqrt{(x-0.5)^2+(y-1.5)^2}}{10.2/R_z}}]^2$, where $J_1$ is the first order Bessel function and $R_z=1.2196698912665045$ - | !transform/airy_disk2d-1.0.0 {amplitude: 43.8, radius: 10.2, x_0: 0.5, y_0: 1.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude of the Airy function. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the maximum of the Airy function. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y position of the maximum of the Airy function. radius: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: The radius of the Airy disk (radius of the first zero). required: ['amplitude', 'x_0', 'y_0', 'radius'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/blackbody-1.0.0.yaml0000644000537500020070000000164700000000000030045 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/blackbody-1.0.0" tag: "tag:stsci.edu:asdf/transform/blackbody-1.0.0" title: > Blackbody model. description: | Blackbody model using the Planck function. $$B_{\\nu}(T) = A \frac{2 h \nu^{3} / c^{2}}{exp(h \nu / k T) - 1}$$ examples: - - $$B_{\\nu}(T) = 10.0 \frac{2 h \nu^{3} / c^{2}}{exp(h \nu / k *6000) - 1}$$ - | !transform/blackbody-1.0.0 scale: 10.0 temperature: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 K, value: 6000.0} allOf: - $ref: "transform-1.2.0" - type: object properties: scale: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Scale factor. temperature: $ref: "../unit/quantity-1.1.0" description: Blackbody temperature. required: ['scale', 'temperature'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.0.0.yaml0000644000537500020070000000230700000000000031365 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.0.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.0.0" - type: object properties: theta1: type: number description: | Bonne conformal latitude, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.1.0.yaml0000644000537500020070000000230700000000000031366 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.1.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.1.0" - type: object properties: theta1: type: number description: | Bonne conformal latitude, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.2.0.yaml0000644000537500020070000000240500000000000031366 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.2.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.2.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.1.0" - type: object properties: theta1: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Bonne conformal latitude, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.3.0.yaml0000644000537500020070000000240500000000000031367 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.3.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.3.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.2.0" - type: object properties: theta1: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Bonne conformal latitude, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/box1d-1.0.0.yaml0000644000537500020070000000175200000000000027125 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/box1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/box1d-1.0.0" title: > One dimensional box model. description: > One dimensional box. examples: - - A 1D box of width 4.0, amplitude 10.0, centered at x=1.5. - | !transform/box1d-1.0.0 amplitude: 10.0 bounding_box: [-0.5, 3.5] width: 4.0 x_0: 1.5 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Position of the center of the box model. width: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Width of box. required: ['amplitude', 'x_0', 'width'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/box2d-1.0.0.yaml0000644000537500020070000000261500000000000027125 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/box2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/box2d-1.0.0" title: > Two dimensional box model. description: > Two dimensional box. examples: - - A 2D box with (x, y) dimensions (4.0, 2.0), centered at (0.5, 1.5) with amplitude 10.0. - | !transform/box2d-1.0.0 amplitude: 10.0 bounding_box: - [0.5, 2.5] - [-1.5, 2.5] x_0: 0.5 x_width: 4.0 y_0: 1.5 y_width: 2.0 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the center of the box model. x_width: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x width of box. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y position of the center of the box model. y_width: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y width of box. required: ['amplitude', 'x_0', 'x_width', 'y_0', 'y_width'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/broken_power_law1d-1.0.0.yaml0000644000537500020070000000237500000000000031676 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/broken_power_law1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/broken_power_law1d-1.0.0" title: > One dimensional power law model with a break. description: > One dimensional power law model with a break. examples: - - $f(x) = A (10.0 / 5.0) ^ {-2.0}$ for x < 5.0 and $f(x) = A (10.0 / 5.0) ^ {-3.0}$ for x > 5.0 - | !transform/broken_power_law1d-1.0.0 {alpha_1: 2.0, alpha_2: 3.0, amplitude: 10.0, x_break: 5.0} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Model amplitude at the break point. x_break: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Break point. alpha_1: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power law index for x < x_break. alpha_2: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power law index for x > x_break. required: ['amplitude', 'x_break', 'alpha_1', 'alpha_2'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.0.0.yaml0000644000537500020070000000073200000000000033057 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cobe_quad_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube-1.0.0" title: | COBE quadrilateralized spherical cube projection. description: | Corresponds to the `CSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.1.0.yaml0000644000537500020070000000073200000000000033060 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cobe_quad_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube-1.1.0" title: | COBE quadrilateralized spherical cube projection. description: | Corresponds to the `CSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.2.0.yaml0000644000537500020070000000073200000000000033061 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cobe_quad_spherical_cube-1.2.0" tag: "tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube-1.2.0" title: | COBE quadrilateralized spherical cube projection. description: | Corresponds to the `CSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.0.0.yaml0000644000537500020070000000215100000000000027547 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/compose-1.0.0" tag: "tag:stsci.edu:asdf/transform/compose-1.0.0" title: > Perform a list of subtransforms in series. description: | The output of each subtransform is fed into the input of the next subtransform. The number of output dimensions of each subtransform must be equal to the number of input dimensions of the next subtransform in list. To reorder or add/drop axes, insert `remap_axes` transforms in the subtransform list. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, by reversing the list of transforms and applying the inverse of each. examples: - - A series of transforms - | !transform/compose-1.0.0 forward: - !transform/shift-1.0.0 offset: 2.0 - !transform/shift-1.0.0 offset: 3.0 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.1.0.yaml0000644000537500020070000000215100000000000027550 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/compose-1.1.0" tag: "tag:stsci.edu:asdf/transform/compose-1.1.0" title: > Perform a list of subtransforms in series. description: | The output of each subtransform is fed into the input of the next subtransform. The number of output dimensions of each subtransform must be equal to the number of input dimensions of the next subtransform in list. To reorder or add/drop axes, insert `remap_axes` transforms in the subtransform list. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, by reversing the list of transforms and applying the inverse of each. examples: - - A series of transforms - | !transform/compose-1.1.0 forward: - !transform/shift-1.1.0 offset: 2.0 - !transform/shift-1.1.0 offset: 3.0 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.2.0.yaml0000644000537500020070000000215100000000000027551 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/compose-1.2.0" tag: "tag:stsci.edu:asdf/transform/compose-1.2.0" title: > Perform a list of subtransforms in series. description: | The output of each subtransform is fed into the input of the next subtransform. The number of output dimensions of each subtransform must be equal to the number of input dimensions of the next subtransform in list. To reorder or add/drop axes, insert `remap_axes` transforms in the subtransform list. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, by reversing the list of transforms and applying the inverse of each. examples: - - A series of transforms - | !transform/compose-1.2.0 forward: - !transform/shift-1.2.0 offset: 2.0 - !transform/shift-1.2.0 offset: 3.0 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.0.0.yaml0000644000537500020070000000365400000000000030377 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/concatenate-1.0.0" tag: "tag:stsci.edu:asdf/transform/concatenate-1.0.0" title: > Send axes to different subtransforms. description: | Transforms a set of separable inputs by splitting the axes apart, sending them through the given subtransforms in parallel, and finally concatenating the subtransform output axes back together. The input axes are assigned to each subtransform in order. If the number of input axes is unequal to the sum of the number of input axes of all of the subtransforms, that is considered an error case. The output axes from each subtransform are appended together to make up the resulting output axes. For example, given 5 input axes, and 3 subtransforms with the following orders: 1. transform A: 2 in -> 2 out 1. transform B: 1 in -> 2 out 1. transform C: 2 in -> 1 out The transform is performed as follows: ``` : i0 i1 i2 i3 i4 : | | | | | : +---------+ +---------+ +----------+ : | A | | B | | C | : +---------+ +---------+ +----------+ : | | | | | : o0 o1 o2 o3 o4 ``` If reordering of the input or output axes is required, use in series with the `remap_axes` transform. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The example in the description - | !transform/concatenate-1.0.0 forward: - !transform/shift-1.0.0 offset: 2.0 - !transform/shift-1.0.0 offset: 3.0 - !transform/shift-1.0.0 offset: 5.0 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.1.0.yaml0000644000537500020070000000365400000000000030400 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/concatenate-1.1.0" tag: "tag:stsci.edu:asdf/transform/concatenate-1.1.0" title: > Send axes to different subtransforms. description: | Transforms a set of separable inputs by splitting the axes apart, sending them through the given subtransforms in parallel, and finally concatenating the subtransform output axes back together. The input axes are assigned to each subtransform in order. If the number of input axes is unequal to the sum of the number of input axes of all of the subtransforms, that is considered an error case. The output axes from each subtransform are appended together to make up the resulting output axes. For example, given 5 input axes, and 3 subtransforms with the following orders: 1. transform A: 2 in -> 2 out 1. transform B: 1 in -> 2 out 1. transform C: 2 in -> 1 out The transform is performed as follows: ``` : i0 i1 i2 i3 i4 : | | | | | : +---------+ +---------+ +----------+ : | A | | B | | C | : +---------+ +---------+ +----------+ : | | | | | : o0 o1 o2 o3 o4 ``` If reordering of the input or output axes is required, use in series with the `remap_axes` transform. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The example in the description - | !transform/concatenate-1.1.0 forward: - !transform/shift-1.1.0 offset: 2.0 - !transform/shift-1.1.0 offset: 3.0 - !transform/shift-1.1.0 offset: 5.0 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.2.0.yaml0000644000537500020070000000365400000000000030401 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/concatenate-1.2.0" tag: "tag:stsci.edu:asdf/transform/concatenate-1.2.0" title: > Send axes to different subtransforms. description: | Transforms a set of separable inputs by splitting the axes apart, sending them through the given subtransforms in parallel, and finally concatenating the subtransform output axes back together. The input axes are assigned to each subtransform in order. If the number of input axes is unequal to the sum of the number of input axes of all of the subtransforms, that is considered an error case. The output axes from each subtransform are appended together to make up the resulting output axes. For example, given 5 input axes, and 3 subtransforms with the following orders: 1. transform A: 2 in -> 2 out 1. transform B: 1 in -> 2 out 1. transform C: 2 in -> 1 out The transform is performed as follows: ``` : i0 i1 i2 i3 i4 : | | | | | : +---------+ +---------+ +----------+ : | A | | B | | C | : +---------+ +---------+ +----------+ : | | | | | : o0 o1 o2 o3 o4 ``` If reordering of the input or output axes is required, use in series with the `remap_axes` transform. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The example in the description - | !transform/concatenate-1.2.0 forward: - !transform/shift-1.2.0 offset: 2.0 - !transform/shift-1.2.0 offset: 3.0 - !transform/shift-1.2.0 offset: 5.0 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.0.0.yaml0000644000537500020070000000245700000000000027206 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.0.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.1.0.yaml0000644000537500020070000000245700000000000027207 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.1.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.2.0.yaml0000644000537500020070000000265300000000000027206 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.2.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.3.0.yaml0000644000537500020070000000265300000000000027207 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.3.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.0.0.yaml0000644000537500020070000000164200000000000031360 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.0.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.1.0.yaml0000644000537500020070000000164200000000000031361 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.1.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.2.0.yaml0000644000537500020070000000160400000000000031360 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.2.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.2.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.2.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.3.0.yaml0000644000537500020070000000160400000000000031361 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.3.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.3.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.3.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.3.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.0.0.yaml0000644000537500020070000000137700000000000031620 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.0.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.1.0.yaml0000644000537500020070000000137700000000000031621 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.1.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.2.0.yaml0000644000537500020070000000134100000000000031611 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.2.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.2.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.2.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.3.0.yaml0000644000537500020070000000134100000000000031612 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.3.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.3.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.3.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.3.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.0.0.yaml0000644000537500020070000000217600000000000032001 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.0.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.1.0.yaml0000644000537500020070000000217600000000000032002 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.1.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.2.0.yaml0000644000537500020070000000214000000000000031772 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.2.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.2.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.2.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.3.0.yaml0000644000537500020070000000214000000000000031773 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.3.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.3.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.3.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.3.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.0.0.yaml0000644000537500020070000000142100000000000031605 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.0.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.1.0.yaml0000644000537500020070000000142100000000000031606 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.1.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.2.0.yaml0000644000537500020070000000136300000000000031614 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.2.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.2.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.2.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.3.0.yaml0000644000537500020070000000136300000000000031615 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.3.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.3.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:transform/conic-1.3.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.3.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.0.0.yaml0000644000537500020070000000105000000000000027730 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.0.0" tag: "tag:stsci.edu:asdf/transform/constant-1.0.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.0.0" - type: object properties: value: type: number required: [value] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.1.0.yaml0000644000537500020070000000105000000000000027731 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.1.0" tag: "tag:stsci.edu:asdf/transform/constant-1.1.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.1.0" - type: object properties: value: type: number required: [value] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.2.0.yaml0000644000537500020070000000114600000000000027740 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.2.0" tag: "tag:stsci.edu:asdf/transform/constant-1.2.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.1.0" - type: object properties: value: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number required: [value] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.3.0.yaml0000644000537500020070000000114600000000000027741 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.3.0" tag: "tag:stsci.edu:asdf/transform/constant-1.3.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.2.0" - type: object properties: value: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number required: [value] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.4.0.yaml0000644000537500020070000000115200000000000027737 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.4.0" tag: "tag:stsci.edu:asdf/transform/constant-1.4.0" title: > A Constant transform. description: | Invertibility: A transform which takes one or two inputs based on dimensionality and returns a constant value. It has no analytical inverse. allOf: - $ref: "transform-1.2.0" - type: object properties: value: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number dimensions: type: integer required: [value, dimensions] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.0.0.yaml0000644000537500020070000000063100000000000030400 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical-1.0.0" title: | Base class of all cylindrical projections. description: | The surface of cylindrical projections is a cylinder. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.1.0.yaml0000644000537500020070000000063100000000000030401 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical-1.1.0" title: | Base class of all cylindrical projections. description: | The surface of cylindrical projections is a cylinder. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.2.0.yaml0000644000537500020070000000063100000000000030402 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical-1.2.0" title: | Base class of all cylindrical projections. description: | The surface of cylindrical projections is a cylinder. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.0.0.yaml0000644000537500020070000000162700000000000032565 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.0.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.0.0" - type: object properties: lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.1.0.yaml0000644000537500020070000000162700000000000032566 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.1.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.2.0.yaml0000644000537500020070000000172500000000000032566 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.2.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.2.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: lambda: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.3.0.yaml0000644000537500020070000000172500000000000032567 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.3.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.3.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.2.0" - type: object properties: lambda: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.0.0.yaml0000644000537500020070000000225100000000000033011 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.0.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.0.0" - type: object properties: mu: type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.1.0.yaml0000644000537500020070000000225100000000000033012 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.1.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: mu: type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.2.0.yaml0000644000537500020070000000244500000000000033020 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.2.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.2.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: mu: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.3.0.yaml0000644000537500020070000000244500000000000033021 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.3.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.3.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.2.0" - type: object properties: mu: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/disk2d-1.0.0.yaml0000644000537500020070000000236300000000000027267 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/disk2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/disk2d-1.0.0" title: > Two dimensional disk model. description: > Two dimensional radially symmetric disk. examples: - - A 2D disk centered at (x, y) = (0.5, 1.5), of radius 5.0 and amplitude 10.0. - | !transform/disk2d-1.0.0 R_0: 5.0 amplitude: 10.0 bounding_box: - [-3.5, 6.5] - [-4.5, 5.5] x_0: 0.5 y_0: 1.5 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Value of the disk function. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the center of the disk. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y position of the center of the disk. R_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Radius of the disk. required: ['amplitude', 'x_0', 'y_0', 'R_0'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.0.0.yaml0000644000537500020070000000155000000000000027350 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/divide-1.0.0" tag: "tag:stsci.edu:asdf/transform/divide-1.0.0" title: > Perform a list of subtransforms in parallel and then divide their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through division. - | !transform/divide-1.0.0 forward: - !transform/shift-1.0.0 offset: 2.0 - !transform/shift-1.0.0 offset: 2.0 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.1.0.yaml0000644000537500020070000000155000000000000027351 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/divide-1.1.0" tag: "tag:stsci.edu:asdf/transform/divide-1.1.0" title: > Perform a list of subtransforms in parallel and then divide their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through division. - | !transform/divide-1.1.0 forward: - !transform/shift-1.1.0 offset: 2.0 - !transform/shift-1.1.0 offset: 2.0 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.2.0.yaml0000644000537500020070000000155000000000000027352 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/divide-1.2.0" tag: "tag:stsci.edu:asdf/transform/divide-1.2.0" title: > Perform a list of subtransforms in parallel and then divide their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through division. - | !transform/divide-1.2.0 forward: - !transform/shift-1.2.0 offset: 2.0 - !transform/shift-1.2.0 offset: 2.0 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/domain-1.0.0.yaml0000644000537500020070000000203600000000000027353 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/domain-1.0.0" tag: "tag:stsci.edu:asdf/transform/domain-1.0.0" title: > Defines the domain of an input axis. (deprecated since 1.1.0) description: > Describes the range of acceptable input values to a particular axis of a transform. examples: - - The domain `[0, 1)`. - | !transform/domain-1.0.0 lower: 0 upper: 1 includes_lower: true properties: lower: description: > The lower value of the domain. If not provided, the domain has no lower limit. type: number default: -.inf upper: description: > The upper value of the domain. If not provided, the domain has no upper limit. type: number default: .inf includes_lower: description: If `true`, the domain includes `lower`. type: boolean default: false includes_upper: description: If `true`, the domain includes `upper`. type: boolean default: false ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/drude1d-1.0.0.yaml0000644000537500020070000000205700000000000027437 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/drude1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/drude1d-1.0.0" title: > One dimensional Drude model description: > Drude model based one the behavior of electons in materials (esp. metals). examples: - - $$f(x) = 10.0 \frac{(2.5/0.5)^2}{((x/0.5 - 0.5/x)^2 + (2.5/0.5)^2}$$ - | !transform/drude1d-1.0.0 amplitude: 10.0 bounding_box: [-124.5, 125.5] fwhm: 2.5 x_0: 0.5 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Peak value. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the peak. fwhm: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Full width at half maximum required: ['amplitude', 'x_0', 'fwhm'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/ellipse2d-1.0.0.yaml0000644000537500020070000000347500000000000027777 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/ellipse2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/ellipse2d-1.0.0" title: > Two dimensional ellipse model. description: > Two dimensional ellipse. examples: - - A 2D ellipse centered at (x, y) = (0.5, 1.5), with a semimajor axis of 2.0, semiminor axis of 4.0, oriented at 0.2 radians counterclockwise from the positive x-axis. - | !transform/ellipse2d-1.0.0 a: 2.0 amplitude: 10.0 b: 4.0 bounding_box: - [-2.4403509950278934, 5.440350995027893] - [-1.6150966966034175, 2.6150966966034175] theta: 0.2 x_0: 0.5 y_0: 1.5 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Value of the ellipse. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the center of the ellipse. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y position of the center of the ellipse. a: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: The length of the semimajor axis. b: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: The length of the seminor axis. theta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: The rotation angle in radians of the semimajor axis. The rotation angle increase counterclockwise from the positive x axis. required: ['amplitude', 'x_0', 'y_0', 'a', 'b', 'theta'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/exponential1d-1.0.0.yaml0000644000537500020070000000145600000000000030664 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/exponential1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/exponential1d-1.0.0" title: > One dimensional exponential model. description: > One dimensional exponential model. examples: - - $$f(x) = 10.0e^{x/2.5}$$ - | !transform/exponential1d-1.0.0 {amplitude: 10.0, tau: 2.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude or scaling factor. r_core: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Denominator in exponent. required: ['amplitude', 'tau'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/exponential_cutoff_power_law1d-1.0.0.yaml0000644000537500020070000000232100000000000034301 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/exponential_cutoff_power_law1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/exponential_cutoff_power_law1d-1.0.0" title: > One dimensional power law model with an exponential cutoff. description: > One dimensional power law model with an exponential cutoff. examples: - - $$f(x) = 10.0 (x / 5.0) ^ {-2.0} \exp (-x / 7.0)$$ - | !transform/exponential_cutoff_power_law1d-1.0.0 {alpha: 2.0, amplitude: 10.0, x_0: 5.0, x_cutoff: 7.0} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Model amplitude. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Reference point. alpha: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power law index. x_cutoff: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Cutoff point. required: ['amplitude', 'x_0', 'alpha', 'x_cutoff'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/fix_inputs-1.1.0.yaml0000644000537500020070000000341700000000000030301 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/fix_inputs-1.1.0" tag: "tag:stsci.edu:asdf/transform/fix_inputs-1.1.0" title: > Set to a constant selected input arguments of a model. description: | This operation takes as the right hand side a dict equivalent that consists of key:value pairs where the key identifies the input argument to be set, either by position number (0 based) or name, and the value is the floating point value that should be assigned to that input. The result is a compound model with n fewer input arguments where n is the number of input values to be set (i.e., the number of keys in the dict). examples: - - Fix the 0-th coordinate. - | !transform/fix_inputs-1.1.0 forward: - !transform/compose-1.1.0 forward: - !transform/gnomonic-1.1.0 {direction: pix2sky} - !transform/rotate2d-1.2.0 {angle: 23.0} - keys: [0] values: [2] - - Fix the "x" coordinate. - | !transform/fix_inputs-1.1.0 forward: - !transform/compose-1.1.0 forward: - !transform/gnomonic-1.1.0 {direction: pix2sky} - !transform/rotate2d-1.2.0 {angle: 23.0} - keys: [x] values: [2] allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: - $ref: "transform-1.1.0" - type: object properties: keys: type: array items: type: [string, integer] values: type: array items: - type: number minItems: 2 maxItems: 2 required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/fix_inputs-1.2.0.yaml0000644000537500020070000000341700000000000030302 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/fix_inputs-1.2.0" tag: "tag:stsci.edu:asdf/transform/fix_inputs-1.2.0" title: > Set to a constant selected input arguments of a model. description: | This operation takes as the right hand side a dict equivalent that consists of key:value pairs where the key identifies the input argument to be set, either by position number (0 based) or name, and the value is the floating point value that should be assigned to that input. The result is a compound model with n fewer input arguments where n is the number of input values to be set (i.e., the number of keys in the dict). examples: - - Fix the 0-th coordinate. - | !transform/fix_inputs-1.2.0 forward: - !transform/compose-1.2.0 forward: - !transform/gnomonic-1.2.0 {direction: pix2sky} - !transform/rotate2d-1.3.0 {angle: 23.0} - keys: [0] values: [2] - - Fix the "x" coordinate. - | !transform/fix_inputs-1.2.0 forward: - !transform/compose-1.2.0 forward: - !transform/gnomonic-1.2.0 {direction: pix2sky} - !transform/rotate2d-1.3.0 {angle: 23.0} - keys: [x] values: [2] allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: - $ref: "transform-1.2.0" - type: object properties: keys: type: array items: type: [string, integer] values: type: array items: - type: number minItems: 2 maxItems: 2 required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/gaussian1d-1.0.0.yaml0000644000537500020070000000175000000000000030145 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gaussian1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/gaussian1d-1.0.0" title: > A 1D Gaussian model. description: > A 1D gaussian distribution. examples: - - $$f(x) = 10.0 e^{- \frac{\left(x - 1.5\right)^{2}}{2*0.25^{2}}}$$ - | !transform/gaussian1d-1.0.0 amplitude: 10.0 bounding_box: [0.125, 2.875] mean: 1.5 stddev: 0.25 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude. mean: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Mean. stddev: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Standard deviation. required: [amplitude, mean, stddev] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/gaussian2d-1.0.0.yaml0000644000537500020070000000376600000000000030157 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gaussian2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/gaussian2d-1.0.0" title: > A 2D Gaussian model. description: > A 2D gaussian distribution. examples: - - $f(x, y) = 10.0 e^{-a\left(x - 1.5\right)^{2} -b\left(x - 1.5\right) \left(y - 2.5\right) -c\left(y - 2.5\right)^{2}}$ using the following definitions $a = \left(\frac{\cos^{2}{\left (0 \right )}}{2*0.25^{2}} +\frac{\sin^{2}{\left (0 \right )}}{2*0.375^{2}}\right)$, $b = \left(\frac{\sin{\left (2 *0 \right )}}{2 *0.25^{2}} -\frac{\sin{\left (2 *0 \right )}}{2*0.375^{2}}\right)$, $c = \left(\frac{\sin^{2}{\left (0\right )}}{2*0.25^{2}} +\frac{\cos^{2}{\left (0 \right )}}{2*0.375^{2}}\right)$ - | !transform/gaussian2d-1.0.0 amplitude: 10.0 bounding_box: - [0.4375, 4.5625] - [0.125, 2.875] theta: 0.0 x_mean: 1.5 x_stddev: 0.25 y_mean: 2.5 y_stddev: 0.375 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude. x_mean: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Mean in x. y_mean: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Mean in y. x_stddev: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Standard deviation in x. y_stddev: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Standard deviation in y. theta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Rotation angle in radians, increases counterclockwise. required: ['amplitude', 'x_mean', 'y_mean', 'x_stddev', 'y_stddev', 'theta'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.0.0.yaml0000644000537500020070000000140600000000000027715 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gnomonic-1.0.0" tag: "tag:stsci.edu:asdf/transform/gnomonic-1.0.0" title: | The gnomonic projection. description: | Corresponds to the `TAN` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.1.0.yaml0000644000537500020070000000135000000000000027714 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gnomonic-1.1.0" tag: "tag:stsci.edu:asdf/transform/gnomonic-1.1.0" title: | The gnomonic projection. description: | Corresponds to the `TAN` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.2.0.yaml0000644000537500020070000000135000000000000027715 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gnomonic-1.2.0" tag: "tag:stsci.edu:asdf/transform/gnomonic-1.2.0" title: | The gnomonic projection. description: | Corresponds to the `TAN` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.0.0.yaml0000644000537500020070000000154000000000000030704 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/hammer_aitoff-1.0.0" tag: "tag:stsci.edu:asdf/transform/hammer_aitoff-1.0.0" title: | Hammer-Aitoff projection. description: | Corresponds to the `AIT` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= 2 \arg \left(2Z^2 - 1, \frac{\pi}{180^\circ} \frac{Z}{2}x\right) \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^\circ}yZ\right)$$ And the sky-to-pixel transformation is defined as: $$x &= 2 \gamma \cos \theta \sin \frac{\phi}{2} \\ y &= \gamma \sin \theta$$ where: $$\gamma = \frac{180^\circ}{\pi} \sqrt{\frac{2}{1 + \cos \theta \cos(\phi / 2)}}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.1.0.yaml0000644000537500020070000000154000000000000030705 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/hammer_aitoff-1.1.0" tag: "tag:stsci.edu:asdf/transform/hammer_aitoff-1.1.0" title: | Hammer-Aitoff projection. description: | Corresponds to the `AIT` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= 2 \arg \left(2Z^2 - 1, \frac{\pi}{180^\circ} \frac{Z}{2}x\right) \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^\circ}yZ\right)$$ And the sky-to-pixel transformation is defined as: $$x &= 2 \gamma \cos \theta \sin \frac{\phi}{2} \\ y &= \gamma \sin \theta$$ where: $$\gamma = \frac{180^\circ}{\pi} \sqrt{\frac{2}{1 + \cos \theta \cos(\phi / 2)}}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.2.0.yaml0000644000537500020070000000154000000000000030706 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/hammer_aitoff-1.2.0" tag: "tag:stsci.edu:asdf/transform/hammer_aitoff-1.2.0" title: | Hammer-Aitoff projection. description: | Corresponds to the `AIT` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= 2 \arg \left(2Z^2 - 1, \frac{\pi}{180^\circ} \frac{Z}{2}x\right) \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^\circ}yZ\right)$$ And the sky-to-pixel transformation is defined as: $$x &= 2 \gamma \cos \theta \sin \frac{\phi}{2} \\ y &= \gamma \sin \theta$$ where: $$\gamma = \frac{180^\circ}{\pi} \sqrt{\frac{2}{1 + \cos \theta \cos(\phi / 2)}}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.0.0.yaml0000644000537500020070000000143500000000000027540 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix-1.0.0" tag: "tag:stsci.edu:asdf/transform/healpix-1.0.0" title: | HEALPix projection. description: | Corresponds to the `HPX` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky H: type: number description: | The number of facets in the longitude direction. default: 4.0 X: type: number description: | The number of facets in the latitude direction. default: 3.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.1.0.yaml0000644000537500020070000000143500000000000027541 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix-1.1.0" tag: "tag:stsci.edu:asdf/transform/healpix-1.1.0" title: | HEALPix projection. description: | Corresponds to the `HPX` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky H: type: number description: | The number of facets in the longitude direction. default: 4.0 X: type: number description: | The number of facets in the latitude direction. default: 3.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.2.0.yaml0000644000537500020070000000143500000000000027542 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix-1.2.0" tag: "tag:stsci.edu:asdf/transform/healpix-1.2.0" title: | HEALPix projection. description: | Corresponds to the `HPX` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky H: type: number description: | The number of facets in the longitude direction. default: 4.0 X: type: number description: | The number of facets in the latitude direction. default: 3.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.0.0.yaml0000644000537500020070000000106600000000000030735 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix_polar-1.0.0" tag: "tag:stsci.edu:asdf/transform/healpix_polar-1.0.0" title: | HEALPix polar, aka "butterfly", projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.1.0.yaml0000644000537500020070000000106600000000000030736 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix_polar-1.1.0" tag: "tag:stsci.edu:asdf/transform/healpix_polar-1.1.0" title: | HEALPix polar, aka "butterfly", projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.2.0.yaml0000644000537500020070000000106600000000000030737 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix_polar-1.2.0" tag: "tag:stsci.edu:asdf/transform/healpix_polar-1.2.0" title: | HEALPix polar, aka "butterfly", projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.0.0.yaml0000644000537500020070000000076400000000000027743 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/identity-1.0.0" tag: "tag:stsci.edu:asdf/transform/identity-1.0.0" title: > The identity transform. description: > Invertibility: The inverse of this transform is also the identity transform. allOf: - $ref: "transform-1.0.0" - type: object properties: n_dims: type: integer default: 1 description: | The number of dimensions. ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.1.0.yaml0000644000537500020070000000076400000000000027744 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/identity-1.1.0" tag: "tag:stsci.edu:asdf/transform/identity-1.1.0" title: > The identity transform. description: > Invertibility: The inverse of this transform is also the identity transform. allOf: - $ref: "transform-1.1.0" - type: object properties: n_dims: type: integer default: 1 description: | The number of dimensions. ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.2.0.yaml0000644000537500020070000000076400000000000027745 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/identity-1.2.0" tag: "tag:stsci.edu:asdf/transform/identity-1.2.0" title: > The identity transform. description: > Invertibility: The inverse of this transform is also the identity transform. allOf: - $ref: "transform-1.2.0" - type: object properties: n_dims: type: integer default: 1 description: | The number of dimensions. ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/king_projected_analytic1d-1.0.0.yaml0000644000537500020070000000221300000000000033201 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/king_projected_analytic1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/king_projected_analytic1d-1.0.0" title: > Projected (surface density) analytic King Model. description: > Projected (surface density) analytic King Model. examples: - - $$f(x)=f(x)=10.0(12.2)^2\left(\frac{1}{\sqrt{x^2+(12.2)^2}}-\frac{1}{\sqrt{(15.4)^2+(12.2)^2}}\right)^2$$ - | !transform/king_projected_analytic1d-1.0.0 amplitude: 10.0 bounding_box: [0.0, 15.4] r_core: 12.2 r_tide: 15.4 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude or scaling factor. r_core: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Core radius. r_tide: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Tidal radius. required: ['amplitude', 'r_core', 'r_tide'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.0.0.yaml0000644000537500020070000001021200000000000030522 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0" tag: "tag:stsci.edu:asdf/transform/label_mapper-1.0.0" title: > Represents a mapping from a coordinate value to a label. description: | A label mapper instance maps inputs to a label. It is used together with [regions_selector](ref:http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0). The [label_mapper](ref:http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0) returns the label corresponding to given inputs. The [regions_selector](ref:http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0) returns the transform corresponding to this label. This maps inputs (e.g. pixels on a detector) to transforms uniquely. examples: - - Map array indices are to labels. - | !transform/label_mapper-1.0.0 mapper: !core/ndarray-1.0.0 [[1, 0, 2], [1, 0, 2], [1, 0, 2]] - - Map numbers dictionary to transforms which return labels. - | !transform/label_mapper-1.0.0 mapper: !!omap - !!omap labels: [-1.67833272, -1.9580548, -1.118888] - !!omap models: - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 6.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 2.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.0.0 mapping: [0] n_inputs: 2 - - Map a number wihtin a range of numbers to transforms which return labels. - | !transform/label_mapper-1.0.0 mapper: !!omap - !!omap labels: - [3.2, 4.1] - [2.67, 2.98] - [1.95, 2.3] - !!omap models: - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 6.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 2.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.0.0 mapping: [0] n_inputs: 2 allOf: - $ref: "transform-1.0.0" - type: object properties: mapper: description: | An array with the shape of the detector/observation. Pixel values are of type integer or string and represent region labels. Pixels which are not within any region have value 0 or " ". anyOf: - $ref: "../core/ndarray-1.0.0" - type: object properties: labels: type: array items: anyOf: - type: number - type: array items: type: number minLength: 2 maxLength: 2 models: type: array items: $ref: "transform-1.0.0" inputs: type: array items: type: string description: | Names of inputs. inputs_mapping: $ref: "transform-1.0.0" description: | [mapping](ref:http://stsci.edu/schemas/asdf/transform/remap-axes-1.0.0) atol: type: number description: | absolute tolerance to compare keys in mapper. required: [mapper] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.1.0.yaml0000644000537500020070000001065400000000000030535 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/label_mapper-1.1.0" tag: "tag:stsci.edu:asdf/transform/label_mapper-1.1.0" title: > Represents a mapping from a coordinate value to a label. description: | A label mapper instance maps inputs to a label. It is used together with [regions_selector](ref:transform/regions_selector-1.1.0). The [label_mapper](ref:transform/label_mapper-1.1.0) returns the label corresponding to given inputs. The [regions_selector](ref:transform/regions_selector-1.1.0) returns the transform corresponding to this label. This maps inputs (e.g. pixels on a detector) to transforms uniquely. examples: - - Map array indices are to labels. - | !transform/label_mapper-1.1.0 mapper: !core/ndarray-1.0.0 [[1, 0, 2], [1, 0, 2], [1, 0, 2]] - - Map numbers dictionary to transforms which return labels. - | !transform/label_mapper-1.1.0 mapper: !!omap - !!omap labels: [-1.67833272, -1.9580548, -1.118888] - !!omap models: - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 6.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 2.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.1.0 mapping: [0] n_inputs: 2 - - Map a number wihtin a range of numbers to transforms which return labels. - | !transform/label_mapper-1.1.0 mapper: !!omap - !!omap labels: - [3.2, 4.1] - [2.67, 2.98] - [1.95, 2.3] - !!omap models: - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 6.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 2.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.1.0 mapping: [0] n_inputs: 2 allOf: - $ref: "transform-1.1.0" - type: object properties: mapper: description: | A mapping of inputs to labels. In the general case this is a `astropy.modeling.core.Model`. It could be a numpy array with the shape of the detector/observation. Pixel values are of type integer or string and represent region labels. Pixels which are not within any region have value ``no_label``. It could be a dictionary which maps tuples to labels or floating point numbers to labels. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "transform-1.1.0" - type: object properties: labels: type: array items: anyOf: - type: number - type: array items: type: number minLength: 2 maxLength: 2 models: type: array items: $ref: "transform-1.1.0" inputs: type: array items: type: string description: | Names of inputs. inputs_mapping: $ref: "transform-1.1.0" description: | [mapping](ref:transform/remap_axes-1.1.0) atol: type: number description: | absolute tolerance to compare keys in mapper. no_label: description: | Fill in value for missing output. anyOf: - type: number - type: string required: [mapper] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.2.0.yaml0000644000537500020070000001065400000000000030536 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/label_mapper-1.2.0" tag: "tag:stsci.edu:asdf/transform/label_mapper-1.2.0" title: > Represents a mapping from a coordinate value to a label. description: | A label mapper instance maps inputs to a label. It is used together with [regions_selector](ref:transform/regions_selector-1.2.0). The [label_mapper](ref:transform/label_mapper-1.2.0) returns the label corresponding to given inputs. The [regions_selector](ref:transform/regions_selector-1.2.0) returns the transform corresponding to this label. This maps inputs (e.g. pixels on a detector) to transforms uniquely. examples: - - Map array indices are to labels. - | !transform/label_mapper-1.2.0 mapper: !core/ndarray-1.0.0 [[1, 0, 2], [1, 0, 2], [1, 0, 2]] - - Map numbers dictionary to transforms which return labels. - | !transform/label_mapper-1.2.0 mapper: !!omap - !!omap labels: [-1.67833272, -1.9580548, -1.118888] - !!omap models: - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 6.0} - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 2.0} - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.2.0 mapping: [0] n_inputs: 2 - - Map a number wihtin a range of numbers to transforms which return labels. - | !transform/label_mapper-1.2.0 mapper: !!omap - !!omap labels: - [3.2, 4.1] - [2.67, 2.98] - [1.95, 2.3] - !!omap models: - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 6.0} - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 2.0} - !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [1] - !transform/shift-1.2.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.2.0 mapping: [0] n_inputs: 2 allOf: - $ref: "transform-1.2.0" - type: object properties: mapper: description: | A mapping of inputs to labels. In the general case this is a `astropy.modeling.core.Model`. It could be a numpy array with the shape of the detector/observation. Pixel values are of type integer or string and represent region labels. Pixels which are not within any region have value ``no_label``. It could be a dictionary which maps tuples to labels or floating point numbers to labels. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "transform-1.2.0" - type: object properties: labels: type: array items: anyOf: - type: number - type: array items: type: number minLength: 2 maxLength: 2 models: type: array items: $ref: "transform-1.2.0" inputs: type: array items: type: string description: | Names of inputs. inputs_mapping: $ref: "transform-1.2.0" description: | [mapping](ref:transform/remap_axes-1.2.0) atol: type: number description: | absolute tolerance to compare keys in mapper. no_label: description: | Fill in value for missing output. anyOf: - type: number - type: string required: [mapper] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/linear1d-1.0.0.yaml0000644000537500020070000000120000000000000027573 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/linear1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/linear1d-1.0.0" title: > A one dimensional line model description: > A one dimensional line model allOf: - $ref: "transform-1.2.0" - type: object properties: slope: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Slope of the straight line. intercept: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Intercept of the straight line. ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/log_parabola1d-1.0.0.yaml0000644000537500020070000000227700000000000030762 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/log_parabola1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/log_parabola1d-1.0.0" title: > One dimensional log parabola model (sometimes called curved power law). description: > One dimensional log parabola model (sometimes called curved power law). examples: - - $$f(x) = 10*(\frac{x}{0.5})^{-2.0-3.2\log{(\frac{x}{0.5})}}$$ - | !transform/log_parabola1d-1.0.0 {alpha: 2.0, amplitude: 10.0, beta: 3.2, x_0: 0.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Model amplitude. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Reference point. alpha: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power law index. beta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power law curvature. required: ['amplitude', 'x_0', 'alpha', 'beta'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/logarithmic1d-1.0.0.yaml0000644000537500020070000000150500000000000030633 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/logarithmic1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/logarithmic1d-1.0.0" title: > One dimensional (natural) logarithmic model. description: > One dimensional (natural) logarithmic model. examples: - - $$f(x) = 10.0ln(\frac{x}{2.5})$$ - | !transform/logarithmic1d-1.0.0 {amplitude: 10.0, tau: 2.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude or scaling factor. r_core: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Denominator in log. required: ['amplitude', 'tau'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/lorentz1d-1.0.0.yaml0000644000537500020070000000202000000000000030017 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/lorentz1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/lorentz1d-1.0.0" title: > One dimensional Lorentzian model. description: > One dimensional Lorentzian model. examples: - - $$f(x) = \frac{10.0 *5.0^{2}}{5.0^{2} + \left(x - 0.5\right)^{2}}$$ - | !transform/lorentz1d-1.0.0 amplitude: 10.0 bounding_box: [-124.5, 125.5] fwhm: 5.0 x_0: 0.5 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Peak value. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Position of the peak. fwhm: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Full width at half maximum. required: ['amplitude', 'x_0', 'fwhm'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/math_functions-1.0.0.yaml0000644000537500020070000000103400000000000031122 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/math_functions-1.0.0" tag: "tag:stsci.edu:asdf/transform/math_functions-1.0.0" title: > Math functions. description: | Commonly used math funcitons. examples: - - Atan2 - | !transform/math_functions-1.0.0 func_name: arctan2 allOf: - $ref: "transform-1.2.0" - type: object properties: func_name: type: string description: | The name of a numpy ufunc. ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.0.0.yaml0000644000537500020070000000131500000000000027717 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/mercator-1.0.0" tag: "tag:stsci.edu:asdf/transform/mercator-1.0.0" title: | The Mercator projection. description: | Corresponds to the `MER` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= 2 \tan^{-1}\left(e^{y \pi / 180^{\circ}}\right)-90^{\circ}$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\ln \tan \left(\frac{90^{\circ} + \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.1.0.yaml0000644000537500020070000000131500000000000027720 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/mercator-1.1.0" tag: "tag:stsci.edu:asdf/transform/mercator-1.1.0" title: | The Mercator projection. description: | Corresponds to the `MER` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= 2 \tan^{-1}\left(e^{y \pi / 180^{\circ}}\right)-90^{\circ}$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\ln \tan \left(\frac{90^{\circ} + \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.2.0.yaml0000644000537500020070000000131500000000000027721 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/mercator-1.2.0" tag: "tag:stsci.edu:asdf/transform/mercator-1.2.0" title: | The Mercator projection. description: | Corresponds to the `MER` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= 2 \tan^{-1}\left(e^{y \pi / 180^{\circ}}\right)-90^{\circ}$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\ln \tan \left(\frac{90^{\circ} + \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/moffat1d-1.0.0.yaml0000644000537500020070000000225500000000000027610 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/moffat1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/moffat1d-1.0.0" title: > One dimensional Moffat model. description: > One dimensional Moffat distribution. examples: - - $$f(x)=10.0\left(1+\frac{\left(x-0.5\right)^{2}}{1.2^{2}}\right)^{-2}$$ - | !transform/moffat1d-1.0.0 {alpha: 2.5, amplitude: 10.0, gamma: 1.2, x_0: 0.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude of the model. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the maximum of the Moffat model. gamma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Core width of the Moffat model. alpha: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power index of the Moffat model. required: ['amplitude', 'x_0', 'gamma', 'alpha'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/moffat2d-1.0.0.yaml0000644000537500020070000000257000000000000027611 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/moffat2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/moffat2d-1.0.0" title: > Two dimensional Moffat model. description: > Two dimensional Moffat distribution. examples: - - $$f(x)=10.0\left(1+\frac{\left(x-0.5\right)^{2}+\left(y-1.5\right)^{2}}{1.2^{2}}\right)^{-2}$$ - | !transform/moffat2d-1.0.0 {alpha: 2.5, amplitude: 10.0, gamma: 1.2, x_0: 0.5, y_0: 1.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude of the model. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the maximum of the Moffat model. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y position of the maximum of the Moffat model. gamma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Core width of the Moffat model. alpha: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power index of the Moffat model. required: ['amplitude', 'x_0', 'y_0', 'gamma', 'alpha'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.0.0.yaml0000644000537500020070000000161600000000000030235 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/molleweide-1.0.0" tag: "tag:stsci.edu:asdf/transform/molleweide-1.0.0" title: | Molleweide's projection. description: | Corresponds to the `MOL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi x}{2 \sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}} \\ \theta &= \sin^{-1}\left(\frac{1}{90^\circ}\sin^{-1}\left(\frac{\pi}{180^\circ}\frac{y}{\sqrt{2}}\right) + \frac{y}{180^\circ}\sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \frac{2 \sqrt{2}}{\pi} \phi \cos \gamma \\ y &= \sqrt{2} \frac{180^\circ}{\pi} \sin \gamma$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.1.0.yaml0000644000537500020070000000161600000000000030236 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/molleweide-1.1.0" tag: "tag:stsci.edu:asdf/transform/molleweide-1.1.0" title: | Molleweide's projection. description: | Corresponds to the `MOL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi x}{2 \sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}} \\ \theta &= \sin^{-1}\left(\frac{1}{90^\circ}\sin^{-1}\left(\frac{\pi}{180^\circ}\frac{y}{\sqrt{2}}\right) + \frac{y}{180^\circ}\sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \frac{2 \sqrt{2}}{\pi} \phi \cos \gamma \\ y &= \sqrt{2} \frac{180^\circ}{\pi} \sin \gamma$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.2.0.yaml0000644000537500020070000000161600000000000030237 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/molleweide-1.2.0" tag: "tag:stsci.edu:asdf/transform/molleweide-1.2.0" title: | Molleweide's projection. description: | Corresponds to the `MOL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi x}{2 \sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}} \\ \theta &= \sin^{-1}\left(\frac{1}{90^\circ}\sin^{-1}\left(\frac{\pi}{180^\circ}\frac{y}{\sqrt{2}}\right) + \frac{y}{180^\circ}\sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \frac{2 \sqrt{2}}{\pi} \phi \cos \gamma \\ y &= \sqrt{2} \frac{180^\circ}{\pi} \sin \gamma$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.0.0.yaml0000644000537500020070000000156600000000000027772 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiply-1.0.0" tag: "tag:stsci.edu:asdf/transform/multiply-1.0.0" title: > Perform a list of subtransforms in parallel and then multiply their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through multiplication. - | !transform/multiply-1.0.0 forward: - !transform/shift-1.0.0 offset: 2.0 - !transform/shift-1.0.0 offset: 3.0 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.1.0.yaml0000644000537500020070000000156600000000000027773 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiply-1.1.0" tag: "tag:stsci.edu:asdf/transform/multiply-1.1.0" title: > Perform a list of subtransforms in parallel and then multiply their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through multiplication. - | !transform/multiply-1.1.0 forward: - !transform/shift-1.1.0 offset: 2.0 - !transform/shift-1.1.0 offset: 3.0 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.2.0.yaml0000644000537500020070000000156600000000000027774 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiply-1.2.0" tag: "tag:stsci.edu:asdf/transform/multiply-1.2.0" title: > Perform a list of subtransforms in parallel and then multiply their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through multiplication. - | !transform/multiply-1.2.0 forward: - !transform/shift-1.2.0 offset: 2.0 - !transform/shift-1.2.0 offset: 3.0 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/multiplyscale-1.0.0.yaml0000644000537500020070000000132500000000000030773 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiplyscale-1.0.0" tag: "tag:stsci.edu:asdf/transform/multiplyscale-1.0.0" title: > A Multiply model. description: > Multiply the input by a factor. examples: - - Multiply the input by a pixel scale factor. - | !transform/multiplyscale-1.0.0 factor: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 arcsec pixel-1, value: 0.06} allOf: - $ref: "transform-1.2.0" - type: object properties: factor: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Multiplication factor. required: [factor] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/ortho_polynomial-1.0.0.yaml0000644000537500020070000000324000000000000031500 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/ortho_polynomial-1.0.0" tag: "tag:stsci.edu:asdf/transform/ortho_polynomial-1.0.0" title: > Respresents various Orthogonal Polynomial models. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. The property polynomial_type defines what kind of polynomial is defined. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/ortho_polynomial-1.0.0 polynomial_type: hermite coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/ortho_polynomial-1.0.0 polynomial_type: chebyshev coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] allOf: - $ref: "transform-1.2.0" - type: object properties: polynomial_type: description: | One of a selected set of polynomial types. type: string enum: [chebyshev, legendre, hermite] coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array required: [polynomial_type, coefficients] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.0.0.yaml0000644000537500020070000000137100000000000030041 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/parabolic-1.0.0" tag: "tag:stsci.edu:asdf/transform/parabolic-1.0.0" title: | Parabolic projection. description: | Corresponds to the `PAR` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{180^\circ}{\pi} \frac{x}{1 - 4(y / 180^\circ)^2} \\ \theta &= 3 \sin^{-1}\left(\frac{y}{180^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \left(2\cos\frac{2\theta}{3} - 1\right) \\ y &= 180^\circ \sin \frac{\theta}{3}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.1.0.yaml0000644000537500020070000000137100000000000030042 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/parabolic-1.1.0" tag: "tag:stsci.edu:asdf/transform/parabolic-1.1.0" title: | Parabolic projection. description: | Corresponds to the `PAR` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{180^\circ}{\pi} \frac{x}{1 - 4(y / 180^\circ)^2} \\ \theta &= 3 \sin^{-1}\left(\frac{y}{180^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \left(2\cos\frac{2\theta}{3} - 1\right) \\ y &= 180^\circ \sin \frac{\theta}{3}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.2.0.yaml0000644000537500020070000000137100000000000030043 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/parabolic-1.2.0" tag: "tag:stsci.edu:asdf/transform/parabolic-1.2.0" title: | Parabolic projection. description: | Corresponds to the `PAR` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{180^\circ}{\pi} \frac{x}{1 - 4(y / 180^\circ)^2} \\ \theta &= 3 \sin^{-1}\left(\frac{y}{180^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \left(2\cos\frac{2\theta}{3} - 1\right) \\ y &= 180^\circ \sin \frac{\theta}{3}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/planar2d-1.0.0.yaml0000644000537500020070000000173600000000000027615 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/planar2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/planar2d-1.0.0" title: > Two dimensional plane model. description: > Two dimensional plane model. examples: - - $$f(x, y)= a=5.0x + 2.5y + 11$$ - | !transform/planar2d-1.0.0 {intercept: 11.0, slope_x: 5.0, slope_y: 2.5} allOf: - $ref: "transform-1.2.0" - type: object properties: slope_x: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Slope of the stright line in x. slope_y: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Slope of the straight lie in y. intercept: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: z-intercept of the straight line. required: ['slope_x', 'slope_y', 'intercept'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.0.0.yaml0000644000537500020070000000123200000000000030527 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/plate_carree-1.0.0" tag: "tag:stsci.edu:asdf/transform/plate_carree-1.0.0" title: | The plate carrée projection. description: | Corresponds to the `CAR` projection in the FITS WCS standard. The main virtue of this transformation is its simplicity. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.1.0.yaml0000644000537500020070000000123200000000000030530 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/plate_carree-1.1.0" tag: "tag:stsci.edu:asdf/transform/plate_carree-1.1.0" title: | The plate carrée projection. description: | Corresponds to the `CAR` projection in the FITS WCS standard. The main virtue of this transformation is its simplicity. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.2.0.yaml0000644000537500020070000000123200000000000030531 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/plate_carree-1.2.0" tag: "tag:stsci.edu:asdf/transform/plate_carree-1.2.0" title: | The plate carrée projection. description: | Corresponds to the `CAR` projection in the FITS WCS standard. The main virtue of this transformation is its simplicity. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/plummer1d-1.0.0.yaml0000644000537500020070000000154400000000000030015 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/plummer1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/plummer1d-1.0.0" title: > Two dimensional Plummer model. description: > One dimensional Plummer density profile model. examples: - - $$\rho(r)=\frac{3*15.0}{4\pi *5.5^3}(1+\frac{r^2}{5.5^2})^{-5/2}$$ - | !transform/plummer1d-1.0.0 {mass: 15.0, r_plum: 5.5} allOf: - $ref: "transform-1.2.0" - type: object properties: mass: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Total mass of cluster. r_plum: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Scale parameter which sets the size of the cluster core. required: ['mass', 'r_plum'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.0.0.yaml0000644000537500020070000000064300000000000030105 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polyconic-1.0.0" tag: "tag:stsci.edu:asdf/transform/polyconic-1.0.0" title: | Polyconic projection. description: | Corresponds to the `PCO` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudoconic-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.1.0.yaml0000644000537500020070000000064300000000000030106 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polyconic-1.1.0" tag: "tag:stsci.edu:asdf/transform/polyconic-1.1.0" title: | Polyconic projection. description: | Corresponds to the `PCO` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudoconic-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.2.0.yaml0000644000537500020070000000064300000000000030107 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polyconic-1.2.0" tag: "tag:stsci.edu:asdf/transform/polyconic-1.2.0" title: | Polyconic projection. description: | Corresponds to the `PCO` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudoconic-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.0.0.yaml0000644000537500020070000000244100000000000030267 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polynomial-1.0.0" tag: "tag:stsci.edu:asdf/transform/polynomial-1.0.0" title: > A Polynomial model. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/polynomial-1.0.0 coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/polynomial-1.0.0 coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] allOf: - $ref: "transform-1.0.0" - type: object properties: coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array required: [coefficients] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.1.0.yaml0000644000537500020070000000244100000000000030270 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polynomial-1.1.0" tag: "tag:stsci.edu:asdf/transform/polynomial-1.1.0" title: > A Polynomial model. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/polynomial-1.1.0 coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/polynomial-1.1.0 coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] allOf: - $ref: "transform-1.1.0" - type: object properties: coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array required: [coefficients] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.2.0.yaml0000644000537500020070000000251400000000000030272 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polynomial-1.2.0" tag: "tag:stsci.edu:asdf/transform/polynomial-1.2.0" title: > A Polynomial model. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/polynomial-1.2.0 coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/polynomial-1.2.0 coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] allOf: - $ref: "transform-1.2.0" - type: object properties: coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../unit/quantity-1.1.0" - type: array required: [coefficients] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/power-1.0.0.yaml0000644000537500020070000000114400000000000027237 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power-1.0.0" tag: "tag:stsci.edu:asdf/transform/power-1.0.0" title: > Perform a list of subtransforms in parallel and then raise each result to the power of the next. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/power-1.1.0.yaml0000644000537500020070000000114400000000000027240 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power-1.1.0" tag: "tag:stsci.edu:asdf/transform/power-1.1.0" title: > Perform a list of subtransforms in parallel and then raise each result to the power of the next. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/power-1.2.0.yaml0000644000537500020070000000114400000000000027241 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power-1.2.0" tag: "tag:stsci.edu:asdf/transform/power-1.2.0" title: > Perform a list of subtransforms in parallel and then raise each result to the power of the next. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/power_law1d-1.0.0.yaml0000644000537500020070000000170000000000000030325 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power_law1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/power_law1d-1.0.0" title: > One dimensional power law model. description: > One dimensional power law model. examples: - - $$f(x) = 10*(x/0.5)^{-2}$$ - | !transform/power_law1d-1.0.0 {alpha: 2.0, amplitude: 10.0, x_0: 0.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Model amplitude at the reference point. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Reference point. alpha: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power law index. required: ['amplitude', 'x_0', 'alpha'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.0.0.yaml0000644000537500020070000000064400000000000030422 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudoconic-1.0.0" title: | Base class of all pseudoconic projections. description: | Pseudoconics are a subclass of conics with concentric parallels. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.1.0.yaml0000644000537500020070000000064400000000000030423 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudoconic-1.1.0" title: | Base class of all pseudoconic projections. description: | Pseudoconics are a subclass of conics with concentric parallels. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.2.0.yaml0000644000537500020070000000064400000000000030424 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudoconic-1.2.0" title: | Base class of all pseudoconic projections. description: | Pseudoconics are a subclass of conics with concentric parallels. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.0.0.yaml0000644000537500020070000000115100000000000031616 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudocylindrical-1.0.0" title: | Base class of all pseudocylindrical projections. description: | Pseudocylindrical projections are like cylindrical projections except the parallels of latitude are projected at diminishing lengths toward the polar regions in order to reduce lateral distortion there. Consequently, the meridians are curved. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.1.0.yaml0000644000537500020070000000115100000000000031617 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudocylindrical-1.1.0" title: | Base class of all pseudocylindrical projections. description: | Pseudocylindrical projections are like cylindrical projections except the parallels of latitude are projected at diminishing lengths toward the polar regions in order to reduce lateral distortion there. Consequently, the meridians are curved. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.2.0.yaml0000644000537500020070000000115100000000000031620 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudocylindrical-1.2.0" title: | Base class of all pseudocylindrical projections. description: | Pseudocylindrical projections are like cylindrical projections except the parallels of latitude are projected at diminishing lengths toward the polar regions in order to reduce lateral distortion there. Consequently, the meridians are curved. allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.0.0.yaml0000644000537500020070000000071300000000000032066 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quad_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/quad_spherical_cube-1.0.0" title: | Quadrilateralized spherical cube projection. description: | Corresponds to the `QSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.1.0.yaml0000644000537500020070000000071300000000000032067 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quad_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/quad_spherical_cube-1.1.0" title: | Quadrilateralized spherical cube projection. description: | Corresponds to the `QSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.2.0.yaml0000644000537500020070000000071300000000000032070 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quad_spherical_cube-1.2.0" tag: "tag:stsci.edu:asdf/transform/quad_spherical_cube-1.2.0" title: | Quadrilateralized spherical cube projection. description: | Corresponds to the `QSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.0.0.yaml0000644000537500020070000000124700000000000027700 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quadcube-1.0.0" title: | Base class of all quadcube projections. description: | Quadrilateralized spherical cube (quad-cube) projections belong to the class of polyhedral projections in which the sphere is projected onto the surface of an enclosing polyhedron. The six faces of the quad-cube projections are numbered and laid out as: ``` 0 4 3 2 1 4 3 2 5 ``` allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.1.0.yaml0000644000537500020070000000124700000000000027701 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quadcube-1.1.0" title: | Base class of all quadcube projections. description: | Quadrilateralized spherical cube (quad-cube) projections belong to the class of polyhedral projections in which the sphere is projected onto the surface of an enclosing polyhedron. The six faces of the quad-cube projections are numbered and laid out as: ``` 0 4 3 2 1 4 3 2 5 ``` allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.2.0.yaml0000644000537500020070000000124700000000000027702 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quadcube-1.2.0" title: | Base class of all quadcube projections. description: | Quadrilateralized spherical cube (quad-cube) projections belong to the class of polyhedral projections in which the sphere is projected onto the surface of an enclosing polyhedron. The six faces of the quad-cube projections are numbered and laid out as: ``` 0 4 3 2 1 4 3 2 5 ``` allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/redshift_scale_factor-1.0.0.yaml0000644000537500020070000000121400000000000032416 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/redshift_scale_factor-1.0.0" tag: "tag:stsci.edu:asdf/transform/redshift_scale_factor-1.0.0" title: > One dimensional redshift scale factor model. description: > One dimensional redshift scale factor model. examples: - - $$f(x)=x(1+2.5)$$ - | !transform/redshift_scale_factor-1.0.0 {z: 2.5} allOf: - $ref: "transform-1.2.0" - type: object properties: z: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Redshift value. required: ['z'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.0.0.yaml0000644000537500020070000000607700000000000031463 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0" tag: "tag:stsci.edu:asdf/transform/regions_selector-1.0.0" title: > Represents a discontinuous transform. description: | Maps regions to transgorms and evaluates the transforms with the corresponding inputs. examples: - - Create a regions_selector schema for 2 regions, labeled "1" and "2". - | !transform/regions_selector-1.0.0 inputs: [x, y] label_mapper: !transform/label_mapper-1.0.0 mapper: !core/ndarray-1.0.0 datatype: int8 data: [[0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0]] outputs: [ra, dec, lam] selector: 1: !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0, 1, 1] - !transform/concatenate-1.0.0 forward: - !transform/concatenate-1.0.0 forward: - !transform/shift-1.0.0 {offset: 1.0} - !transform/shift-1.0.0 {offset: 2.0} - !transform/shift-1.0.0 {offset: 3.0} 2: !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0, 1, 1] - !transform/concatenate-1.0.0 forward: - !transform/concatenate-1.0.0 forward: - !transform/scale-1.0.0 {factor: 2.0} - !transform/scale-1.0.0 {factor: 3.0} - !transform/scale-1.0.0 {factor: 3.0} undefined_transform_value: .nan allOf: - $ref: "transform-1.0.0" - type: object properties: label_mapper: description: | An instance of [label_mapper-1.0.0](ref:http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0) $ref: "./label_mapper-1.0.0" inputs: description: | Names of inputs. type: array items: type: string outputs: description: | Names of outputs. type: array items: type: string selector: description: | A mapping of regions to trransforms. type: object properties: labels: description: | An array of unique region labels. type: array items: type: - integer - string transforms: description: | A transform for each region. The order should match the order of labels. type: array items: $ref: "transform-1.0.0" undefined_transform_value: description: | Value to be returned if there's no transform defined for the inputs. type: number required: [label_mapper, inputs, outputs, selector] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.1.0.yaml0000644000537500020070000000604100000000000031453 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/regions_selector-1.1.0" tag: "tag:stsci.edu:asdf/transform/regions_selector-1.1.0" title: > Represents a discontinuous transform. description: | Maps regions to transgorms and evaluates the transforms with the corresponding inputs. examples: - - Create a regions_selector schema for 2 regions, labeled "1" and "2". - | !transform/regions_selector-1.1.0 inputs: [x, y] label_mapper: !transform/label_mapper-1.1.0 mapper: !core/ndarray-1.0.0 datatype: int8 data: [[0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0]] outputs: [ra, dec, lam] selector: 1: !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0, 1, 1] - !transform/concatenate-1.1.0 forward: - !transform/concatenate-1.1.0 forward: - !transform/shift-1.1.0 {offset: 1.0} - !transform/shift-1.1.0 {offset: 2.0} - !transform/shift-1.1.0 {offset: 3.0} 2: !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0, 1, 1] - !transform/concatenate-1.1.0 forward: - !transform/concatenate-1.1.0 forward: - !transform/scale-1.1.0 {factor: 2.0} - !transform/scale-1.1.0 {factor: 3.0} - !transform/scale-1.1.0 {factor: 3.0} undefined_transform_value: .nan allOf: - $ref: "transform-1.1.0" - type: object properties: label_mapper: description: | An instance of [label_mapper-1.1.0](ref:transform/label_mapper-1.1.0) $ref: "./label_mapper-1.1.0" inputs: description: | Names of inputs. type: array items: type: string outputs: description: | Names of outputs. type: array items: type: string selector: description: | A mapping of regions to trransforms. type: object properties: labels: description: | An array of unique region labels. type: array items: type: - integer - string transforms: description: | A transform for each region. The order should match the order of labels. type: array items: $ref: "transform-1.1.0" undefined_transform_value: description: | Value to be returned if there's no transform defined for the inputs. type: number required: [label_mapper, inputs, outputs, selector] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.2.0.yaml0000644000537500020070000000604100000000000031454 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/regions_selector-1.2.0" tag: "tag:stsci.edu:asdf/transform/regions_selector-1.2.0" title: > Represents a discontinuous transform. description: | Maps regions to transgorms and evaluates the transforms with the corresponding inputs. examples: - - Create a regions_selector schema for 2 regions, labeled "1" and "2". - | !transform/regions_selector-1.2.0 inputs: [x, y] label_mapper: !transform/label_mapper-1.2.0 mapper: !core/ndarray-1.0.0 datatype: int8 data: [[0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0]] outputs: [ra, dec, lam] selector: 1: !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [0, 1, 1] - !transform/concatenate-1.2.0 forward: - !transform/concatenate-1.2.0 forward: - !transform/shift-1.2.0 {offset: 1.0} - !transform/shift-1.2.0 {offset: 2.0} - !transform/shift-1.2.0 {offset: 3.0} 2: !transform/compose-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [0, 1, 1] - !transform/concatenate-1.2.0 forward: - !transform/concatenate-1.2.0 forward: - !transform/scale-1.2.0 {factor: 2.0} - !transform/scale-1.2.0 {factor: 3.0} - !transform/scale-1.2.0 {factor: 3.0} undefined_transform_value: .nan allOf: - $ref: "transform-1.2.0" - type: object properties: label_mapper: description: | An instance of [label_mapper-1.2.0](ref:transform/label_mapper-1.2.0) $ref: "./label_mapper-1.2.0" inputs: description: | Names of inputs. type: array items: type: string outputs: description: | Names of outputs. type: array items: type: string selector: description: | A mapping of regions to trransforms. type: object properties: labels: description: | An array of unique region labels. type: array items: type: - integer - string transforms: description: | A transform for each region. The order should match the order of labels. type: array items: $ref: "transform-1.2.0" undefined_transform_value: description: | Value to be returned if there's no transform defined for the inputs. type: number required: [label_mapper, inputs, outputs, selector] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.0.0.yaml0000644000537500020070000000404500000000000030232 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.0.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.0.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers, each corresponding to an index of the input axis to send to the output axis. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. Invertibility: TBD examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.0.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.0.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.0.0 mapping: [0, 0, 1] - - For 2 input axes, add a third axis which is a constant - | !transform/concatenate-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0] - !transform/remap_axes-1.0.0 mapping: [1] - !transform/constant-1.0.0 value: 42 - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.0.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: type: integer allOf: - $ref: "transform-1.0.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.1.0.yaml0000644000537500020070000000413700000000000030235 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.1.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.1.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers, each corresponding to an index of the input axis to send to the output axis. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. Invertibility: TBD examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.1.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.1.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.1.0 mapping: [0, 0, 1] - - For 2 input axes, add a third axis which is a constant - | !transform/concatenate-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0] - !transform/remap_axes-1.1.0 mapping: [1] - !transform/constant-1.1.0 value: 42 - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.1.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: anyOf: - type: integer - $ref: "../core/constant-1.0.0" allOf: - $ref: "transform-1.1.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.2.0.yaml0000644000537500020070000000413700000000000030236 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.2.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.2.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers, each corresponding to an index of the input axis to send to the output axis. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. Invertibility: TBD examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.2.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.2.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.2.0 mapping: [0, 0, 1] - - For 2 input axes, add a third axis which is a constant - | !transform/concatenate-1.2.0 forward: - !transform/remap_axes-1.2.0 mapping: [0] - !transform/remap_axes-1.2.0 mapping: [1] - !transform/constant-1.2.0 value: 42 - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.2.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: anyOf: - type: integer - $ref: "../core/constant-1.0.0" allOf: - $ref: "transform-1.2.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.3.0.yaml0000644000537500020070000000433100000000000030233 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.3.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.3.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers. Each item in the list corresponds to an output axis. Each item is the index of the input axis to send to the output axis. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. Invertibility: This transform does not have a general analytical inverse. In some well defined cases it is possible to invert automatically examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.3.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.3.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.3.0 mapping: [0, 0, 1] - - | The above example is equivalent to the following, and ASDF implementations are free to normalize it thusly: - | !transform/concatenate-1.2.0 forward: - !transform/remap_axes-1.3.0 mapping: [0] - !transform/remap_axes-1.3.0 mapping: [1] - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.3.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: type: integer allOf: - $ref: "transform-1.2.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/ricker_wavelet1d-1.0.0.yaml0000644000537500020070000000204400000000000031336 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/ricker_wavelet1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/ricker_wavelet1d-1.0.0" title: > One dimensional Ricker Wavelet model. description: > One dimensional Ricker Wavelet model examples: - - $$f(x)={10.0\left(1-\frac{\left(x-0.5\right)^{2}}{2.0^{2}}\right)e^{-\frac{\left(x-0.5\right)^{2}}{2*2.0^{2}}}}$$ - | !transform/ricker_wavelet1d-1.0.0 {amplitude: 10.0, sigma: 2.0, x_0: 0.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Peak value. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Position of the peak. sigma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Width of the Ricker wavelet. required: ['amplitude', 'x_0', 'sigma'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/ricker_wavelet2d-1.0.0.yaml0000644000537500020070000000232700000000000031343 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/ricker_wavelet2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/ricker_wavelet2d-1.0.0" title: > Two dimensional Ricker Wavelet model. description: > Two dimensional Ricker Wavelet model. examples: - - $$f(x)={10.0\left(1-\frac{\left(x-0.5\right)^{2}-(y-1.5)^2}{2.0^{2}}\right)e^{-\frac{\left(x-0.5\right)^{2}-(y-1.5)^2)}{2*2.0^{2}}}}$$ - | !transform/ricker_wavelet2d-1.0.0 {amplitude: 10.0, sigma: 2.0, x_0: 0.5, y_0: 1.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the peak. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y position of the peak. sigma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Width of the Ricker wavelet. required: ['amplitude', 'x_0', 'y_0', 'sigma'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/ring2d-1.0.0.yaml0000644000537500020070000000271000000000000027270 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/ring2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/ring2d-1.0.0" title: > Two dimensional radially symmetric ring model. description: > Two dimensional radially symmetric ring. examples: - - A 2D disk centered at (x, y) = (0.5, 1.5), with an inner radius of 5.0, outer radius of 7.5 and amplitude 10.0. - | !transform/ring2d-1.0.0 amplitude: 10.0 bounding_box: - [-6.0, 9.0] - [-7.0, 8.0] r_in: 5.0 width: 2.5 x_0: 0.5 y_0: 1.5 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Value of the disk function. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x center position of the disk. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y center position of the disk. r_in: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Inner radius of the ring. width: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Width of the ring. required: ['amplitude', 'x_0', 'y_0', 'r_in', 'width'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.0.0.yaml0000644000537500020070000000104500000000000027627 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.0.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: angle: type: number description: Angle, in degrees. required: [angle] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.1.0.yaml0000644000537500020070000000104500000000000027630 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.1.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.1.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: angle: type: number description: Angle, in degrees. required: [angle] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.2.0.yaml0000644000537500020070000000114300000000000027630 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.2.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.2.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: angle: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. required: [angle] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.3.0.yaml0000644000537500020070000000114300000000000027631 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.3.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.3.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.2.0" - type: object properties: angle: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. required: [angle] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.0.0.yaml0000644000537500020070000000267000000000000027635 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.0.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.0.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.0.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.0.0" - type: object properties: phi: type: number description: Angle, in degrees. theta: type: number description: Angle, in degrees. psi: type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.1.0.yaml0000644000537500020070000000267000000000000027636 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.1.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.1.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.1.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.1.0" - type: object properties: phi: type: number description: Angle, in degrees. theta: type: number description: Angle, in degrees. psi: type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.2.0.yaml0000644000537500020070000000316200000000000027634 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.2.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.2.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.2.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.1.0" - type: object properties: phi: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. theta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. psi: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.3.0.yaml0000644000537500020070000000316200000000000027635 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.3.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.3.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.3.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.2.0" - type: object properties: phi: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. theta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. psi: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/rotate_sequence_3d-1.0.0.yaml0000644000537500020070000000244300000000000031662 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate_sequence_3d-1.0.0" tag: "tag:stsci.edu:asdf/transform/rotate_sequence_3d-1.0.0" title: > Rotation in 3D space. description: | Rotation in 3D space by arbitrary number of angles about arbitrary order of "x", "y", "z" axes. examples: - - A sequence of rotation around 5 axes.. - | !transform/rotate_sequence_3d-1.0.0 angles: [-0.0193, -0.1432, -0.04, -65.60, 273.089] axes_order: zyxyz rotation_type: cartesian allOf: - $ref: "transform-1.2.0" - type: object properties: angles: type: array items: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | The angles of rotation in units of deg. axes_order: description: | A sequence of "x", "y" or "z" characters representing an axis of rotation. The number of characters must equal the number of angles. For the JWST V23 to sky transform the axes are zyxyz. type: string rotation_type: description: | The type of rotation class to nitialize type: str enum: [spherical, cartesian] required: [angles, axes_order, rotation_type] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.0.0.yaml0000644000537500020070000000121500000000000031427 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sanson_flamsteed-1.0.0" tag: "tag:stsci.edu:asdf/transform/sanson_flamsteed-1.0.0" title: | The Sanson-Flamsteed projection. description: | Corresponds to the `SFL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\cos y} \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \cos \theta \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.1.0.yaml0000644000537500020070000000121500000000000031430 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sanson_flamsteed-1.1.0" tag: "tag:stsci.edu:asdf/transform/sanson_flamsteed-1.1.0" title: | The Sanson-Flamsteed projection. description: | Corresponds to the `SFL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\cos y} \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \cos \theta \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.2.0.yaml0000644000537500020070000000121500000000000031431 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sanson_flamsteed-1.2.0" tag: "tag:stsci.edu:asdf/transform/sanson_flamsteed-1.2.0" title: | The Sanson-Flamsteed projection. description: | Corresponds to the `SFL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\cos y} \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \cos \theta \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.0.0.yaml0000644000537500020070000000065300000000000027176 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/scale-1.0.0" tag: "tag:stsci.edu:asdf/transform/scale-1.0.0" title: > A Scale model. description: > Multiply the input by a factor. allOf: - $ref: "transform-1.0.0" - type: object properties: factor: type: number description: Multiplication factor. required: [factor] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.1.0.yaml0000644000537500020070000000065300000000000027177 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/scale-1.1.0" tag: "tag:stsci.edu:asdf/transform/scale-1.1.0" title: > A Scale model. description: > Multiply the input by a factor. allOf: - $ref: "transform-1.1.0" - type: object properties: factor: type: number description: Multiplication factor. required: [factor] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.2.0.yaml0000644000537500020070000000075300000000000027201 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/scale-1.2.0" tag: "tag:stsci.edu:asdf/transform/scale-1.2.0" title: > A Scale model. description: > Scale the input by a dimensionless factor. allOf: - $ref: "transform-1.2.0" - type: object properties: factor: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Scale factor. required: [factor] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/sersic1d-1.0.0.yaml0000644000537500020070000000217300000000000027623 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sersic1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/sersic1d-1.0.0" title: > One dimensional Sersic surface brightness profile. description: > One dimensional Sersic surface brightness profile. examples: - - $I(r)=10.0\exp\left\{-b_n\left[\left(\frac{r}{1.0}\right)^{(1/4)}-1\right]\right\}$, where $b_n$ is defined such that $r_e$ contains half the total luminosity (can be solved for numeriacally). - | !transform/sersic1d-1.0.0 {amplitude: 10.0, n: 4.0, r_eff: 1.0} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Surface brightness at r_eff. r_eff: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Effective (half-light) radius. n: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Sersic index. required: ['amplitude', 'r_eff', 'n'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/sersic2d-1.0.0.yaml0000644000537500020070000000353000000000000027622 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sersic2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/sersic2d-1.0.0" title: > Two dimensional Sersic surface brightness profile. description: > Two dimensional Sersic surface brightness profile. examples: - - $I(x, y)=I(r)=I(x, y)=I(r)=10.0\exp\left\{-b_n\left[\left(\frac{(\sqrt{(x-0.5)^2 + (y-1.5)^2})}{1.0}\right)^{(1/4)}-1\right]\right\}$ where $b_n$ is defined such that $r_e$ contains half the total luminosity (can be solved for numerically). - | !transform/sersic2d-1.0.0 {amplitude: 10.0, ellip: 0.0, n: 4.0, r_eff: 1.0, theta: 0.0, x_0: 0.5, y_0: 1.5} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Surface brightness at r_eff. r_eff: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Effective (half-light) radius. n: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Sersic index. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x position of the center. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y position of the center. ellip: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Ellipticity. theta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Rotation angle in radians, increases counterclockwise from the positive x-axis. required: ['amplitude', 'r_eff', 'n', 'x_0', 'y_0', 'ellip', 'theta'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.0.0.yaml0000644000537500020070000000066300000000000027225 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/shift-1.0.0" tag: "tag:stsci.edu:asdf/transform/shift-1.0.0" title: > A Shift opeartion. description: > Apply an offset in one direction. allOf: - $ref: "transform-1.0.0" - type: object properties: offset: type: number description: Offset in one direction. required: [offset] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.1.0.yaml0000644000537500020070000000066300000000000027226 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/shift-1.1.0" tag: "tag:stsci.edu:asdf/transform/shift-1.1.0" title: > A Shift opeartion. description: > Apply an offset in one direction. allOf: - $ref: "transform-1.1.0" - type: object properties: offset: type: number description: Offset in one direction. required: [offset] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.2.0.yaml0000644000537500020070000000076100000000000027226 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/shift-1.2.0" tag: "tag:stsci.edu:asdf/transform/shift-1.2.0" title: > A Shift opeartion. description: > Apply an offset in one direction. allOf: - $ref: "transform-1.2.0" - type: object properties: offset: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Offset in one direction. required: [offset] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/sine1d-1.0.0.yaml0000644000537500020070000000166500000000000027276 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sine1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/sine1d-1.0.0" title: > One dimensional sine model. description: > One dimensional sine. examples: - - $$f(x)=10.0sin(2\pi *0.5x+2\pi*1.0)$$ - | !transform/sine1d-1.0.0 {amplitude: 10.0, frequency: 0.5, phase: 1.0} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Oscillation amplitude. frequency: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Oscillation frequency. phase: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Oscillation phase. required: ['amplitude', 'frequency', 'phase'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.0.0.yaml0000644000537500020070000000144300000000000031777 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_orthographic-1.0.0" tag: "tag:stsci.edu:asdf/transform/slant_orthographic-1.0.0" title: | The slant orthographic projection. description: | Corresponds to the `SIN` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \cos^{-1}\left(\frac{\pi}{180^{\circ}}R_\theta\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cos \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.1.0.yaml0000644000537500020070000000140500000000000031776 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_orthographic-1.1.0" tag: "tag:stsci.edu:asdf/transform/slant_orthographic-1.1.0" title: | The slant orthographic projection. description: | Corresponds to the `SIN` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \cos^{-1}\left(\frac{\pi}{180^{\circ}}R_\theta\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cos \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.2.0.yaml0000644000537500020070000000140500000000000031777 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_orthographic-1.2.0" tag: "tag:stsci.edu:asdf/transform/slant_orthographic-1.2.0" title: | The slant orthographic projection. description: | Corresponds to the `SIN` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \cos^{-1}\left(\frac{\pi}{180^{\circ}}R_\theta\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cos \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.0.0.yaml0000644000537500020070000000246500000000000033542 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_zenithal_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/slant_zenithal_perspective-1.0.0" title: | The slant zenithal perspective projection. description: | Corresponds to the `SZP` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.0.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 phi0: type: number description: | The longitude $\phi_0$ of the reference point, in degrees. default: 0 theta0: type: number description: | The latitude $\theta_0$ of the reference point, in degrees. default: 90 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.1.0.yaml0000644000537500020070000000246500000000000033543 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_zenithal_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/slant_zenithal_perspective-1.1.0" title: | The slant zenithal perspective projection. description: | Corresponds to the `SZP` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.1.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 phi0: type: number description: | The longitude $\phi_0$ of the reference point, in degrees. default: 0 theta0: type: number description: | The latitude $\theta_0$ of the reference point, in degrees. default: 90 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.2.0.yaml0000644000537500020070000000272100000000000033537 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_zenithal_perspective-1.2.0" tag: "tag:stsci.edu:asdf/transform/slant_zenithal_perspective-1.2.0" title: | The slant zenithal perspective projection. description: | Corresponds to the `SZP` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.2.0" - type: object properties: mu: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 phi0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | The longitude $\phi_0$ of the reference point, in degrees. default: 0 theta0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | The latitude $\theta_0$ of the reference point, in degrees. default: 90 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/smoothly_broken_power_law1d-1.0.0.yaml0000644000537500020070000000267700000000000033641 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/smoothly_broken_power_law1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/smoothly_broken_power_law1d-1.0.0" title: > One dimensional smoothly broken power law model. description: > One dimensional smoothly broken power law model. examples: - - $$f(x) = 10*(\frac{x}{5.0})^{-2.0}\{\frac{1}{2}[1+(\frac{x}{5.0})^{1/0.5}]\}^{(2.0- 3.0)0.5}$$ - | !transform/smoothly_broken_power_law1d-1.0.0 {alpha_1: 2.0, alpha_2: 2.0, amplitude: 10.0, delta: 0.5, x_break: 5.0} allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Model amplitude at the break point. x_break: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Break point. alpha_1: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power law index for x < x_break. alpha_2: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Power law index for x > x_break. delta: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Smoothness parameter. required: ['amplitude', 'x_break', 'alpha_1', 'alpha_2', 'delta'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.0.0.yaml0000644000537500020070000000147600000000000030752 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/stereographic-1.0.0" tag: "tag:stsci.edu:asdf/transform/stereographic-1.0.0" title: | The stereographic projection. description: | Corresponds to the `STG` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^{\circ} - 2 \tan^{-1}\left(\frac{\pi R_\theta}{360^{\circ}}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\frac{2 \cos \theta}{1 + \sin \theta}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.1.0.yaml0000644000537500020070000000144000000000000030742 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/stereographic-1.1.0" tag: "tag:stsci.edu:asdf/transform/stereographic-1.1.0" title: | The stereographic projection. description: | Corresponds to the `STG` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^{\circ} - 2 \tan^{-1}\left(\frac{\pi R_\theta}{360^{\circ}}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\frac{2 \cos \theta}{1 + \sin \theta}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.2.0.yaml0000644000537500020070000000144000000000000030743 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/stereographic-1.2.0" tag: "tag:stsci.edu:asdf/transform/stereographic-1.2.0" title: | The stereographic projection. description: | Corresponds to the `STG` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^{\circ} - 2 \tan^{-1}\left(\frac{\pi R_\theta}{360^{\circ}}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\frac{2 \cos \theta}{1 + \sin \theta}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.0.0.yaml0000644000537500020070000000156300000000000027737 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/subtract-1.0.0" tag: "tag:stsci.edu:asdf/transform/subtract-1.0.0" title: > Perform a list of subtransforms in parallel and then subtract their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through subtraction. - | !transform/subtract-1.0.0 forward: - !transform/shift-1.0.0 offset: 2.0 - !transform/shift-1.0.0 offset: 3.0 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.1.0.yaml0000644000537500020070000000156300000000000027740 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/subtract-1.1.0" tag: "tag:stsci.edu:asdf/transform/subtract-1.1.0" title: > Perform a list of subtransforms in parallel and then subtract their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through subtraction. - | !transform/subtract-1.1.0 forward: - !transform/shift-1.1.0 offset: 2.0 - !transform/shift-1.1.0 offset: 3.0 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.2.0.yaml0000644000537500020070000000156300000000000027741 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/subtract-1.2.0" tag: "tag:stsci.edu:asdf/transform/subtract-1.2.0" title: > Perform a list of subtransforms in parallel and then subtract their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through subtraction. - | !transform/subtract-1.2.0 forward: - !transform/shift-1.2.0 offset: 2.0 - !transform/shift-1.2.0 offset: 3.0 allOf: - $ref: "transform-1.2.0" - properties: forward: type: array items: $ref: "transform-1.2.0" required: [forward] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.0.0.yaml0000644000537500020070000000353600000000000027544 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tabular-1.0.0" tag: "tag:stsci.edu:asdf/transform/tabular-1.0.0" title: > A Tabular model. description: | Tabular represents a lookup table with values corresponding to some grid points. It computes the interpolated values corresponding to the given inputs. Three methods of interpolation are supported - "linear", "nearest" and "splinef2d". It supports extrapolation. allOf: - $ref: "transform-1.0.0" - type: object properties: lookup_table: description: > Table values. anyOf: - $ref: ../core/ndarray-1.0.0 - type: array points: type: array items: anyOf: - type: array - $ref: ../core/ndarray-1.0.0 description: | Grid values - each row in the array corresponds to a dimension in the lookup table. The grid does not have to be regular. method: description: | Method of interpolation. Supported are "linear" and "nearest", and "splinef2d". "splinef2d" is only supported for 2-dimensional data. type: string enum: ["linear", "nearest", "splinef2d"] default: "linear" bounds_error: description: | If True, when interpolated values are requested outside of the domain of the input data, a ValueError is raised. If False, then "fill_value" is used. type: boolean default: true fill_value: description: | If provided, the value to use for points outside of the interpolation domain. If None, values outside the domain are extrapolated. Extrapolation is not supported by method "splinef2d". type: number required: [lookup_table] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.1.0.yaml0000644000537500020070000000353600000000000027545 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tabular-1.1.0" tag: "tag:stsci.edu:asdf/transform/tabular-1.1.0" title: > A Tabular model. description: | Tabular represents a lookup table with values corresponding to some grid points. It computes the interpolated values corresponding to the given inputs. Three methods of interpolation are supported - "linear", "nearest" and "splinef2d". It supports extrapolation. allOf: - $ref: "transform-1.1.0" - type: object properties: lookup_table: description: > Table values. anyOf: - $ref: ../core/ndarray-1.0.0 - type: array points: type: array items: anyOf: - type: array - $ref: ../core/ndarray-1.0.0 description: | Grid values - each row in the array corresponds to a dimension in the lookup table. The grid does not have to be regular. method: description: | Method of interpolation. Supported are "linear" and "nearest", and "splinef2d". "splinef2d" is only supported for 2-dimensional data. type: string enum: ["linear", "nearest", "splinef2d"] default: "linear" bounds_error: description: | If True, when interpolated values are requested outside of the domain of the input data, a ValueError is raised. If False, then "fill_value" is used. type: boolean default: true fill_value: description: | If provided, the value to use for points outside of the interpolation domain. If None, values outside the domain are extrapolated. Extrapolation is not supported by method "splinef2d". type: number required: [lookup_table] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.2.0.yaml0000644000537500020070000000366100000000000027545 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tabular-1.2.0" tag: "tag:stsci.edu:asdf/transform/tabular-1.2.0" title: > A Tabular model. description: | Tabular represents a lookup table with values corresponding to some grid points. It computes the interpolated values corresponding to the given inputs. Three methods of interpolation are supported - "linear", "nearest" and "splinef2d". It supports extrapolation. allOf: - $ref: "transform-1.2.0" - type: object properties: lookup_table: description: > Table values. anyOf: - type: array - $ref: ../core/ndarray-1.0.0 - $ref: ../unit/quantity-1.1.0 points: type: array items: anyOf: - type: array - $ref: ../core/ndarray-1.0.0 - $ref: ../unit/quantity-1.1.0 description: | Grid values - each row in the array corresponds to a dimension in the lookup table. The grid does not have to be regular. method: description: | Method of interpolation. Supported are "linear" and "nearest", and "splinef2d". "splinef2d" is only supported for 2-dimensional data. type: string enum: ["linear", "nearest", "splinef2d"] default: "linear" bounds_error: description: | If True, when interpolated values are requested outside of the domain of the input data, a ValueError is raised. If False, then "fill_value" is used. type: boolean default: true fill_value: description: | If provided, the value to use for points outside of the interpolation domain. If None, values outside the domain are extrapolated. Extrapolation is not supported by method "splinef2d". type: number required: [lookup_table] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.0.0.yaml0000644000537500020070000000072000000000000033260 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tangential_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/tangential_spherical_cube-1.0.0" title: | Tangential spherical cube projection. description: | Corresponds to the `TSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.1.0.yaml0000644000537500020070000000072000000000000033261 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tangential_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/tangential_spherical_cube-1.1.0" title: | Tangential spherical cube projection. description: | Corresponds to the `TSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.2.0.yaml0000644000537500020070000000072000000000000033262 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tangential_spherical_cube-1.2.0" tag: "tag:stsci.edu:asdf/transform/tangential_spherical_cube-1.2.0" title: | Tangential spherical cube projection. description: | Corresponds to the `TSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.0.0.yaml0000644000537500020070000000201000000000000030107 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/transform-1.0.0" title: > A generic type used to mark where other transforms are accepted. description: > These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. type: object properties: name: description: | A user-friendly name for the transform, to give it extra meaning. type: string domain: description: | The domain (range of valid inputs) to the transform. Each entry in the list corresponds to an input dimension. type: array items: $ref: "domain-1.0.0" inverse: description: | Explicitly sets the inverse transform of this transform. If the transform has a direct analytic inverse, this property is usually not necessary, as the ASDF-reading tool can provide it automatically. $ref: "transform-1.0.0" additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.1.0.yaml0000644000537500020070000000201000000000000030110 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/transform-1.1.0" title: > A generic type used to mark where other transforms are accepted. description: > These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. type: object properties: name: description: | A user-friendly name for the transform, to give it extra meaning. type: string domain: description: | The domain (range of valid inputs) to the transform. Each entry in the list corresponds to an input dimension. type: array items: $ref: "domain-1.0.0" inverse: description: | Explicitly sets the inverse transform of this transform. If the transform has a direct analytic inverse, this property is usually not necessary, as the ASDF-reading tool can provide it automatically. $ref: "transform-1.1.0" additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.2.0.yaml0000644000537500020070000000147100000000000030123 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/transform-1.2.0" title: > A generic type used to mark where other transforms are accepted. description: > These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. type: object properties: name: description: | A user-friendly name for the transform, to give it extra meaning. type: string inverse: description: | Explicitly sets the inverse transform of this transform. If the transform has a direct analytic inverse, this property is usually not necessary, as the ASDF-reading tool can provide it automatically. $ref: "transform-1.2.0" additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/trapezoid1d-1.0.0.yaml0000644000537500020070000000240500000000000030332 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/trapezoid1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/trapezoid1d-1.0.0" title: > One dimensional trapezoid model. description: > One dimensional trapezoid. examples: - - A 1D trapezoid centered at x=0.5, of width 5.0, slope of tails 1.0, and amplitude 10.0 - | !transform/trapezoid1d-1.0.0 amplitude: 10.0 bounding_box: [-12.0, 13.0] slope: 1.0 width: 5.0 x_0: 0.5 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude of the trapezoid. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Center position of the trapezoid. width: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Width of the constant part of the trapezoid. slope: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Slope of the tails of the trapezoid. required: ['amplitude', 'x_0', 'width', 'slope'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/trapezoid_disk2d-1.0.0.yaml0000644000537500020070000000307300000000000031347 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/trapezoid_disk2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/trapezoid_disk2d-1.0.0" title: > Two dimensional circular trapezoid model. description: > Two dimensional circular trapezoid. examples: - - A 2D trapezoid disk centered at (x, y) = (0.5, 1.5), of radius (distance between constant segments) 5.0, slope of tails 1.0, and amplitude 10.0 - | !transform/trapezoid_disk2d-1.0.0 R_0: 5.0 amplitude: 10.0 bounding_box: - [-13.5, 16.5] - [-14.5, 15.5] slope: 1.0 x_0: 0.5 y_0: 1.5 allOf: - $ref: "transform-1.2.0" - type: object properties: amplitude: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Amplitude of the trapezoid. x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: x center position of the trapezoid. y_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: y center position of the trapezoid. R_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Radius of the constant part of the trapezoid. slope: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Slope of the tails of the trapezoid in x direction. required: ['amplitude', 'x_0', 'y_0', 'R_0', 'slope'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/voigt1d-1.0.0.yaml0000644000537500020070000000236700000000000027470 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/voigt1d-1.0.0" tag: "tag:stsci.edu:asdf/transform/voigt1d-1.0.0" title: > One dimensional model for the Voigt profile. description: > One dimensional model for the Voigt profile. examples: - - 1D Voigt model with a Lorentzian amplitude of 10.0, Lorentzian FWHM of 0.5, Gaussian FWHM of 0.9, centered at x=0.5. - | !transform/voigt1d-1.0.0 {amplitude_L: 10.0, fwhm_G: 0.9, fwhm_L: 0.5, x_0: 0.55} allOf: - $ref: "transform-1.2.0" - type: object properties: x_0: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: Position of the peak. amplitude_L: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: The Lorentzian amplitude. fwhm_L: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: The Lorentzian full width at half maximum. fwhm_G: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: The Gaussian full width at half maximum. required: ['x_0', 'amplitude_L', 'fwhm_L', 'fwhm_G'] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.0.0.yaml0000644000537500020070000000127500000000000027726 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0" title: | Base class of all zenithal (or azimuthal) projections. description: | Zenithal projections are completely specified by defining the radius as a function of native latitude, $R_\theta$. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y, x) \\ R_\theta &= \sqrt{x^2 + y^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin \phi \\ y &= R_\theta \cos \phi$$ allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.1.0.yaml0000644000537500020070000000127500000000000027727 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0" title: | Base class of all zenithal (or azimuthal) projections. description: | Zenithal projections are completely specified by defining the radius as a function of native latitude, $R_\theta$. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y, x) \\ R_\theta &= \sqrt{x^2 + y^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin \phi \\ y &= R_\theta \cos \phi$$ allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.2.0.yaml0000644000537500020070000000127500000000000027730 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal-1.2.0" title: | Base class of all zenithal (or azimuthal) projections. description: | Zenithal projections are completely specified by defining the radius as a function of native latitude, $R_\theta$. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y, x) \\ R_\theta &= \sqrt{x^2 + y^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin \phi \\ y &= R_\theta \cos \phi$$ allOf: - $ref: "transform-1.2.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.0.0.yaml0000644000537500020070000000162600000000000032105 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equal_area-1.0.0" title: | The zenithal equal area projection. description: | Corresponds to the `ZEA` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - 2 \sin^{-1} \left(\frac{\pi R_\theta}{360^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta &= \frac{180^\circ}{\pi} \sqrt{2(1 - \sin\theta)} \\ &= \frac{360^\circ}{\pi} \sin\left(\frac{90^\circ - \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.1.0.yaml0000644000537500020070000000157000000000000032104 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equal_area-1.1.0" title: | The zenithal equal area projection. description: | Corresponds to the `ZEA` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - 2 \sin^{-1} \left(\frac{\pi R_\theta}{360^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta &= \frac{180^\circ}{\pi} \sqrt{2(1 - \sin\theta)} \\ &= \frac{360^\circ}{\pi} \sin\left(\frac{90^\circ - \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.2.0.yaml0000644000537500020070000000157000000000000032105 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equal_area-1.2.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equal_area-1.2.0" title: | The zenithal equal area projection. description: | Corresponds to the `ZEA` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - 2 \sin^{-1} \left(\frac{\pi R_\theta}{360^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta &= \frac{180^\circ}{\pi} \sqrt{2(1 - \sin\theta)} \\ &= \frac{360^\circ}{\pi} \sin\left(\frac{90^\circ - \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.0.0.yaml0000644000537500020070000000136600000000000032341 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equidistant-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equidistant-1.0.0" title: | The zenithal equidistant projection. description: | Corresponds to the `ARC` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - R_\theta$$ And the sky-to-pixel transformation is defined as: $$R_\theta = 90^\circ - \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.1.0.yaml0000644000537500020070000000133000000000000032331 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equidistant-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equidistant-1.1.0" title: | The zenithal equidistant projection. description: | Corresponds to the `ARC` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - R_\theta$$ And the sky-to-pixel transformation is defined as: $$R_\theta = 90^\circ - \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.2.0.yaml0000644000537500020070000000133000000000000032332 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equidistant-1.2.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equidistant-1.2.0" title: | The zenithal equidistant projection. description: | Corresponds to the `ARC` projection in the FITS WCS standard. See [zenithal](ref:transform/zenithal-1.2.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - R_\theta$$ And the sky-to-pixel transformation is defined as: $$R_\theta = 90^\circ - \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.2.0" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.0.0.yaml0000644000537500020070000000262300000000000032335 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.0.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.0.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: type: number description: | Look angle, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.1.0.yaml0000644000537500020070000000262300000000000032336 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.1.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.1.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: type: number description: | Look angle, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.2.0.yaml0000644000537500020070000000301700000000000032335 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.2.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.2.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.1.0" - type: object properties: mu: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Look angle, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.3.0.yaml0000644000537500020070000000301700000000000032336 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.3.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.3.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.2.0" - type: object properties: mu: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: anyOf: - $ref: "../unit/quantity-1.1.0" - type: number description: | Look angle, in degrees. default: 0 ... ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9142423 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/unit/0000755000537500020070000000000000000000000023531 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/unit/defunit-1.0.0.yaml0000644000537500020070000000147500000000000026514 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/defunit-1.0.0" title: Define a new physical unit. description: | Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. tag: "tag:stsci.edu:asdf/unit/defunit-1.0.0" type: object properties: name: description: The name of the new unit. type: string pattern: "[A-Za-z_][A-Za-z0-9_]+" unit: description: | The unit that the new name is equivalent to. It is optional, and if not provided, or ``null``, this ``defunit`` defines a new base unit. anyOf: - $ref: "unit-1.0.0" - type: "null" required: [name] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/unit/quantity-1.1.0.yaml0000644000537500020070000000245100000000000026730 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/quantity-1.1.0" tag: "tag:stsci.edu:asdf/unit/quantity-1.1.0" title: > Represents a Quantity object from astropy description: | A Quantity object represents a value that has some unit associated with the number. examples: - - A quantity consisting of a scalar value and unit - | !unit/quantity-1.1.0 value: 3.14159 unit: km - - A quantity consisting of a single value in an array - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 [2.71828] unit: A - - A quantity with an array of values - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 [1, 2, 3, 4] unit: s - - A quantity with an n-dimensional array of values - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 datatype: float64 data: [[1, 2, 3], [4, 5, 6]] unit: pc type: object properties: value: description: | A vector of one or more values anyOf: - type: number - $ref: "../core/ndarray-1.0.0" unit: description: | The unit corresponding to the values $ref: unit-1.0.0 required: [value, unit] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/unit/unit-1.0.0.yaml0000644000537500020070000000105700000000000026031 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/unit-1.0.0" title: Physical unit. description: > This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. examples: - - Example unit - | !unit/unit-1.0.0 "2.1798721 10-18kg m2 s-2" anyOf: - tag: "tag:stsci.edu:asdf/unit/unit-1.0.0" - {} type: string pattern: "[\x00-\x7f]*" ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/version_map-1.0.0.yaml0000644000537500020070000000615000000000000026414 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/core/asdf: 1.0.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.0.0 tag:stsci.edu:asdf/transform/add: 1.0.0 tag:stsci.edu:asdf/transform/affine: 1.0.0 tag:stsci.edu:asdf/transform/airy: 1.0.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/compose: 1.0.0 tag:stsci.edu:asdf/transform/concatenate: 1.0.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.0.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.0.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.0.0 tag:stsci.edu:asdf/transform/constant: 1.0.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.0.0 tag:stsci.edu:asdf/transform/divide: 1.0.0 tag:stsci.edu:asdf/transform/domain: 1.0.0 tag:stsci.edu:asdf/transform/gnomonic: 1.0.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.0.0 tag:stsci.edu:asdf/transform/healpix: 1.0.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.0.0 tag:stsci.edu:asdf/transform/identity: 1.0.0 tag:stsci.edu:asdf/transform/label_mapper: 1.0.0 tag:stsci.edu:asdf/transform/mercator: 1.0.0 tag:stsci.edu:asdf/transform/molleweide: 1.0.0 tag:stsci.edu:asdf/transform/multiply: 1.0.0 tag:stsci.edu:asdf/transform/parabolic: 1.0.0 tag:stsci.edu:asdf/transform/plate_carree: 1.0.0 tag:stsci.edu:asdf/transform/polyconic: 1.0.0 tag:stsci.edu:asdf/transform/polynomial: 1.0.0 tag:stsci.edu:asdf/transform/power: 1.0.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/regions_selector: 1.0.0 tag:stsci.edu:asdf/transform/remap_axes: 1.0.0 tag:stsci.edu:asdf/transform/rotate2d: 1.0.0 tag:stsci.edu:asdf/transform/rotate3d: 1.0.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.0.0 tag:stsci.edu:asdf/transform/scale: 1.0.0 tag:stsci.edu:asdf/transform/shift: 1.0.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.0.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.0.0 tag:stsci.edu:asdf/transform/stereographic: 1.0.0 tag:stsci.edu:asdf/transform/subtract: 1.0.0 tag:stsci.edu:asdf/transform/tabular: 1.0.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.0.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.0.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.0.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.0.0 tag:stsci.edu:asdf/wcs/step: 1.0.0 tag:stsci.edu:asdf/wcs/wcs: 1.0.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/version_map-1.1.0.yaml0000644000537500020070000000622000000000000026413 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/core/asdf: 1.0.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.1.0 tag:stsci.edu:asdf/transform/affine: 1.1.0 tag:stsci.edu:asdf/transform/airy: 1.1.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/compose: 1.1.0 tag:stsci.edu:asdf/transform/concatenate: 1.1.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.1.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.1.0 tag:stsci.edu:asdf/transform/constant: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.1.0 tag:stsci.edu:asdf/transform/divide: 1.1.0 tag:stsci.edu:asdf/transform/gnomonic: 1.1.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.1.0 tag:stsci.edu:asdf/transform/healpix: 1.1.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.1.0 tag:stsci.edu:asdf/transform/identity: 1.1.0 tag:stsci.edu:asdf/transform/label_mapper: 1.1.0 tag:stsci.edu:asdf/transform/mercator: 1.1.0 tag:stsci.edu:asdf/transform/molleweide: 1.1.0 tag:stsci.edu:asdf/transform/multiply: 1.1.0 tag:stsci.edu:asdf/transform/parabolic: 1.1.0 tag:stsci.edu:asdf/transform/plate_carree: 1.1.0 tag:stsci.edu:asdf/transform/polyconic: 1.1.0 tag:stsci.edu:asdf/transform/polynomial: 1.1.0 tag:stsci.edu:asdf/transform/power: 1.1.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/regions_selector: 1.1.0 tag:stsci.edu:asdf/transform/remap_axes: 1.1.0 tag:stsci.edu:asdf/transform/rotate2d: 1.1.0 tag:stsci.edu:asdf/transform/rotate3d: 1.1.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.1.0 tag:stsci.edu:asdf/transform/scale: 1.1.0 tag:stsci.edu:asdf/transform/shift: 1.1.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.1.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.1.0 tag:stsci.edu:asdf/transform/stereographic: 1.1.0 tag:stsci.edu:asdf/transform/subtract: 1.1.0 tag:stsci.edu:asdf/transform/tabular: 1.1.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.1.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.1.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.1.0 tag:stsci.edu:asdf/wcs/icrs_coord: 1.1.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.1.0 tag:stsci.edu:asdf/wcs/step: 1.1.0 tag:stsci.edu:asdf/wcs/wcs: 1.0.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/version_map-1.2.0.yaml0000644000537500020070000000630400000000000026417 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/core/asdf: 1.1.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/extension_metadata: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.1.0 tag:stsci.edu:asdf/transform/affine: 1.2.0 tag:stsci.edu:asdf/transform/airy: 1.2.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/compose: 1.1.0 tag:stsci.edu:asdf/transform/concatenate: 1.1.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.2.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.2.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.2.0 tag:stsci.edu:asdf/transform/constant: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.2.0 tag:stsci.edu:asdf/transform/divide: 1.1.0 tag:stsci.edu:asdf/transform/gnomonic: 1.1.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.1.0 tag:stsci.edu:asdf/transform/healpix: 1.1.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.1.0 tag:stsci.edu:asdf/transform/identity: 1.1.0 tag:stsci.edu:asdf/transform/label_mapper: 1.1.0 tag:stsci.edu:asdf/transform/mercator: 1.1.0 tag:stsci.edu:asdf/transform/molleweide: 1.1.0 tag:stsci.edu:asdf/transform/multiply: 1.1.0 tag:stsci.edu:asdf/transform/parabolic: 1.1.0 tag:stsci.edu:asdf/transform/plate_carree: 1.1.0 tag:stsci.edu:asdf/transform/polyconic: 1.1.0 tag:stsci.edu:asdf/transform/polynomial: 1.2.0 tag:stsci.edu:asdf/transform/power: 1.1.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/regions_selector: 1.1.0 tag:stsci.edu:asdf/transform/remap_axes: 1.1.0 tag:stsci.edu:asdf/transform/rotate2d: 1.2.0 tag:stsci.edu:asdf/transform/rotate3d: 1.2.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.1.0 tag:stsci.edu:asdf/transform/scale: 1.2.0 tag:stsci.edu:asdf/transform/shift: 1.2.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.1.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/transform/stereographic: 1.1.0 tag:stsci.edu:asdf/transform/subtract: 1.1.0 tag:stsci.edu:asdf/transform/tabular: 1.2.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.1.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.1.0 tag:stsci.edu:asdf/wcs/icrs_coord: 1.1.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.1.0 tag:stsci.edu:asdf/wcs/step: 1.1.0 tag:stsci.edu:asdf/wcs/wcs: 1.1.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/version_map-1.3.0.yaml0000644000537500020070000000643400000000000026424 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/core/asdf: 1.1.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/extension_metadata: 1.0.0 tag:stsci.edu:asdf/core/externalarray: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/integer: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.1.0 tag:stsci.edu:asdf/transform/affine: 1.2.0 tag:stsci.edu:asdf/transform/airy: 1.2.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/compose: 1.1.0 tag:stsci.edu:asdf/transform/concatenate: 1.1.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.2.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.2.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.2.0 tag:stsci.edu:asdf/transform/constant: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.2.0 tag:stsci.edu:asdf/transform/divide: 1.1.0 tag:stsci.edu:asdf/transform/gnomonic: 1.1.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.1.0 tag:stsci.edu:asdf/transform/healpix: 1.1.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.1.0 tag:stsci.edu:asdf/transform/identity: 1.1.0 tag:stsci.edu:asdf/transform/label_mapper: 1.1.0 tag:stsci.edu:asdf/transform/mercator: 1.1.0 tag:stsci.edu:asdf/transform/molleweide: 1.1.0 tag:stsci.edu:asdf/transform/multiply: 1.1.0 tag:stsci.edu:asdf/transform/parabolic: 1.1.0 tag:stsci.edu:asdf/transform/plate_carree: 1.1.0 tag:stsci.edu:asdf/transform/polyconic: 1.1.0 tag:stsci.edu:asdf/transform/polynomial: 1.2.0 tag:stsci.edu:asdf/transform/power: 1.1.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/regions_selector: 1.1.0 tag:stsci.edu:asdf/transform/remap_axes: 1.1.0 tag:stsci.edu:asdf/transform/rotate2d: 1.2.0 tag:stsci.edu:asdf/transform/rotate3d: 1.2.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.1.0 tag:stsci.edu:asdf/transform/scale: 1.2.0 tag:stsci.edu:asdf/transform/shift: 1.2.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.1.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/transform/stereographic: 1.1.0 tag:stsci.edu:asdf/transform/subtract: 1.1.0 tag:stsci.edu:asdf/transform/tabular: 1.2.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.1.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.1.0 tag:stsci.edu:asdf/wcs/icrs_coord: 1.1.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.1.0 tag:stsci.edu:asdf/wcs/step: 1.1.0 tag:stsci.edu:asdf/wcs/wcs: 1.1.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/version_map-1.4.0.yaml0000644000537500020070000000721000000000000026416 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/core/asdf: 1.1.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/extension_metadata: 1.0.0 tag:stsci.edu:asdf/core/externalarray: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/integer: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/subclass_metadata: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.2.0 tag:stsci.edu:asdf/transform/affine: 1.3.0 tag:stsci.edu:asdf/transform/airy: 1.2.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.3.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.2.0 tag:stsci.edu:asdf/transform/compose: 1.2.0 tag:stsci.edu:asdf/transform/concatenate: 1.2.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.3.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.3.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.3.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.3.0 tag:stsci.edu:asdf/transform/constant: 1.3.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.3.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.3.0 tag:stsci.edu:asdf/transform/divide: 1.2.0 tag:stsci.edu:asdf/transform/fix_inputs: 1.2.0 tag:stsci.edu:asdf/transform/gnomonic: 1.2.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.2.0 tag:stsci.edu:asdf/transform/healpix: 1.2.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.2.0 tag:stsci.edu:asdf/transform/identity: 1.2.0 tag:stsci.edu:asdf/transform/label_mapper: 1.2.0 tag:stsci.edu:asdf/transform/linear1d: 1.0.0 tag:stsci.edu:asdf/transform/math_functions: 1.0.0 tag:stsci.edu:asdf/transform/mercator: 1.2.0 tag:stsci.edu:asdf/transform/molleweide: 1.2.0 tag:stsci.edu:asdf/transform/multiply: 1.2.0 tag:stsci.edu:asdf/transform/multiplyscale: 1.0.0 tag:stsci.edu:asdf/transform/ortho_polynomial: 1.0.0 tag:stsci.edu:asdf/transform/parabolic: 1.2.0 tag:stsci.edu:asdf/transform/plate_carree: 1.2.0 tag:stsci.edu:asdf/transform/polyconic: 1.2.0 tag:stsci.edu:asdf/transform/polynomial: 1.2.0 tag:stsci.edu:asdf/transform/power: 1.2.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.2.0 tag:stsci.edu:asdf/transform/regions_selector: 1.2.0 tag:stsci.edu:asdf/transform/remap_axes: 1.2.0 tag:stsci.edu:asdf/transform/rotate2d: 1.3.0 tag:stsci.edu:asdf/transform/rotate3d: 1.3.0 tag:stsci.edu:asdf/transform/rotate_sequence_3d: 1.0.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.2.0 tag:stsci.edu:asdf/transform/scale: 1.2.0 tag:stsci.edu:asdf/transform/shift: 1.2.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.2.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/transform/stereographic: 1.2.0 tag:stsci.edu:asdf/transform/subtract: 1.2.0 tag:stsci.edu:asdf/transform/tabular: 1.2.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.2.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.2.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.3.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.1.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.1.0 tag:stsci.edu:asdf/wcs/icrs_coord: 1.1.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.1.0 tag:stsci.edu:asdf/wcs/step: 1.2.0 tag:stsci.edu:asdf/wcs/wcs: 1.2.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/version_map-1.5.0.yaml0000644000537500020070000001156700000000000026431 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/core/asdf: 1.1.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/extension_metadata: 1.0.0 tag:stsci.edu:asdf/core/externalarray: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/integer: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/subclass_metadata: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.2.0 tag:stsci.edu:asdf/transform/affine: 1.3.0 tag:stsci.edu:asdf/transform/airy: 1.2.0 tag:stsci.edu:asdf/transform/airy_disk2d: 1.0.0 tag:stsci.edu:asdf/transform/blackbody: 1.0.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.3.0 tag:stsci.edu:asdf/transform/box1d: 1.0.0 tag:stsci.edu:asdf/transform/box2d: 1.0.0 tag:stsci.edu:asdf/transform/broken_power_law1d: 1.0.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.2.0 tag:stsci.edu:asdf/transform/compose: 1.2.0 tag:stsci.edu:asdf/transform/concatenate: 1.2.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.3.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.3.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.3.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.3.0 tag:stsci.edu:asdf/transform/constant: 1.4.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.3.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.3.0 tag:stsci.edu:asdf/transform/disk2d: 1.0.0 tag:stsci.edu:asdf/transform/divide: 1.2.0 tag:stsci.edu:asdf/transform/drude1d: 1.0.0 tag:stsci.edu:asdf/transform/ellipse2d: 1.0.0 tag:stsci.edu:asdf/transform/exponential1d: 1.0.0 tag:stsci.edu:asdf/transform/exponential_cutoff_power_law1d: 1.0.0 tag:stsci.edu:asdf/transform/fix_inputs: 1.2.0 tag:stsci.edu:asdf/transform/gaussian1d: 1.0.0 tag:stsci.edu:asdf/transform/gaussian2d: 1.0.0 tag:stsci.edu:asdf/transform/gnomonic: 1.2.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.2.0 tag:stsci.edu:asdf/transform/healpix: 1.2.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.2.0 tag:stsci.edu:asdf/transform/identity: 1.2.0 tag:stsci.edu:asdf/transform/king_projected_analytic1d: 1.0.0 tag:stsci.edu:asdf/transform/linear1d: 1.0.0 tag:stsci.edu:asdf/transform/log_parabola1d: 1.0.0 tag:stsci.edu:asdf/transform/logarithmic1d: 1.0.0 tag:stsci.edu:asdf/transform/lorentz1d: 1.0.0 tag:stsci.edu:asdf/transform/math_functions: 1.0.0 tag:stsci.edu:asdf/transform/mercator: 1.2.0 tag:stsci.edu:asdf/transform/moffat1d: 1.0.0 tag:stsci.edu:asdf/transform/moffat2d: 1.0.0 tag:stsci.edu:asdf/transform/molleweide: 1.2.0 tag:stsci.edu:asdf/transform/multiply: 1.2.0 tag:stsci.edu:asdf/transform/multiplyscale: 1.0.0 tag:stsci.edu:asdf/transform/ortho_polynomial: 1.0.0 tag:stsci.edu:asdf/transform/parabolic: 1.2.0 tag:stsci.edu:asdf/transform/planar2d: 1.0.0 tag:stsci.edu:asdf/transform/plate_carree: 1.2.0 tag:stsci.edu:asdf/transform/plummer1d: 1.0.0 tag:stsci.edu:asdf/transform/polyconic: 1.2.0 tag:stsci.edu:asdf/transform/polynomial: 1.2.0 tag:stsci.edu:asdf/transform/power: 1.2.0 tag:stsci.edu:asdf/transform/power_law1d: 1.0.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.2.0 tag:stsci.edu:asdf/transform/redshift_scale_factor: 1.0.0 tag:stsci.edu:asdf/transform/remap_axes: 1.3.0 tag:stsci.edu:asdf/transform/ricker_wavelet1d: 1.0.0 tag:stsci.edu:asdf/transform/ricker_wavelet2d: 1.0.0 tag:stsci.edu:asdf/transform/ring2d: 1.0.0 tag:stsci.edu:asdf/transform/rotate2d: 1.3.0 tag:stsci.edu:asdf/transform/rotate3d: 1.3.0 tag:stsci.edu:asdf/transform/rotate_sequence_3d: 1.0.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.2.0 tag:stsci.edu:asdf/transform/scale: 1.2.0 tag:stsci.edu:asdf/transform/sersic1d: 1.0.0 tag:stsci.edu:asdf/transform/sersic2d: 1.0.0 tag:stsci.edu:asdf/transform/shift: 1.2.0 tag:stsci.edu:asdf/transform/sine1d: 1.0.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.2.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.2.0 tag:stsci.edu:asdf/transform/smoothly_broken_power_law1d: 1.0.0 tag:stsci.edu:asdf/transform/stereographic: 1.2.0 tag:stsci.edu:asdf/transform/subtract: 1.2.0 tag:stsci.edu:asdf/transform/tabular: 1.2.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.2.0 tag:stsci.edu:asdf/transform/trapezoid1d: 1.0.0 tag:stsci.edu:asdf/transform/trapezoid_disk2d: 1.0.0 tag:stsci.edu:asdf/transform/voigt1d: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.2.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.2.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.3.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/version_map-1.6.0.yaml0000644000537500020070000000141500000000000026421 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/core/asdf: 1.1.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/extension_metadata: 1.0.0 tag:stsci.edu:asdf/core/externalarray: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/integer: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/subclass_metadata: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 ... ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9195096 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/0000755000537500020070000000000000000000000023346 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.0.0.yaml0000644000537500020070000000100000000000000027772 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/celestial_frame-1.0.0" title: > Represents a celestial frame. description: > Represents a celestial frame. allOf: - type: object properties: axes_names: minItems: 2 maxItems: 3 axes_order: minItems: 2 maxItems: 3 unit: minItems: 2 maxItems: 3 - $ref: frame-1.0.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.1.0.yaml0000644000537500020070000000100000000000000027773 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/celestial_frame-1.1.0" title: > Represents a celestial frame. description: > Represents a celestial frame. allOf: - type: object properties: axes_names: minItems: 2 maxItems: 3 axes_order: minItems: 2 maxItems: 3 unit: minItems: 2 maxItems: 3 - $ref: frame-1.1.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.0.0.yaml0000644000537500020070000000102400000000000030035 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/composite_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/composite_frame-1.0.0" title: > Represents a set of frames. description: > Represents a set of frames. allOf: - type: object properties: name: description: Name of composite frame. type: string frames: description: List of frames in the composite frame. type: array - $ref: frame-1.0.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.1.0.yaml0000644000537500020070000000077500000000000030052 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/composite_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/composite_frame-1.1.0" title: > Represents a set of frames. description: > Represents a set of frames. allOf: - type: object properties: name: description: Name of composite frame. type: string frames: description: List of frames in the composite frame. type: array ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.0.0.yaml0000644000537500020070000001317400000000000025764 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/frame-1.0.0" title: | The base class of all coordinate frames. description: | These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. Most of these coordinate frames are defined in [IERS conventions](http://www.iers.org/IERS/EN/Publications/TechnicalNotes/tn36.html). examples: - - | A celestial frame in the FK4 reference frame. - | !wcs/celestial_frame-1.0.0 axes_names: [ra, dec] name: CelestialFrame reference_frame: type: FK4 equinox: !time/time-1.0.0 '2010-01-01 00:00:00.000' obstime: !time/time-1.0.0 '2015-01-01 00:00:00.000' unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] type: object properties: name: description: | A user-friendly name for the frame. type: string axes_order: description: | The order of the axes. type: array items: type: integer axes_names: description: | The name of each axis in this frame. type: array items: anyOf: - type: string - type: 'null' reference_frame: description: | The reference frame. type: object properties: type: description: | The reference frame type. Some reference frame types require additional properties, listed next to each reference frame type below. The reference frames types are: - `ICRS` - `FK5`: `equinox`. - `FK4`: `equinox` and optionally `obstime`. - `FK4_noeterms`: `equinox` and optionally `obstime`. - `galactic` - `galactocentric`: `galcen_distance`, `galcen_ra`, `galcen_dec`, `z_sun` and `roll`. - `GCRS`: `obstime`, `obsgeoloc`, and `obsgeovel`. - `CIRS`: `obstime`. - `ITRS`: `obstime`. - `precessed_geocentric`: `obstime`, `obsgeoloc`, and `obsgeovel`. enum: [ICRS, FK5, FK4, FK4_noeterms, galactic, galactocentric, GCRS, CIRS, ITRS, precessed_geocentric] default: ICRS equinox: description: | The equinox of the reference frame. Required when `reference_frame` one of: `FK5`, `FK4`, `FK4_noeterms` $ref: ../time/time-1.0.0 obstime: description: | The observation time of the reference frame, used to determine the location of the Earth. Required when `reference_frame` is one of: `FK4`, `FK4_noeterms`, `GCRS`, `CIRS`, `ITRS` If not provided, it defaults to the same value as `equinox`. $ref: ../time/time-1.0.0 galcen_distance: description: | The distance from the Sun to the Galactic center. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: pc galcen_ra: description: | The Right Ascension (RA) of the Galactic center in the ICRS frame. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg galcen_dec: description: | The Declination (DEC) of the Galactic center in the ICRS frame. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg z_sun: description: | The distance from the sun to the galactic midplane. Required when `reference_frame` is `galactocentric`. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: pc roll: description: | The angle to rotate about the final x-axis, relative to the orientation for `galactic`. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg obsgeoloc: description: | 3-vector giving the position of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: - type: array items: type: number minItems: 3 maxItems: 3 - $ref: ../unit/unit-1.0.0 default: m default: - [0, 0, 0] obsgeovel: description: | 3-vector giving the velocity of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: - type: array items: type: number minItems: 3 maxItems: 3 - $ref: ../unit/unit-1.0.0 default: m/s default: - [0, 0, 0] required: [type] unit: description: | Units for each axis. type: array items: $ref: ../unit/unit-1.0.0 required: [name] additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.1.0.yaml0000644000537500020070000001251100000000000025757 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/frame-1.1.0" title: | The base class of all coordinate frames. description: | These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. Most of these coordinate frames are defined in [IERS conventions](http://www.iers.org/IERS/EN/Publications/TechnicalNotes/tn36.html). examples: - - | A celestial frame in the FK4 reference frame. - | !wcs/celestial_frame-1.1.0 axes_names: [ra, dec] name: CelestialFrame reference_frame: type: FK4 equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000' obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000' unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] type: object properties: name: description: | A user-friendly name for the frame. type: string axes_order: description: | The order of the axes. type: array items: type: integer axes_names: description: | The name of each axis in this frame. type: array items: anyOf: - type: string - type: 'null' reference_frame: description: | The reference frame. type: object properties: type: description: | The reference frame type. Some reference frame types require additional properties, listed next to each reference frame type below. The reference frames types are: - `ICRS` - `FK5`: `equinox`. - `FK4`: `equinox` and optionally `obstime`. - `FK4_noeterms`: `equinox` and optionally `obstime`. - `galactic` - `galactocentric`: `galcen_distance`, `galcen_ra`, `galcen_dec`, `z_sun` and `roll`. - `GCRS`: `obstime`, `obsgeoloc`, and `obsgeovel`. - `CIRS`: `obstime`. - `ITRS`: `obstime`. - `precessed_geocentric`: `obstime`, `obsgeoloc`, and `obsgeovel`. enum: [ICRS, FK5, FK4, FK4_noeterms, galactic, galactocentric, GCRS, CIRS, ITRS, precessed_geocentric] default: ICRS equinox: description: | The equinox of the reference frame. Required when `reference_frame` one of: `FK5`, `FK4`, `FK4_noeterms` $ref: ../time/time-1.1.0 obstime: description: | The observation time of the reference frame, used to determine the location of the Earth. Required when `reference_frame` is one of: `FK4`, `FK4_noeterms`, `GCRS`, `CIRS`, `ITRS` If not provided, it defaults to the same value as `equinox`. $ref: ../time/time-1.1.0 galcen_distance: description: | The distance from the Sun to the Galactic center. Required when `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 galcen_coord: description: | The ICRS coordinates of the Galactic center. Required when `reference_frame` is `galactocentric`. $ref: icrs_coord-1.1.0 galcen_v_sun: description: | The velocity of the sun in the galactocentric frame as Cartesian velocity components. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: k/s } - { value: [0], unit: k/s } - { value: [0], unit: k/s } z_sun: description: | The distance from the sun to the galactic midplane. Required when `reference_frame` is `galactocentric`. `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 roll: description: | The angle to rotate about the final x-axis, relative to the orientation for `galactic`. Required when `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 obsgeoloc: description: | 3-vector giving the position of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: m } - { value: [0], unit: m } - { value: [0], unit: m } obsgeovel: description: | 3-vector giving the velocity of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: m/s } - { value: [0], unit: m/s } - { value: [0], unit: m/s } required: [type] unit: description: | Units for each axis. type: array items: $ref: ../unit/unit-1.0.0 required: [name] additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/icrs_coord-1.1.0.yaml0000644000537500020070000000202200000000000027007 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/icrs_coord-1.1.0" tag: "tag:stsci.edu:asdf/wcs/icrs_coord-1.1.0" title: | Represents an ICRS coordinate object from astropy description: This object represents the right ascension (RA) and declination of an ICRS coordinate or frame. The astropy ICRS class contains additional fields that may be useful to add here in the future. type: object properties: ra: type: object description: | A longitude representing the right ascension of the ICRS coordinate properties: value: type: number unit: $ref: ../unit/unit-1.0.0 default: deg wrap_angle: $ref: ../unit/quantity-1.1.0 default: "360 deg" dec: type: object description: | A latitude representing the declination of the ICRS coordinate properties: value: type: number unit: $ref: ../unit/unit-1.0.0 default: deg required: [ra, dec] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.0.0.yaml0000644000537500020070000000125300000000000027654 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/spectral_frame-1.0.0" title: > Represents a spectral frame. description: > Represents a spectral frame. allOf: - type: object properties: reference_position: description: | The position of the reference frame. enum: [geocenter, barycenter, heliocenter] default: geocenter axes_names: minItems: 1 maxItems: 1 axes_order: minItems: 1 maxItems: 1 unit: minItems: 1 maxItems: 1 - $ref: frame-1.0.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.1.0.yaml0000644000537500020070000000125300000000000027655 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/spectral_frame-1.1.0" title: > Represents a spectral frame. description: > Represents a spectral frame. allOf: - type: object properties: reference_position: description: | The position of the reference frame. enum: [geocenter, barycenter, heliocenter] default: geocenter axes_names: minItems: 1 maxItems: 1 axes_order: minItems: 1 maxItems: 1 unit: minItems: 1 maxItems: 1 - $ref: frame-1.1.0 ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.0.0.yaml0000644000537500020070000000146100000000000025641 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/step-1.0.0" tag: "tag:stsci.edu:asdf/wcs/step-1.0.0" title: > Describes a single step of a WCS transform pipeline. description: > Describes a single step of a WCS transform pipeline. examples: [] type: object properties: frame: description: | The frame of the inputs to the transform. anyOf: - type: string - $ref: frame-1.0.0 transform: description: | The transform from this step to the next one. The last step in a WCS should not have a transform, but exists only to describe the frames and units of the final output axes. anyOf: - $ref: ../transform/transform-1.0.0 - type: 'null' default: null required: [frame] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.1.0.yaml0000644000537500020070000000146100000000000025642 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/step-1.1.0" tag: "tag:stsci.edu:asdf/wcs/step-1.1.0" title: > Describes a single step of a WCS transform pipeline. description: > Describes a single step of a WCS transform pipeline. examples: [] type: object properties: frame: description: | The frame of the inputs to the transform. anyOf: - type: string - $ref: frame-1.1.0 transform: description: | The transform from this step to the next one. The last step in a WCS should not have a transform, but exists only to describe the frames and units of the final output axes. anyOf: - $ref: ../transform/transform-1.1.0 - type: 'null' default: null required: [frame] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.2.0.yaml0000644000537500020070000000146100000000000025643 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/step-1.2.0" tag: "tag:stsci.edu:asdf/wcs/step-1.2.0" title: > Describes a single step of a WCS transform pipeline. description: > Describes a single step of a WCS transform pipeline. examples: [] type: object properties: frame: description: | The frame of the inputs to the transform. anyOf: - type: string - $ref: frame-1.1.0 transform: description: | The transform from this step to the next one. The last step in a WCS should not have a transform, but exists only to describe the frames and units of the final output axes. anyOf: - $ref: ../transform/transform-1.2.0 - type: 'null' default: null required: [frame] ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.0.0.yaml0000644000537500020070000000203600000000000025461 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/wcs-1.0.0" tag: "tag:stsci.edu:asdf/wcs/wcs-1.0.0" title: > A system for describing generalized world coordinate transformations. description: > ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. type: object properties: name: description: | A descriptive name for this WCS. type: string steps: description: | A list of steps in the forward transformation from detector to world coordinates. The inverse transformation is determined automatically by reversing this list, and inverting each of the individual transforms according to the rules described in [inverse](ref:http://stsci.edu/schemas/asdf/transform/transform-1.0.0/properties/inverse). type: array items: $ref: step-1.0.0 required: [name, steps] additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.1.0.yaml0000644000537500020070000000176500000000000025472 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/wcs-1.1.0" tag: "tag:stsci.edu:asdf/wcs/wcs-1.1.0" title: > A system for describing generalized world coordinate transformations. description: > ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. type: object properties: name: description: | A descriptive name for this WCS. type: string steps: description: | A list of steps in the forward transformation from detector to world coordinates. The inverse transformation is determined automatically by reversing this list, and inverting each of the individual transforms according to the rules described in [inverse](ref:transform/transform-1.1.0:inverse). type: array items: $ref: step-1.1.0 required: [name, steps] additionalProperties: true ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.2.0.yaml0000644000537500020070000000176500000000000025473 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/wcs-1.2.0" tag: "tag:stsci.edu:asdf/wcs/wcs-1.2.0" title: > A system for describing generalized world coordinate transformations. description: > ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. type: object properties: name: description: | A descriptive name for this WCS. type: string steps: description: | A list of steps in the forward transformation from detector to world coordinates. The inverse transformation is determined automatically by reversing this list, and inverting each of the individual transforms according to the rules described in [inverse](ref:transform/transform-1.2.0:inverse). type: array items: $ref: step-1.2.0 required: [name, steps] additionalProperties: true ... ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9198735 asdf-2.9.2/asdf-standard/schemas/stsci.edu/yaml-schema/0000755000537500020070000000000000000000000024035 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643993368.0 asdf-2.9.2/asdf-standard/schemas/stsci.edu/yaml-schema/draft-01.yaml0000644000537500020070000001071000000000000026236 0ustar00wjamiesonSTSCI\science%YAML 1.1 --- $schema: "http://json-schema.org/draft-04/schema" id: "http://stsci.edu/schemas/yaml-schema/draft-01" title: YAML Schema description: | A metaschema extending JSON Schema's metaschema to add support for some YAML-specific constructions. allOf: - $ref: "http://json-schema.org/draft-04/schema" - type: object properties: tag: description: | A fully-qualified YAML tag name that should be associated with the object type returned by the YAML parser; for example, the object must be an instance of the class registered with the parser to create instances of objects with this tag. Implementation of this validator is optional and depends on details of the YAML parser. type: string minLength: 6 propertyOrder: description: | Specifies the default order of the properties when writing out. Any keys not listed in **propertyOrder** will be in arbitrary order at the end. This field applies only to nodes with **object** type. type: array items: type: string flowStyle: description: | Specifies the default serialization style to use for an array or object. YAML supports multiple styles for arrays/sequences and objects/maps, called "block style" and "flow style". For example:: Block style: !!map Clark : Evans Ingy : döt Net Oren : Ben-Kiki Flow style: !!map { Clark: Evans, Ingy: döt Net, Oren: Ben-Kiki } This property gives a hint to the tool outputting the YAML which style to use. If not provided, the library is free to use whatever heuristics it wishes to determine the output style. This property does not enforce any particular style on YAML being parsed. type: string enum: [block, flow] style: description: | Specifies the default serialization style to use for a string. YAML supports multiple styles for strings: ```yaml Inline style: "First line\nSecond line" Literal style: | First line Second line Folded style: > First line Second line ``` This property gives a hint to the tool outputting the YAML which style to use. If not provided, the library is free to use whatever heuristics it wishes to determine the output style. This property does not enforce any particular style on YAML being parsed. type: string enum: [inline, literal, folded] examples: description: | A list of examples to help document the schema. Each pair is a prose description followed by a string containing YAML content. For example: ```yaml examples: - - Complex number: 1 real, -1 imaginary - "!complex 1-1j" type: array items: ``` type: array items: type: array items: - type: string - anyOf: - type: string - type: object # Redefine JSON schema validators in terms of this document so that # we can check nested objects: additionalItems: anyOf: - type: boolean - $ref: "#" items: anyOf: - $ref: "#" - $ref: "#/definitions/schemaArray" additionalProperties: anyOf: - type: boolean - $ref: "#" definitions: type: object additionalProperties: $ref: "#" properties: type: object additionalProperties: $ref: "#" patternProperties: type: object additionalProperties: $ref: "#" dependencies: type: object additionalProperties: anyOf: - $ref: "#" - $ref: "http://json-schema.org/draft-04/schema#definitions/stringArray" allOf: $ref: "#/definitions/schemaArray" anyOf: $ref: "#/definitions/schemaArray" oneOf: $ref: "#/definitions/schemaArray" not: $ref: "#" definitions: schemaArray: type: array minItems: 1 items: $ref: "#" ... ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9217288 asdf-2.9.2/asdf.egg-info/0000755000537500020070000000000000000000000016205 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644282534.0 asdf-2.9.2/asdf.egg-info/PKG-INFO0000644000537500020070000002666200000000000017316 0ustar00wjamiesonSTSCI\scienceMetadata-Version: 2.1 Name: asdf Version: 2.9.2 Summary: Python implementation of the ASDF Standard Home-page: http://github.com/asdf-format/asdf Author: The ASDF Developers Author-email: help@stsci.edu License: BSD-3-Clause Project-URL: Bug Tracker, https://github.com/asdf-format/asdf/issues Project-URL: Documentation, https://asdf.readthedocs.io/en/stable Project-URL: Source Code, https://github.com/asdf-format/asdf Platform: UNKNOWN Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/x-rst Provides-Extra: all Provides-Extra: docs Provides-Extra: tests License-File: LICENSE ASDF - Advanced Scientific Data Format ====================================== .. image:: https://github.com/asdf-format/asdf/workflows/CI/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: CI Status .. image:: https://github.com/asdf-format/asdf/workflows/s390x/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: s390x Status .. image:: https://github.com/asdf-format/asdf/workflows/Downstream/badge.svg :target: https://github.com/asdf-format/asdf/actions :alt: Downstream CI Status .. image:: https://readthedocs.org/projects/asdf/badge/?version=latest :target: https://asdf.readthedocs.io/en/latest/ .. image:: https://codecov.io/gh/asdf-format/asdf/branch/master/graphs/badge.svg :target: https://codecov.io/gh/asdf-format/asdf .. image:: https://img.shields.io/pypi/l/asdf.svg :target: https://img.shields.io/pypi/l/asdf.svg | .. _begin-summary-text: The **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat (ASDF) is a next-generation interchange format for scientific data. This package contains the Python implementation of the ASDF Standard. More information on the ASDF Standard itself can be found `here `__. The ASDF format has the following features: * A hierarchical, human-readable metadata format (implemented using `YAML `__) * Numerical arrays are stored as binary data blocks which can be memory mapped. Data blocks can optionally be compressed. * The structure of the data can be automatically validated using schemas (implemented using `JSON Schema `__) * Native Python data types (numerical types, strings, dicts, lists) are serialized automatically * ASDF can be extended to serialize custom data types .. _end-summary-text: ASDF is under active development `on github `__. More information on contributing can be found `below <#contributing>`__. Overview -------- This section outlines basic use cases of the ASDF package for creating and reading ASDF files. Creating a file ~~~~~~~~~~~~~~~ .. _begin-create-file-text: We're going to store several `numpy` arrays and other data to an ASDF file. We do this by creating a "tree", which is simply a `dict`, and we provide it as input to the constructor of `AsdfFile`: .. code:: python import asdf import numpy as np # Create some data sequence = np.arange(100) squares = sequence**2 random = np.random.random(100) # Store the data in an arbitrarily nested dictionary tree = { 'foo': 42, 'name': 'Monty', 'sequence': sequence, 'powers': { 'squares' : squares }, 'random': random } # Create the ASDF file object from our data tree af = asdf.AsdfFile(tree) # Write the data to a new file af.write_to('example.asdf') If we open the newly created file, we can see some of the key features of ASDF on display: :: #ASDF 1.0.0 #ASDF_STANDARD 1.2.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.1.0 asdf_library: !core/software-1.0.0 {author: The ASDF Developers, homepage: 'http://github.com/asdf-format/asdf', name: asdf, version: 2.0.0} history: extensions: - !core/extension_metadata-1.0.0 extension_class: asdf.extension.BuiltinExtension software: {name: asdf, version: 2.0.0} foo: 42 name: Monty powers: squares: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [100] random: !core/ndarray-1.0.0 source: 2 datatype: float64 byteorder: little shape: [100] sequence: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [100] ... The metadata in the file mirrors the structure of the tree that was stored. It is hierarchical and human-readable. Notice that metadata has been added to the tree that was not explicitly given by the user. Notice also that the numerical array data is not stored in the metadata tree itself. Instead, it is stored as binary data blocks below the metadata section (not shown here). It is possible to compress the array data when writing the file: .. code:: python af.write_to('compressed.asdf', all_array_compression='zlib') The built-in compression algorithms are ``'zlib'``, and ``'bzp2'``. The ``'lz4'`` algorithm becomes available when the `lz4 `__ package is installed. Other compression algorithms may be available via extensions. .. _end-create-file-text: Reading a file ~~~~~~~~~~~~~~ .. _begin-read-file-text: To read an existing ASDF file, we simply use the top-level `open` function of the `asdf` package: .. code:: python import asdf af = asdf.open('example.asdf') The `open` function also works as a context handler: .. code:: python with asdf.open('example.asdf') as af: ... To get a quick overview of the data stored in the file, use the top-level `AsdfFile.info()` method: .. code:: python >>> import asdf >>> af = asdf.open('example.asdf') >>> af.info() root (AsdfObject) ├─asdf_library (Software) │ ├─author (str): The ASDF Developers │ ├─homepage (str): http://github.com/asdf-format/asdf │ ├─name (str): asdf │ └─version (str): 2.8.0 ├─history (dict) │ └─extensions (list) │ └─[0] (ExtensionMetadata) │ ├─extension_class (str): asdf.extension.BuiltinExtension │ └─software (Software) │ ├─name (str): asdf │ └─version (str): 2.8.0 ├─foo (int): 42 ├─name (str): Monty ├─powers (dict) │ └─squares (NDArrayType): shape=(100,), dtype=int64 ├─random (NDArrayType): shape=(100,), dtype=float64 └─sequence (NDArrayType): shape=(100,), dtype=int64 The `AsdfFile` behaves like a Python `dict`, and nodes are accessed like any other dictionary entry: .. code:: python >>> af['name'] 'Monty' >>> af['powers'] {'squares': } Array data remains unloaded until it is explicitly accessed: .. code:: python >>> af['powers']['squares'] array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256, 289, 324, 361, 400, 441, 484, 529, 576, 625, 676, 729, 784, 841, 900, 961, 1024, 1089, 1156, 1225, 1296, 1369, 1444, 1521, 1600, 1681, 1764, 1849, 1936, 2025, 2116, 2209, 2304, 2401, 2500, 2601, 2704, 2809, 2916, 3025, 3136, 3249, 3364, 3481, 3600, 3721, 3844, 3969, 4096, 4225, 4356, 4489, 4624, 4761, 4900, 5041, 5184, 5329, 5476, 5625, 5776, 5929, 6084, 6241, 6400, 6561, 6724, 6889, 7056, 7225, 7396, 7569, 7744, 7921, 8100, 8281, 8464, 8649, 8836, 9025, 9216, 9409, 9604, 9801]) >>> import numpy as np >>> expected = [x**2 for x in range(100)] >>> np.equal(af['powers']['squares'], expected).all() True By default, uncompressed data blocks are memory mapped for efficient access. Memory mapping can be disabled by using the ``copy_arrays`` option of `open` when reading: .. code:: python af = asdf.open('example.asdf', copy_arrays=True) .. _end-read-file-text: For more information and for advanced usage examples, see the `documentation <#documentation>`__. Extending ASDF ~~~~~~~~~~~~~~ Out of the box, the ``asdf`` package automatically serializes and deserializes native Python types. It is possible to extend ``asdf`` by implementing custom tags that correspond to custom user types. More information on extending ASDF can be found in the `official documentation `__. Installation ------------ .. _begin-pip-install-text: Stable releases of the ASDF Python package are registered `at PyPi `__. The latest stable version can be installed using ``pip``: :: $ pip install asdf .. _begin-source-install-text: The latest development version of ASDF is available from the ``master`` branch `on github `__. To clone the project: :: $ git clone https://github.com/asdf-format/asdf To install: :: $ cd asdf $ git submodule update --init $ pip install . To install in `development mode `__:: $ pip install -e . .. note:: The source repository makes use of a git submodule for referencing the schemas provided by the ASDF standard. While this submodule is automatically initialized when installing the package (including in development mode), it may be necessary for developers to manually update the submodule if changes are made upstream. See the `documentation on git submodules `__ for more information. .. _end-source-install-text: Testing ------- .. _begin-testing-text: To install the test dependencies from a source checkout of the repository: :: $ pip install -e ".[tests]" To run the unit tests from a source checkout of the repository: :: $ pytest It is also possible to run the test suite from an installed version of the package. :: $ pip install "asdf[tests]" $ pytest --pyargs asdf It is also possible to run the tests using `tox `__. :: $ pip install tox To list all available environments: :: $ tox -va To run a specific environment: :: $ tox -e .. _end-testing-text: Documentation ------------- More detailed documentation on this software package can be found `here `__. More information on the ASDF Standard itself can be found `here `__. There are two mailing lists for ASDF: * `asdf-users `_ * `asdf-developers `_ If you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, information can be found `here `__. Contributing ------------ We welcome feedback and contributions to the project. Contributions of code, documentation, or general feedback are all appreciated. Please follow the `contributing guidelines `__ to submit an issue or a pull request. We strive to provide a welcoming community to all of our users by abiding to the `Code of Conduct `__. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644282535.0 asdf-2.9.2/asdf.egg-info/SOURCES.txt0000644000537500020070000006231500000000000020100 0ustar00wjamiesonSTSCI\scienceCHANGES.rst CODE_OF_CONDUCT.md CONTRIBUTING.md LICENSE MANIFEST.in README.rst conftest.py pyproject.toml setup.cfg setup.py tox.ini .github/workflows/changelog.yml .github/workflows/ci.yml .github/workflows/downstream.yml .github/workflows/publish-to-pypi.yml .github/workflows/s390x.yml asdf/__init__.py asdf/_convenience.py asdf/_display.py asdf/_helpers.py asdf/asdf.py asdf/asdftypes.py asdf/block.py asdf/compression.py asdf/config.py asdf/conftest.py asdf/constants.py asdf/entry_points.py asdf/exceptions.py asdf/fits_embed.py asdf/generic_io.py asdf/reference.py asdf/resolver.py asdf/resource.py asdf/schema.py asdf/search.py asdf/stream.py asdf/tagged.py asdf/treeutil.py asdf/type_index.py asdf/types.py asdf/util.py asdf/version.py asdf/versioning.py asdf/yamlutil.py asdf-standard/reference_files/README.rst asdf-standard/reference_files/1.0.0/ascii.asdf asdf-standard/reference_files/1.0.0/ascii.yaml asdf-standard/reference_files/1.0.0/basic.asdf asdf-standard/reference_files/1.0.0/basic.yaml asdf-standard/reference_files/1.0.0/complex.asdf asdf-standard/reference_files/1.0.0/complex.yaml asdf-standard/reference_files/1.0.0/compressed.asdf asdf-standard/reference_files/1.0.0/compressed.yaml asdf-standard/reference_files/1.0.0/exploded.asdf asdf-standard/reference_files/1.0.0/exploded.yaml asdf-standard/reference_files/1.0.0/exploded0000.asdf asdf-standard/reference_files/1.0.0/float.asdf asdf-standard/reference_files/1.0.0/float.yaml asdf-standard/reference_files/1.0.0/int.asdf asdf-standard/reference_files/1.0.0/int.yaml asdf-standard/reference_files/1.0.0/shared.asdf asdf-standard/reference_files/1.0.0/shared.yaml asdf-standard/reference_files/1.0.0/stream.asdf asdf-standard/reference_files/1.0.0/stream.yaml asdf-standard/reference_files/1.0.0/unicode_bmp.asdf asdf-standard/reference_files/1.0.0/unicode_bmp.yaml asdf-standard/reference_files/1.0.0/unicode_spp.asdf asdf-standard/reference_files/1.0.0/unicode_spp.yaml asdf-standard/reference_files/generate/README.rst asdf-standard/reference_files/generate/generate asdf-standard/resources/asdf-format.org/core/manifests/core-1.0.0.yaml asdf-standard/resources/asdf-format.org/core/manifests/core-1.1.0.yaml asdf-standard/resources/asdf-format.org/core/manifests/core-1.2.0.yaml asdf-standard/resources/asdf-format.org/core/manifests/core-1.3.0.yaml asdf-standard/resources/asdf-format.org/core/manifests/core-1.4.0.yaml asdf-standard/resources/asdf-format.org/core/manifests/core-1.5.0.yaml asdf-standard/resources/asdf-format.org/core/manifests/core-1.6.0.yaml asdf-standard/resources/asdf-format.org/core/schemas/extension_manifest-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/asdf-schema-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.4.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.5.0.yaml asdf-standard/schemas/stsci.edu/asdf/version_map-1.6.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/column-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/complex-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/constant-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/extension_metadata-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/externalarray-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/history_entry-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/integer-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/ndarray-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/software-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/subclass_metadata-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/core/table-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/fits/fits-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/time/time-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/time/time-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/add-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/add-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/add-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/airy_disk2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/blackbody-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/box1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/box2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/broken_power_law1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.4.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/disk2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/domain-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/drude1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/ellipse2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/exponential1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/exponential_cutoff_power_law1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/fix_inputs-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/fix_inputs-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/gaussian1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/gaussian2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/king_projected_analytic1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/linear1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/log_parabola1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/logarithmic1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/lorentz1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/math_functions-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/moffat1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/moffat2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/multiplyscale-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/ortho_polynomial-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/planar2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/plummer1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/power-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/power-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/power-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/power_law1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/redshift_scale_factor-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/ricker_wavelet1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/ricker_wavelet2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/ring2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/rotate_sequence_3d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/sersic1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/sersic2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/sine1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/smoothly_broken_power_law1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/trapezoid1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/trapezoid_disk2d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/voigt1d-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.3.0.yaml asdf-standard/schemas/stsci.edu/asdf/unit/defunit-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/unit/quantity-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/unit/unit-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/icrs_coord-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.2.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.0.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.1.0.yaml asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.2.0.yaml asdf-standard/schemas/stsci.edu/yaml-schema/draft-01.yaml asdf.egg-info/PKG-INFO asdf.egg-info/SOURCES.txt asdf.egg-info/dependency_links.txt asdf.egg-info/entry_points.txt asdf.egg-info/requires.txt asdf.egg-info/top_level.txt asdf/commands/__init__.py asdf/commands/defragment.py asdf/commands/diff.py asdf/commands/edit.py asdf/commands/exploded.py asdf/commands/extension.py asdf/commands/extract.py asdf/commands/info.py asdf/commands/main.py asdf/commands/remove_hdu.py asdf/commands/tags.py asdf/commands/to_yaml.py asdf/commands/tests/__init__.py asdf/commands/tests/test_defragment.py asdf/commands/tests/test_diff.py asdf/commands/tests/test_edit.py asdf/commands/tests/test_exploded.py asdf/commands/tests/test_extension.py asdf/commands/tests/test_extract.py asdf/commands/tests/test_info.py asdf/commands/tests/test_main.py asdf/commands/tests/test_remove_hdu.py asdf/commands/tests/test_tags.py asdf/commands/tests/test_to_yaml.py asdf/commands/tests/data/__init__.py asdf/commands/tests/data/block0.asdf asdf/commands/tests/data/block1.asdf asdf/commands/tests/data/blocks.diff asdf/commands/tests/data/frames.diff asdf/commands/tests/data/frames0.asdf asdf/commands/tests/data/frames1.asdf asdf/commands/tests/data/frames_ignore_asdf_library.diff asdf/commands/tests/data/frames_ignore_both.diff asdf/commands/tests/data/frames_ignore_reference_frame.diff asdf/commands/tests/data/frames_minimal.diff asdf/commands/tests/data/simple_inline_array.diff asdf/commands/tests/data/simple_inline_array0.asdf asdf/commands/tests/data/simple_inline_array1.asdf asdf/compat/__init__.py asdf/compat/numpycompat.py asdf/extension/__init__.py asdf/extension/_compressor.py asdf/extension/_converter.py asdf/extension/_extension.py asdf/extension/_legacy.py asdf/extension/_manager.py asdf/extension/_manifest.py asdf/extension/_tag.py asdf/extern/RangeHTTPServer.py asdf/extern/__init__.py asdf/extern/atomicfile.py asdf/tags/__init__.py asdf/tags/core/__init__.py asdf/tags/core/complex.py asdf/tags/core/constant.py asdf/tags/core/external_reference.py asdf/tags/core/integer.py asdf/tags/core/ndarray.py asdf/tags/core/tests/__init__.py asdf/tags/core/tests/test_complex.py asdf/tags/core/tests/test_extension_metadata.py asdf/tags/core/tests/test_external_reference.py asdf/tags/core/tests/test_history.py asdf/tags/core/tests/test_integer.py asdf/tags/core/tests/test_ndarray.py asdf/tags/core/tests/data/__init__.py asdf/tags/core/tests/data/datatype-1.0.0.yaml asdf/tags/core/tests/data/ndim-1.0.0.yaml asdf/testing/__init__.py asdf/testing/helpers.py asdf/tests/__init__.py asdf/tests/conftest.py asdf/tests/coveragerc asdf/tests/helpers.py asdf/tests/httpserver.py asdf/tests/test_api.py asdf/tests/test_array_blocks.py asdf/tests/test_asdf.py asdf/tests/test_compression.py asdf/tests/test_config.py asdf/tests/test_entry_points.py asdf/tests/test_extension.py asdf/tests/test_file_format.py asdf/tests/test_fits_embed.py asdf/tests/test_generic_io.py asdf/tests/test_helpers.py asdf/tests/test_integration.py asdf/tests/test_reference.py asdf/tests/test_reference_files.py asdf/tests/test_resolver.py asdf/tests/test_resource.py asdf/tests/test_schema.py asdf/tests/test_search.py asdf/tests/test_stream.py asdf/tests/test_tagged.py asdf/tests/test_treeutil.py asdf/tests/test_types.py asdf/tests/test_util.py asdf/tests/test_versioning.py asdf/tests/test_yaml.py asdf/tests/data/__init__.py asdf/tests/data/asdf.fits.gz asdf/tests/data/complex-42.0.0.yaml asdf/tests/data/custom-1.0.0.yaml asdf/tests/data/custom_flow-1.0.0.yaml asdf/tests/data/custom_flow-1.1.0.yaml asdf/tests/data/custom_schema.yaml asdf/tests/data/custom_schema_definitions.yaml asdf/tests/data/custom_schema_external_ref.yaml asdf/tests/data/custom_style-1.0.0.yaml asdf/tests/data/default-1.0.0.yaml asdf/tests/data/example_schema.json asdf/tests/data/extension_check.fits asdf/tests/data/foreign_tag_reference-1.0.0.yaml asdf/tests/data/fraction-1.0.0.yaml asdf/tests/data/fraction_with_inverse-1.0.0.yaml asdf/tests/data/fractional_2d_coord-1.0.0.yaml asdf/tests/data/missing-1.1.0.yaml asdf/tests/data/one_of-1.0.0.yaml asdf/tests/data/self_referencing-1.0.0.yaml asdf/tests/data/tag_reference-1.0.0.yaml asdf/tests/data/version_mismatch.fits compatibility_tests/README.md compatibility_tests/assert_file_correct.py compatibility_tests/common.py compatibility_tests/generate_file.py compatibility_tests/test_file_compatibility.py docker/s390x/Dockerfile docker/s390x/README.md docs/Makefile docs/conf.py docs/index.rst docs/make.bat docs/_static/custom.css docs/_templates/autosummary/base.rst docs/_templates/autosummary/class.rst docs/_templates/autosummary/module.rst docs/asdf/arrays.rst docs/asdf/asdf_tool.rst docs/asdf/changes.rst docs/asdf/config.rst docs/asdf/developer_api.rst docs/asdf/developer_overview.rst docs/asdf/developer_versioning.rst docs/asdf/features.rst docs/asdf/install.rst docs/asdf/overview.rst docs/asdf/user_api.rst docs/asdf/using_extensions.rst docs/asdf/extending/compressors.rst docs/asdf/extending/converters.rst docs/asdf/extending/extensions.rst docs/asdf/extending/legacy.rst docs/asdf/extending/manifests.rst docs/asdf/extending/resources.rst docs/asdf/extending/schemas.rst docs/asdf/extending/uris.rst docs/asdf/extending/use_cases.rst docs/sphinxext/__init__.py docs/sphinxext/example.py pytest_asdf/__init__.py pytest_asdf/plugin.py././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644282534.0 asdf-2.9.2/asdf.egg-info/dependency_links.txt0000644000537500020070000000000100000000000022253 0ustar00wjamiesonSTSCI\science ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644282534.0 asdf-2.9.2/asdf.egg-info/entry_points.txt0000644000537500020070000000036100000000000021503 0ustar00wjamiesonSTSCI\science[asdf.resource_mappings] asdf = asdf.resource:get_core_resource_mappings [asdf_extensions] builtin = asdf.extension:BuiltinExtension [console_scripts] asdftool = asdf.commands.main:main [pytest11] asdf_schema_tester = pytest_asdf.plugin ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644282534.0 asdf-2.9.2/asdf.egg-info/requires.txt0000644000537500020070000000050400000000000020604 0ustar00wjamiesonSTSCI\sciencejmespath>=0.6.2 jsonschema<4,>=3.0.2 numpy>=1.10 packaging>=16.0 pyyaml>=3.10 semantic_version>=2.8 [:python_version < "3.9"] importlib_resources>=3 [all] lz4>=0.10 [docs] sphinx sphinx-astropy astropy graphviz matplotlib docutils [tests] pytest astropy gwcs pytest-doctestplus pytest-remotedata pytest-openfiles psutil ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644282534.0 asdf-2.9.2/asdf.egg-info/top_level.txt0000644000537500020070000000002100000000000020730 0ustar00wjamiesonSTSCI\scienceasdf pytest_asdf ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.969278 asdf-2.9.2/compatibility_tests/0000755000537500020070000000000000000000000017671 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/compatibility_tests/README.md0000644000537500020070000000100600000000000021145 0ustar00wjamiesonSTSCI\scienceASDF file compatibility tests ============================= These tests confirm that files produced by the latest library code can be read by earlier releases of the library, and vice versa. The tests obtain a list of released versions from pypi.org and install each tested version into a virtualenv, so an internet connection is required to run them. The tests in this directory are excluded from the normal test suite, but can be run (from the repo root directory) with `pytest compatibility_tests/ --remote-data`.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/compatibility_tests/assert_file_correct.py0000644000537500020070000000071200000000000024264 0ustar00wjamiesonSTSCI\scienceimport argparse from pathlib import Path from common import assert_file_correct def parse_args(): parser = argparse.ArgumentParser(description="Confirm that an ASDF file generated by generate_file.py can be read") parser.add_argument("filename", help="the filename to test") return parser.parse_args() def main(): args = parse_args() path = Path(args.filename) assert_file_correct(path) if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/compatibility_tests/common.py0000644000537500020070000000107200000000000021533 0ustar00wjamiesonSTSCI\scienceimport asdf from asdf.versioning import supported_versions import numpy as np def generate_file(path, version): if version not in supported_versions: raise ValueError("ASDF Standard version {} is not supported by version {} of the asdf library".format(version, asdf.__version__)) af = asdf.AsdfFile({"array": np.ones((8, 16))}, version=version) af.write_to(path) def assert_file_correct(path): __tracebackhide__ = True with asdf.open(str(path)) as af: assert af["array"].shape == (8, 16) assert np.all(af["array"] == 1) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/compatibility_tests/generate_file.py0000644000537500020070000000114100000000000023031 0ustar00wjamiesonSTSCI\scienceimport argparse from pathlib import Path from asdf.versioning import AsdfVersion from common import generate_file def parse_args(): parser = argparse.ArgumentParser(description="Generate an ASDF file for library version compatibility testing") parser.add_argument("filename", help="the output filename") parser.add_argument("version", help="the ASDF Standard version to write") return parser.parse_args() def main(): args = parse_args() path = Path(args.filename) version = AsdfVersion(args.version) generate_file(path, version) if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266559.0 asdf-2.9.2/compatibility_tests/test_file_compatibility.py0000644000537500020070000001564500000000000025165 0ustar00wjamiesonSTSCI\scienceimport json import urllib.request from packaging.version import Version from itertools import groupby from pathlib import Path import subprocess from contextlib import contextmanager import os import pytest import pytest_remotedata import virtualenv import asdf from common import generate_file, assert_file_correct # Strange version present on pypi that doesn't parse as a Version BAD_VERSIONS = {"0"} # Minimum library version to read files produced by the current # version of the code. We're not maintaining < 2.7.x and bugs in older # versions prevent valid files from being read. MIN_VERSION_NEW_FILES = Version("2.7.0") # Minimum library version to produce files read by the current # version of the code. Earlier versions aren't able to generate # files for all the ASDF Standard versions that they claim to support. MIN_VERSION_OLD_FILES = Version("2.3.0") GENERATE_SCRIPT_PATH = Path(__file__).parent/"generate_file.py" ASSERT_SCRIPT_PATH = Path(__file__).parent/"assert_file_correct.py" @contextmanager def internet_temporarily_enabled(verbose=False): """ Context manager that temporarily enables pytest_remotedata internet. """ initially_disabled = pytest_remotedata.disable_internet.INTERNET_OFF pytest_remotedata.disable_internet.turn_on_internet(verbose=verbose) try: yield finally: if initially_disabled: pytest_remotedata.disable_internet.turn_off_internet(verbose=verbose) def fetch_package_versions(package_name): """ Request a package's available versions from pypi.org metadata. """ content = urllib.request.urlopen("https://pypi.org/pypi/{}/json".format(package_name)).read() version_strings = json.loads(content)["releases"].keys() return [ Version(v) for v in version_strings if v not in BAD_VERSIONS and (Version(v) >= MIN_VERSION_NEW_FILES or Version(v) >= MIN_VERSION_OLD_FILES) ] def fetch_latest_patch_versions(package_name): """ Return the latest patch version within each of the package's minor versions. """ key_fn = lambda v: v.release[0:2] versions = sorted(fetch_package_versions(package_name), key=key_fn) return [max(group) for _, group in groupby(versions, key=key_fn)] # Enable internet here, otherwise pytest_remotedata will complain # (and @pytest.mark.remote_data doesn't work on non-test methods). with internet_temporarily_enabled(): PATCH_VERSIONS = fetch_latest_patch_versions("asdf") def env_run(env_path, command, *args, **kwargs): """ Run a command on the context of the virtual environment at the specified path. """ return subprocess.run([env_path/"bin"/command] + list(args), **kwargs).returncode == 0 def env_check_output(env_path, command, *args): """ Run a command on the context of the virtual environment at the specified path, and return the output. """ return subprocess.check_output([env_path/"bin"/command] + list(args)).decode("utf-8").strip() def get_supported_versions(env_path): """ Get ASDF Standard versions that are supported by the asdf library installed in the specified virtual environment. """ script = r"""import asdf; print("\n".join(str(v) for v in asdf.versioning.supported_versions))""" output = env_check_output(env_path, "python3", "-c", script) return [asdf.versioning.AsdfVersion(v) for v in output.split("\n")] def get_installed_version(env_path): """ Get the version of the asdf library installed in the specified virtual environment. """ script = r"""import asdf; print(asdf.__version__)""" return Version(env_check_output(env_path, "python3", "-c", script)) @pytest.fixture(scope="module", params=PATCH_VERSIONS) def asdf_version(request): """ The (old) version of the asdf library under test. """ return request.param @pytest.fixture(scope="module") def env_path(asdf_version, tmp_path_factory): """ Path to the virtualenv where the (old) asdf library is installed. """ path = tmp_path_factory.mktemp("asdf-{}-env".format(asdf_version), numbered=False) virtualenv.cli_run([str(path)]) assert env_run(path, "pip", "install", "asdf=={}".format(asdf_version), capture_output=True), ( "Failed to install asdf version {}".format(asdf_version) ) return path @pytest.fixture(autouse=True) def pushd_tmpdir(tmpdir): """ Change the working directory, in case the user is running these tests from the repo root. Python will import a module from the current working directory by preference, so this prevents us from accidentally comparing the current library code to itself. """ original_cwd = os.getcwd() tmpdir.chdir() yield os.chdir(original_cwd) @pytest.mark.remote_data def test_file_compatibility(asdf_version, env_path, tmpdir): # Sanity check to ensure we're not accidentally comparing # the current code to itself. installed_version = get_installed_version(env_path) assert installed_version == asdf_version, ( "The version of asdf in the virtualenv ({}) does ".format(installed_version) + "not match the version being tested ({})".format(asdf_version) ) # We can only test ASDF Standard versions that both library # versions support. current_supported_versions = set(asdf.versioning.supported_versions) old_supported_versions = set(get_supported_versions(env_path)) standard_versions = [v for v in current_supported_versions.intersection(old_supported_versions)] # Confirm that this test isn't giving us a false sense of security. assert len(standard_versions) > 0 for standard_version in standard_versions: # Confirm that a file generated by the current version of the code # can be read by the older version of the library. if asdf_version >= MIN_VERSION_NEW_FILES: current_file_path = Path(str(tmpdir))/"test-current.asdf" generate_file(current_file_path, standard_version) assert env_run(env_path, "python3", ASSERT_SCRIPT_PATH, current_file_path, capture_output=True), ( "asdf library version {} failed to read an ASDF Standard {} ".format(asdf_version, standard_version) + "file produced by this code" ) # Confirm that a file generated by the older version of the library # can be read by the current version of the code. if asdf_version >= MIN_VERSION_OLD_FILES: old_file_path = Path(str(tmpdir))/"test-old.asdf" assert env_run(env_path, "python3", GENERATE_SCRIPT_PATH, old_file_path, str(standard_version), capture_output=True), ( "asdf library version {} failed to generate an ASDF Standard {} file".format(asdf_version, standard_version) ) assert_file_correct(old_file_path), ( "asdf library version {} produced an ASDF Standard {} ".format(asdf_version, standard_version) + "that this code failed to read" ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/conftest.py0000644000537500020070000000065600000000000016004 0ustar00wjamiesonSTSCI\scienceimport os import pytest @pytest.fixture(scope="session", autouse=True) def temp_cwd(tmpdir_factory): """ This fixture creates a temporary current working directory for the test session, so that docstring tests that write files don't clutter up the real cwd. """ original_cwd = os.getcwd() try: os.chdir(tmpdir_factory.mktemp("cwd")) yield finally: os.chdir(original_cwd) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.7911348 asdf-2.9.2/docker/0000755000537500020070000000000000000000000015045 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9700773 asdf-2.9.2/docker/s390x/0000755000537500020070000000000000000000000015733 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docker/s390x/Dockerfile0000644000537500020070000000152200000000000017725 0ustar00wjamiesonSTSCI\scienceFROM s390x/debian:buster ENV DEBIAN_FRONTEND noninteractive RUN apt-get update -q -y RUN apt-get install -q -y git \ python3 \ python3-astropy \ python3-lz4 \ python3-numpy \ python3-venv \ python3-wheel WORKDIR /root RUN python3 -m venv --system-site-packages asdf-env RUN . /root/asdf-env/bin/activate && \ pip3 install --upgrade pip setuptools gwcs==0.9.1 pytest==5.4.3 pytest-doctestplus==0.8.0 RUN git clone https://github.com/asdf-format/asdf.git WORKDIR /root/asdf RUN . /root/asdf-env/bin/activate &&\ git submodule init && \ git submodule update && \ pip3 install -e .[all,tests] RUN echo ". /root/asdf-env/bin/activate" >> /root/.bashrc CMD [ "/bin/bash" ] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docker/s390x/README.md0000644000537500020070000000061700000000000017216 0ustar00wjamiesonSTSCI\science# Troubleshooting asdf tests on S390X architecture 1. Build the Docker image: ``` docker build -t asdf-s390x . ``` 2. Run the container, which starts in the asdf repository root: ``` docker run -it asdf-s390x ``` Alternatively, bind-mount a checkout with local changes: ``` docker run -it --mount type=bind,source=/path/to/asdf,target=/root/asdf asdf-s390x ``` 3. Run pytest: ``` pytest ``` ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9723277 asdf-2.9.2/docs/0000755000537500020070000000000000000000000014526 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/Makefile0000644000537500020070000001116400000000000016171 0ustar00wjamiesonSTSCI\science# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest #This is needed with git because git doesn't create a dir if it's empty $(shell [ -d "_static" ] || mkdir -p _static) help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR) -rm -rf api html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9729633 asdf-2.9.2/docs/_static/0000755000537500020070000000000000000000000016154 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/_static/custom.css0000644000537500020070000000022100000000000020173 0ustar00wjamiesonSTSCI\sciencediv.highlight { transition: width 0.5s; -webkit-transition: width 0.5s; overflow: hidden; } div.highlight:hover { width: 150% } ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1644282536.792267 asdf-2.9.2/docs/_templates/0000755000537500020070000000000000000000000016663 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9741182 asdf-2.9.2/docs/_templates/autosummary/0000755000537500020070000000000000000000000021251 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/_templates/autosummary/base.rst0000644000537500020070000000037200000000000022717 0ustar00wjamiesonSTSCI\science{% extends "autosummary_core/base.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/_templates/autosummary/class.rst0000644000537500020070000000037300000000000023113 0ustar00wjamiesonSTSCI\science{% extends "autosummary_core/class.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/_templates/autosummary/module.rst0000644000537500020070000000037400000000000023274 0ustar00wjamiesonSTSCI\science{% extends "autosummary_core/module.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9802413 asdf-2.9.2/docs/asdf/0000755000537500020070000000000000000000000015443 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/arrays.rst0000644000537500020070000002174400000000000017506 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf Saving arrays ------------- Beyond the basic data types of dictionaries, lists, strings and numbers, the most important thing ASDF can save is arrays. It's as simple as putting a :mod:`numpy` array somewhere in the tree. Here, we save an 8x8 array of random floating-point numbers (using `numpy.random.rand`). Note that the resulting YAML output contains information about the structure (size and data type) of the array, but the actual array content is in a binary block. .. runcode:: from asdf import AsdfFile import numpy as np tree = {'my_array': np.random.rand(8, 8)} ff = AsdfFile(tree) ff.write_to("test.asdf") .. note:: In the file examples below, the first YAML part appears as it appears in the file. The ``BLOCK`` sections are stored as binary data in the file, but are presented in human-readable form on this page. .. asdf:: test.asdf Sharing of data --------------- Arrays that are views on the same data automatically share the same data in the file. In this example an array and a subview on that same array are saved to the same file, resulting in only a single block of data being saved. .. runcode:: from asdf import AsdfFile import numpy as np my_array = np.random.rand(8, 8) subset = my_array[2:4,3:6] tree = { 'my_array': my_array, 'subset': subset } ff = AsdfFile(tree) ff.write_to("test.asdf") .. asdf:: test.asdf Saving inline arrays -------------------- For small arrays, you may not care about the efficiency of a binary representation and just want to save the array contents directly in the YAML tree. The `~asdf.AsdfFile.set_array_storage` method can be used to set the storage type of the associated data. The allowed values are ``internal``, ``external``, and ``inline``. - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file (also known as "exploded" format, which discussed below in :ref:`exploded`). - ``inline``: Store the data as YAML inline in the tree. .. runcode:: from asdf import AsdfFile import numpy as np my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = AsdfFile(tree) ff.set_array_storage(my_array, 'inline') ff.write_to("test.asdf") .. asdf:: test.asdf Alternatively, it is possible to use the ``all_array_storage`` parameter of `AsdfFile.write_to` and `AsdfFile.update` to control the storage format of all arrays in the file. .. code:: # This controls the output format of all arrays in the file ff.write_to("test.asdf", all_array_storage='inline') For automatic management of the array storage type based on number of elements, see :ref:`config_options_array_inline_threshold`. .. _exploded: Saving external arrays ---------------------- ASDF files may also be saved in "exploded form", which creats multiple files corresponding to the following data items: - One ASDF file containing only the header and tree. - *n* ASDF files, each containing a single array data block. Exploded form is useful in the following scenarios: - Over a network protocol, such as HTTP, a client may only need to access some of the blocks. While reading a subset of the file can be done using HTTP ``Range`` headers, it still requires one (small) request per block to "jump" through the file to determine the start location of each block. This can become time-consuming over a high-latency network if there are many blocks. Exploded form allows each block to be requested directly by a specific URI. - An ASDF writer may stream a table to disk, when the size of the table is not known at the outset. Using exploded form simplifies this, since a standalone file containing a single table can be iteratively appended to without worrying about any blocks that may follow it. To save a block in an external file, set its block type to ``'external'``. .. runcode:: from asdf import AsdfFile import numpy as np my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = AsdfFile(tree) # On an individual block basis: ff.set_array_storage(my_array, 'external') ff.write_to("test.asdf") # Or for every block: ff.write_to("test.asdf", all_array_storage='external') .. asdf:: test.asdf .. asdf:: test0000.asdf Streaming array data -------------------- In certain scenarios, you may want to stream data to disk, rather than writing an entire array of data at once. For example, it may not be possible to fit the entire array in memory, or you may want to save data from a device as it comes in to prevent data loss. The ASDF Standard allows exactly one streaming block per file where the size of the block isn't included in the block header, but instead is implicitly determined to include all of the remaining contents of the file. By definition, it must be the last block in the file. To use streaming, rather than including a Numpy array object in the tree, you include a `asdf.Stream` object which sets up the structure of the streamed data, but will not write out the actual content. The file handle's `write` method is then used to manually write out the binary data. .. runcode:: from asdf import AsdfFile, Stream import numpy as np tree = { # Each "row" of data will have 128 entries. 'my_stream': Stream([128], np.float64) } ff = AsdfFile(tree) with open('test.asdf', 'wb') as fd: ff.write_to(fd) # Write 100 rows of data, one row at a time. ``write`` # expects the raw binary bytes, not an array, so we use # ``tobytes()``. for i in range(100): fd.write(np.array([i] * 128, np.float64).tobytes()) .. asdf:: test.asdf A case where streaming may be useful is when converting large data sets from a different format into ASDF. In these cases it would be impractical to hold all of the data in memory as an intermediate step. Consider the following example that streams a large CSV file containing rows of integer data and converts it to numpy arrays stored in ASDF: .. doctest-skip:: import csv import numpy as np from asdf import AsdfFile, Stream tree = { # We happen to know in advance that each row in the CSV has 100 ints 'data': Stream([100], np.int64) } ff = AsdfFile(tree) # open the output file handle with open('new_file.asdf', 'wb') as fd: ff.write_to(fd) # open the CSV file to be converted with open('large_file.csv', 'r') as cfd: # read each line of the CSV file reader = csv.reader(cfd) for row in reader: # convert each row to a numpy array array = np.array([int(x) for x in row], np.int64) # write the array to the output file handle fd.write(array.tobytes()) Compression ----------- Individual blocks in an ASDF file may be compressed. You can easily `zlib `__ or `bzip2 `__ compress all blocks: .. runcode:: from asdf import AsdfFile import numpy as np tree = { 'a': np.random.rand(32, 32), 'b': np.random.rand(64, 64) } target = AsdfFile(tree) target.write_to('target.asdf', all_array_compression='zlib') target.write_to('target.asdf', all_array_compression='bzp2') .. asdf:: target.asdf The `lz4 `__ compression algorithm is also supported, but requires the optional `lz4 `__ package in order to work. When reading a file with compressed blocks, the blocks will be automatically decompressed when accessed. If a file with compressed blocks is read and then written out again, by default the new file will use the same compression as the original file. This behavior can be overridden by explicitly providing a different compression algorithm when writing the file out again. .. code:: import asdf # Open a file with some compression af = asdf.open('compressed.asdf') # Use the same compression when writing out a new file af.write_to('same.asdf') # Or specify the (possibly different) algorithm to use when writing out af.write_to('different.asdf', all_array_compression='lz4') Memory mapping -------------- By default, all internal array data is memory mapped using `numpy.memmap`. This allows for the efficient use of memory even when reading files with very large arrays. The use of memory mapping means that the following usage pattern is not permitted: .. code:: import asdf with asdf.open('my_data.asdf') as af: ... af.tree Specifically, if an ASDF file has been opened using a `with` context, it is not possible to access the file contents outside of the scope of that context, because any memory mapped arrays will no longer be available. It may sometimes be useful to copy array data into memory instead of using memory maps. This can be controlled by passing the `copy_arrays` parameter to either the `AsdfFile` constructor or `asdf.open`. By default, `copy_arrays=False`. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/asdf_tool.rst0000644000537500020070000000166500000000000020157 0ustar00wjamiesonSTSCI\science.. asdf_tool: Command line tool ----------------- `asdf` includes a command-line tool, ``asdftool`` that performs a number of useful operations: - ``explode``: Convert a self-contained ASDF file into exploded form (see :ref:`exploded`). - ``implode``: Convert an ASDF file in exploded form into a self-contained file. - ``defragment``: Remove unused blocks and extra space. - ``diff``: Report differences between two ASDF files. - ``edit``: Edit the YAML portion of an ASDF file. - ``remove-hdu``: Remove ASDF extension from ASDF-in-FITS file (requires `astropy`, see :ref:`asdf-in-fits`). - ``info``: Print a rendering of an ASDF tree. - ``extensions``: Show information about installed extensions (see :ref:`other_packages`). - ``tags``: List currently available tags. - ``to_yaml``: Inline all of the data in an ASDF file so that it is pure YAML. Run ``asdftool --help`` for more information. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266978.0 asdf-2.9.2/docs/asdf/changes.rst0000644000537500020070000003112100000000000017603 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf ******* Changes ******* What's new in asdf 2.9.2? ========================= The ASDF Standard is at v1.6.0. Changes include: - Fix deprecation warnings stemming from the release of pytest 7.0.0. - Fix bug in pytest plugin when schemas are not in a directory named "schemas". What's new in asdf 2.9.1? ========================= The ASDF Standard is at v1.6.0. Changes include: - Fix typo in testing module ``__init__.py`` name. What's new in asdf 2.9.0? ========================= The ASDF Standard is at v1.6.0. Changes include: - Added the capability for classes to expose their contents to AsdfFile.info and AsdfFile.search via the ``__asdf_traverse__`` method. - Drop support for Python 3.6. - Update setup.py to raise error if "git submodule update --init" has not been run. - Add ability for tags to correspond to multiple schema_uri, with an implied allOf among the schema_uris. - Add the URL of the file being parsed to ``SerializationContext``. - Add ``asdf.testing.helpers`` module with simplified versions of test helpers previously available in ``asdf.tests.helpers``. What's new in asdf 2.8.3? ========================= The ASDF Standard is at v1.6.0. Changes include: - Fix use of "python" instead of "python3" in compatibility tests. What's new in asdf 2.8.2? ========================= The ASDF Standard is at v1.6.0. Changes include: - Fix array compression for non-native byte order. - Fix bug in schema loading when URI is claimed by both legacy and new-style extensions. - Hide 'name' and 'version' attributes on instances of NDArrayType. What's new in asdf 2.8.1? ========================= The ASDF Standard is at v1.6.0. Changes include: - Fix bug that corrupts ndarray views when a new block is added to an existing file in absence of a block index. What's new in asdf 2.8.0? ========================= The ASDF Standard is at v1.6.0. Changes include: - New plugin APIs for block compressors, extensions, and schemas. - New global configuration mechanism (``asdf.get_config()``). - Support for the asdf:// URI scheme. - Drop support for Python 3.5. - Support for ASDF Standard 1.6.0, which is available via the ``version`` argument to ``AsdfFile.__init__`` but not yet the default. - Values that match defaults defined in schemas are no longer automatically removed from the ASDF tree. - ``None`` values are no longer removed from the tree on write. - Remove the ``asdf.test`` method and ``asdf.__githash__`` attribute. - And more, see full changelog below for details. What's new in asdf 2.7.5? ========================= The ASDF Standard is at v1.5.0. Changes include: - Fix bug in ``asdf.schema.check_schema`` causing relative references in metaschemas to be resolved incorrectly. - Fix bug that corrupts ndarray views when a new block is added to an existing file in absence of a block index. What's New in asdf 2.7.4? ========================= The ASDF Standard is at v1.5.0. Changes include: - Fix pytest plugin failure under older versions of pytest. - Fix bugs when serializing views over non-contiguous and FITS base arrays. - Add support for opening files from HTTPS URLs and following HTTP/HTTPS redirects. What's New in asdf 2.7.3? ========================= The ASDF Standard is at v1.5.0. Changes include: - Fix bug resulting in invalid strides values for views over FITS arrays. - Add pytest plugin options to skip and xfail individual tests and xfail the unsupported ndarray-1.0.0 schema example. What's New in asdf 2.7.2? ========================= The ASDF Standard is at v1.5.0. Changes include: * Fix bug causing test collection failures under Python 3.9. * Fix bug when decompressing arrays with numpy 1.20. * Replace ``assert`` statements with ``raise`` at the behest of the bandit security linter. What's New in asdf 2.7.1? ========================= The ASDF Standard is at v1.5.0. The sole change in this release: * Fix bug preventing access to copied array data after an ``AsdfFile`` is closed. What's New in asdf 2.7.0? ========================= The ASDF Standard is at v1.5.0. Changes include: * Performance improvements when reading and validating ASDF files. * Numerous bug fixes. See complete changelog below for details. * Add option to ``asdf.open`` that disables schema validation on read. * Improved warning messages. All `asdf` library warnings now subclass ``asdf.exceptions.AsdfWarning``. * Drop support for filling default values from subschemas within oneOf or anyOf combiners. * Resolve deprecation warnings from the `asdf` pytest plugin when used with newer versions of pytest. * Drop support for 2.x versions of the jsonschema package. What's New in asdf 2.6.0? ========================= The ASDF Standard is at v1.5.0. Changes include: * ASDF Standard 1.5.0 is now the default for new files. Changes to the standard include several new and updated transform schemas, and removal of wcs schemas that were previously deprecated and moved to the ``gwcs`` package. * Add ``asdf.info`` and ``AsdfFile.search`` methods for visualizing and interactively searching an ASDF tree. * Fix bug causing too many bytes to be consumed when reading compressed blocks. * Support validation and serialization of additional numpy scalar types. * Fix serialization of trees containing implicit internal references and reference cycles, and simplify handling of children in ``ExtensionType`` subclasses. * Fix bug preventing addition of history entires to a file that was initially saved without them. * Expand developer documentation to cover the details of pyyaml integration and conversion between tagged trees and custom trees. What's New in asdf 2.5.2? ========================= The ASDF Standard is at v1.4.0. Changes include: * Add schemas that were previously missing from ASDF Standard 1.4.0, and fix a variety of minor issues in ``AsdfType`` subclasses. * Add general and versioning-specific developer documentation. What's New in asdf 2.5.1? ========================= The ASDF Standard is at v1.4.0. Changes include: * Fix bug in test causing failure when test suite is run against an installed `asdf` package. What's New in asdf 2.5.0? ========================= The ASDF Standard is at v1.4.0. Changes include: * Added convenience method for fetching the default resolver * Fixed load_schema LRU cache memory usage issue * Fixed bug causing segfault after update of a memory-mapped file. What's New in asdf 2.4.2? ========================= The ASDF Standard is at v1.3.0. Changes include: * Define the ``in`` operator for top-level ``AsdfFile`` objects. * Automatically register schema tester plugin. Do not enable schema tests by default. Add configuration setting and command line option to enable schema tests. * Enable handling of subclasses of known custom types by using decorators for convenience. * Add support for jsonschema 3.x. * Fix bug in ``NDArrayType.__len__``. It must be a method, not a property. What's New in asdf 2.3.3? ========================= The ASDF Standard is at v1.3.0. Changes include: * Pass ``ignore_unrecognized_tag`` setting through to ASDF-in-FITS. * Use ``$schema`` keyword if available to determine meta-schema to use when testing whether schemas themselves are valid. * Take into account resolvers from installed extensions when loading schemas for validation. * Fix compatibility issue with new release of ``pyyaml`` (version 5.1). * Allow use of ``pathlib.Path`` objects for ``custom_schema`` option. What's New in asdf 2.3.1? ========================= he ASDF Standard is at v1.3.0. Changes include: * Provide source information for ``AsdfDeprecationWarning`` that come from extensions from external packages. * Fix the way ``generic_io`` handles URIs and paths on Windows. * Fix bug in ``asdftool`` that prevented ``extract`` command from being visible. What's New in asdf 2.3? ======================= `asdf` 2.3 reflects the update of ASDF Standard to v1.3.0, and contains a few notable features and an API change: * Storage of arbitrary precision integers is now provided by `asdf.IntegerType`. This new type is provided by version 1.3.0 of the ASDF Standard. * Reading a file with integer literals that are too large now causes only a warning instead of a validation error. This is to provide backwards compatibility for files that were created with a buggy version of `asdf`. * The functions `asdf.open` and `AsdfFile.write_to` now support the use of `pathlib.Path`. * The `asdf.asdftypes` module has been deprecated in favor of `asdf.types`. The old module will be removed entirely in the 3.0 release. What's New in asdf 2.2? ======================= `asdf` 2.2 contains several API changes, although backwards compatibilty is preserved for now. The most significant changes are: * The function `AsdfFile.open` has been deprecated in favor of `asdf.open`. It will be removed entirely in the 3.0 release. More intelligent file mode handling has been added to `asdf.open`. Files that are opened in read-only mode with `asdf.open` now explicitly block writes to memory-mapped arrays. This may cause problems for some existing code, but any such code was accessing these arrays in an unsafe manner, so backwards compatibility for this case is not provided. The old mode handling behavior is retained for now in `AsdfFile.open`. * It is now possible to disable lazy loading of internal arrays. This is useful when the `AsdfFile` was opened using another open file. With lazy loading, it is possible to close the original file but still retain access to the array data. * There is a new warning `AsdfConversionWarning` that occurs when failing to convert nodes in the ASDF tree into custom tagged types. This makes it easier for users to filter specifically for this failure case. What's New in asdf 2.1? ======================= `asdf` 2.1 is a minor release, and most of the changes affect only a subset of users. The most notable changes are the following: * `namedtuple` objects can now be serialized. They are automatically converted into `list` objects, and therefore are not strictly able to round-trip. By default a warning occurs when performing this conversion, but the warning can be disabled by passing `ignore_implicit_conversion=True` to the `AsdfFile` constructor. * Added a method `AsdfFile.get_history_entries` for getting a list of history entries from the tree. * Added an option to `generic_io.get_file` to close the underlying file handle. Please see the :ref:`change_log` for additional details. What's New in asdf 2.0? ======================= `asdf` 2.0 is a major release that includes many improvements, new features, and some API changes. It is the first release of the `asdf` package that only supports Python 3. The full list of changes, including bug fixes, can be found in the :ref:`change_log`. A brief overview of changes is provided below: * Support for Python 2.7 has been removed entirely. * There is no longer a hard dependency on `astropy`. It is still required for some features, and for running the tests. Astropy-related tag implementations have been moved to the Astropy package itself. * External packages can now install and register custom ASDF extensions using `setuptools` entry points (see :ref:`other_packages` and :ref:`packaging_extensions`). `asdf` detects extensions that are installed in this way and automatically uses them when reading and writing files with custom types. * A bug was fixed that now allows fully-specified tags from external packages to be properly resolved. * The file format now includes metadata about the extensions that were used to create an ASDF file. The software automatically adds this information when writing an ASDF file, and will check for installed extensions when reading a file containing such metadata (see :ref:`extension_checking`). * The restrictions on the top-level attributes `data`, `wcs`, and `fits` have been removed. * Clients that wish to impose additional validation requirements on files can now provide custom top-level schemas (see :ref:`custom-schemas`). * There is a new way to reference array data that is defined in external files (see :ref:`array-references`). * Several new commands have been added to the `asdftool` command line interface: * ``extensions`` for showing information about installed extensions (see :ref:`other_packages`). * ``remove-hdu`` for removing ASDF extension from ASDF-in-FITS file (requires `astropy`, see :ref:`asdf-in-fits`). * The package now cleanly supports builds in `develop` mode and can be imported from the source tree. .. _change_log: Change Log ========== .. include:: ../../CHANGES.rst ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/config.rst0000644000537500020070000001236600000000000017452 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf.config ============= Configuration ============= Version 2.8 of this library introduced a new mechanism, `AsdfConfig`, for setting global configuration options. Currently available options are limited, but we expect to eventually move many of the ``AsdfFile.__init__`` and ``AsdfFile.write_to`` keyword arguments to `AsdfConfig`. AsdfConfig and you ================== The `AsdfConfig` class provides properties that can be adjusted to change the behavior of the `asdf` library for all files. For example, to disable schema validation on read: .. code-block:: python >>> import asdf >>> asdf.get_config().validate_on_read = False # doctest: +SKIP This will prevent validation on any subsequent call to `~asdf.open`. Obtaining an AsdfConfig instance -------------------------------- There are two methods available that give access to an `AsdfConfig` instance: `~asdf.get_config` and `~asdf.config_context`. The former simply returns the currently active config: .. code-block:: python >>> import asdf >>> asdf.get_config() The latter method, `~asdf.config_context`, returns a context manager that yields a copy of the currently active config. The copy is also returned by subsequent calls to `~asdf.get_config`, but only until the context manager exits. This allows for short-lived configuration changes that do not impact other code: .. code-block:: python >>> import asdf >>> with asdf.config_context() as config: ... config.validate_on_read = False ... asdf.get_config() ... >>> asdf.get_config() Special note to library maintainers ----------------------------------- Libraries that use `asdf` are encouraged to only modify `AsdfConfig` within a surrounding call to `~asdf.config_context`. The downstream library will then be able to customize `asdf`'s behavior without impacting other libraries or clobbering changes made by the user. Config options ============== .. _config_options_array_inline_threshold: array_inline_threshold ---------------------- The threshold number of array elements under which arrays are automatically stored inline in the ASDF tree instead of in binary blocks. If ``None``, array storage type is not managed automatically. Defaults to ``None``. default_version --------------- The default ASDF Standard version used for new files. This can be overridden on an individual file basis (using the version argument to ``AsdfFile.__init__``) or set here to change the default for all new files created in the current session. Defaults to the latest supported ASDF Standard version. io_block_size ------------- The buffer size used when reading and writing to the filesystem. Users may wish to adjust this value to improve I/O performance. Set to -1 to use the preferred block size for each file, as reported by st_blksize. Defaults to -1. legacy_fill_schema_defaults --------------------------- Flag that controls filling default values from schemas for older versions of the ASDF Standard. This library used to remove nodes from the tree whose values matched the default property in the schema. That behavior was changed in `asdf` 2.8, but in order to read files produced by older versions of the library, default values must still be filled from the schema for ASDF Standard <= 1.5.0. Set to False to disable filling default values from the schema for these older ASDF Standard versions. The flag has no effect for ASDF Standard >= 1.6.0. Defaults to True. validate_on_read ---------------- Flag that controls schema validation of the ASDF tree when opening files. Users who trust the source of their files may wish to disable validation on read to improve performance. Defaults to True. Additional AsdfConfig features ============================== `AsdfConfig` also provides methods for adding and removing plugins at runtime. For example, the `AsdfConfig.add_resource_mapping` method can be used to register a schema, which can then be used to validate a file: .. code-block:: python >>> import asdf >>> content = b""" ... %YAML 1.1 ... --- ... $schema: http://stsci.edu/schemas/yaml-schema/draft-01 ... id: http://example.com/example-project/schemas/foo-1.0.0 ... type: object ... properties: ... foo: ... type: string ... required: [foo] ... ... ... """ >>> asdf.get_config().add_resource_mapping({"http://example.com/example-project/schemas/foo-1.0.0": content}) >>> af = asdf.AsdfFile(custom_schema="http://example.com/example-project/schemas/foo-1.0.0") >>> af.validate() Traceback (most recent call last): ... jsonschema.exceptions.ValidationError: 'foo' is a required property ... >>> af["foo"] = "bar" >>> af.validate() See the `AsdfConfig` API documentation for more detail. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/developer_api.rst0000644000537500020070000000061300000000000021013 0ustar00wjamiesonSTSCI\science************* Developer API ************* The classes and functions documented here will be of use to developers who wish to create their own custom ASDF types and extensions. .. automodapi:: asdf.types .. automodapi:: asdf.extension .. automodapi:: asdf.resource .. automodapi:: asdf.yamlutil .. automodapi:: asdf.util .. automodapi:: asdf.versioning .. automodapi:: asdf.tests.helpers ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/docs/asdf/developer_overview.rst0000644000537500020070000011232200000000000022111 0ustar00wjamiesonSTSCI\scienceHigh level overview of the basic ASDF library ============================================= This document is an attempt to make it easier to understand the design and workings of the python asdf library for those unfamiliar with it. This is expected to grow organically so at the moment it should not be considered complete or comprehensive. Understanding the design is complicated by the fact that the library effectively inserts custom methods or classes into the objects that the pyyaml and jsonschema libraries use. Understanding what is going on thus means having some understanding of the relevant parts of the internals of both of those libraries. This overview will try to provide a small amount of context for these packages to illuminate how the code in asdf interacts with them. There are at least two ways of outlining the design. One is to give high level overviews of the various modules and how they interact with other modules. The other is to illustrate how code is actually invoked in common operations, this often being much more informative on a practical level (at least some find that to be the case). This document will attempt to do both. We will start with a high-level review of concepts and terms and point to where these are handled in the asdf modules. Because of the complexity, this initial design overview will focus on issues of validation and tree construction when reading. Construction in progress ------------------------ Before we get into further details, a word on the transition to new plugin APIs. Starting in asdf 2.8 we've introduced new interfaces for extending the asdf library to support additional tags and schemas. The interfaces were redesigned with the following goals in mind: - Simplify the connection between tags and their schema content. The old "resolver" system involves sending the tag URI through a lengthy series of transformations to get the filesystem path to the schema document. This has been error-prone and difficult to troubleshoot, so the new "resource mapping" system explicitly maps schema URIs to their content, and tag URIs directly to schema URIs. - Make it easier to separate schemas from extension code. Until now the schemas have always been provided by the same Python package that implements support for their tags, but we would like to move the schemas to language-agnostic repositories that non-Python implementations can use. To better support this, the new interface splits the old extension plugin into two new plugins, one of which is dedicated to schemas. - Allow tag serialization support to handle arbitrary sets of URIs. Previously tag code was restricted to working with tag URIs that were identical except for version. This presented a problem for the transition of URIs from stsci.edu to asdf-format.org, so the new interface allows for supporting diverse URIs with the same code. - Improve the terminology used in the tag serialization support classes. The old ``ExtensionType`` has been renamed ``Converter`` to indicate its purpose, and to eliminate the ambiguity betwenen YAML types and Python types. The ``to_tree`` and ``from_tree`` methods have been renamed ``to_yaml_tree`` and ``from_yaml_tree`` to better indicate which tree they're expected to convert. - Simplify the code and behavior of tag classes. Converters are used as instances instead of classes with a custom metaclass, Python sub-types are no longer automatically handled, URIs are treated as single values instead of broken down into various components, etc. You can witness the gory details of this effort by clicking through the PR links on the asdf 2.8.0 `roadmap `_. Support for ASDF core tags has not yet been moved to the new system. Doing so would be a breaking change for users who subclass that code, so we'll need to wait until asdf 3.0 to do that. Some terminology and definitions -------------------------------- **URI vs URL (Universal Resource Identifier)**. This is distinguished from URL (Universal Resource Locator) primarily in that URI is a mechanism for a unique name that follows a particular syntax, but itself may not indicate where the resource is. Generally URLs are expected to be used on the web for the HTTP protocol, though for asdf, this isn't necessarily the case as mentioned next. Recent changes to the library permit use of URIs with the asdf:// scheme, which is intended to reduce confusion over the distinction between identifiers and locations. **Resolver:** Tools to map URIs and tags into actual locations of schema files, which may be local directories (the usual approach) or an actual URL for retrieval over the network. This is more complicated that it may seem for reasons explained later. The resolver system has been deprecated in favor of resource mappings; new code should use the latter instead. **Global config:** A global library configuration feature that was added in asdf 2.8. Allows plugins to be added or removed at runtime and ``AsdfFile`` defaults to be modified by the user. Accessed via the ``get_config`` method on the top-level ``asdf`` module. For example, the default ASDF Standard version for new files can be set like this:: asdf.get_config().default_version = "1.3.0" Or a resource mapping plugin added at runtime like this:: asdf.get_config().add_resource_mapping({"http://somewhere.org/resources/foo", b"foo resource content"}) **Entry point:** A Python packaging feature that allows asdf to use plugins provided by other packages. Entry points are registered when a package is installed and become available to asdf without any additional effort on the part of the user. See ``_ for more information. **Resource mapping:** An asdf plugin that provides access to "resources" which are binary blobs associated with a URI. These resources are mostly schemas, but any resource may provided by a mapping. Resource mappings are provided via entry points or added at runtime using a method on the global config object. This feature is intended to replace the deprecated "resolver" mechanism. **Extension:** An extension to the ASDF Standard that defines additional YAML tags. In the future an extension may include other additional features such as binary block compressors or filters, but currently only tags are supported. **Extension implementation:** An asdf plugin that implements an extension to the ASDF Standard. This is the asdf library's support for an extended set of YAML tags. The library currently provides two interfaces for implementing extensions: the ``AsdfExtension`` class and the new, still-experimental ``Extension`` class. Extension implementations are provided via entry points or added at runtime using a method on the global config object. The ``AsdfFile`` also permits adding additional extensions on a per-instance basis, but use of that feature is discouraged and may be removed in asdf 3.0. **Tag code/tag class:** A class responsible for converting a family of tags into Python objects and vice versa. Each extension implementation includes a list of such classes. For the original ``AsdfExtension`` API, the tag classes all implement the ``ExtensionType`` interface. For the new API, tag classes implement ``Converter``. **Validator:** Tool to confirm that the YAML conforms to the schemas that apply. A lot goes on in this area and it is pretty complex in the implementation. **Tree building:** The YAML content is built into a tree in two stages. The YAML parser converts the raw YAML into a custom Python structure. It is that structure that is validated. Then if no errors are found, the tree is converted into a tree where tagged nodes get converted into corresponding Python objects (usually, an option exists to prevent this from happening, which is useful for some applications), e.g., WCS object or numpy arrays (well, not quite that simply for numpy arrays). The above is a simplified view of what happens when an ASDF file is read. Most of resolver tools and code is in ``resolver.py`` (but not all). Most of the validation code is in ``schema.py``. The code that builds the trees is spread in many places: ``tagged.py``, ``treeutil.py``, ``types.py`` as well as all the extension code that supplies code to handle the tags within (and often the the associated schemas). A note on the location of schemas and tag code; there is a bit of schizophrenic aspect to this since schema should be language agnostic and in that view, not bundled with specific language library code. But currently nearly all of the implementation is in Python so while the long-term goal is to keep them separate, it is more convenient to keep them together for now. You will see cases where they are separate and some where they are bundled. The introduction of a separate plugin for providing access to schemas (the "resource mapping") is intended to allow extension authors to keep the schema documents in a separate language-neutral repository. Actions that happen when an AsdfFile is instantiated ---------------------------------------------------- The asdf plugins (new and old-style extensions as well as resource mappings) registered as entry points can be obtained by calling methods in ``entry_points.py``. These methods are invoked by ``config.AsdfConfig`` the first time library needs to use the plugins, and thereafter are cached within that config object. Both extensions and resource mappings are stored wrapped in proxy objects (``ExtensionProxy`` and ``ResourceMappingProxy``, respectively) that carry additional metadata like the package name and version of the entry point, and add some convenience methods on top of what the extension developer provides. Additionally, ``ExtensionProxy`` allows the library to treat both new-style ``Extension`` instances and old-style ``AsdfExtension`` instances similarly. To see the list of extensions loaded by the library, call ``asdf.get_config().extensions``. To see the list of resource mappings, call ``asdf.get_config().resource_mappings``. Both of these properties are lazy-loaded and then cached, so the first call will take a moment to complete but subsequent calls will return immediately. When an ``AsdfFile`` class is instantiated, one thing that happens on the ``__init__`` is that ``self._process_plugin_extensions()`` is called. This method retrieves the extensions from the global config and selects those that are compatible with the ``AsdfFile``'s ASDF Standard version. It returns the resulting list, which is assigned to the ``_plugin_extensions`` variable. The term "plugin extensions" constrasts with "user extensions" which are additional extensions provided by the user as an argument to ``AsdfFile.__init__``. The extension lists are used by ``AsdfFile`` to create the file's ``ExtensionList`` and ``ExtensionManager`` instances, which manage extensions for the old and new extension APIs, respectively. These instances are created lazily when the ``extension_list`` and ``extension_manager`` properties are first accessed, to help speed up the initial construction of the ``AsdfFile``. The ``extension_manager`` is responsible for mapping tag URIs to schema URIs for validation and retrieving type converters (instances of the ``Converter`` interface) by Python type or by YAML tag URI. ``extension_list`` handles the same duties, but for old-style extensions. ``extension_manager`` takes precedence over ``extension_list`` throughout the asdf library, so ``extension_list`` will only be consulted if ``extension_manager`` can't handle a particular tag or Python type. On the subject of resolvers and tag/url mapping ----------------------------------------------- The ``AsdfFile`` class has ``tag_mapping`` and ``url_mapping`` properties that each return the ``extension_list`` properties of the same name. These objects implement the original support for mapping tag URIs to schema content that in the new API is provided by resource mappings. ``tag_mapping`` and ``url_mapping`` are each ``resolver.Resolver`` instances that are generated from the mapping lists in the old-style extensions. These lists consist of 2-tuples. In the first case it is a mechanism to map the tag string to a url string, typically with an expected prefix or suffix to the tag (suffix is typical) so that given a full tag, it generates a url that includes the suffix. This permits one mapping to cover many tag variants (The details of mapping machinery with examples are given in a later section since understanding this is essential to defining new tags and corresponding schemas). The URL mapping works in a similar way, except that it consists of 2-tuples where the first element is the common elements of the url, and the second part maps it to an actual location (url or file path). Again the second part may include a place holder for the suffix or prefix, and code to generate the path to the schema file. The use of the resolver object turns these lists into functions so that supplied the appropriate input that matches something in the list, it gives the corresponding output. Outline of how an ASDF file is opened and read into the corresponding Python object. ------------------------------------------------------------------------------------ The starting point can be found in ``asdf.py`` essentially through the following chain (many calls and steps left out to keep it simpler to follow) When ``asdf.open("myasdffile.asdf")`` is called, it is aliased to ``asdf.open_asdf`` which first creates an instance of ``asdf.AsdfFile`` (let's call the instance ``af``), then calls ``af._open_impl()`` and then ``af._open_asdf``. That invokes a call to ``generic_io.get_file()``. ``generic.py`` basically contains code to handle all the variants of I/O possible (files, streaming, http access, etc). In this case it returns a ``RealFile`` instance that wraps a local file system file. Next the file is examined to see if it is an ASDF file (first by examining the first few lines in the header). If it passes those checks, the header (yaml) section of the file is extracted through a proxy mechanism that signals an end of file when the end of the yaml is reached, but otherwise looks like a file object. The yaml parsing phase described below normally returns a "tagged_tree". That is (somewhat simplified), it returns the data structure that yaml would normally return without any object conversion (i.e., all nodes are either dicts, lists, or scalar values), except that they are objects that now support a tag attribute that indicates if a tag was associated with that node and what the tag was. This reader object is passed to the yaml parser by calling ``yamlutil.load_tree``. A simple explanation for what goes on here is necessary to understand how this all works. Yaml supports various kinds of loaders. For security reasons, the "safe" loader is used (note that both C and python versions are supported through an indirection of the ``_yaml_base_loader`` defined at the beginning of that module that determines whether the C version is available). The loaders are recursive mechanisms that build the tree structure. Note that ``yamlutil.load_tree`` creates a temporary subclass of ``AsdfLoader`` and attaches a reference to the AsdfFile instance as the ``.ctx`` attribute of that temporary subclass. One of the hooks that pyyaml supplies is the ability to overload the method ``construct_object``. That's what the class ``yamlutil.AsdfLoader`` does. pyyaml calls this method at each node in the tree to see if anything special should be done. One could perform conversion to predefined objects here, but instead it does the following: it sees if the node.tag attribute is handled by yaml itself (examples?) it calls that constructor which returns the type yaml converts it to. Otherwise: - it converts the node to the type indicated (dict, list, or scalar type) by yaml for that node. - it obtains the appropriate tag class (an AsdfType subclass) from the AsdfFile instance (using ``ctx.type_index.fix_yaml_tag`` to deal with version issues to match the most appropriate tag class). The new extension API does not support this "fix YAML tag" feature so file's ExtensionManager is not used here. - it wraps all the node alternatives in a special asdf ``Tagged`` class instance variant where that object contains a ._tag attribute that is a reference to the corresponding Tag class. The loading process returns a tree of these Tagged object instances. This tagged_tree is then returned to the ``af`` instance (still running the ``_open_asdf()`` method) this tree is passed to to the ``_validate()`` method (This is the major reason that the tree isn't directly converted to an object tree since jsonschema would not be able to use the final object tree for validation, besides issues relate to the fact that things that don't validate may not be convertable to the designated object.) The validate machinery is a bit confusing since there are essentially two basic approaches to how validation is done. One type of validation is for validation of schema files themselves, and the other for schemas for tags. The schema.py file is fairly involved and the details are covered elsewhere. When the validator machinery is constructed, it uses the fundamental validation files (schemas). But this doesn't handle the fact that the file being validated is yaml, not json and that there are items in yaml not part of json so special handling is needed. And the way it is handled is through a internal mechanism of the jsonschema library. There is a method that jsonschema calls recursively for a validator and it is called iter_errors. The subclass of the jsonschema validator class is defined as schema.ASDFValidator and this method is overloaded in this class. Despite its name, it's primary purpose is to validate the special features that yaml has, namely applying schemas associated with tags (this is not part of the normal jsonschema scheme [ahem]). It is in this method that it looks for a tag for a node and if it exists and in the tag_index, loads the appropriate schema and applies it to the node. (jsonschemas are normally only associated with a whole json entity rather than specific nodes). While the purpose of this method is to iteratively handle errors that jsonschema detects, it has essentially been repurposed as the means of interjecting handling tag schemas. In order to prevent repeated loading of the same schema, the lru caching scheme is used (from functools in the standard library) where the last n cached schemas are saved (details of how this works were recently changed to prevent a serious memory leak) In any event, a lot is going on behind the scenes in validation and it deserves its own description elsewhere. After validation, the tagged tree is then passed to yamlutil.tagged_tree_to_custom_tree() where the nodes in the tree that have special tag code convert the nodes into the appropriate Python objects that the base asdf and extensions are aware of. This is accomplished by that function defining a walker "callback" function (defined within that function as to pick up the af object intrinsically). The function then passes the callback walker to treeutil.walk_and_modify() where the tree will be traversed recursively applying the tag code associated with the tag to the more primitive tree representation replacing such nodes with Python objects. The tree traversal starts from the top, but the objects are created from the bottom up due to recursion (well, not quite that simple). Understanding how this works is described more fully later on. The result is what af.tree is set to, after doing another tree traversal looking for special type hooks for each node. It isn't clear if there is yet any use of that feature. Not quite that simple --------------------- Outline of schema.py -------------------- This module is somewhat confusing due to the many functions and methods with some variant of validate in their name. This will try to make clear what they do (a renaming of these may be in order). Here is a list of the functions/classes in ``schema.py`` and their purpose and where they sit in the order of things default_ext_resolver **_type_to_tag:** Handles mapping python types to yaml_tags, with the addition of support for OrderedDicts. The next 5 functions are put in the ``YAML_VALIDATORS`` dictionary to ultimately be used by ``_create_validator`` to create the json validator object ------ **validate_tag:** Obtain the relevant tag for the supplied instance (either built ins or custom objects) and check that it matches the tag supplied to the function. **validate_propertyOrder:** Not really a validator but rather as a trick to indicate that properties should retain their order. **validate_flowStyle:** Not really a validator but rather as a trick to store what style to use to write the elements (for yaml objects and arrays) **validate_style:** Not really a validator but rather as a trick to store info on what style to use to write the string. **validate_type:** Used to deal with date strings (It may make sense to rename the above to be more descriptive of the action than where they are stuck in the validation machinery; e.g., ``set_propertyOrder``) **validate_fill_default:** Set the default values for all properties that have a subschema that defines a default. Called indirectly in ``fill_defaults`` **validate_remove_default:** does the opposite; remove all properties where value equals subschema default. Called indirectly in ``remove_defaults`` (For this and the above, validate in the name mostly confuses although it is used by the json validator.) [these could be renamed as well since they do more than validate] **_create_validator:** Creates an ``ASDFValidator`` class on the fly that uses the ``jsonchema.validators`` class created. This ``ASDFValidator`` class overrides the ``iter_errors`` method that is used to handle yaml tag cases (using the ``._tag`` attribute of the node to obtain the corresponding schema for that tag; e.g., it calls ``load_schema`` to obtain the right schema when called for each node in the jsonschema machinery). What isn't clear to me is why this is done on the fly and at least cached since it really only handles two variants of calls (basically which JSONSCHEMA version is to be used). Otherwise it doesn't appear to vary except for that. Admittedly, this is only created at the top level. This is called by ``get_validator``. **class OrderedLoader:** Inherits from the ``_yaml_base_loader``, but otherwise does nothing new in the definition. But the following code defines ``construct_mapping``, and then adds it as a method. **construct_mapping:** Defined outside the ``OrderedLoader`` class but to be added to the ``OrderedLoader`` class by use of the base class add_constructor method. This function flattens the mapping and returns an ``OrderedDict`` of the property attributes (This needs some deep understanding of how the yaml parser actually works, which is not covered here. Apparently mappings can be represented as nested trees as the yaml is originally parsed. Or something like that.) **_load_schema:** Loads json or yaml schemas (using the ``OrderedLoader``). **_make_schema_loader:** Defines the function load_schema using the provided resolver and _load_schema. **_make_resolver:** Sets the schema loader for http, https, file, tag using a dictionary where these access methods are the keys and the schema loader returning only the schema (and not the uri). These all appear to use the same schema loader. **_load_draft4_metaschema:** **load_custom_schema:** Deals with custom schemas. **load_schema:** Loads a schema from the specified location (this is cached). Called for every tag encountered (uses resolver machinery). Most of the complexity is in resolving json references. Calls ``_make_schema_loader, resolver, reference.resolve_fragment, load_schema`` **get_validator:** Calls ``_create_validator``. Is called by validate to return the created validator. **validate_large_literals:** Ensures tree has no large literals (raises error if it does) **validate:** Uses ``get_validator`` to get a validator object and then calls its validate method, and validates any large literals using ``validate_large_literals``. **fill_defaults:** Inserts attributes missing with the default value **remove_defaults:** Where the tree has attributes with value equal to the default, strip the attribute. **check_schema:** Checks schema against the metaschema. --------------- **Illustration of the where these are called:** ``af._open_asdf`` calls ``af.validate`` which calls ``af._validate`` which then calls ``schema.validate`` with the tagged tree as the first argument (it can be called again if there is a custom schema). **in schema.py** ``validate -> get_validator -> _create_validator`` (returns ``ASDFValidator``). There are two levels of validation, those passed to the json_validation machinery for the schemas themselves, and those that the tag machinery triggers when the jsonschema validator calls through ``iter_errors``. The first level handles all the tricks at the top. the ``ASDFValidator`` uses ``load_schema`` which in turn calls ``_make_schema_loader``, then ``_load_schema``. ``_load_schema`` uses the ``OrderedLoader`` to load the schemas. Got that? How the ASDF library works with pyyaml -------------------------------------- A Tree Identifier ................. There are three flavors of trees in the process of reading ASDF files, one will see many references to each in the code and description below. **pyyaml native tree.** This consists of standard Python containers like dict and list, and primitive values like string, integer, float, etc. **Tagged tree.** These are similar to pyyaml native trees, but with the basic types wrapped in a class that has has an attribute that identifies the tag associated with that node so that later processing can apply the appropriate conversion code to convert to the final Python object. **Custom tree**. This is a tree where all nodes are converted to the destination Python objects. For example, a numpy array or GWCS object. Brief overview of how pyyaml constructs a Python tree ..................................................... Understanding the process of creating Python objects from yaml requires some understanding of how pyyaml works. We will not go into all the details of pyyaml, but instead concentrate on one phase of its loading process. First an outline of the phases of processing that pyyaml goes through in loading a yaml file: 1. **scanning:** Converting the text into lexical tokens. Done in scanner.py #. **parsing:** Converting the lexical tokens into parsing events. Done in parser.py. #. **composing:** Converting the parsing events into a tree structure of pyyaml objects. Done in composer.py #. **loading:** Converting the pyyaml tree into a Python object tree. Done in constructor.py We will focus on the last step since that is where asdf integrates with how pyyaml works. The key object in that module is ``BaseConstructor`` and its subclasses (asdf uses ``SafeConstructor`` for security purposes). Note that the pyyaml code is severely deficient in docstrings and comments. The key method that kicks off the conversion is ``construct_document()``. Its responsibilities are to call the ``construct_object()`` method on the top node, "drain" any generators produced by construction (more on this later), and finally reset internal data structures once construction is complete. The actual process seems somewhat mysterious because what is going on is that it is using generators in place of vanilla code to construct the children for mutable items. The general scheme is that each constructor for mutable elements (see as an example the ``SafeConstructor.construct_yaml_seq()`` method) is written as a generator that is expected to be asked a value twice. The first value returned is an empty object of the expected type (e.g., empty dict or list) and when asked a second time, it populates the previous object returned (and returns None, which is not used). (In rare exceptions, when called with ``deep=True``, it does immediately populate the child nodes.) Normally the generator is appended to the loader's state_generators attribute (a list) for later use. Any generators not handled in the recursive chain are handled when contruct_object returns to ``construct_document``, where it iteratively asks each generator to complete populating its referenced object. Since that step of populating the object may in turn create new generators on the ``state_generator`` list, it only stops when no more generators appear on the list. Why is this done? One reason is to handle references (anchors and aliases) that may be circular. Suppose one had the following yaml source:: A: &a x: 1 B: item1: 42 item2: life, the universe, and everything circular: *a Without generators, it would not be possible to handle this case since the node identified by anchor ``a`` has not been fully constructed when pyyaml encounters a reference to that anchor among the same node's descendants. The use of the generator allows creation of the container object to reference to before it is populated so that the above construction will work when constructing the tree. To follow the above example in more detail, the construction creates a dictionary for ``a`` and then returns to the ``construct_document()`` method, which then starts handling the generators put on the list (there is only one in this case). The generator then populates the contents of ``a``. For the attribute ``B`` it encounters a new mutable container, and puts its generator on the list to handle, and then makes a reference to ``a`` which now is defined. One last time it handles the generator for ``B`` and since each item in that is not a container, the construction completes. Pyyaml tracks pending objects in a recursive objects dict and throws an exception if generators fail to handle reference cycles. (The conversion of the tagged tree to the custom tree, performed later does not use the same technique; explained later) How ASDF hooks into pyyaml construction ....................................... ASDF makes use of this by adding generators to this process by defining a new construct method ``construct_undefined()`` that handles all ASDF tag cases. This is added to the pyyaml dict of construct methods under the key of ``None``. When pyyaml doesn't find a tag, that is what it uses as a key to handle unknown tags. Thus the construction is redirected to ASDF code. That code returns a generator in the case of mutable ASDF objects in line with how yaml works with mutable objects. Historical note: Versions older than 2.6.0 did not work this way. Instead, those versions completely replaced the pyyaml method ``construct_object()`` with their own version that did not use generators as pyyaml did. How conversion to ASDF objects is done ...................................... The current means of conversion is simpler to use by tag code, but also more subtle to understand how it actually works (for many, that means harder ;-) The YAML loading process produces a tagged tree of basic Python types. The conversion of these into ASDF types is kicked off when the ``AsdfFile`` method ``_open_asdf()`` calls ``yamlutil.tagged_tree_to_custom_tree()``. This function defines a walker function that is to be used with ``treeutil.walk_and_modify()``. Most of what the walker function does is handle tag issues (e.g., can the tag be appropriately mapped to the tag creation code) and then returns the appropriate ASDF type by calling ``tag_type.from_tree_tagged()``. A note on tree traversal. One can traverse a tree in three ways: inorder, preorder, and postorder (``asdf.info()`` uses a breadth-first traversal, yet another exciting option, which we won't describe here). These respectively mean whether nodes are visited in the horizontal ordering of the nodes displayed on a graphs (inorder), descending the tree from the root, doing the left node first, before the right node (preorder), or from the bottom up, doing both leaf nodes before the parent node (postorder). In generating the pyyaml tree, preorder works since it builds the tree from the root as one would expect in constructing the tree. But in converting the tagged tree into the custom tree, postorder is the natural course, where the children are generated first so that the parent node can refer to the final objects. An important part of this conversion process is handled by an instance of the class ``treeutil._TreeModificationContext``. This class does much the same trick that pyyaml does with generators. Although pyyaml creates references between basic python objects, these references must be converted to references between ASDF objects, and doing so requires a similar mechanism for building the ASDF objects. The ``_TreeModificationContext`` object (hereafter context object) holds the incomplete generators in a way similar to the pyyaml ``construct_document`` function. There are differences though. The class ``TreeModificationContext`` provides methods to indicate if nodes are pending (i.e., incomplete), and there is a special value ``PendingValue`` that is a signal that the node hasn't been handled yet (e.g., it may be referencing something yet to be done). If ``PendingValue`` persists to the end, it indicates a failure to handle circular references in the tag code. This approach was taken because one of the earlier prototype implementations did something like this, passing dict and list subclasses that would throw an exception if a ``PendingValue`` element was accessed. That would have been more friendly to extension developers, but it was discarded because it wasn't thought it was worth turning all those high performance containers into slower asdf subclasses. We may want to revisit this if we decide to implement a tree that tracks "dirty" nodes and only writes to disk those that have changed, since in that case we'll need custom container subclasses anyway. We could also consider writing our own dict/list subclass in C so we could have our cake and eat it too. The ``walk_and_modify`` code handles the case where the tag code returns a generator instead of a value. This generator is expected to be a similar kind of generator to what pyyaml uses, but differing in that instead of returning an empty container object it will populate whatever elements it can complete (e.g, all non-mutable ones), and complete the population of all the mutable members on the second iteration (which may, in turn, generate new generators for mutable elements contained within). When it detects a generator, the ``walk_and_modify`` code retrieves the first yielded value, then saves the generator in the context. When the top level of the context is reached (it handles nesting by indicating how many times it has been entered as a context), it starts "draining" the saved generators by doing the second iteration on them. Like pyyaml, this second iteration may produce yet more generators that get saved, and thus keeps iterating on the saved generators until none are left. It is not possible to construct reference cycles in immutable objects within pure Python code, and thus the generators are only needed for mutable constructs (e.g., dicts and lists). Historical note: versions of the ASDF library prior to 2.6.0 required tag code when converting from a tagged object to a custom object to call ``tagged_tree_to_custom_tree`` on any values of attributes that may be arbitrarily nested objects. That no longer is needed with the latest code since any attribute that contains a mapping or sequence object automatically uses a generator, so population of that attribute is automatically deferred until the context is exited. Thus there is no need to explicitly call a function to populate it. More explicitly, the ``_recurse`` function defined within ``walk_and_modify`` (in this postorder case) calls ``_handle_children()`` on the node in question first. If the node contains children, they are each fed back into ``_recurse`` and transformed into their final objects. A new node is populated with these transformed children, and that is the node that gets handed to ``tag.from_tree_tagged()``. The effect is that the tag class receives a structure containing only transformed children, so it has no need to call ``tagged_tree_to_custom_tree`` on its own. Future plans for SerializationContext ------------------------------------- Currently, the ``AsdfFile`` itself is used as a container for serialization parameters and is passed to various methods in block.py, reference.py, schema.py, yamlutil.py, in ``ExtensionType`` subclasses, and others. This doesn't work very well for a couple of reasons. For one, the intention of ``AsdfFile.write_to`` is to "export" a copy of the file to disk without changing the in-memory ``AsdfFile``, but since serialization parameters are read from the ``AsdfFile``, the code currently modifies the open file as part of the write (and doesn't change it back). The second issue is that requiring an ``AsdfFile`` instance in so many method signatures forces the code (or users themselves) to create an empty dummy ``AsdfFile`` just to use the method. The new ``Converter`` interface also accepts a ``ctx`` variable, but instead of an ``AsdfFile`` it's an instance of ``SerializationContext``. This new object will serve the purpose of configuring serialization parameters and keeping necessary state, which means that the ``AsdfFile`` can go unmodified. The ``SerializationContext`` will be relatively lightweight and creating it will not incur as much of a performance penalty as creating an ``AsdfFile``. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/developer_versioning.rst0000644000537500020070000002440300000000000022430 0ustar00wjamiesonSTSCI\scienceTag Versioning and You ====================== Here we'll explore ASDF tag versioning, and walk through the process of supporting new and updated tags with AsdfType subclasses. AsdfType is the original API that is currently used to support the ASDF core tags. The new API, Converter, remains experimental and is currently (2020-09-24) being trialled in the asdf-astropy package. ASDF versioning conventions --------------------------- The ASDF Standard document provides a helpful overview_ of the various ASDF versioning conventions. We will be concerned with the *standard version* and individual *tag versions*. .. _overview: https://asdf-standard.readthedocs.io/en/latest/versioning.html Overview -------- The "standard version" or "ASDF Standard version" refers to the subset of individual tag versions that correspond to a specific release version of the ASDF Standard. The list of tags and versions is maintained in version_map files in the asdf-standard repository. For example, version_map-1.3.0.yaml contains a list of all tag versions that we must handle in order to fully support version 1.3.0 of the ASDF Standard. This list contains both "core" tags and non-core tags. The distinction there is that core tags are supported by this library, while the others are supported by some external Python library, such as astropy. Our support for specific versions of the ASDF core tags is implemented with AsdfType subclasses. We'll discuss these more later, but for now the important thing to know is that each AsdfType class identifies the tag name and version(s) that it supports. Any core tag objects that lack this support will not serialize or deserialize properly. When reading an ASDF file, the standard version doesn't play a significant role. Each core object is self-described by a YAML tag, which will be used to deserialize the object even if that tag conflicts with the overall standard version of the file. The library will use the tag to identify the most appropriate AsdfType to deserialize the object. On write, the situation is different. The library may have a choice in which tag and/or AsdfType to use when serializing a given core object -- if multiple versions of the same tag are present, which shall we choose? Here the standard version becomes important. The tag version selected is specified by the version map of the standard version that the file is being written under. By default, the standard version used for writes is the latest offered, but users may override with another version. Implementation details ---------------------- Supported ASDF standard version list ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The list of supported ASDF standard versions is maintained in ``asdf.versioning.supported_versions``. The default version, ``asdf.versioning.default_version``, is applied whenever a user declines to specify the standard version of a new file, and is set to the latest supported version. AsdfType ~~~~~~~~ In this library, each core tag is handled by a distinct ``asdf.types.AsdfType`` subclass. The AsdfType subclass is responsible for identifying the base name of its tag and the tag version(s) that it supports. It also provides any custom serialization/deserialization behavior that is required -- AsdfType provides a default implementation that is only able to get and set attributes on dict-like objects. In some cases, the AsdfType subclass also serves as the deserialized object type. For example, ``asdf.types.core.Software`` subclasses both AsdfType and dict. Its AsdfType-like behavior is to identify its tag and version, while its dict-like behavior is to act as a container for the attributes described by the tag. The class definition is mostly empty because as a dict it can rely on AsdfType's default implementation for (de)serialization. Meanwhile, other AsdfType subclasses deserialize ASDF objects into instances of entirely separate classes. For example, ``asdf.types.core.complex.ComplexType`` handles complex number types, which aren't natively supported by YAML. ComplexType includes an additional class attribute, ``types``, that lists the types that it is able to handle. It also provides custom implementations of the ``to_tree`` and ``from_tree`` class methods, which enable it to serialize a complex value into the appropriate string, and later rebuild the complex value from that string. This additional code is necessary because ComplexType does not (de)serialize itself. We won't find an explicit list of AsdfType subclasses in the code; that list is assembled at runtime by AsdfType's metaclass, ``asdf.types.AsdfTypeMeta``. The list can be inspected in the console like so: .. code-block:: python >>> import asdf >>> asdf.types._all_asdftypes # doctest: +SKIP ... The AsdfType class attributes relevant to versioning are as follows: - *name*: the base name of the tag, without its version string. For example, the tag URI ``tag:stsci.edu:asdf/core/example-1.2.0`` will have a name value of ``"core/example"``. - *version*: the primary tag version supported by the AsdfType. For the example above, version should be set to ``"1.2.0"``. This should be the latest version that the tag supports. - *supported_versions*: a set of tag versions that the AsdfType supports. In the above example, this might be ``{"1.0.0", "1.1.0", "1.2.0"}``. AsdfType selection rules ~~~~~~~~~~~~~~~~~~~~~~~~ On read, the library will ideally be able to identify an AsdfType subclass that explicitly supports a given tag (either in the ``version`` class attribute or ``supported_versions``. If that is not possible, it proceeds as follows: - Use the AsdfType that supports the latest version that is less than the tag version. For example, if the tag is example-1.2.0, and AsdfType are available for 1.1.0 and 1.3.0, it will use the 1.1.0 subclass. - If the above fails, use the earliest available AsdfType - If no AsdfType exists that supports any version of that tag, then ASDF will deserialize the data into vanilla diff. The library does not currently emit a warning in either of the first two cases, but in the third case, a warning is emitted. The rules for selecting an AsdfType for a given tag are implemented by ``asdf.type_index.AsdfTypeIndex.fix_yaml_tag``. On write, the library will read the version map that corresponds to the ASDF Standard version in use, which dictates the subset of tag versions that are available. From the subset of AsdfType subclasses that handle those tag versions, it selects the subclass that is able to handle the type of the core object being serialized. If an object is not supported by an AsdfType, its serialization will be handled by pyyaml. If pyyaml doesn't know how to serialize, it will raise ``yaml.representer.RepresenterError``. The rules for selecting an AsdfType for a given serializable object are implemented by ``asdf.type_index.AsdfTypeIndex.from_custom_type``. Implementing updates to the standard ------------------------------------ Let's assume that there is a new standard version, 2.0.0, which includes one entirely new core tag, ``core/new_object-1.0.0``, one backwards-compatible update to an existing tag, ``core/updated_object-1.1.0``, and one breaking change to an existing tag, ``core/breaking_object-2.0.0``. The following sections walk through the steps we'll need to take to support this new material. Update the asdf-standard submodule commit pointer ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The asdf-standard repository is integrated into the asdf repository as a submodule. To pull in new commits from the remote master ( assumed to be named ``origin``: .. code-block:: console $ cd asdf-standard $ git fetch origin $ git checkout origin/master Support the new standard version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The list can be found in ``asdf.versioning.supported_versions``. Add ``AsdfVersion("2.0.0")`` to the end of the list (maintaining the sort order). This new version will become the default for new files, but we can update the definition of ``asdf.versioning.default_version`` if that is undesirable. Support the new tag ~~~~~~~~~~~~~~~~~~~ Tags for previously unsupported objects are straightforward, since we don't need to worry about compatibility issues. Create a new AsdfType subclass with ``name`` and ``version`` set appropriately: .. code-block:: python class NewObjectType(AsdfType): name = "core/new_object" version = "1.0.0" In a real-life scenario, we'd need to actually support (de)serialization in some way, but those details are beyond the scope of this document. Support the backwards-compatible tag ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Since our updated_object-1.1.0 is backwards-compatible, we can share the same AsdfType subclass between it and the previous version. Presumably there exists an AsdfType that looks something like this: .. code-block:: python class UpdatedObjectType(AsdfType): name = "core/updated_object" version = "1.0.0" We'll need to update the version, and list 1.0.0 as a supported version, so that this class can continue to handle it: .. code-block:: python class UpdatedObjectType(AsdfType): name = "core/updated_object" version = "1.1.0" supported_versions = {"1.0.0", "1.1.0"} Support the breaking tag ~~~~~~~~~~~~~~~~~~~~~~~~ The tag with breaking changes, core/breaking_object-2.0.0, may not be easily supported by the same AsdfType as the previous version. In that case, we can create a new AsdfType for 2.0.0, and as long as the two subclasses have distinct ``version`` values and non-overlapping ``supported_versions`` sets, they should coexist peaceably. If this is the existing AsdfType: .. code-block:: python class BreakingObjectType(AsdfType): name = "core/breaking_object" version = "1.0.0" The new AsdfType might look something like this: .. code-block:: python class BreakingObjectType2(AsdfType): name = "core/breaking_object" version = "2.0.0" **CAUTION:** We might be tempted here to simply update the original BreakingObjectType, but failing to handle an older version of the tag constitutes dropping support for any ASDF Standard version that relies on that tag. This should only be done after a deprecation period and with a major version release of the library, since files written by an older release will not be readable by the new code. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9836867 asdf-2.9.2/docs/asdf/extending/0000755000537500020070000000000000000000000017430 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/extending/compressors.rst0000644000537500020070000000550400000000000022545 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf.extension .. _extending_compressors: ======================== Binary block compressors ======================== The `Compressor` interface provides an implementation of a compression algorithm that can be used to transform binary blocks in an `~asdf.AsdfFile`. Each Compressor must provide a 4-byte compression code that identifies the algorithm. Once the Compressor is installed as part of an Extension plugin, this code will be available to users as an argument to `~asdf.AsdfFile.set_array_compression` and the ``all_array_compression`` argument to `~asdf.AsdfFile.write_to` and `~asdf.AsdfFile.update`. See :ref:`extending_extensions_compressors` for details on including a Compressor in an extension. The Compressor interface ======================== Every Compressor implementation must provide one required property and two required methods: `Compressor.label` - A 4-byte compression code. This code is used by users to select a compression algorithm and also stored in the binary block header to identify the algorithm that was applied to the block's data. `Compressor.compress` - The method that transforms the block's bytes before they are written to an ASDF file. The positional argument is a `memoryview` object which is guaranteed to be 1D and contiguous. Compressors must be prepared to handle `memoryview.itemsize` > 1. Any keyword arguments are passed through from the user and may be used to tune the compression algorithm. ``compress`` methods have no return value and instead are expected to yield bytes-like values until the input data has been fully compressed. `Compressor.decompress` - The method that transforms the block's bytes after they are read from an ASDF file. The first positional argument is an `~collections.abc.Iterable` of bytes-like objects that each contain a chunk of the compressed input data. The second positional argument is a pre-allocated output array where the decompressed bytes should be written. The method is expected to return the number of bytes written to the output array. Entry point performance considerations ====================================== For the good of `asdf` users everywhere, it's important that entry point methods load as quickly as possible. All extensions must be loaded before reading an ASDF file, and therefore all compressors are created as well. Any compressor module or ``__init__`` method that lingers will introduce a delay to the initial call to `asdf.open`. For that reason, we recommend that compressor authors minimize the number of imports that occur in the module containing the Compressor implementation, and defer imports of compression libraries to inside the `Comrpessor.compress` and `Compressor.decompress` methods. This will prevent the library from ever being imported when reading ASDF files that do not utilize the Compressor's algorithm. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/extending/converters.rst0000644000537500020070000002477200000000000022370 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf.extension .. _extending_converters: ========== Converters ========== The `~asdf.extension.Converter` interface defines a mapping between tagged objects in the ASDF tree and their corresponding Python object(s). Typically a Converter will map one YAML tag to one Python type, but the interface also supports many-to-one and many-to-many mappings. A Converter provides the software support for a tag and is responsible for both converting from parsed YAML to more complex Python objects and vice versa. The Converter interface ======================= Every Converter implementation must provide two required properties and two required methods: `Converter.tags` - a list of tag URIs or URI patterns handled by the converter. Patterns may include the wildcard character `*`, which matches any sequence of characters up to a `/`, or `**`, which matches any sequence of characters. The `~asdf.util.uri_match` method can be used to test URI patterns. `Converter.types` - a list of Python types or fully-qualified Python type names handled by the converter. Note that a string name must reflect the actual location of the class's implementation and not just a module where it is imported for convenience. For example, if class ``Foo`` is implemented in ``example_package.foo.Foo`` but imported as ``example_package.Foo`` for convenience, it is the former name that must be used. The `~asdf.util.get_class_name` method will return the name that `asdf` expects. The string type name is recommended over a type object for performance reasons, see :ref:`extending_converters_performance`. `Converter.to_yaml_tree` - a method that accepts a complex Python object and returns a simple node object (typically a `dict`) suitable for serialization to YAML. The node is permitted to contain nested complex objects; these will in turn be passed to other ``to_yaml_tree`` methods in other Converters. `Converter.from_yaml_tree` - a method that accepts a simple node object from parsed YAML and returns the appropriate complex Python object. Nested nodes in the received node will have already been converted to complex objects by other calls to ``from_yaml_tree`` methods, except where reference cycles are present -- see :ref:`extending_converters_reference_cycles` for information on how to handle that situation. Additionally, the Converter interface includes a method that must be implemented when some logic is required to select the tag to assign to a ``to_yaml_tree`` result: `Converter.select_tag` - a method that accepts a complex Python object and a list candidate tags and returns the tag that should be used to serialize the object. A simple example ================ Say we have a Python class, ``Rectangle``, that we wish to serialize to an ASDF file. A ``Rectangle`` instance has two attributes, width and height, and a convenient method that computes its area: .. code-block:: python # in module example_package.shapes class Rectangle: def __init__(self, width, height): self.width = width self.height = height def get_area(self): return self.width * self.height We'll need to designate a tag URI to represent this object's type in the ASDF tree -- let's use ``asdf://example.com/example-project/tags/rectangle-1.0.0``. Here is a simple Converter implementation for this type and tag: .. code-block:: python from asdf.extension import Converter class RectangleConverter(Converter): tags = ["asdf://example.com/shapes/tags/rectangle-1.0.0"] types = ["example_package.shapes.Rectangle"] def to_yaml_tree(self, obj, tag, ctx): return { "width": obj.width, "height": obj.height, } def from_yaml_tree(self, node, tag, ctx): from example_package.shapes import Rectangle return Rectangle(node["width"], node["height"]) Note that import of the ``Rectangle`` class has been deferred to inside the ``from_yaml_tree`` method. This is a performance consideration that is discussed in :ref:`extending_converters_performance`. In order to use this Converter, we'll need to create a simple extension around it and install that extension: .. code-block:: python import asdf from asdf.extension import Extension class ShapesExtension(Extension): extension_uri = "asdf://example.com/shapes/extensions/shapes-1.0.0" converters = [RectangleConverter()] tags = ["asdf://example.com/shapes/tags/rectangle-1.0.0"] asdf.get_config().add_extension(ShapesExtension()) Now we can include a Rectangle object in an `~asdf.asdf.AsdfFile` tree and write out a file: .. code-block:: python with asdf.AsdfFile() as af: af["rect"] = Rectangle(5, 4) af.write_to("test.asdf") The portion of the ASDF file that represents the rectangle looks like this: .. code-block:: yaml rect: ! {height: 4, width: 5} Multiple tags ============= Now say we want to map our one Rectangle class to one of two tags, either rectangle-1.0.0 or square-1.0.0. We'll need to add square-1.0.0 to the converter's list of tags and implement a ``select_tag`` method: .. code-block:: python RETANGLE_TAG = "asdf://example.com/shapes/tags/rectangle-1.0.0" SQUARE_TAG = "asdf://example.com/shapes/tags/square-1.0.0" class RectangleConverter(Converter): tags = [RECTANGLE_TAG, SQUARE_TAG] types = ["example_package.shapes.Rectangle"] def select_tag(self, obj, tags, ctx): if obj.width == obj.height: return SQUARE_TAG else: return RECTANGLE_TAG def to_yaml_tree(self, obj, tag, ctx): if tag == SQUARE_TAG: return { "side_length": obj.width, } else: return { "width": obj.width, "height": obj.height, } def from_yaml_tree(self, node, tag, ctx): from example_package.shapes import Rectangle if tag == SQUARE_TAG: return Rectangle(node["side_length"], node["side_length"]) else: return Rectangle(node["width"], node["height"]) .. _extending_converters_reference_cycles: Reference cycles ================ Special considerations must be made when deserializing a tagged object that contains a reference to itself among its descendants. Consider a `fractions.Fraction` subclass that maintains a reference to its multiplicative inverse: .. code-block:: python # in the example_project.fractions module class FractionWithInverse(fractions.Fraction): def __init__(self, *args, **kwargs): self._inverse = None @property def inverse(self): return self._inverse @inverse.setter def inverse(self, value): self._inverse = value The inverse of the inverse of a fraction is the fraction itself, we might wish to construct the objects in the following way: .. code-block:: python f1 = FractionWithInverse(3, 5) f2 = FractionWithInverse(5, 3) f1.inverse = f2 f2.inverse = f1 Which creates an "infinite loop" between the two fractions. An ordinary Converter wouldn't be able to deserialize this, since each fraction requires that the other be deserialized first! Let's see what happens when we define our ``from_yaml_tree`` method in a naive way: .. code-block:: python class FractionWithInverseConverter(Converter): tags = ["asdf://example.com/fractions/tags/fraction-1.0.0"] types = ["example_project.fractions.FractionWithInverse"] def to_yaml_tree(self, obj, tag, ctx): return { "numerator": obj.width, "denominator": obj.height, "inverse": obj.inverse, } def from_yaml_tree(self, node, tag, ctx): from example_project.fractions import FractionWithInverse obj = FractionWithInverse( tree["numerator"], tree["denominator"] ) obj.inverse = tree["inverse"] return obj After adding this Converter to an Extension and installing it, the fraction will serialize correctly: .. code-block:: python with asdf.AsdfFile({"fraction": f1}) as af: af.write_to("with_inverse.asdf") But upon deserialization, we notice a problem: .. code-block:: python with asdf.open("with_inverse.asdf") as af: reconstituted_f1 = af["fraction"] assert reconstituted_f1.inverse.inverse is asdf.treeutil.PendingValue The presence of `~asdf.treeutil.PendingValue` is asdf's way of telling us that the value corresponding to the key ``inverse`` was not fully deserialized at the time that we retrieved it. We can handle this situation by making our ``from_yaml_tree`` a generator function: .. code-block:: python def from_yaml_tree(self, node, tag, ctx): from example_project.fractions import FractionWithInverse obj = FractionWithInverse( tree["numerator"], tree["denominator"] ) yield obj obj.inverse = tree["inverse"] The generator version of ``from_yaml_tree`` yields the partially constructed ``FractionWithInverse`` object before setting its inverse property. This allows `asdf` to proceed to constructing the inverse ``FractionWithInverse`` object, and resume the original ``from_yaml_tree`` execution only when the inverse is actually available. With this modification we can successfully deserialize our ASDF file: .. code-block:: python with asdf.open("with_inverse.asdf") as af: reconstituted_f1 = ff["fraction"] assert reconstituted_f1.inverse.inverse is reconstituted_f1 .. _extending_converters_performance: Entry point performance considerations ====================================== For the good of `asdf` users everywhere, it's important that entry point methods load as quickly as possible. All extensions must be loaded before reading an ASDF file, and therefore all converters are created as well. Any converter module or ``__init__`` method that lingers will introduce a delay to the initial call to `asdf.open`. For that reason, we recommend that converter authors minimize the number of imports that occur in the module containing the Converter implementation, and defer imports of serializable types to within the ``from_yaml_tree`` method. This will prevent the type from ever being imported when reading ASDF files that do not contain the associated tag. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/docs/asdf/extending/extensions.rst0000644000537500020070000003100300000000000022356 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf.extension .. _extending_extensions: ========== Extensions ========== An ASDF "extension" is a supplement to the core ASDF specification that describes additional YAML tags or binary block compressors which may be used when writing files. In this library, extensions implement the `Extension` interface and can be installed manually by the user or automatically by a package using Python's entry points mechanism. Extension features ================== Basics ------ Every extension to ASDF must be uniquely identified by a URI; this URI is written to the file's metadata when the extension is used and allows software to determine if the necessary extensions are installed when the file is read. An ASDF extension implementation intended for use with this library must, at a minimum, implement the `Extension` interface and provide its URI as a property: .. code-block:: python from asdf.extension import Extension class FooExtension(Extension): extension_uri = "asdf://example.com/example-project/extensions/foo-1.0.0" Note that this is an "empty" extension that does not extend the library in any meaningful way; other attributes must be implemented to actually support additional tags and/or compressors. Read on for a description of the rest of the Extension interface. Additional tags --------------- In order to implement support for additional YAML tags, an Extension subclass must provide both a list of relevant tags and a list of `Converter` instances that translate objects with those tags to and from YAML. These lists are provided in the ``tags`` and ``converters`` properties, respectively: .. code-block:: python from asdf.extension import Extension, Converter class FooConverter(Converter): # ... class FooExtension(Extension): extension_uri = "asdf://example.com/example-project/extensions/foo-1.0.0" tags = ["asdf://example.com/example-project/tags/foo-1.0.0"] converters = [FooConverter()] The implementation of a Converter is a topic unto itself and is discussed in detail in :ref:`extending_converters`. The Extension implemented above will happily convert between ``foo-1.0.0`` tagged YAML objects and the appropriate Python representation, but it will not perform any schema validation. In order to associate the tag with a schema, we'll need to provide a `TagDefinition` object instead of just a string: .. code-block:: python from asdf.extension import Extension, Converter, TagDefinition class FooConverter(Converter): # ... class FooExtension(Extension): extension_uri = "asdf://example.com/example-project/extensions/foo-1.0.0" tags = [ TagDefinition( "asdf://example.com/example-project/tags/foo-1.0.0", schema_uri="asdf://example.com/example-project/schemas/foo-1.0.0", ) ] converters = [FooConverter()] .. _extending_extensions_compressors: Additional block compressors ---------------------------- Binary block compressors implement the `Compressor` interface and are included in an extension via the ``compressors`` property: .. code-block:: python from asdf.extension import Extension, Compressor class FooCompressor(Compressor): # ... class FooExtension(Extension): extension_uri = "asdf://example.com/example-project/extensions/foo-1.0.0" compressors = [FooCompressor()] See :ref:`extending_compressors` for details on implementing the Compressor interface. Additional YAML tag handles --------------------------- The YAML format permits use of "tag handles" as shorthand prefixes in tags. For example, these two YAML files are equivalent: .. code-block:: yaml %YAML 1.1 --- value: ! # etc ... .. code-block:: yaml %YAML 1.1 %TAG !example! asdf://example.com/example-project/tags/ --- value: !example!foo-1.0.0 # etc ... In both cases the ``value`` object has tag asdf://example.com/example-project/tags/foo-1.0.0, but in the second example the tag is abbreviated as ``!example!foo-1.0.0`` through use of a handle. This has no impact on the interpretation of the file but can make the raw ASDF tree easier to read for humans. Tag handles can be defined in the ``yaml_tag_handles`` property of an extension: .. code-block:: python from asdf.extension import Extension class FooExtension(Extension): extension_uri = "asdf://example.com/example-project/extensions/foo-1.0.0" yaml_tag_handles = { "!example!": "asdf://example.com/example-project/tags/" } ASDF Standard version requirement --------------------------------- Some extensions may only work with specific version(s) of the ASDF Standard -- for example, the schema associated with one of an extension's tags may reference specific versions of ASDF core tags. This requirement can be expressed as a PEP 440 version specifier in an Extension's ``asdf_standard_requirement`` property: .. code-block:: python from asdf.extension import Extension class FooExtension(Extension): extension_uri = "asdf://example.com/example-project/extensions/foo-1.0.0" asdf_standard_requirement = ">= 1.2.0, < 1.5.0" Now the extension will only be used with ASDF Standard 1.3.0 and 1.4.0 files. Legacy class names ------------------ Previous versions of this library referred to extensions by their Python class names instead of by URI. These class names were written to ASDF file metadata and allowed the library to warn users when an extension used to write the file was not available on read. Now the extension URI is written to the metadata, but to prevent warnings when reading older files, extension authors can provide an additional list of class names that previously identified the extension: .. code-block:: python from asdf.extension import Extension class FooExtension(Extension): extension_uri = "asdf://example.com/example-project/extensions/foo-1.0.0" legacy_class_names = [ "foo_package.extensions.FooExtension", ] .. _exposing_extension_object_internals: Making converted object's contents visible to `info` and `search` ----------------------------------------------------------------- If the object produced by the extension supports a class method `.__asdf_traverse__` then it can be used by those tools to expose the contents of the object. That method should accept no arguments and return either a dict of attributes and their values, or a list if the object itself is list-like. .. _extending_extensions_installing: Installing an extension ======================= Once an extension is implemented, it must be installed so that the `asdf` library knows to use it. There are two options for installing an extension: manually per session using `~asdf.config.AsdfConfig`, or automatically for every session using the ``asdf.extensions`` entry point .. _extending_extensions_installing_asdf_config: Installing extensions via AsdfConfig ------------------------------------ The simplest way to install an extension is to add it at runtime using the `AsdfConfig.add_extension ` method. For example, the following code defines and installs a minimal extension: .. code-block:: python import asdf from asdf.extension import Extension class FooExtension(Extension): extension_uri = "asdf://example.com/example-project/extensions/foo-1.0.0" asdf.get_config().add_extension(FooExtension()) Now the extension will be available when working with ASDF files, but only for the duration of the current Python session. .. _extending_extensions_installing_entry_points: Installing extensions via entry points -------------------------------------- The `asdf` package also offers an entry point for installing extensions This registers a package's extensions automatically on package install without requiring calls to the AsdfConfig method. The entry point is called ``asdf.extensions`` and expects to receive a method that returns a list of ``Extension`` instances. For example, let's say we're creating a package named ``asdf-foo-extension`` that provides the not-particularly-useful ``FooExtension`` from the previous section. We'll need to define an entry point method that returns a list containing an instance of ``FooExtension``: .. code-block:: python def get_extensions(): return [FooExtension()] We'll assume that method is located in the module ``asdf_foo_extension.integration``. Next, in the package's ``setup.cfg``, define an ``[options.entry_points]`` section that identifies the method as an ``asdf.extensions`` entry point: .. code-block:: cfg # setup.cfg [options.entry_points] asdf.extensions = asdf_foo_extension = asdf_foo_extension.integration:get_extensions After installing the package, the extension should be automatically available in any new Python session. Entry point performance considerations -------------------------------------- For the good of `asdf` users everywhere, it's important that entry point methods load as quickly as possible. All extensions must be loaded before reading an ASDF file, so any entry point method that lingers will introduce a delay to the initial call to `asdf.open`. For that reason, we recommend that extension authors minimize the number of imports that occur in the module containing the entry point method, particularly imports of modules outside of the Python standard library or `asdf` itself. .. _extending_extensions_manifest: Populating an extension from a manifest ======================================= An "extension manifest" is a language-independent description of an ASDF extension (little 'e') that includes information such as the extension URI, list of tags, ASDF Standard requirement, etc. Instructions on writing a manifest can be found in :ref:`extending_manifests`, but once written, we'll still need a Python Extension (big 'E') whose content mirrors the manifest. Rather than duplicate that information in Python code, we recommend use of the `ManifestExtension` class, which reads a manifest and maps its content to the appropriate Extension interface properties. Assuming the manifest is installed as a resource (see :ref:`extending_resources`), an extension instance can be created using the ``from_uri`` factory method: .. code-block:: python from asdf.extension import ManifestExtension extension = ManifestExtension.from_uri("asdf://example.com/example-project/manifests/foo-1.0.0") Compressors and converters can be included in the extension by adding them as keyword arguments: .. code-block:: python from asdf.extension import ManifestExtension extension = ManifestExtension.from_uri( "asdf://example.com/example-project/manifests/foo-1.0.0", converters=[FooConverter()], compressors=[FooCompressor()], ) The extension may then be installed by one of the two methods described above. Warning on ManifestExtension and entry points --------------------------------------------- When implementing a package that automatically installs a ManifestExtension, we'll need to utilize both the ``asdf.resource_mappings`` entry point (to install the manifest) and the ``asdf.extensions`` entry point (to install the extension). Because the manifest must be installed before the extension can be instantiated, it's easy to end up trapped in an import loop. For example, this seemingly innocuous set of entry point methods cannot be successfully loaded: .. code-block:: python from asdf.extension import ManifestExtension RESOURCES = { "asdf://example.com/example-project/manifests/foo-1.0.0": open("foo-1.0.0.yaml").read() } def get_resource_mappings(): return [RESOURCES] EXTENSION = ManifestExtension.from_uri("asdf://example.com/example-project/manifests/foo-1.0.0") def get_extensions(): return [EXTENSION] When the module is imported, ``ManifestExtension.from_uri`` asks the `asdf` library to load all available resources so that it can retrieve the manifest content. But loading the resources requires importing this module to get at the ``get_resource_mappings`` method, so now we're stuck! The solution is to instantiate the ManifestExtension inside of its entry point method: .. code-block:: python def get_extensions(): return [ ManifestExtension.from_uri("asdf://example.com/example-project/manifests/foo-1.0.0") ] This is not as inefficient as it might seem, since the `asdf` library only calls the method once and reuses a cached result thereafter. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/docs/asdf/extending/legacy.rst0000644000537500020070000010063100000000000021427 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf.extensions .. _extending_legacy: Deprecated extension API ======================== This page documents the original `asdf` extension API, which has been deprecated in favor of :ref:`extending_extensions`. Since support for the deprecated API will be removed in `asdf` 3.0, we recommend that all new extensions be implemented with the new API. Extensions provide a way for ASDF to represent complex types that are not defined by the ASDF standard. Examples of types that require custom extensions include types from third-party libraries, user-defined types, and complex types that are part of the Python standard library but are not handled in the ASDF standard. From ASDF's perspective, these are all considered 'custom' types. Supporting new types in ASDF is easy. Three components are required: 1. A YAML Schema file for each new type. 2. A tag class (inheriting from `asdf.CustomType`) corresponding to each new custom type. The class must override `~asdf.CustomType.to_tree` and `~asdf.CustomType.from_tree` from `asdf.CustomType` in order to define how ASDF serializes and deserializes the custom type. 3. A Python class to define an "extension" to ASDF, which is a set of related types. This class must implement the `asdf.AsdfExtension` abstract base class. In general, a third-party library that defines multiple custom types can group them all in the same extension. .. note:: The mechanisms of tag classes and extension classes are specific to this particular implementation of ASDF. As of this writing, this is the only complete implementation of the ASDF Standard. However, other language implementations may use other mechanisms for processing custom types. All implementations of ASDF, regardless of language, will make use of the same schemas for abstract data type definitions. This allows all ASDF files to be language-agnostic, and also enables interoperability. An Example ---------- As an example, we will write an extension for ASDF that allows us to represent Python's standard `fractions.Fraction` class for representing rational numbers. We will call our new ASDF type ``fraction``. First, the YAML Schema, defining the type as a pair of integers: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fraction-1.0.0" title: An example custom type for handling fractions tag: "tag:nowhere.org:custom/fraction-1.0.0" type: array items: type: integer minItems: 2 maxItems: 2 ... Then, the Python implementation of the tag class and extension class. See the `asdf.CustomType` and `asdf.AsdfExtension` documentation for more information: .. runcode:: hidden import os import asdf # This is a hack in order to get the example below to work properly __file__ = os.path.join(asdf.__path__[0], 'tests', 'data', 'fraction-1.0.0.yaml') .. runcode:: import os import asdf from asdf import util import fractions class FractionType(asdf.CustomType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [fractions.Fraction] @classmethod def to_tree(cls, node, ctx): return [node.numerator, node.denominator] @classmethod def from_tree(cls, tree, ctx): return fractions.Fraction(tree[0], tree[1]) class FractionExtension(asdf.AsdfExtension): @property def types(self): return [FractionType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(os.path.dirname(__file__)) + '/{url_suffix}.yaml')] Note that the method `~asdf.CustomType.to_tree` of the tag class ``FractionType`` defines how the library converts `fractions.Fraction` into a tree that can be stored by ASDF. Conversely, the method `~asdf.CustomType.from_tree` defines how the library reads a serialized representation of the object and converts it back into an instance of `fractions.Fraction`. Note that the values of the `~asdf.CustomType.name`, `~asdf.CustomType.organization`, `~asdf.CustomType.standard`, and `~asdf.CustomType.version` fields are all reflected in the ``id`` and ``tag`` definitions in the schema. Note also that the base of the ``tag`` value (up to the `name` and `version` components) is reflected in `~asdf.AsdfExtension.tag_mapping` property of the `FractionExtension` type, which is used to map tags to URLs. The `~asdf.AsdfExtension.url_mapping` is used to map URLs (of the same form as the ``id`` field in the schema) to the actual location of a schema file. Once these classes and the schema have been defined, we can save an ASDF file using them: .. runcode:: tree = {'fraction': fractions.Fraction(10, 3)} with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to("test.asdf") .. asdf:: test.asdf ignore_unrecognized_tag Defining custom types --------------------- In the example above, we showed how to create an extension that is capable of serializing `fractions.Fraction`. The custom tag type that we created was defined as a subclass of `asdf.CustomType`. Custom type attributes ********************** We overrode the following attributes of `~asdf.CustomType` in order to define `FractionType` (each bullet is also a link to the API documentation): * `~asdf.CustomType.name` * `~asdf.CustomType.organization` * `~asdf.CustomType.version` * `~asdf.CustomType.standard` * `~asdf.CustomType.types` Each of these attributes is important, and each is described in more detail in the linked API documentation. The choice of `~asdf.CustomType.name` should be descriptive of the custom type that is being serialized. The choice of `~asdf.CustomType.organization`, and `~asdf.CustomType.standard` is fairly arbitrary, but also important. Custom types that are provided by the same package should be grouped into the same `~asdf.CustomType.standard` and `~asdf.CustomType.organization`. These three values, along with the `~asdf.CustomType.version`, are used to define the YAML tag that will mark the serialized type in ASDF files. In our example, the tag becomes ``tag:nowhere.org:custom/fraction-1.0.0``. The tag is important when defining the `asdf.AsdfExtension` subclass. Critically, these values must all be reflected in the associated schema. Custom type methods ******************* In addition to the attributes mentioned above, we also overrode the following methods of `~asdf.CustomType` (each bullet is also a link to the API documentation): * `~asdf.CustomType.to_tree` * `~asdf.CustomType.from_tree` The `~asdf.CustomType.to_tree` method defines how an instance of a custom data type is converted into data structures that represent a YAML tree that can be serialized to a file. The `~asdf.CustomType.from_tree` method defines how a YAML tree can be converted back into an instance of the original custom data type. In the example above, we used a `list` to contain the important attributes of `fractions.Fraction`. However, this choice is fairly arbitrary, as long as it is consistent between the way that `~asdf.CustomType.to_tree` and `~asdf.CustomType.from_tree` are defined. For example, we could have also chosen to use a `dict`: .. runcode:: import asdf import fractions class FractionType(asdf.CustomType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [fractions.Fraction] @classmethod def to_tree(cls, node, ctx): return dict(numerator=node.numerator, denominator=node.denominator) @classmethod def from_tree(cls, tree, ctx): return fractions.Fraction(tree['numerator'], tree['denominator']) .. runcode:: hidden # Redefine the fraction extension for the sake of the example FractionExtension.types = [FractionType] tree = {'fraction': fractions.Fraction(10, 3)} with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to("test.asdf") In this case, the associated schema would look like the following:: %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fraction-1.0.0" title: An example custom type for handling fractions tag: "tag:nowhere.org:custom/fraction-1.0.0" type: object properties: numerator: type: integer denominator: type: integer ... We can compare the output using this representation to the example above: .. asdf:: test.asdf ignore_unrecognized_tag Serializing more complex types ****************************** Sometimes the custom types that we wish to represent in ASDF themselves have attributes which are also custom types. As a somewhat contrived example, consider a 2D cartesian coordinate that uses `fraction.Fraction` to represent each of the components. We will call this type `Fractional2DCoordinate`. First we need to define a schema to represent this new type:: %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fractional_2d_coord-1.0.0" title: An example custom type for handling components tag: "tag:nowhere.org:custom/fractional_2d_coord-1.0.0" type: object properties: x: $ref: fraction-1.0.0 y: $ref: fraction-1.0.0 ... Note that in the schema, the ``x`` and ``y`` attributes are expressed as references to our ``fraction-1.0.0`` schema. Since both of these schemas are defined under the same standard and organization, we can simply use the name and version of the ``fraction-1.0.0`` schema to refer to it. However, if the reference type was defined in a different organization and standard, it would be necessary to use the entire YAML tag in the reference (e.g. ``tag:nowhere.org:custom/fraction-1.0.0``). Relative tag references are also allowed where appropriate. .. runcode:: hidden class Fractional2DCoordinate: x = None y = None We also need to define the custom tag type that corresponds to our new type: .. runcode:: import asdf class Fractional2DCoordinateType(asdf.CustomType): name = 'fractional_2d_coord' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [Fractional2DCoordinate] @classmethod def to_tree(cls, node, ctx): tree = dict() tree['x'] = node.x tree['y'] = node.y return tree @classmethod def from_tree(cls, tree, ctx): coord = Fractional2DCoordinate() coord.x = tree['x'] coord.y = tree['y'] return coord In previous versions of this library, it was necessary for our `Fractional2DCoordinateType` class to call `~asdf.yamlutil` functions explicitly to convert the ``x`` and ``y`` components to and from their tree representations. Now, the library will automatically convert nested custom types before calling `~asdf.CustomType.from_tree`, and after receiving the result from `~asdf.CustomType.to_tree`. Since `Fractional2DCoordinateType` shares the same `~asdf.CustomType.organization` and `~asdf.CustomType.standard` as `FractionType`, it can be added to the same extension class: .. runcode:: class FractionExtension(asdf.AsdfExtension): @property def types(self): return [FractionType, Fractional2DCoordinateType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(os.path.dirname(__file__)) + '/{url_suffix}.yaml')] Now we can use this extension to create an ASDF file: .. runcode:: coord = Fractional2DCoordinate() coord.x = fractions.Fraction(22, 7) coord.y = fractions.Fraction(355, 113) tree = {'coordinate': coord} with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to("coord.asdf") .. asdf:: coord.asdf ignore_unrecognized_tag Note that in the resulting ASDF file, the ``x`` and ``y`` components of our new `fraction_2d_coord` type are tagged as `fraction-1.0.0`. Serializing reference cycles **************************** Special considerations must be made when deserializing a custom type that contains a reference to itself among its descendants. Consider a `fractions.Fraction` subclass that maintains a reference to its multiplicative inverse: .. runcode:: class FractionWithInverse(fractions.Fraction): def __init__(self, *args, **kwargs): self._inverse = None @property def inverse(self): return self._inverse @inverse.setter def inverse(self, value): self._inverse = value The inverse of the inverse of a fraction is the fraction itself, so you might wish to construct your objects in the following way: .. runcode:: f1 = FractionWithInverse(3, 5) f2 = FractionWithInverse(5, 3) f1.inverse = f2 f2.inverse = f1 Which creates an "infinite loop" between the two fractions. An ordinary `~asdf.CustomType` wouldn't be able to deserialize this, since each object requires that the other be deserialized first! Let's see what happens when we define our `~asdf.CustomType.from_tree` method in a naive way: .. runcode:: class FractionWithInverseType(asdf.CustomType): name = 'fraction_with_inverse' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [FractionWithInverse] @classmethod def to_tree(cls, node, ctx): return { "numerator": node.numerator, "denominator": node.denominator, "inverse": node.inverse } @classmethod def from_tree(cls, tree, ctx): result = FractionWithInverse( tree["numerator"], tree["denominator"] ) result.inverse = tree["inverse"] return result After adding our type to the extension class, the tree will serialize correctly: .. runcode:: hidden FractionExtension.types = [FractionType, Fractional2DCoordinateType, FractionWithInverseType] .. runcode:: tree = {'fraction': f1} with asdf.AsdfFile(tree, extensions=FractionExtension()) as ff: ff.write_to("with_inverse.asdf") But upon deserialization, we notice a problem: .. runcode:: with asdf.open("with_inverse.asdf", extensions=FractionExtension()) as ff: reconstituted_f1 = ff["fraction"] assert reconstituted_f1.inverse.inverse is asdf.treeutil.PendingValue The presence of `~asdf.treeutil.PendingValue` is `asdf`'s way of telling you that the value corresponding to the key ``inverse`` was not fully deserialized at the time that you retrieved it. We can handle this situation by making our `~asdf.CustomType.from_tree` a generator function: .. runcode:: class FractionWithInverseType(asdf.CustomType): name = 'fraction_with_inverse' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [FractionWithInverse] @classmethod def to_tree(cls, node, ctx): return { "numerator": node.numerator, "denominator": node.denominator, "inverse": node.inverse } @classmethod def from_tree(cls, tree, ctx): result = FractionWithInverse( tree["numerator"], tree["denominator"] ) yield result result.inverse = tree["inverse"] The generator version of `~asdf.CustomType.from_tree` yields the partially constructed `FractionWithInverse` object before setting its inverse property. This allows asdf to proceed to constructing the inverse `FractionWithInverse` object, and resume the original `~asdf.CustomType.from_tree` execution only when the inverse is actually available. With this new version of `~asdf.CustomType.from_tree`, we can successfully deserialize our ASDF file: .. runcode:: hidden FractionExtension.types = [FractionType, Fractional2DCoordinateType, FractionWithInverseType] .. runcode:: with asdf.open("with_inverse.asdf", extensions=FractionExtension()) as ff: reconstituted_f1 = ff["fraction"] assert reconstituted_f1.inverse.inverse is reconstituted_f1 Assigning schema and tag versions ********************************* Authors of new tags and schemas should strive to use the conventions described by `semantic versioning `_. Tags and schemas for types that have not been serialized before should begin at ``1.0.0``. Versions for a particular tag type need not move in lock-step with other tag types in the same extension. The patch version should be bumped for bug fixes and other minor, backwards-compatible changes. New features can be indicated with increments to the minor version, as long as they remain backwards compatible with older versions of the schema. Any changes that break backwards compatibility must be indicated by a major version update. Since ASDF is intended to be an archival file format, authors of tags and schemas should work to ensure that ASDF files created with older extensions can continue to be processed. This means that every time a schema version is bumped (with the possible exception of patch updates), a **new** schema file should be created. For example, if we currently have a schema for ``xyz-1.0.0``, and we wish to make changes and bump the version to ``xyz-1.1.0``, we should leave the original schema intact. A **new** schema file should be created for ``xyz-1.1.0``, which can exist in parallel with the old file. The version of the corresponding tag type should be bumped to ``1.1.0``. For more details on the behavior of schema and tag versioning from a user perspective, see :ref:`version_and_compat`, and also :ref:`custom_type_versions`. Explicit version support ************************ To some extent schemas and tag classes will be closely tied to the custom data types that they represent. This means that in some cases API changes or other changes to the representation of the underlying types will force us to modify our schemas and tag classes. ASDF's schema versioning allows us to handle changes in schemas over time. Let's consider an imaginary custom type called ``Person`` that we want to serialize in ASDF. The first version of ``Person`` was constructed using a first and last name: .. code-block:: python person = Person('James', 'Webb') print(person.first, person.last) Our version 1.0.0 YAML schema for ``Person`` might look like the following: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/person-1.0.0" title: An example custom type for representing a Person tag: "tag:nowhere.org:custom/person-1.0.0" type: array items: type: string minItems: 2 maxItems: 2 ... And our tag implementation would look something like this: .. code-block:: python import asdf from people import Person class PersonType(asdf.CustomType): name = 'person' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [Person] @classmethod def to_tree(cls, node, ctx): return [node.first, node.last] @classmethod def from_tree(cls, tree, ctx): return Person(tree[0], tree[1]) However, a newer version of ``Person`` now requires a middle name in the constructor as well: .. code-block:: python person = Person('James', 'Edwin', 'Webb') print(person.first, person.middle, person.last) James Edwin Webb So we update our YAML schema to version 1.1.0 in order to support newer versions of Person: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/person-1.1.0" title: An example custom type for representing a Person tag: "tag:nowhere.org:custom/person-1.1.0" type: array items: type: string minItems: 3 maxItems: 3 ... We need to update our tag class implementation as well. However, we need to be careful. We still want to be able to read version 1.0.0 of our schema and be able to convert it to the newer version of ``Person`` objects. To accomplish this, we will make use of the `~asdf.CustomType.supported_versions` attribute for our tag class. This will allow us to declare explicit support for the schema versions our tag class implements. Under the hood, `asdf` creates multiple copies of our ``PersonType`` tag class, each with a different `~asdf.CustomType.version` attribute corresponding to one of the supported versions. This means that in our new tag class implementation, we can condition our `~asdf.CustomType.from_tree` implementation on the value of ``version`` to determine which schema version should be used when reading: .. code-block:: python import asdf from people import Person class PersonType(asdf.CustomType): name = 'person' organization = 'nowhere.org' version = (1, 1, 0) supported_versions = [(1, 0, 0), (1, 1, 0)] standard = 'custom' types = [Person] @classmethod def to_tree(cls, node, ctx): return [node.first, node.middle, node.last] @classmethod def from_tree(cls, tree, ctx): # Handle the older version of the person schema if cls.version == (1, 0, 0): # Construct a Person object with an empty middle name field return Person(tree[0], '', tree[1]) else: # The newer version of the schema stores the middle name too return person(tree[0], tree[1], tree[2]) Note that the implementation of ``to_tree`` is not conditioned on ``cls.version`` since we do not need to convert new ``Person`` objects back to the older version of the schema. Handling subclasses ******************* By default, if a custom type is serialized by an `asdf` tag class, then all subclasses of that type can also be serialized. However, no attributes that are specific to the subclass will be stored in the file. When reading the file, an instance of the base custom type will be returned instead of the subclass that was written. To properly handle subclasses of custom types already recognized by `asdf`, it is necessary to implement a separate tag class that is specific to the subclass to be serialized. Previous versions of this library implemented an experimental feature that allowed ADSF to serialize subclass attributes using the same tag class, but this feature was dropped as it produced files that were not portable. Creating custom schemas ----------------------- All custom types to be serialized by `asdf` require custom schemas. The best resource for creating ASDF schemas can be found in the `ASDF Standard `_ documentation. In most cases, ASDF schemas will be included as part of a packaged software distribution. In these cases, it is important for the `~asdf.AsdfExtension.url_mapping` of the corresponding `~asdf.AsdfExtension` extension class to map the schema URL to an actual location on disk. However, it is possible for schemas to be hosted online as well, in which case the URL mapping can map (perhaps trivially) to an actual network location. See :ref:`defining_extensions` for more information. It is also important for packages that provide custom schemas to test them, both to make sure that they are valid, and to ensure that any examples they provide are also valid. See :ref:`testing_custom_schemas` for more information. Adding custom validators ------------------------ A new type may also add new validation keywords to the schema language. This can be used to impose type-specific restrictions on the values in an ASDF file. This feature is used internally so a schema can specify the required datatype of an array. To support custom validation keywords, set the `~asdf.CustomType.validators` member of a `~asdf.CustomType` subclass to a dictionary where the keys are the validation keyword name and the values are validation functions. The validation functions are of the same form as the validation functions in the underlying ``jsonschema`` library, and are passed the following arguments: - ``validator``: A `jsonschema.Validator` instance. - ``value``: The value of the schema keyword. - ``instance``: The instance to validate. This will be made up of basic datatypes as represented in the YAML file (list, dict, number, strings), and not include any object types. - ``schema``: The entire schema that applies to instance. Useful to get other related schema keywords. The validation function should either return ``None`` if the instance is valid or ``yield`` one or more `asdf.ValidationError` objects if the instance is invalid. To continue the example from above, for the ``FractionType`` say we want to add a validation keyword "``simplified``" that, when ``true``, asserts that the corresponding fraction is in simplified form: .. code-block:: python from asdf import ValidationError def validate_simplified(validator, simplified, instance, schema): if simplified: reduced = fraction.Fraction(instance[0], instance[1]) if (reduced.numerator != instance[0] or reduced.denominator != instance[1]): yield ValidationError("Fraction is not in simplified form.") FractionType.validators = {'simplified': validate_simplified} .. _defining_extensions: Defining custom extension classes --------------------------------- Extension classes are the mechanism that `asdf` uses to register custom tag types so that they can be used when processing ASDF files. Packages that define their own custom tag types must also define extensions in order for those types to be used. All extension classes must implement the `asdf.AsdfExtension` abstract base class. A custom extension will override each of the following properties of `AsdfExtension` (the text in each bullet is also a link to the corresponding documentation): * `~asdf.AsdfExtension.types` * `~asdf.AsdfExtension.tag_mapping` * `~asdf.AsdfExtension.url_mapping` .. _packaging_extensions: Overriding built-in extensions ****************************** It is possible for externally defined extensions to override tag types that are provided by `asdf`'s built-in extension. For example, maybe an external package wants to provide a different implementation of `~asdf.tags.core.NDArrayType`. In this case, the external package does not need to provide custom schemas since the schema for the type to be overridden is already provided as part of the ASDF standard. Instead, the extension class may inherit from `asdf`'s `~asdf.extension.BuiltinExtension` and simply override the `~asdf.AsdfExtension.types` property to indicate the type that is being overridden. Doing this preserves the `~asdf.AsdfExtension.tag_mapping` and `~asdf.AsdfExtension.url_mapping` that is used by the `BuiltinExtension`, which allows the schemas that are packaged by `asdf` to be located. `asdf` will give precedence to the type that is provided by the external extension, effectively overriding the corresponding type in the built-in extension. Note that it is currently undefined if multiple external extensions are provided that override the same built-in type. Packaging custom extensions --------------------------- Packaging schemas ***************** If a package provides custom schemas, the schema files must be installed as part of that package distribution. In general, schema files must be installed into a subdirectory of the package distribution. The `asdf` extension class must supply a `~asdf.AsdfExtension.url_mapping` that maps to the installed location of the schemas. See :ref:`defining_extensions` for more details. Registering entry points ************************ Packages that provide their own ASDF extensions can (and should!) install them so that they are automatically detectable by the `asdf` Python package. This is accomplished using Python's `setuptools` entry points. Entry points are registered in a package's `setup.py` file. Consider a package that provides an extension class `MyPackageExtension` in the submodule `mypackage.asdf.extensions`. We need to register this class as an extension entry point that `asdf` will recognize. First, we create a dictionary: .. code:: python entry_points = {} entry_points['asdf_extensions'] = [ 'mypackage = mypackage.asdf.extensions:MyPackageExtension' ] The key used in the `entry_points` dictionary must be ``'asdf_extensions'``. The value must be an array of one or more strings, each with the following format: ``extension_name = fully.specified.submodule:ExtensionClass`` The extension name can be any arbitrary string, but it should be descriptive of the package and the extension. In most cases the package itself name will suffice. Note that depending on individual package requirements, there may be other entries in the `entry_points` dictionary. The entry points must be passed to the call to `setuptools.setup`: .. code:: python from setuptools import setup entry_points = {} entry_points['asdf_extensions'] = [ 'mypackage = mypackage.asdf.extensions:MyPackageExtension' ] setup( # We omit other package-specific arguments that are not # relevant to this example entry_points=entry_points, ) When running ``python setup.py install`` or ``python setup.py develop`` on this package, the entry points will be registered automatically. This allows the `asdf` package to recognize the extensions without any user intervention. Users of your package that wish to read ASDF files using types that you have registered will not need to use any extension explicitly. Instead, `asdf` will automatically recognize the types you have registered and will process them appropriately. See :ref:`other_packages` for more information on using extensions. .. _testing_custom_schemas: Testing custom schemas ---------------------- Packages that provide their own schemas can test them using `asdf`'s `pytest `_ plugin for schema testing. Schemas are tested for overall validity, and any examples given within the schemas are also tested. The schema tester plugin is automatically registered when the `asdf` package is installed. In order to enable testing, it is necessary to add the directory containing your schema files to the pytest section of your project's `setup.cfg` file. If you do not already have such a file, creating a `setup.cfg` with the following should be sufficient: .. code:: ini [tool:pytest] asdf_schema_root = path/to/schemas another/path/to/schemas The schema directory paths should be paths that are relative to the top of the package directory **when it is installed**. If this is different from the path in the source directory, then both paths can be used to facilitate in-place testing (see `asdf`'s own `setup.cfg` for an example of this). .. note:: Older versions of `asdf` (prior to 2.4.0) required the plugin to be registered in your project's `conftest.py` file. As of 2.4.0, the plugin is now registered automatically and so this line should be removed from your `conftest.py` file, unless you need to retain compatibility with older versions of `asdf`. The ``asdf_schema_skip_names`` configuration variable can be used to skip schema files that live within one of the ``asdf_schema_root`` directories but should not be tested. The names should be given as simple base file names (without directory paths or extensions). Again, see `asdf`'s own `setup.cfg` file for an example. The schema tests do **not** run by default. In order to enable the tests by default for your package, add ``asdf_schema_tests_enabled = true`` to the ``[tool:pytest]`` section of your `setup.cfg` file. If you do not wish to enable the schema tests by default, you can add the ``--asdf-tests`` option to the ``pytest`` command line to enable tests on a per-run basis. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/extending/manifests.rst0000644000537500020070000000703000000000000022153 0ustar00wjamiesonSTSCI\science.. _extending_manifests: =================== Extension manifests =================== An extension "manifest" is a YAML document that defines an extension in a language-independent way. Use of a manifest is recommended for ASDF extensions that are intended to be implemented by ASDF libraries in multiple languages, so that other implementers do not need to go spelunking through Python code to discover the tags and schemas that are included in the extension. This library provides support for automatically populating a `~asdf.extension.Extension` object from a manifest; see :ref:`extending_extensions_manifest` for more information. Anatomy of a manifest ===================== Here is an example of a simple manifest that describes an extension with one tag and schema: .. code-block:: yaml :linenos: %YAML 1.1 --- id: asdf://example.com/example-project/manifests/example-1.0.0 extension_uri: asdf://example.com/example-project/extensions/example-1.0.0 title: Example extension 1.0.0 description: Tags for example objects. asdf_standard_requirement: gte: 1.3.0 lt: 1.5.0 tags: - tag_uri: asdf://example.com/example-project/tags/foo-1.0.0 schema_uri: asdf://example.com/example-project/schemas/foo-1.0.0 ... .. code-block:: yaml :lineno-start: 3 id: asdf://example.com/example-project/manifests/example-1.0.0 The ``id`` property contains the URI that uniquely identifies our manifest. This URI is how we'll refer to the manifest document's content when using the `asdf` library. .. code-block:: yaml :lineno-start: 4 extension_uri: asdf://example.com/example-project/extensions/example-1.0.0 The ``extension_uri`` property contains the URI of the extension that the manifest describes. This is the URI written to ASDF file metadata to document that an extension was used when writing the file. .. code-block:: yaml :lineno-start: 5 title: Example extension 1.0.0 description: Tags for example objects. ``title`` and ``description`` are optional documentation properties. .. code-block:: yaml :lineno-start: 7 asdf_standard_requirement: gte: 1.3.0 lt: 1.5.0 The optional ``asdf_standard_requirement`` property describes the ASDF Standard versions that are compatible with this extension. The ``gte`` and ``lt`` properties are used here to restrict ASDF Standard versions to greater-than-or-equal 1.3.0 and less-than 1.5.0, respectively. ``gt`` and ``lte`` properties are also available. .. code-block:: yaml :lineno-start: 10 tags: - tag_uri: asdf://example.com/example-project/tags/foo-1.0.0 schema_uri: asdf://example.com/example-project/schemas/foo-1.0.0 The ``tags`` property contains a list of objects, each representing a new tag that the extension brings to ASDF. The ``tag_uri`` property contains the tag itself, while the (optional, but recommended) ``schema_uri`` property contains the URI of a schema that can be used to validate objects with that tag. Tag objects may also include ``title`` and ``description`` documentation properties. Validating a manifest ===================== This library includes a schema, ``asdf://asdf-format.org/core/schemas/extension_manifest-1.0.0``, that can be used to validate a manifest document: .. code-block:: python import asdf import yaml schema = asdf.schema.load_schema("asdf://asdf-format.org/core/schemas/extension_manifest-1.0.0") manifest = yaml.safe_load(open("path/to/manifests/example-1.0.0.yaml").read()) asdf.schema.validate(manifest, schema=schema) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/extending/resources.rst0000644000537500020070000001765100000000000022206 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf.resource .. _extending_resources: =============================== Resources and resource mappings =============================== In the terminology of this library, a "resource" is a sequence of bytes associated with a URI. Currently the two types of resources recognized by `asdf` are schemas and extension manifests. Both of these are YAML documents whose associated URI is expected to match the ``id`` property of the document. A "resource mapping" is an `asdf` plugin that provides access to the content for a URI. These plugins must implement the `~collections.abc.Mapping` interface (a simple `dict` qualifies) and map `str` URI keys to `bytes` values. Resource mappings are installed into the `asdf` library via one of two routes: the `AsdfConfig.add_resource_mapping ` method or the ``asdf.resource_mappings`` entry point. Installing resources via AsdfConfig =================================== The simplest way to isntall a resource into `asdf` is to add it at runtime using the `AsdfConfig.add_resource_mapping ` method. For example, the following code istalls a schema for use with the `asdf.AsdfFile` custom_schema argument: .. code-block:: python import asdf content = b""" %YAML 1.1 --- $schema: http://stsci.edu/schemas/yaml-schema/draft-01 id: asdf://example.com/example-project/schemas/foo-1.0.0 type: object properties: foo: type: string required: [foo] ... """ asdf.get_config().add_resource_mapping({ "asdf://example.com/example-project/schemas/foo-1.0.0": content }) The schema will now be available for validating files: .. code-block:: python af = asdf.AsdfFile(custom_schema="asdf://example.com/example-project/schemas/foo-1.0.0") af.validate() # Error, "foo" is missing The DirectoryResourceMapping class ================================== But what if we don't want to store our schemas in variables in the code? Storing resources in a directory tree is a common use case, so `asdf` provides a `~collections.abc.Mapping` implementation that reads schema content from a filesystem. This is the `DirectoryResourceMapping` class. Consider these three schemas: .. code-block:: yaml # foo-1.0.0.yaml id: asdf://example.com/example-project/schemas/foo-1.0.0 # ... # bar-2.3.4.yaml id: asdf://example.com/example-project/nested/bar-2.3.4 # ... # baz-8.1.1.yaml id: asdf://example.com/example-project/nested/baz-8.1.1 # ... which are arranged in the following directory structure:: schemas ├─ foo-1.0.0.yaml ├─ README └─ nested ├─ bar-2.3.4.yaml └─ baz-8.1.1.yaml Our goal is to install all schemas in the directory tree so that they are available for use with `asdf`. The `DirectoryResourceMapping` class can do that for us, but we need to show it how to construct the schema URIs from the file paths *without reading the id property from the files*. This requirement is a performance consideration; not all resources are used in every session, and if `asdf` were to read and parse all available files when plugins are loaded, the first call to `asdf.open` would be intolerably slow. We should configure `DirectoryResourceMapping` like this: .. code-block:: python import asdf from asdf.resource import DirectoryResourceMapping mapping = DirectoryResourceMapping( "/path/to/schemas", "asdf://example.com/example-project/schemas/", recursive=True, filename_pattern="*.yaml", stem_filename=True ) asdf.get_config().add_resource_mapping(mapping) The first argument is the path to the schemas directory on the filesystem. The second argument is the prefix that should be prepended to file paths relative to that root when constructing the schema URIs. The ``recursive`` argument tells the class to descend into the ``nested`` directory when searching for schemas, ``filename_pattern`` is a glob pattern chosen to exclude our README file, and ``stem_filename`` causes the class to drop the ``.yaml`` suffix when constructing URIs. We can test that our configuration is correct by asking `asdf` to read and parse one of the schemas: .. code-block:: python from asdf.schema import load_schema uri = "asdf://example.com/example-project/schemas/nested/bar-2.3.4.yaml" schema = load_schema(uri) assert schema["id"] == uri .. _extending_resources_entry_points: Installing resources via entry points ===================================== The `asdf` package also offers an entry point for installing resource mapping plugins. This installs a package's resources automatically without requiring calls to the AsdfConfig method. The entry point is called ``asdf.resource_mappings`` and expects to receive a method that returns a list of `~abc.collections.Mapping` instances. For example, let's say we're creating a package named ``asdf-foo-schemas`` that provides the same schemas described in the previous section. Our directory structure might look something like this:: asdf-foo-schemas ├─ setup.cfg ├─ setup.py └─ src └─ asdf_foo_schemas ├─ __init__.py ├─ integration.py └─ schemas ├─ __init__.py ├─ foo-1.0.0.yaml ├─ README └─ nested ├─ __init__.py ├─ bar-2.3.4.yaml └─ baz-8.1.1.yaml In ``integration.py``, we'll define the entry point method and have it return a list with a single element, our `DirectoryResourceMapping` instance: .. code-block:: python # integration.py from pathlib import Path from asdf.resource import DirectoryResourceMapping def get_resource_mappings(): # Get path to schemas directory relative to this file schemas_path = Path(__file__).parent / "schemas" mapping = DirectoryResourceMapping( schemas_path, "asdf://example.com/example-project/schemas/", recursive=True, filename_pattern="*.yaml", stem_filename=True ) return [mapping] Then in ``setup.cfg``, define an ``[options.entry_points]`` section that identifies the method as an ``asdf.resource_mappings`` entry point: .. code-block:: cfg # setup.cfg [options.entry_points] asdf.resource_mappings = asdf_foo_schemas = asdf_foo_schemas.integration:get_resource_mappings After installing the package, it should be possible to load one of our schemas in a new session without any additional setup: .. code-block:: python from asdf.schema import load_schema uri = "asdf://example.com/example-project/schemas/nested/bar-2.3.4.yaml" schema = load_schema(uri) assert schema["id"] == uri Note that the package will need to be configured to include the YAML files. There are multiple ways to accomplish this, but one easy option is to add an ``[options.package_data]`` section to ``setup.cfg`` requesting that all files with a ``.yaml`` extension be installed: .. code-block:: cfg # setup.cfg [options.package_data] * = *.yaml Entry point performance considerations -------------------------------------- For the good of `asdf` users everywhere, it's important that entry point methods load as quickly as possible. All resource URIs must be loaded before reading an ASDF file, so any entry point method that lingers will introduce a delay to the initial call to `asdf.open`. For that reason, we recommend to minimize the number of imports that occur in the module containing the entry point method, particularly imports of modules outside of the Python standard library or `asdf` itself. When resources are stored in a filesystem, it's also helpful to delay reading a file until its URI is actually requested, which may not occur in a given session. The DirectoryResourceMapping class is implemented with this behavior. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/extending/schemas.rst0000644000537500020070000002007600000000000021612 0ustar00wjamiesonSTSCI\science.. _extending_schemas: ============ ASDF schemas ============ ASDF schemas are YAML documents that describe validations to be performed on tagged objects nested within the ASDF tree or on the tree itself. Schemas can validate the presence, datatype, and value of objects and their properties, and can be combined in different ways to facilitate reuse. These schemas, though expressed in YAML, are structured according to the `JSON Schema Draft 4`_ specification. The excellent `Understanding JSON Schema`_ book is a great place to start for users not already familiar with JSON Schema. Just keep in mind that the book includes coverage of later drafts of the JSON Schema spec, so certain features (constant values, conditional subschemas, etc) will not be available when writing schemas for ASDF. The book makes clear which features were introduced after Draft 4. Anatomy of a schema =================== Here is an example of an ASDF schema that validates an object with a numeric value and corresponding unit: .. code-block:: yaml :linenos: %YAML 1.1 --- $schema: http://stsci.edu/schemas/yaml-schema/draft-01 id: asdf://asdf-format.org/core/schemas/quantity-2.0.0 title: Quantity object containing numeric value and unit description: >- An object with a numeric value, which may be a scalar or an array, and associated unit. type: object properties: value: description: A vector of one or more values anyOf: - type: number - tag: tag:stsci.edu:asdf/core/ndarray-1.0.0 unit: description: The unit corresponding to the values tag: tag:stsci.edu:asdf/unit/unit-1.0.0 required: [value, unit] ... This is similar to the `quantity schema`_ in the ASDF Standard, but has been updated to reflect current recommendations regarding schemas. Let's walk through this schema line by line. .. code-block:: yaml :linenos: %YAML 1.1 --- These first two lines form the header of the file. The ``%YAML 1.1`` indicates that we're following version 1.1 of the YAML spec. The ``---`` marks the start of a new YAML document. .. code-block:: yaml :lineno-start: 3 $schema: http://stsci.edu/schemas/yaml-schema/draft-01 The ``$schema`` property contains the URI of the schema that validates this document. Since our document is itself a schema, the URI refers to a *metaschema*. ASDF comes with three built-in metaschemas: - ``http://json-schema.org/draft-04/schema`` - The JSON Schema Draft 4 metaschema. Includes basic validators and combiners. - ``http://stsci.edu/schemas/yaml-schema/draft-01`` - The YAML Schema metaschema. Includes everything in JSON Schema Draft 4, plus additional YAML-specific validators including ``tag`` and ``propertyOrder``. - ``http://stsci.edu/schemas/asdf/asdf-schema-1.0.0`` - The ASDF Schema metaschema. Includes everything in YAML Schema, plus additional ASDF-specific validators that check ndarray properties. Our schema makes use of the ``tag`` validator, so we're specifying the YAML Schema URI here. .. code-block:: yaml :lineno-start: 4 id: asdf://asdf-format.org/core/schemas/quantity-2.0.0 The ``id`` property contains the URI that uniquely identifies our schema. This URI is how we'll refer to the schema when using the asdf library. .. code-block:: yaml :lineno-start: 6 title: Quantity object containing numeric value and unit description: >- An object with a numeric value, which may be a scalar or an array, and associated unit. Title and description are optional (but recommended) documentation properties. These properties can be placed multiple times at any level of the schema and do not have an impact on the validation process. .. code-block:: yaml :lineno-start: 11 type: object This line invokes the ``type`` validator to check the data type of the top-level value. We're asserting that the type must be a YAML mapping, which in Python is represented as a `dict`. .. code-block:: yaml :lineno-start: 12 properties: The ``properties`` validator announces that we'd like to validate certain named properties of mapping. If a property is listed here and is present in the ASDF, it will be validated accordingly. .. code-block:: yaml :lineno-start: 13 value: description: A vector of one or more values Here we're identifying a property named ``value`` that we'd like to validate. The ``description`` is used to add some additional documentation. .. code-block:: yaml :lineno-start: 15 anyOf: The ``anyOf`` validator is one of JSON Schema's combiners. The ``value`` property will be validated against each of the following subschemas, and if any validates successfully, the entire ``anyOf`` will be considered valid. Other available combiners are ``allOf``, which requires that all subschemas validate successfully, ``oneOf``, which requires that one and only one of the subschemas validates, and ``not``, which requires that a single subschema does *not* validate. .. code-block:: yaml :lineno-start: 16 - type: number The first subschema in the list contains a ``type`` validator that succeeds if the entity assigned to ``value`` is a numeric literal. .. code-block:: yaml :lineno-start: 17 - tag: tag:stsci.edu:asdf/core/ndarray-1.0.0 The second subschema contains a ``tag`` validator, which makes an assertion regarding the YAML tag URI of the object assigned to ``value``. In this subschema we're requiring the tag of an ndarray-1.0.0 object, which is how n-dimensional arrays are represented in an ASDF tree. The net effect of the ``anyOf`` combiner and its two subschemas is: validate successfully if the ``value`` object is either a numeric literal or an n-dimensional array. .. code-block:: yaml :lineno-start: 18 unit: description: The unit corresponding to the values tag: tag:stsci.edu:asdf/unit/unit-1.0.0 The ``unit`` property has another bit of documentation and a ``tag`` validator that requires it to be a unit-1.0.0 object. .. code-block:: yaml :lineno-start: 21 required: [value, unit] Since the ``properties`` validator does not require the presence of its listed properties, we need another validator to do that. The ``required`` validator defines a list of properties that need to be present if validation is to succeed. .. code-block:: yaml :lineno-start: 21 ... Finally, the YAML document end indicator indicates the end of the schema. Checking schema syntax ====================== The `~asdf.schema.check_schema` function performs basic syntax checks on a schema and will raise an error if it discovers a problem. It does not currently accept URIs and requires that the schema already be loaded into Python objects. If the schema is already registered with the asdf library as a resource (see :ref:`extending_resources`), it can be loaded and checked like this: .. code-block:: python from asdf.schema import load_schema, check_schema schema = load_schema("asdf://example.com/example-project/schemas/foo-1.0.0") check_schema(schema) Otherwise, the schema can be loaded using pyyaml directly: .. code-block:: python from asdf.schema import check_schema import yaml schema = yaml.safe_load(open("/path/to/foo-1.0.0.yaml").read()) check_schema(schema) Testing validation ================== Getting a schema to validate as intended can be a tricky business, so it's helpful to test validation against some example objects as you go along. The `~asdf.schema.validate` function will validate a Python object against a schema: .. code-block:: python from asdf.schema import validate import yaml schema = yaml.safe_load(open("/path/to/foo-1.0.0.yaml").read()) obj = {"foo": "bar"} validate(obj, schema=schema) The validate function will return successfully if the object is valid, or raise an error if not. .. _JSON Schema Draft 4: https://json-schema.org/specification-links.html#draft-4 .. _Understanding JSON Schema: https://json-schema.org/understanding-json-schema/ .. _quantity schema: https://asdf-standard.readthedocs.io/en/latest/generated/stsci.edu/asdf/unit/quantity-1.1.0.html././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/extending/uris.rst0000644000537500020070000001124000000000000021142 0ustar00wjamiesonSTSCI\science.. _extending_uris: ============ URIs in ASDF ============ The ASDF format uses **U**\ niform **R**\ esource **I**\ dentifiers to refer to various entities such as schemas or tags. These are string identifiers that often resemble web addresses that uniquely identify the associated entity. Here are some examples of URIs that might be encountered in an ASDF file: - ``http://stsci.edu/schemas/asdf/core/ndarray-1.0.0`` (URI of the ndarray-1.0.0 schema) - ``tag:stsci.edu:asdf/core/asdf-1.1.0`` (URI of the asdf-1.0.0 YAML tag) - ``asdf://example.com/schemas/foo-1.0.0`` (URI of the foo-1.0.0 schema) Each of these uses a different URI *scheme*, and each is a valid URI format in ASDF. URI vs URL ========== One common point of confusion is the distinction between a URI and a URL. The two are easily conflated -- for example, consider the URI of the ndarray-1.0.0 schema above. ``http://stsci.edu/schemas/asdf/core/ndarray-1.0.0`` This string looks just like the URL of a web page, but if we were to attempt to visit that location in a browser, we'd get a 404 Not Found from stsci.edu. And yet it is still a valid URI! The similarity arises from the need for URIs to be globally unique. Since web domains are already controlled by a single organization or individual, they offer a convenient way to define URIs -- just reserve some path prefix off a domain you control and dole out strings with that prefix where unique identifiers are needed. But using ``http://`` as a URI scheme has the downside that users expect to be able to retrieve the document contents from that address. The asdf:// URI scheme ====================== To counter the problem of URIs vs URLs, `asdf` 2.8 introduced support for the ``asdf://`` URI scheme. These URIs are constructed just like ``http://`` or ``https://`` URIs, but the ASDF-specific scheme makes clear that the content cannot be fetched from a webserver. Entities identified by URI ========================== The following is a complete list of entity types that are identified by URI in ASDF: .. _extending_uris_entities_schemas: Schemas ------- Schemas are expected to include an ``id`` property that contains the URI that identifies them. That URI is used when referring to the schema in calls to `asdf` library functions. We recommend the following pattern for schema URIs: ``asdf:////schemas/-`` Where ```` is some domain that you control, ```` collects all entities for a particular ASDF project, ```` is the name of the schema, and ```` is the schema's version number. For example: ``asdf://example.com/example-project/schemas/foo-1.2.3`` .. _extending_uris_entities_tags: Tags ---- Tags, which annotate typed objects in an ASDF file's YAML tree, are represented as URIs. Unlike schemas, there is no resource associated with the tag; no blob of bytes exists that corresponds to the URI. Instead, the URI alone communicates the type of a YAML object. We recommend the following pattern for tag URIs: ``asdf:////tags/-`` Where ```` is some domain that you control, ```` collects all entities for a particular ASDF project, ```` is the name of the tag, and ```` is the tag's version number. For example: ``asdf://example.com/example-project/tags/foo-1.2.3`` Manifests --------- Manifest documents are language-independent definitions of extensions to ASDF and include an ``id`` property that contains the URI that identifies them. That URI is used when referring to the manifest in calls to `asdf` library functions. We recommend the following pattern for manifest URIs: ``asdf:////manifests/-`` Where ```` is some domain that you control, ```` collects all entities for a particular ASDF project, ```` is the name of the manifest, and ```` is the manifest's version number. For example: ``asdf://example.com/example-project/manifests/foo-1.2.3`` Extensions ---------- Finally, extensions URIs identify extensions to the ASDF format. These URIs are included in an ASDF file's metadata to advertise the fact that additional software support (beyond a core ASDF library) is needed to properly interpret the file. Like tags, these URIs are not associated with a particular resource. We recommend the following pattern for extension URIs: ``asdf:////extensions/-`` Where ```` is some domain that you control, ```` collects all entities for a particular ASDF project, ```` is the name of the extension, and ```` is the extension's version number. For example: ``asdf://example.com/example-project/extensions/foo-1.2.3`` ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/docs/asdf/extending/use_cases.rst0000644000537500020070000002010200000000000022127 0ustar00wjamiesonSTSCI\science.. _extending_use_cases: ================ Common use cases ================ This section is intended as a kind of index to the rest of the "Extending ASDF" documentation. Here we list common use cases and link to the relevant documentation sections that are needed to get the job done. Validate an ASDF tree against a schema ====================================== The `asdf` library already validates individual tagged objects within the tree, but what if we want to validate the structure of the tree itself? Such "document schemas" can be associated with an `~asdf.AsdfFile` using the ``custom_schema`` argument, but this argument accepts a URI and the asdf library needs to know how to access the schema content associated with that URI. 1. Designate a URI for the schema. See :ref:`extending_uris_entities_schemas` for recommendations on schema URI structure. 2. Write the schema. See :ref:`extending_schemas` if you're new to authoring schemas. 3. Install the schema as an `asdf` library resource. See :ref:`extending_resources` for an overview of resources in `asdf` and options for installing them. Serialize a new type ==================== This section summarizes the steps needed to serialize a new type to an ASDF file. We'll describe three options, starting with the most expedient and growing progressively more formal. Quick and dirty, for personal use --------------------------------- In this scenario, we want to serialize a new Python type to an ASDF file, but we're not planning on widely sharing the file, so we want to cut as many corners as possible. Here are the minimal steps needed to get instances of that type into the file and back again: 1. Identify the Python type to serialize. We'll need to know the fully-qualified name of the type (module path + class name). 2. Select a tag URI that will signify the type in YAML. See :ref:`extending_uris_entities_tags` for recommendations on tag URI structure. 3. Implement a `~asdf.extension.Converter` class that converts the type to YAML-serializable objects and back again. See :ref:`extending_converters` for a discussion of the Converter interface. 4. Implement an `~asdf.extension.Extension` class which is the vehicle for plugging our converter into the asdf library. See :ref:`extending_extensions` for a discussion of the Extension interface. 5. Install the extension. There are multiple ways to do this, but the path of least resistance is to install the extension at runtime using `~asdf.config.AsdfConfig`. See :ref:`extending_extensions_installing_asdf_config`. Now instances of our type can be added to an `~asdf.AsdfFile`'s tree and serialized to an ASDF file. For sharing with other Python users ----------------------------------- Now say our files are getting out into the world and into the hands of other Python users. We'll want to build an installable package around our code and use the `asdf` library's entry points to make our extension more convenient to use. We should also think about adding a schema that validates our tagged objects, so if someone manually edits a file and makes a mistake, we get a clear error when `asdf` opens the file. 1. Identify the Python type to serialize. We'll need to know the fully-qualified name of the type (module path + class name). 2. Select a tag URI that will signify the type in YAML. See :ref:`extending_uris_entities_tags` for recommendations on tag URI structure. 3. Designate a URI for the schema. See :ref:`extending_uris_entities_schemas` for recommendations on schema URI structure. 4. Write the schema that will validate the tagged object. See :ref:`extending_schemas` if you're new to authoring schemas. 5. Make the schema installable as an `asdf` library resource. See :ref:`extending_resources` for an overview of resources in `asdf` and :ref:`extending_resources_entry_points` for information on installing resources via an entry point. 6. Implement a `~asdf.extension.Converter` class that converts the type to YAML-serializable objects and back again. See :ref:`extending_converters` for a discussion of the Converter interface. Refer to the schema to ensure that the Converter is writing YAML objects correctly. 7. Implement an `~asdf.extension.Extension` class which is the vehicle for plugging our converter into the `asdf` library. See :ref:`extending_extensions` for a discussion of the Extension interface. We'll need to associate the schema URI with the tag URI in our tag's `~asdf.extension.TagDefinition` object. 8. Install the extension via an entry point. See :ref:`extending_extensions_installing_entry_points`. Now anyone who installs the package containing the entry points will be able to read, write, and validate ASDF files containing our new tag! For sharing with users of other languages ----------------------------------------- Finally, let's consider the case where we want to serialize instances of our type to an ASDF file that will be read using ASDF libraries written in other languages. The problem with our previous efforts is that the extension definition exists only as Python code, so here we'll want to create an additional YAML document called an extension manifest that defines the extension in a language-independent way. 1. Identify the Python type to serialize. We'll need to know the fully-qualified name of the type (module path + class name). 2. Select a tag URI that will signify the type in YAML. See :ref:`extending_uris_entities_tags` for recommendations on tag URI structure. 3. Designate a URI for the schema. See :ref:`extending_uris_entities_schemas` for recommendations on schema URI structure. 4. Write the schema that will validate the tagged object. See :ref:`extending_schemas` if you're new to authoring schemas. 5. Write an extension manifest document that describes the tag and schema that we're including in our extension. See :ref:`extending_manifests` for information on the manifest format. 5. Make the schema and manifest installable as `asdf` library resources. See :ref:`extending_resources` for an overview of resources in `asdf` and :ref:`extending_resources_entry_points` for information on installing resources via an entry point. 6. Implement a `~asdf.extension.Converter` class that converts the type to YAML-serializable objects and back again. See :ref:`extending_converters` for a discussion of the Converter interface. Refer to the schema to ensure that the Converter is writing YAML objects correctly. 7. Use `asdf.extension.ManifestExtension.from_uri` to populate an extension with the Converter and information from the manifest document. See :ref:`extending_extensions_manifest` for instructions on using ManifestExtension. 8. Install the extension via an entry point. See :ref:`extending_extensions_installing_entry_points`. That's it! Python users should experience the same convenience, but now the manifest document is available as a reference for developers who wish to implement support for reading our tagged objects in their language of choice. Support a new block compressor ============================== In order to support a new compression algorithm for ASDF binary blocks, we need to implement the `~asdf.extension.Compressor` interface and install that in an extension. 1. Select a 4-byte compression code that will signify the compression algorithm. 1. Implement a `~asdf.extension.Compressor` class that associates the 4-byte code with compression and decompression methods. See :ref:`extending_compressors` for a discussion of the Compressor interface. 2. Implement an `~asdf.extension.Extension` class which is the vehicle for plugging our compressor into the `asdf` library. See :ref:`extending_extensions` for a discussion of the Extension interface. 3. Install the extension via one of the two available methods. See :ref:`extending_extensions_installing` for instructions. Now the compression algorithm will be available for both reading and writing ASDF files. Users writing files will simply need to specify the new 4-byte compression code when making calls to `asdf.AsdfFile.set_array_compression`. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/docs/asdf/features.rst0000644000537500020070000006035000000000000020017 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf ************* Core Features ************* This section discusses the core features of the ASDF data format, and provides examples and use cases that are specific to the Python implementation. Data Model ========== The fundamental data object in ASDF is the ``tree``, which is a nested combination of basic data structures: dictionaries, lists, strings and numbers. In Python, these types correspond to :class:`dict`, :class:`list`, :class:`str`, and :class:`int`, :class:`float`, and :class:`complex`, respectively. The top-level tree object behaves like a Python dictionary and supports arbitrary nesting of data structures. For simple examples of creating and reading trees, see :ref:`overview`. .. note:: The ASDF Standard imposes a maximum size of 64-bit signed integers literals in the tree (see `the docs `_ for details and justification). Attempting to store a larger value as a YAML literal will result in a validation error. For arbitrary precision integer support, see `IntegerType`. Integers and floats of up to 64 bits can be stored inside of :mod:`numpy` arrays (see below). One of the key features of `asdf` is its ability to serialize :mod:`numpy` arrays. This is discussed in detail in :ref:`array-data`. While the core `asdf` package supports serialization of basic data types and Numpy arrays, its true power comes from its ability to be extended to support serialization of a wide range of custom data types. Details on using ASDF extensions can be found in :ref:`using_extensions`. Details on creating custom ASDF extensions to support custom data types can be found in :ref:`extending`. .. _array-data: Array Data ========== Much of ASDF's power and convenience comes from its ability to represent multidimensional array data. The :mod:`asdf` Python package provides native support for :mod:`numpy` arrays. .. toctree:: :maxdepth: 2 arrays .. _using_extensions: Using extensions ================ According to Wikipedia, serialization "is the process of translating data structures or object state into a format that can be stored...and reconstructed later" [#wiki]_. The power of ASDF is that it provides the ability to store, or serialize, the state of Python objects into a *human-readable* data format. The state of those objects can later be restored by another program in a process called deserialization. While ASDF is capable of serializing basic Python types and Numpy arrays out of the box, it can also be extended to serialize arbitrary custom data types. This section discusses the extension mechanism from a user's perspective. For documentation on creating extensions, see :ref:`extending_extensions`. Even though this particular implementation of ASDF necessarily serializes Python data types, in theory an ASDF implementation in another language could read the resulting file and reconstruct an analogous type in that language. Conversely, this implementation can read ASDF files that were written by other implementations of ASDF as long as the proper extensions are available. .. toctree:: :maxdepth: 2 using_extensions .. _schema_validation: Schema validation ================= Schema validation is used to determine whether an ASDF file is well formed. All ASDF files must conform to the schemas defined by the `ASDF Standard `_. Schema validation occurs when reading ASDF files (using `asdf.open`), and also when writing them out (using `AsdfFile.write_to` or `AsdfFile.update`). Schema validation also plays a role when using custom extensions (see :ref:`using_extensions` and :ref:`extending_extensions`). Extensions must provide schemas for the types that they serialize. When writing a file with custom types, the output is validated against the schemas corresponding to those types. If the appropriate extension is installed when reading a file with custom types, then the types will be validated against the schemas provided by the corresponding extension. .. _custom-schemas: Custom schemas -------------- Every ASDF file is validated against the ASDF Standard, and also against any schemas provided by custom extensions. However, it is sometimes useful for particular applications to impose additional restrictions when deciding whether a given file is valid or not. For example, consider an application that processes digital image data. The application expects the file to contain an image, and also some metadata about how the image was created. The following example schema reflects these expectations: .. code:: yaml %YAML 1.1 --- id: "http://example.com/schemas/your-custom-schema" $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" type: object properties: image: description: An ndarray containing image data. $ref: "ndarray-1.0.0" metadata: type: object description: Metadata about the image properties: time: description: | A timestamp for when the image was created, in UTC. type: string format: date-time resolution: description: | A 2D array representing the resolution of the image (N x M). type: array items: type: integer number: 2 required: [image, metadata] additionalProperties: true This schema restricts the kinds of files that will be accepted as valid to those that contain a top-level ``image`` property that is an ``ndarray``, and a top-level ``metadata`` property that contains information about the time the image was taken and the resolution of the image. In order to use this schema for a secondary validation pass, we pass the `custom_schema` argument to either `asdf.open` or the `AsdfFile` constructor. Assume that the schema file lives in ``image_schema.yaml``, and we wish to open a file called ``image.asdf``. We would open the file with the following code: .. code:: import asdf af = asdf.open('image.asdf', custom_schema='image_schema.yaml') Similarly, if we wished to use this schema when creating new files: .. code:: new_af = asdf.AsdfFile(custom_schema='image_schema.yaml') ... If your custom schema is registered with ASDF in an extension, you may pass the schema URI (``http://example.com/schemas/your-custom-schema``, in this case) instead of a file path. .. _top-level core schema: https://github.com/asdf-format/asdf-standard/blob/master/schemas/stsci.edu/asdf/core/asdf-1.1.0.yaml .. _version_and_compat: Versioning and Compatibility ============================ There are several different versions to keep in mind when discussing ASDF: * The software package version * The ASDF Standard version * The ASDF file format version * Individual tag, schema, and extension versions Each ASDF file contains information about the various versions that were used to create the file. The most important of these are the ASDF Standard version and the ASDF file format version. A particular version of the ASDF software package will explicitly provide support for specific combinations of these versions. Tag, schema, and extension versions are also important for serializing and deserializing data types that are stored in ASDF files. A detailed discussion of these versions from a user perspective can be found in :ref:`custom_type_versions`. Since ASDF is designed to serve as an archival format, this library is careful to maintain backwards compatibility with older versions of the ASDF Standard, ASDF file format, and core tags. However, since deserializing custom tags requires other software packages, backwards compatibility is often contingent on the available versions of such software packages. In general, forward compatibility with newer versions of the ASDF Standard and ASDF file format is not supported by the software. When creating new ASDF files, it is possible to control the version of the file format that is used. This can be specified by passing the `version` argument to either the `AsdfFile` constructor when the file object is created, or to the `AsdfFile.write_to` method when it is written. By default, the latest version of the file format will be used. Note that this option has no effect on the versions of tags from custom extensions. External References =================== Tree References --------------- ASDF files may reference items in the tree in other ASDF files. The syntax used in the file for this is called "JSON Pointer", but users of `asdf` can largely ignore that. First, we'll create a ASDF file with a couple of arrays in it: .. runcode:: import asdf from asdf import AsdfFile import numpy as np tree = { 'a': np.arange(0, 10), 'b': np.arange(10, 20) } target = AsdfFile(tree) target.write_to('target.asdf') .. asdf:: target.asdf Then we will reference those arrays in a couple of different ways. First, we'll load the source file in Python and use the `make_reference` method to generate a reference to array ``a``. Second, we'll work at the lower level by manually writing a JSON Pointer to array ``b``, which doesn't require loading or having access to the target file. .. runcode:: ff = AsdfFile() with asdf.open('target.asdf') as target: ff.tree['my_ref_a'] = target.make_reference(['a']) ff.tree['my_ref_b'] = {'$ref': 'target.asdf#b'} ff.write_to('source.asdf') .. asdf:: source.asdf Calling `~asdf.AsdfFile.find_references` will look up all of the references so they can be used as if they were local to the tree. It doesn't actually move any of the data, and keeps the references as references. .. runcode:: with asdf.open('source.asdf') as ff: ff.find_references() assert ff.tree['my_ref_b'].shape == (10,) On the other hand, calling `~asdf.AsdfFile.resolve_references` places all of the referenced content directly in the tree, so when we write it out again, all of the external references are gone, with the literal content in its place. .. runcode:: with asdf.open('source.asdf') as ff: ff.resolve_references() ff.write_to('resolved.asdf') .. asdf:: resolved.asdf A similar feature provided by YAML, anchors and aliases, also provides a way to support references within the same file. These are supported by `asdf`, however the JSON Pointer approach is generally favored because: - It is possible to reference elements in another file - Elements are referenced by location in the tree, not an identifier, therefore, everything can be referenced. Anchors and aliases are handled automatically by `asdf` when the data structure is recursive. For example here is a dictionary that is included twice in the same tree: .. runcode:: d = {'foo': 'bar'} d['baz'] = d tree = {'d': d} ff = AsdfFile(tree) ff.write_to('anchors.asdf') .. asdf:: anchors.asdf .. _array-references: Array References ---------------- ASDF files can refer to array data that is stored in other files using the `ExternalArrayReference` type. External files need not be ASDF files: ASDF is completely agnostic as to the format of the external file. The ASDF external array reference does not define how the external data file will be resolved; in fact it does not even check for the existence of the external file. It simply provides a way for ASDF files to refer to arrays that exist in external files. Creating an external array reference is simple. Only four pieces of information are required: * The name of the external file. Since ASDF does not itself resolve the file or check for its existence, the format of the name is not important. In most cases the name will be a path relative to the ASDF file itself, or a URI for a network resource. * The data type of the array data. This is a string representing any valid `numpy.dtype`. * The shape of the data array. This is a tuple representing the dimensions of the array data. * The array data ``target``. This is either an integer or a string that indicates to the user something about how the data array should be accessed in the external file. For example, if there are multiple data arrays in the external file, the ``target`` might be an integer index. Or if the external file is an ASDF file, the ``target`` might be a string indicating the key to use in the external file's tree. The value and format of the ``target`` field is completely arbitrary since ASDF will not use it itself. As an example, we will create a reference to an external CSV file. We will assume that one of the rows of the CSV file contains the array data we care about: .. runcode:: import asdf csv_data_row = 10 # The row of the CSV file containing the data we want csv_row_size = 100 # The size of the array extref = asdf.ExternalArrayReference('data.csv', csv_data_row, "int64", (csv_row_size,)) tree = {'csv_data': extref} af = asdf.AsdfFile(tree) af.write_to('external_array.asdf') .. asdf:: external_array.asdf When reading a file containing external references, the user is responsible for using the information in the `ExternalArrayReference` type to open the external file and retrieve the associated array data. Saving history entries ====================== `asdf` has a convenience method for notating the history of transformations that have been performed on a file. Given a `~asdf.AsdfFile` object, call `~asdf.AsdfFile.add_history_entry`, given a description of the change and optionally a description of the software (i.e. your software, not `asdf`) that performed the operation. .. runcode:: from asdf import AsdfFile import numpy as np tree = { 'a': np.random.rand(32, 32) } ff = AsdfFile(tree) ff.add_history_entry( "Initial random numbers", {'name': 'asdf examples', 'author': 'John Q. Public', 'homepage': 'http://github.com/asdf-format/asdf', 'version': '0.1'}) ff.write_to('example.asdf') .. asdf:: example.asdf `asdf` automatically saves history metadata about the extensions that were used to create the file. This information is used when opening files to determine if the proper extensions are installed (see :ref:`extension_checking` for more details). .. _asdf-in-fits: Saving ASDF in FITS =================== .. note:: This section is about packaging entire ASDF files inside of `FITS data format `_ files. This is probably only of interest to astronomers. Making use of this feature requires the `astropy` package to be installed. Sometimes you may need to store the structured data supported by ASDF inside of a FITS file in order to be compatible with legacy tools that support only FITS. First, create an `~astropy.io.fits.HDUList` object using `astropy.io.fits`. Here, we are building an `~astropy.io.fits.HDUList` from scratch, but it could also have been loaded from an existing file. We will create a FITS file that has two image extensions, SCI and DQ respectively. .. runcode:: from astropy.io import fits hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=float), name='SCI')) hdulist.append(fits.ImageHDU(np.arange(512, dtype=float), name='DQ')) Next we make a tree structure out of the data in the FITS file. Importantly, we use the *same* array references in the FITS `~astropy.io.fits.HDUList` and store them in the tree. By doing this, ASDF will automatically refer to the data in the regular FITS extensions. .. runcode:: tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, }, 'dq': { 'data': hdulist['DQ'].data, } } } Now we take both the FITS `~astropy.io.fits.HDUList` and the ASDF tree and create an `AsdfInFits` object. .. runcode:: from asdf import fits_embed ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to('embedded_asdf.fits') .. runcode:: hidden from astropy.io import fits with fits.open('embedded_asdf.fits') as new_hdulist: with open('content.asdf', 'wb') as fd: fd.write(new_hdulist['ASDF'].data.tobytes()) The special ASDF extension in the resulting FITS file contains the following data. Note that the data source of the arrays uses the ``fits:`` prefix to indicate that the data comes from a FITS extension: .. asdf:: content.asdf To load an ASDF-in-FITS file, simply open it using `asdf.open`. The returned value will be an `AsdfInFits` object, which can be used in the same way as any other `AsdfFile` object. .. runcode:: with asdf.open('embedded_asdf.fits') as asdf_in_fits: science = asdf_in_fits.tree['model']['sci'] .. rubric:: Footnotes .. [#wiki] https://en.wikipedia.org/wiki/Serialization Rendering ASDF trees ==================== The `asdf.info` function prints a representation of an ASDF tree to stdout. For example: .. code:: python >>> asdf.info('path/to/some/file.asdf') # doctest: +SKIP root (AsdfObject) ├─asdf_library (Software) │ ├─author (str): The ASDF Developers │ ├─homepage (str): http://github.com/asdf-format/asdf │ ├─name (str): asdf │ └─version (str): 2.5.1 ├─history (dict) │ └─extensions (list) ... └─data (dict) └─example_key (str): example value The first argument may be a ``str`` or ``pathlib.Path`` filesystem path, or an `AsdfFile` or sub-node of an ASDF tree. By default, `asdf.info` limits the number of lines, and line length, of the displayed tree. The ``max_rows`` parameter controls the number of lines, and ``max_cols`` controls the line length. Set either to ``None`` to disable that limit. An integer ``max_rows`` will be interpreted as an overall limit on the number of displayed lines. If ``max_rows`` is a tuple, then each member limits lines per node at the depth corresponding to its tuple index. For example, to show all top-level nodes and 5 of each's children: .. code:: python >>> asdf.info('file.asdf', max_rows=(None, 5)) # doctest: +SKIP ... The `AsdfFile.info` method behaves similarly to `asdf.info`, rendering the tree of the associated `AsdfFile`. Normally `asdf.info` will not show the contents of asdf nodes turned into Python custom objects, but if that object supports a special method, you may see the contents of such objects. See :ref:`exposing_extension_object_internals` for how to implement such support for `asdf.info` and `asdf.search`. Searching the ASDF tree ======================= The `AsdfFile` search interface provides a way to interactively discover the locations and values of nodes within the ASDF tree. We can search for nodes by key/index, type, or value. Basic usage ----------- Initiate a search by calling `AsdfFile.search` on an open file: .. code:: python >>> af.search() # doctest: +SKIP root (AsdfObject) ├─asdf_library (Software) │ ├─author (str): The ASDF Developers │ ├─homepage (str): http://github.com/asdf-format/asdf │ ├─name (str): asdf │ └─version (str): 2.5.1 ├─history (dict) │ └─extensions (list) ... └─data (dict) └─example_key (str): example value >>> af.search('example') # doctest: +SKIP root (AsdfObject) └─data (dict) └─example_key (str): example value .. currentmodule:: asdf.search The search returns an `AsdfSearchResult` object that displays in the Python console as a rendered tree. For single-node search results, the `AsdfSearchResult.path` property contains the Python code required to reference that node directly: .. code:: python >>> af.search('example').path # doctest: +SKIP "root['data']['example_key']" While the `AsdfSearchResult.node` property contains the actual value of the node: .. code:: python >>> af.search('example').node # doctest: +SKIP 'example value' For searches with multiple matching nodes, use the `AsdfSearchResult.paths` and `AsdfSearchResult.nodes` properties instead: .. code:: python >>> af.search('duplicate_key').paths # doctest: +SKIP ["root['data']['duplicate_key']", "root['other_data']['duplicate_key']"] >>> af.search('duplicate_key').nodes # doctest: +SKIP ["value 1", "value 2"] To replace matching nodes with a new value, use the `AsdfSearchResult.replace` method: .. code:: python >>> af.search('example').replace('replacement value') # doctest: +SKIP >>> af.search('example').node # doctest: +SKIP 'replacement value' .. currentmodule:: asdf The first argument to `AsdfFile.search` searches by dict key or list/tuple index. We can also search by type, value, or any combination thereof: .. code:: python >>> af.search('foo') # Find nodes with key containing the string 'foo' # doctest: +SKIP ... >>> af.search(type=int) # Find nodes that are instances of int # doctest: +SKIP ... >>> af.search(value=10) # Find nodes whose value is equal to 10 # doctest: +SKIP ... >>> af.search('foo', type=int, value=10) # Find the intersection of the above # doctest: +SKIP Chaining searches ----------------- The return value of `AsdfFile.search`, `asdf.search.AsdfSearchResult`, has its own search method, so it's possible to chain searches together. This is useful when you need to see intermediate results before deciding how to further narrow the search. .. code:: python >>> af.search() # See an overview of the entire ASDF tree # doctest: +SKIP ... >>> af.search().search(type='NDArrayType') # Find only ndarrays # doctest: +SKIP ... >>> af.search().search(type='NDArrayType').search('err') # Only ndarrays with 'err' in the key # doctest: +SKIP Descending into child nodes --------------------------- Another way to narrow the search is to use the index operator to descend into a child node of the current tree root: .. code:: python >>> af.search()['data'] # Restrict search to the 'data' child # doctest: +SKIP ... >>> af.search()['data'].search(type=int) # Find integer descendants of 'data' # doctest: +SKIP Regular expression searches --------------------------- Any string argument to search is interpeted as a regular expression. For example, we can search for nodes whose keys start with a particular string: .. code:: python >>> af.search('foo') # Find nodes with 'foo' anywhere in the key # doctest: +SKIP ... >>> af.search('^foo') # Find only nodes whose keys start with 'foo' # doctest: +SKIP ... Note that all node keys (even list indices) will be converted to string before the regular expression is matched: .. code:: python >>> af.search('^7$') # Returns all nodes with key '7' or index 7 # doctest: +SKIP ... When the ``type`` argument is a string, the search compares against the fully-qualified class name of each node: .. code:: python >>> af.search(type='asdf.tags.core.Software') # Find instances of ASDF's Software type # doctest: +SKIP ... >>> af.search(type='^asdf\.') # Find all ASDF objects # doctest: +SKIP ... When the ``value`` argument is a string, the search compares against the string representation of each node's value. .. code:: python >>> af.search(value='^[0-9]{4}-[0-9]{2}-[0-9]{2}$') # Find values that look like dates # doctest: +SKIP ... Arbitrary search criteria ------------------------- If ``key``, ``type``, and ``value`` aren't sufficient, we can also provide a callback function to search by arbitrary criteria. The ``filter`` parameter accepts a callable that receives the node under consideration, and returns ``True`` to keep it or ``False`` to reject it from the search results. For example, to search for NDArrayType with a particular shape: .. code:: python >>> af.search(type='NDArrayType', filter=lambda n: n.shape[0] == 1024) # doctest: +SKIP ... Formatting search results ------------------------- .. currentmodule:: asdf.search The `AsdfSearchResult` object displays its content as a rendered tree with reasonable defaults for maximum number of lines and columns displayed. To change those values, we call `AsdfSearchResult.format`: .. code:: python >>> af.search(type=float) # Displays limited rows # doctest: +SKIP ... >>> af.search(type=float).format(max_rows=None) # Show all matching rows # doctest: +SKIP ... Like `AsdfSearchResult.search`, calls to format may be chained: .. code:: python >>> af.search('time').format(max_rows=10).search(type=str).format(max_rows=None) # doctest: +SKIP ... ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/install.rst0000644000537500020070000000322000000000000017640 0ustar00wjamiesonSTSCI\science.. _installation: ************ Installation ************ There are several different ways to install the `asdf` package. Each is described in detail below. Requirements ============ The `asdf` package has several dependencies which are listed in the project's setup.cfg file. All dependencies are available on pypi and will be automatically installed along with `asdf`. Support for units, time, and transform tags requires an implementation of these types. One recommended option is the `astropy `__ package. Optional support for `lz4 `__ compression is provided by the `lz4 `__ package. Installing with pip =================== .. include:: ../../README.rst :start-after: begin-pip-install-text: :end-before: begin-source-install-text: Installing with conda ===================== `asdf` is also distributed as a `conda `__ package via the `conda-forge `__ channel. It is also available through the `astroconda `__ channel. To install `asdf` within an existing conda environment:: $ conda install -c conda-forge asdf To create a new conda environment and install `asdf`:: $ conda create -n new-env-name -c conda-forge python asdf Building from source ==================== .. include:: ../../README.rst :start-after: begin-source-install-text: :end-before: end-source-install-text: Running the tests ================= .. include:: ../../README.rst :start-after: begin-testing-text: :end-before: end-testing-text: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/overview.rst0000644000537500020070000000225500000000000020047 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf .. _overview: ******** Overview ******** Let's start by taking a look at a few basic ASDF use cases. This will introduce you to some of the core features of ASDF and will show you how to get started with using ASDF in your own projects. To follow along with this tutorial, you will need to install the :mod:`asdf` package. See :ref:`installation` for details. Hello World =========== At its core, ASDF is a way of saving nested data structures to YAML. Here we save a :class:`dict` with the key/value pair ``'hello': 'world'``. .. runcode:: from asdf import AsdfFile # Make the tree structure, and create a AsdfFile from it. tree = {'hello': 'world'} ff = AsdfFile(tree) ff.write_to("test.asdf") # You can also make the AsdfFile first, and modify its tree directly: ff = AsdfFile() ff.tree['hello'] = 'world' ff.write_to("test.asdf") .. asdf:: test.asdf Creating Files ============== .. include:: ../../README.rst :start-after: begin-create-file-text: :end-before: end-create-file-text: Reading Files ============= .. include:: ../../README.rst :start-after: begin-read-file-text: :end-before: end-read-file-text: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/asdf/user_api.rst0000644000537500020070000000032100000000000020000 0ustar00wjamiesonSTSCI\science******** User API ******** .. automodapi:: asdf :include-all-objects: :inherited-members: :skip: ValidationError :skip: AsdfExtension .. automodapi:: asdf.search .. automodapi:: asdf.config ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/docs/asdf/using_extensions.rst0000644000537500020070000002171400000000000021606 0ustar00wjamiesonSTSCI\science.. currentmodule:: asdf The built-in extension ---------------------- The ability to serialize the following types is provided by `asdf`'s built-in extension: * `dict` * `list` * `str` * `int` * `float` * `complex` * `numpy.ndarray` The built-in extension is packaged with `asdf` and is automatically used when reading and writing files. Users can not control the use of the built-in extension and in general they need not concern themselves with the details of its implementation. However, it is useful to be aware that the built-in extension is always in effect when reading and writing ASDF files. Custom types ------------ For the purposes of this documentation, a "custom type" is any data type that can not be serialized by the built-in extension. In order for a particular custom type to be serialized, a special class called a "converter" must be implemented. Each converter defines how the corresponding custom type will be serialized and deserialized. More details on how converters are implemented can be found in :ref:`extending_converters`. Users should never have to refer to converter implementations directly; they simply enable `asdf` to recognize and process custom types. In addition to converters, each custom type may have a corresponding schema, which is used for validation. The definition of the schema if present is closely tied to the definition of the converter. More details on schema validation can be found in :ref:`schema_validation`. Schemas are generally versioned and change in sync with their associated converters. The version number will increase whenever a schema (and therefore the converter implementation) changes. Extensions ---------- In order for the converters and schemas to be used by `asdf`, they must be packaged into an **extension** class. In general, the details of extensions are irrelevant to users of `asdf`. However, users need to be aware of extensions in the following two scenarios: * when storing custom data types to files to be written * when reading files that contain custom data types These scenarios require the use of custom extensions (the built-in extension is always used). There are two ways to use custom extensions, which are detailed below in :ref:`other_packages` and :ref:`explicit_extensions`. Writing custom types to files ***************************** `asdf` is not capable of serializing any custom type unless an extension is provided that defines how to serialize that type. Attempting to do so will cause an error when trying to write the file. For details on developing support for custom types and extensions, see :ref:`extending_extensions`. .. _reading_custom_types: Reading files with custom types ******************************* The `asdf` software is capable of reading files that contain custom data types even if the extension that was used to create the file is not present. However, the extension is required in order to properly deserialize the original type. If the necessary extension is **not** present, the custom data types will simply appear in the tree as a nested combination of basic data types. The structure of this data will mirror the structure of the YAML objects in the ASDF file. In this case, a warning will occur by default to indicate to the user that the custom type in the file was not recognized and can not be deserialized. To suppress these warnings, users should pass ``ignore_unrecognized_tag=True`` to `asdf.open`. Even if an extension for the custom type is present, it does not guarantee that the type can be deserialized successfully. Instantiating the custom type may involve additional software dependencies, which, if not present, will cause an error when the type is deserialized. Users should be aware of the dependencies that are required for instantiating custom types when reading ASDF files. .. _custom_type_versions: Custom types, extensions, and versioning ---------------------------------------- Tags and schemas that follow best practices are versioned. This allows changes to tags and schemas to be recorded, and it allows `asdf` to define behavior with respect to version compatibility. Tag and schema versions may change for several reasons. One common reason is to reflect a change to the API of the custom type that a tag represents. This typically corresponds to an update to the version of the software that defines that custom type. Since ASDF is designed to be an archival file format, extension authors are encouraged to maintain backwards compatibility with all older tag versions. Reading files ************* When `asdf` encounters a tagged object in a file, it will compare the URI of the tag in the file with the list of tags handled by available converters. The first matching converter will be selected to deserialize the object. If no such converters exist, the library will emit a warning and the object will be presented to the user in its primitive form. If multiple converters are present that both handle the same tag, the first found by the library will be used. Users may disable a converter by removing its extension with the `~asdf.config.AsdfConfig.remove_extension` method. Writing files ************* When writing a object to a file, `asdf` compares the object's type to the list of types handled by available converters. The first matching converter will be selected to serialize the object. If no such converters exist, the library will raise an error. If multiple converters are present that both handle the same type, the first found by the library will be used. Users may disable a converter by removing its extension with the `~asdf.config.AsdfConfig.remove_extension` method. .. _other_packages: Extensions from other packages ------------------------------ Some external packages may define extensions that allow `asdf` to recognize some or all of the types that are defined by that package. Such packages may install the extension class as part of the package itself (details for developers can be found in :ref:`extending_extensions_installing_entry_points`). If the package installs its extension, then `asdf` will automatically detect the extension and use it when processing any files. No specific action is required by the user in order to successfully read and write custom types defined by the extension for that particular package. Users can use the ``extensions`` command of the ``asdftool`` command line tool in order to determine which packages in the current Python environment have installed ASDF extensions: .. code-block:: none $ asdftool extensions -s Extension Name: 'bizbaz' (from bizbaz 1.2.3) Class: bizbaz.io.asdf.extension.BizbazExtension Extension Name: 'builtin' (from asdf 2.0.0) Class: asdf.extension.BuiltinExtension The output will always include the built-in extension, but may also display other extensions from other packages, depending on what is installed. .. _explicit_extensions: Explicit use of extensions -------------------------- Sometimes no packaged extensions are provided for the types you wish to serialize. In this case, it is necessary to explicitly install any necessary extension classes when reading and writing files that contain custom types. The config object returned from `asdf.get_config` offers an `~asdf.config.AsdfConfig.add_extension` method that can be used to install an extension for the remainder of the current Python session. Consider the following example where there exists a custom type ``MyCustomType`` that needs to be written to a file. An extension is defined ``MyCustomExtension`` that contains a converter that can serialize and deserialize ``MyCustomType``. Since ``MyCustomExtension`` is not installed by any package, we will need to manually install it: .. code-block:: python import asdf ... asdf.get_config().add_extension(MyCustomExtension()) af = asdf.AsdfFile() af.tree = {'thing': MyCustomType('foo') } # This call would cause an error if the proper extension was not # provided to the constructor af.write_to('custom.asdf') Note that the extension class must actually be instantiated when it is passed to `~asdf.config.AsdfConfig.add_extension`. To read the file (in a new session) we again need to install the extension first: .. code-block:: python import asdf asdf.get_config().add_extension(MyCustomExtension()) af = asdf.open('custom.asdf') .. _extension_checking: Extension checking ------------------ When writing ASDF files using this software, metadata about the extensions that were used to create the file will be added to the file itself. This includes the extension's URI, which uniquely identifies a particular version of the extension. When reading files with extension metadata, `asdf` can check whether the required extensions are present before processing the file. If a required extension is not present, `asdf` will issue a warning. It is possible to turn these warnings into errors by using the `strict_extension_check` parameter of `asdf.open`. If this parameter is set to `True`, then opening the file will fail if any of the required extensions are missing. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/conf.py0000644000537500020070000001334600000000000016034 0ustar00wjamiesonSTSCI\science# Astropy documentation build configuration file. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default. Some values are defined in # the global Astropy configuration which is loaded here before anything else. # See astropy.sphinx.conf for which values are set there. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('..')) # IMPORTANT: the above commented section was generated by sphinx-quickstart, but # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left # commented out with this explanation to make it clear why this should not be # done. If the sys.path entry above is added, when the astropy.sphinx.conf # import occurs, it will import the *source* version of astropy instead of the # version installed (if invoked as "make html" or directly with sphinx), or the # version in the build directory (if "python setup.py build_sphinx" is used). # Thus, any C-extensions that are needed to build the documentation will *not* # be accessible, and the documentation will not build correctly. import os import sys import datetime # Ensure documentation examples are determinstically random. import numpy try: numpy.random.seed(int(os.environ['SOURCE_DATE_EPOCH'])) except KeyError: pass try: from sphinx_astropy.conf.v1 import * except ImportError: print('ERROR: the documentation requires the sphinx-astropy package to be installed') sys.exit(1) # Get configuration information from setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.2' # To perform a Sphinx version check that needs to be more specific than # major.minor, call `check_sphinx_version("x.y.z")` here. # check_sphinx_version("1.2.1") # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns.append('_templates') # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog += """ """ # -- Project information ------------------------------------------------------ # This does not *have* to match the package name, but typically does project = setup_cfg['name'] author = setup_cfg['author'] copyright = '{0}, {1}'.format( datetime.datetime.now().year, setup_cfg['author']) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. from pkg_resources import get_distribution release = get_distribution(setup_cfg['name']).version # for example take major/minor version = '.'.join(release.split('.')[:2]) # -- Options for HTML output --------------------------------------------------- # A NOTE ON HTML THEMES # The global astropy configuration uses a custom theme, 'bootstrap-astropy', # which is installed along with astropy. A different theme can be used or # the options for this theme can be modified by overriding some of the # variables set in the global configuration. The variables set in the # global configuration are listed below, commented out. # Add any paths that contain custom themes here, relative to this directory. # To use a different custom theme, add the directory containing the theme. #html_theme_path = [] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. To override the custom theme, set this to the # name of a builtin theme or the name of a custom theme in html_theme_path. html_theme = 'alabaster' html_theme_options = { 'github_user': 'asdf-format', 'github_repo': 'asdf', 'github_button': 'true', 'fixed_sidebar': 'true', 'page_width': '90%', } html_static_path = ['_static'] # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = '' # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '' # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". html_title = '{0} v{1}'.format(project, release) # Output file base name for HTML help builder. htmlhelp_basename = project + 'doc' # -- Options for LaTeX output -------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [('index', project + '.tex', project + u' Documentation', author, 'manual')] # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [('index', project.lower(), project + u' Documentation', [author], 1)] sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname('__file__')), 'sphinxext')) extensions += ['example'] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/docs/index.rst0000644000537500020070000000415100000000000016370 0ustar00wjamiesonSTSCI\science************************************** ASDF - Advanced Scientific Data Format ************************************** `asdf` is a tool for reading and writing Advanced Scientific Data Format (ASDF) files. .. include:: ../README.rst :start-after: begin-summary-text: :end-before: end-summary-text: .. note:: This is the **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat - if you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, go here: http://seismic-data.org/ Getting Started =============== .. toctree:: :maxdepth: 2 asdf/install asdf/overview asdf/features asdf/config asdf/asdf_tool asdf/changes .. _extending: Extending ASDF ============== .. toctree:: :maxdepth: 2 asdf/extending/use_cases asdf/extending/uris asdf/extending/schemas asdf/extending/resources asdf/extending/converters asdf/extending/extensions asdf/extending/manifests asdf/extending/compressors asdf/extending/legacy API Documentation ================= .. toctree:: :maxdepth: 1 asdf/user_api asdf/developer_api Developer Overview ================== Currently a work in progress. Intended to give an overview of how the various parts of ASDF interact and which modules do what and how. .. toctree:: :maxdepth: 1 asdf/developer_overview asdf/developer_versioning Contributing and reporting issues ================================= We welcome feedback and contributions of all kinds. Contributions of code, documentation, or general feedback are all appreciated. Feature requests and bug reports for the Python implementation can be posted at `asdf's github page `_. The ASDF Standard itself also has a repository on github. Suggestions for improvements to the ASDF Standard can be reported `here `_. See also ======== - The `Advanced Scientific Data Format (ASDF) standard `__ - `asdf` Python package distribution on `pypi `_ Index ===== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/make.bat0000644000537500020070000001064100000000000016135 0ustar00wjamiesonSTSCI\science@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9842284 asdf-2.9.2/docs/sphinxext/0000755000537500020070000000000000000000000016560 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/docs/sphinxext/__init__.py0000644000537500020070000000000000000000000020657 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644265882.0 asdf-2.9.2/docs/sphinxext/example.py0000644000537500020070000001121500000000000020565 0ustar00wjamiesonSTSCI\scienceimport atexit import io import os import shutil import tempfile import textwrap import codecs from docutils.parsers.rst import Directive from docutils import nodes from sphinx.util.nodes import set_source_info import asdf from asdf import AsdfFile from asdf.constants import ASDF_MAGIC, BLOCK_FLAG_STREAMED from asdf import versioning version_string = str(versioning.default_version) TMPDIR = tempfile.mkdtemp() def delete_tmpdir(): shutil.rmtree(TMPDIR) GLOBALS = {} FLAGS = { BLOCK_FLAG_STREAMED: "BLOCK_FLAG_STREAMED" } class RunCodeDirective(Directive): has_content = True optional_arguments = 1 def run(self): code = textwrap.dedent('\n'.join(self.content)) cwd = os.getcwd() os.chdir(TMPDIR) try: try: exec(code, GLOBALS) except Exception: print(code) raise literal = nodes.literal_block(code, code) literal['language'] = 'python' set_source_info(self, literal) finally: os.chdir(cwd) if 'hidden' not in self.arguments: return [literal] else: return [] class AsdfDirective(Directive): required_arguments = 1 optional_arguments = 1 def run(self): filename = self.arguments[0] cwd = os.getcwd() os.chdir(TMPDIR) parts = [] try: ff = AsdfFile() code = AsdfFile._open_impl(ff, filename, _get_yaml_content=True) code = '{0} {1}\n'.format(ASDF_MAGIC, version_string) + code.strip().decode('utf-8') literal = nodes.literal_block(code, code) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) kwargs = dict() # Use the ignore_unrecognized_tag parameter as a proxy for both options kwargs['ignore_unrecognized_tag'] = 'ignore_unrecognized_tag' in self.arguments kwargs['ignore_missing_extensions'] = 'ignore_unrecognized_tag' in self.arguments with asdf.open(filename, **kwargs) as ff: for i, block in enumerate(ff.blocks.internal_blocks): data = codecs.encode(block.data.tobytes(), 'hex') if len(data) > 40: data = data[:40] + '...'.encode() allocated = block._allocated size = block._size data_size = block._data_size flags = block._flags if flags & BLOCK_FLAG_STREAMED: allocated = size = data_size = 0 lines = [] lines.append('BLOCK {0}:'.format(i)) human_flags = [] for key, val in FLAGS.items(): if flags & key: human_flags.append(val) if len(human_flags): lines.append(' flags: {0}'.format(' | '.join(human_flags))) if block.input_compression: lines.append(' compression: {0}'.format(block.input_compression)) lines.append(' allocated_size: {0}'.format(allocated)) lines.append(' used_size: {0}'.format(size)) lines.append(' data_size: {0}'.format(data_size)) lines.append(' data: {0}'.format(data)) code = '\n'.join(lines) literal = nodes.literal_block(code, code) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) internal_blocks = list(ff.blocks.internal_blocks) if (len(internal_blocks) and internal_blocks[-1].array_storage != 'streamed'): buff = io.BytesIO() ff.blocks.write_block_index(buff, ff) block_index = buff.getvalue().decode('utf-8') literal = nodes.literal_block(block_index, block_index) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) finally: os.chdir(cwd) result = nodes.literal_block() textnodes, messages = self.state.inline_text(filename, self.lineno) title = nodes.title(filename, '', *textnodes) result += title result += parts return [result] def setup(app): app.add_directive('runcode', RunCodeDirective) app.add_directive('asdf', AsdfDirective) atexit.register(delete_tmpdir) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/pyproject.toml0000644000537500020070000000011400000000000016506 0ustar00wjamiesonSTSCI\science[build-system] requires = ["setuptools>=30.3.0", "setuptools_scm", "wheel"] ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9847572 asdf-2.9.2/pytest_asdf/0000755000537500020070000000000000000000000016123 5ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1643407967.0 asdf-2.9.2/pytest_asdf/__init__.py0000644000537500020070000000000000000000000020222 0ustar00wjamiesonSTSCI\science././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266522.0 asdf-2.9.2/pytest_asdf/plugin.py0000644000537500020070000002372700000000000020006 0ustar00wjamiesonSTSCI\scienceimport io import os import pathlib import warnings import yaml import pytest import numpy as np # Avoid all imports of asdf at this level in order to avoid circular imports def pytest_addoption(parser): parser.addini( "asdf_schema_root", "Root path indicating where schemas are stored") parser.addini( "asdf_schema_skip_names", "Base names of files to skip in schema tests") parser.addini( "asdf_schema_skip_tests", "List of tests to skip, one per line, in format ::") parser.addini( "asdf_schema_xfail_tests", "List of tests to xfail, one per line, in format ::") parser.addini( "asdf_schema_skip_examples", "Base names of schemas whose examples should not be tested") parser.addini( "asdf_schema_tests_enabled", "Controls whether schema tests are enabled by default", type="bool", default=False, ) parser.addini( "asdf_schema_validate_default", "Set to true to enable validation of the schema 'default' property", type="bool", default=True, ) parser.addini( "asdf_schema_ignore_unrecognized_tag", "Set to true to disable warnings when tag serializers are missing", type="bool", default=False, ) parser.addini( "asdf_schema_ignore_version_mismatch", "Set to true to disable warnings when missing explicit support for a tag", type="bool", default=True ) parser.addoption('--asdf-tests', action='store_true', help='Enable ASDF schema tests') class AsdfSchemaFile(pytest.File): @classmethod def from_parent(cls, parent, *, fspath, skip_examples=False, validate_default=True, ignore_unrecognized_tag=False, ignore_version_mismatch=False, skip_tests=[], xfail_tests=[], **kwargs): # Fix for depreciation of fspath in pytest 7+ from asdf.util import minversion if minversion("pytest", "7.0.0"): path = pathlib.Path(fspath) kwargs["path"] = path else: path = fspath kwargs["fspath"] = path if hasattr(super(), "from_parent"): result = super().from_parent(parent, **kwargs) else: result = AsdfSchemaFile(path, parent) result.skip_examples = skip_examples result.validate_default = validate_default result.ignore_unrecognized_tag = ignore_unrecognized_tag result.ignore_version_mismatch = ignore_version_mismatch result.skip_tests = skip_tests result.xfail_tests = xfail_tests return result def _set_markers(self, item): if item.name in self.skip_tests or "*" in self.skip_tests: item.add_marker(pytest.mark.skip) if item.name in self.xfail_tests or "*" in self.xfail_tests: item.add_marker(pytest.mark.xfail) def collect(self): item = AsdfSchemaItem.from_parent(self, self.fspath, validate_default=self.validate_default, name="test_schema") self._set_markers(item) yield item if not self.skip_examples: for index, example in enumerate(self.find_examples_in_schema()): name = f"test_example_{index}" item = AsdfSchemaExampleItem.from_parent( self, self.fspath, example, index, ignore_unrecognized_tag=self.ignore_unrecognized_tag, ignore_version_mismatch=self.ignore_version_mismatch, name=name, ) self._set_markers(item) yield item def find_examples_in_schema(self): """Returns generator for all examples in schema at given path""" from asdf import treeutil with open(str(self.fspath), 'rb') as fd: schema_tree = yaml.safe_load(fd) for node in treeutil.iter_tree(schema_tree): if (isinstance(node, dict) and 'examples' in node and isinstance(node['examples'], list)): for desc, example in node['examples']: yield example class AsdfSchemaItem(pytest.Item): @classmethod def from_parent(cls, parent, schema_path, validate_default=True, **kwargs): if hasattr(super(), "from_parent"): result = super().from_parent(parent, **kwargs) else: name = kwargs.pop("name") result = AsdfSchemaItem(name, parent, **kwargs) result.schema_path = schema_path result.validate_default = validate_default return result def runtest(self): from asdf import schema from asdf.extension import default_extensions # Make sure that each schema itself is valid. schema_tree = schema.load_schema( self.schema_path, resolver=default_extensions.resolver, resolve_references=True) schema.check_schema(schema_tree, validate_default=self.validate_default) def reportinfo(self): return self.fspath, 0, "" class AsdfSchemaExampleItem(pytest.Item): @classmethod def from_parent(cls, parent, schema_path, example, example_index, ignore_unrecognized_tag=False, ignore_version_mismatch=False, **kwargs): if hasattr(super(), "from_parent"): result = super().from_parent(parent, **kwargs) else: name = kwargs.pop("name") result = AsdfSchemaExampleItem(name, parent, **kwargs) result.filename = str(schema_path) result.example = example result.ignore_unrecognized_tag = ignore_unrecognized_tag result.ignore_version_mismatch = ignore_version_mismatch return result def runtest(self): from asdf import AsdfFile, block, util from asdf.tests import helpers # Make sure that the examples in the schema files (and thus the # ASDF standard document) are valid. buff = helpers.yaml_to_asdf('example: ' + self.example.strip()) ff = AsdfFile( uri=util.filepath_to_url(os.path.abspath(self.filename)), ignore_unrecognized_tag=self.ignore_unrecognized_tag, ignore_version_mismatch=self.ignore_version_mismatch, ) # Fake an external file ff2 = AsdfFile({'data': np.empty((1024*1024*8), dtype=np.uint8)}) ff._external_asdf_by_uri[ util.filepath_to_url( os.path.abspath( os.path.join( os.path.dirname(self.filename), 'external.asdf')))] = ff2 # Add some dummy blocks so that the ndarray examples work for i in range(3): b = block.Block(np.zeros((1024*1024*8), dtype=np.uint8)) b._used = True ff.blocks.add(b) b._array_storage = "streamed" try: # Do not tolerate any warnings that occur during schema validation with warnings.catch_warnings(): warnings.simplefilter("error") ff._open_impl(ff, buff, mode='rw') except Exception: print("From file:", self.filename) raise # Just test we can write it out. A roundtrip test # wouldn't always yield the correct result, so those have # to be covered by "real" unit tests. if b'external.asdf' not in buff.getvalue(): buff = io.BytesIO() ff.write_to(buff) def reportinfo(self): return self.fspath, 0, "" def _parse_test_list(content): result = {} for line in content.split("\n"): line = line.strip() if len(line) > 0: parts = line.split("::", 1) path_suffix = pathlib.Path(parts[0]).as_posix() if len(parts) == 1: name = "*" else: name = parts[-1] if path_suffix not in result: result[path_suffix] = [] result[path_suffix].append(name) return result def pytest_collect_file(path, parent): if not (parent.config.getini('asdf_schema_tests_enabled') or parent.config.getoption('asdf_tests')): return schema_roots = parent.config.getini('asdf_schema_root').split() if not schema_roots: return skip_names = parent.config.getini('asdf_schema_skip_names') skip_examples = parent.config.getini('asdf_schema_skip_examples') validate_default = parent.config.getini('asdf_schema_validate_default') ignore_unrecognized_tag = parent.config.getini('asdf_schema_ignore_unrecognized_tag') ignore_version_mismatch = parent.config.getini('asdf_schema_ignore_version_mismatch') skip_tests = _parse_test_list(parent.config.getini('asdf_schema_skip_tests')) xfail_tests = _parse_test_list(parent.config.getini('asdf_schema_xfail_tests')) schema_roots = [os.path.join(str(parent.config.rootdir), os.path.normpath(root)) for root in schema_roots] if path.ext != '.yaml': return None for root in schema_roots: if str(path).startswith(root) and path.purebasename not in skip_names: posix_path = pathlib.Path(path).as_posix() schema_skip_tests = [] for suffix, names in skip_tests.items(): if posix_path.endswith(suffix): schema_skip_tests.extend(names) schema_xfail_tests = [] for suffix, names in xfail_tests.items(): if posix_path.endswith(suffix): schema_xfail_tests.extend(names) return AsdfSchemaFile.from_parent( parent, fspath=path, skip_examples=(path.purebasename in skip_examples), validate_default=validate_default, ignore_unrecognized_tag=ignore_unrecognized_tag, ignore_version_mismatch=ignore_version_mismatch, skip_tests=schema_skip_tests, xfail_tests=schema_xfail_tests, ) return None ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1644282536.9866881 asdf-2.9.2/setup.cfg0000644000537500020070000000562400000000000015426 0ustar00wjamiesonSTSCI\science[metadata] name = asdf description = Python implementation of the ASDF Standard long_description = file: README.rst long_description_content_type = text/x-rst author = The ASDF Developers author_email = help@stsci.edu license = BSD-3-Clause license_file = LICENSE url = http://github.com/asdf-format/asdf project_urls = Bug Tracker = https://github.com/asdf-format/asdf/issues Documentation = https://asdf.readthedocs.io/en/stable Source Code = https://github.com/asdf-format/asdf classifiers = Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Development Status :: 5 - Production/Stable [options] python_requires = >=3.7 setup_requires = setuptools_scm install_requires = importlib_resources>=3;python_version<"3.9" jmespath>=0.6.2 jsonschema>=3.0.2,<4 numpy>=1.10 packaging>=16.0 pyyaml>=3.10 semantic_version>=2.8 [options.extras_require] all = lz4>=0.10 docs = sphinx sphinx-astropy astropy graphviz matplotlib docutils tests = pytest astropy gwcs pytest-doctestplus pytest-remotedata pytest-openfiles psutil [options.entry_points] console_scripts = asdftool = asdf.commands.main:main asdf_extensions = builtin = asdf.extension:BuiltinExtension pytest11 = asdf_schema_tester = pytest_asdf.plugin asdf.resource_mappings = asdf = asdf.resource:get_core_resource_mappings [build_sphinx] source-dir = docs build-dir = docs/_build all_files = 1 [upload_docs] upload-dir = docs/_build/html show-response = 1 [tool:pytest] testpaths = asdf docs asdf-standard/schemas minversion = 4.6 norecursedirs = build docs/_build docs/sphinxext doctest_plus = enabled remote_data_strict = True open_files_ignore = test.fits asdf.fits filterwarnings = ignore::asdf.exceptions.AsdfDeprecationWarning:asdf.asdftypes ignore:numpy.ndarray size changed:astropy.utils.exceptions.AstropyWarning ignore:numpy.ndarray size changed:RuntimeWarning text_file_format = rst asdf_schema_root = asdf-standard/schemas asdf/schemas asdf_schema_skip_tests = stsci.edu/asdf/asdf-schema-1.0.0.yaml stsci.edu/asdf/transform/domain-1.0.0.yaml stsci.edu/asdf/wcs/celestial_frame-1.0.0.yaml stsci.edu/asdf/wcs/celestial_frame-1.1.0.yaml stsci.edu/asdf/wcs/frame-1.0.0.yaml stsci.edu/asdf/wcs/frame-1.1.0.yaml stsci.edu/asdf/wcs/spectral_frame-1.1.0.yaml stsci.edu/asdf/wcs/step-1.1.0.yaml stsci.edu/asdf/wcs/step-1.2.0.yaml stsci.edu/asdf/wcs/wcs-1.1.0.yaml stsci.edu/asdf/wcs/wcs-1.2.0.yaml stsci.edu/yaml-schema/draft-01.yaml asdf_schema_xfail_tests = stsci.edu/asdf/core/ndarray-1.0.0.yaml::test_example_2 asdf_schema_tests_enabled = true asdf_schema_ignore_unrecognized_tag = true addopts = --doctest-rst [flake8] exclude = extern, docs/conf.py, .tox, .eggs select = F,W,E101,E111,E502,E722,E901,E902 ignore = W503,W504 [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266446.0 asdf-2.9.2/setup.py0000755000537500020070000000241300000000000015313 0ustar00wjamiesonSTSCI\science#!/usr/bin/env python import os from pathlib import Path from setuptools import setup, find_packages if not any((Path(__file__).parent / "asdf-standard").iterdir()): from setuptools.errors import SetupError raise SetupError("asdf-standard is empty. Need to run `git submodule update --init` and try again!") packages = find_packages() packages.append('asdf.schemas') packages.append('asdf.reference_files') packages.append('asdf.resources') package_dir = { 'asdf.schemas': 'asdf-standard/schemas', 'asdf.reference_files': 'asdf-standard/reference_files', 'asdf.resources': 'asdf-standard/resources', } def package_yaml_files(directory): paths = sorted(Path(directory).rglob("*.yaml")) return [str(p.relative_to(directory)) for p in paths] package_data = { 'asdf.commands.tests.data': ['*'], 'asdf.tags.core.tests.data': ['*'], 'asdf.tests.data': ['*'], 'asdf.reference_files': ['*', '**/*'], 'asdf.schemas': package_yaml_files("asdf-standard/schemas"), 'asdf.resources': package_yaml_files("asdf-standard/resources"), } setup( use_scm_version={"write_to": os.path.join("asdf", "version.py"), "write_to_template": 'version = "{version}"\n'}, packages=packages, package_dir=package_dir, package_data=package_data, ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1644266491.0 asdf-2.9.2/tox.ini0000644000537500020070000000602100000000000015110 0ustar00wjamiesonSTSCI\science[tox] envlist= py38,style [testenv] deps= pytest-sugar astropydev: git+https://github.com/astropy/astropy gwcsdev: git+https://github.com/spacetelescope/gwcs numpydev: git+https://github.com/numpy/numpy asdfastropydev: git+https://github.com/astropy/asdf-astropy asdftransformschemasdev: git+https://github.com/asdf-format/asdf-transform-schemas asdfwcsschemasdev: git+https://github.com/asdf-format/asdf-wcs-schemas asdfcoordinatesschemasdev: git+https://github.com/asdf-format/asdf-coordinates-schemas # Newer versions of gwcs require astropy 4.x, which # isn't compatible with the older versions of numpy # that we test with. legacy: gwcs==0.9.1 legacy: semantic_version==2.8 legacy: pyyaml==3.13 legacy: jsonschema==3.0.2 legacy: numpy~=1.14.6 legacy: pytest~=4.6.11 legacy: astropy~=3.0.0 numpydev,s390x: cython extras= all,tests # astropy will complain if the home directory is missing passenv= HOME usedevelop= true commands= pytest --remote-data [testenv:s390x] # As of 2020-01-23, The s390x container on Travis has a bug where # /home/travis/.cache/pip/wheels is owned by root, which prevents # us from installing packages unless we disable caching. install_command= python -m pip install --no-cache-dir {opts} {packages} [testenv:prerelease] pip_pre= true [testenv:warnings] commands= pytest --remote-data -W error \ -p no:unraisableexception \ -W ignore::asdf.exceptions.AsdfDeprecationWarning:asdf.asdftypes \ -W 'ignore:numpy.ndarray size changed:astropy.utils.exceptions.AstropyWarning' \ -W 'ignore:numpy.ndarray size changed:RuntimeWarning' [testenv:packaged] # The default tox working directory is in .tox in the source directory. If we # execute pytest from there, it will discover tox.ini in the source directory # and load the asdf module from the unpackaged sourcee, which is not what we # want. The home directory does not have a tox.ini in any of its ancestors, # so this will allow us to test the installed package. usedevelop= false changedir= {homedir} commands= pytest --pyargs asdf --remote-data [testenv:egg_info] deps= commands= python setup.py egg_info [testenv:twine] usedevelop= false deps= twine commands= twine check {distdir}/* [testenv:docbuild] extras= docs commands= sphinx-build -W docs build/docs [testenv:checkdocs] deps= collective.checkdocs pygments commands= python setup.py checkdocs [testenv:style] deps= flake8 commands= flake8 --count [testenv:coverage] deps= codecov coverage commands= coverage run --source=asdf --rcfile={toxinidir}/asdf/tests/coveragerc \ -m pytest --remote-data --open-files coverage report -m codecov -e TOXENV passenv= TOXENV CI TRAVIS TRAVIS_* CODECOV_* DISPLAY HOME [testenv:compatibility] deps= virtualenv extras= all,tests commands= pytest compatibility_tests/ --remote-data [testenv:bandit] deps= bandit commands= bandit -r -x asdf/tests,asdf/commands/tests,asdf/tags/core/tests,asdf/extern asdf