asdf-1.3.3/0000755000175000017500000000000013246031665011767 5ustar dandan00000000000000asdf-1.3.3/asdf/0000755000175000017500000000000013246031665012704 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/0000755000175000017500000000000013246031665013642 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/wcs/0000755000175000017500000000000013246031665014436 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/wcs/wcs.py0000644000175000017500000003124513246003441015600 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import six from ...asdftypes import AsdfType from ... import yamlutil _REQUIRES = ['gwcs', 'astropy'] class WCSType(AsdfType): name = "wcs/wcs" requires = _REQUIRES types = ['gwcs.WCS'] @classmethod def from_tree(cls, node, ctx): import gwcs steps = [(x['frame'], x.get('transform')) for x in node['steps']] name = node['name'] return gwcs.WCS(steps, name=name) @classmethod def to_tree(cls, gwcs, ctx): def get_frame(frame_name): frame = getattr(gwcs, frame_name) if frame is None: return frame_name return frame frames = gwcs.available_frames steps = [] for i in range(len(frames) - 1): frame_name = frames[i] frame = get_frame(frame_name) transform = gwcs.get_transform(frames[i], frames[i + 1]) steps.append(StepType({'frame': frame, 'transform': transform})) frame_name = frames[-1] frame = get_frame(frame_name) steps.append(StepType({'frame': frame})) return {'name': gwcs.name, 'steps': yamlutil.custom_tree_to_tagged_tree(steps, ctx)} @classmethod def assert_equal(cls, old, new): from ...tests import helpers assert old.name == new.name assert len(old.available_frames) == len(new.available_frames) for (old_frame, old_transform), (new_frame, new_transform) in zip( old.pipeline, new.pipeline): helpers.assert_tree_match(old_frame, new_frame) helpers.assert_tree_match(old_transform, new_transform) class StepType(dict, AsdfType): name = "wcs/step" requires = _REQUIRES class FrameType(AsdfType): name = "wcs/frame" version = '1.1.0' requires = ['gwcs', 'astropy-1.3.3'] types = ['gwcs.Frame2D'] import astropy _astropy_version = astropy.__version__ # This indicates that Cartesian Differential is not available _old_astropy = astropy.__version__ <= '1.3.3' @classmethod def _get_reference_frame_mapping(cls): if hasattr(cls, '_reference_frame_mapping'): return cls._reference_frame_mapping from astropy.coordinates import builtin_frames cls._reference_frame_mapping = { 'ICRS': builtin_frames.ICRS, 'FK5': builtin_frames.FK5, 'FK4': builtin_frames.FK4, 'FK4_noeterms': builtin_frames.FK4NoETerms, 'galactic': builtin_frames.Galactic, 'galactocentric': builtin_frames.Galactocentric, 'GCRS': builtin_frames.GCRS, 'CIRS': builtin_frames.CIRS, 'ITRS': builtin_frames.ITRS, 'precessed_geocentric': builtin_frames.PrecessedGeocentric } return cls._reference_frame_mapping @classmethod def _get_inverse_reference_frame_mapping(cls): if hasattr(cls, '_inverse_reference_frame_mapping'): return cls._inverse_reference_frame_mapping reference_frame_mapping = cls._get_reference_frame_mapping() cls._inverse_reference_frame_mapping = {} for key, val in six.iteritems(reference_frame_mapping): cls._inverse_reference_frame_mapping[val] = key return cls._inverse_reference_frame_mapping @classmethod def _reference_frame_from_tree(cls, node, ctx): from ..unit import QuantityType from astropy.units import Quantity from astropy.coordinates import ICRS, CartesianRepresentation version = cls.version reference_frame = node['reference_frame'] reference_frame_name = reference_frame['type'] frame_cls = cls._get_reference_frame_mapping()[reference_frame_name] frame_kwargs = {} for name in frame_cls.get_frame_attr_names().keys(): val = reference_frame.get(name) if val is not None: # These are deprecated fields that must be handled as a special # case for older versions of the schema if name in ['galcen_ra', 'galcen_dec']: continue # There was no schema for quantities in v1.0.0 if name in ['galcen_distance', 'roll', 'z_sun'] and version == '1.0.0': val = Quantity(val[0], unit=val[1]) # These fields are known to be CartesianRepresentations if name in ['obsgeoloc', 'obsgeovel']: if version == '1.0.0': unit = val[1] x = Quantity(val[0][0], unit=unit) y = Quantity(val[0][1], unit=unit) z = Quantity(val[0][2], unit=unit) else: x = QuantityType.from_tree(val[0], ctx) y = QuantityType.from_tree(val[1], ctx) z = QuantityType.from_tree(val[2], ctx) val = CartesianRepresentation(x, y, z) elif not cls._old_astropy and name == 'galcen_v_sun': from astropy.coordinates import CartesianDifferential # This field only exists since v1.1.0, and it only uses # CartesianDifferential after v1.3.3 d_x = QuantityType.from_tree(val[0], ctx) d_y = QuantityType.from_tree(val[1], ctx) d_z = QuantityType.from_tree(val[2], ctx) val = CartesianDifferential(d_x, d_y, d_z) else: val = yamlutil.tagged_tree_to_custom_tree(val, ctx) frame_kwargs[name] = val has_ra_and_dec = reference_frame.get('galcen_dec') and \ reference_frame.get('galcen_ra') if version == '1.0.0' and has_ra_and_dec: # Convert deprecated ra and dec fields into galcen_coord galcen_dec = reference_frame['galcen_dec'] galcen_ra = reference_frame['galcen_ra'] dec = Quantity(galcen_dec[0], unit=galcen_dec[1]) ra = Quantity(galcen_ra[0], unit=galcen_ra[1]) frame_kwargs['galcen_coord'] = ICRS(dec=dec, ra=ra) return frame_cls(**frame_kwargs) @classmethod def _from_tree(cls, node, ctx): kwargs = {'name': node['name']} if 'axes_names' in node: kwargs['axes_names'] = node['axes_names'] if 'reference_frame' in node: kwargs['reference_frame'] = \ cls._reference_frame_from_tree(node, ctx) if 'axes_order' in node: kwargs['axes_order'] = tuple(node['axes_order']) if 'unit' in node: kwargs['unit'] = tuple( yamlutil.tagged_tree_to_custom_tree(node['unit'], ctx)) return kwargs @classmethod def _to_tree(cls, frame, ctx): import numpy as np from ..unit import QuantityType from astropy.coordinates import CartesianRepresentation if not cls._old_astropy: from astropy.coordinates import CartesianDifferential node = {} node['name'] = frame.name if frame.axes_order != (0, 1): node['axes_order'] = list(frame.axes_order) if frame.axes_names is not None: node['axes_names'] = list(frame.axes_names) if frame.reference_frame is not None: reference_frame = {} reference_frame['type'] = cls._get_inverse_reference_frame_mapping()[ type(frame.reference_frame)] for name in frame.reference_frame.get_frame_attr_names().keys(): frameval = getattr(frame.reference_frame, name) # CartesianRepresentation becomes a flat list of x,y,z # coordinates with associated units if isinstance(frameval, CartesianRepresentation): value = [frameval.x, frameval.y, frameval.z] frameval = value elif not cls._old_astropy and isinstance(frameval, CartesianDifferential): value = [frameval.d_x, frameval.d_y, frameval.d_z] frameval = value yamlval = yamlutil.custom_tree_to_tagged_tree(frameval, ctx) reference_frame[name] = yamlval node['reference_frame'] = reference_frame if frame.unit is not None: node['unit'] = yamlutil.custom_tree_to_tagged_tree( list(frame.unit), ctx) return node @classmethod def _assert_equal(cls, old, new): from ...tests import helpers assert old.name == new.name assert old.axes_order == new.axes_order assert old.axes_names == new.axes_names assert type(old.reference_frame) == type(new.reference_frame) assert old.unit == new.unit if old.reference_frame is not None: for name in old.reference_frame.get_frame_attr_names().keys(): helpers.assert_tree_match( getattr(old.reference_frame, name), getattr(new.reference_frame, name)) @classmethod def assert_equal(cls, old, new): cls._assert_equal(old, new) @classmethod def from_tree(cls, node, ctx): import gwcs node = cls._from_tree(node, ctx) return gwcs.Frame2D(**node) @classmethod def to_tree(cls, frame, ctx): return cls._to_tree(frame, ctx) class CelestialFrameType(FrameType): name = "wcs/celestial_frame" types = ['gwcs.CelestialFrame'] supported_versions = [(1,0,0), (1,1,0)] @classmethod def from_tree(cls, node, ctx): import gwcs node = cls._from_tree(node, ctx) return gwcs.CelestialFrame(**node) @classmethod def to_tree(cls, frame, ctx): return cls._to_tree(frame, ctx) @classmethod def assert_equal(cls, old, new): cls._assert_equal(old, new) assert old.reference_position == new.reference_position class SpectralFrame(FrameType): name = "wcs/spectral_frame" types = ['gwcs.SpectralFrame'] @classmethod def from_tree(cls, node, ctx): import gwcs node = cls._from_tree(node, ctx) if 'reference_position' in node: node['reference_position'] = node['reference_position'].upper() return gwcs.SpectralFrame(**node) @classmethod def to_tree(cls, frame, ctx): node = cls._to_tree(frame, ctx) if frame.reference_position is not None: node['reference_position'] = frame.reference_position.lower() return node class CompositeFrame(FrameType): name = "wcs/composite_frame" types = ['gwcs.CompositeFrame'] @classmethod def from_tree(cls, node, ctx): import gwcs if len(node) != 2: raise ValueError("CompositeFrame has extra properties") name = node['name'] frames = node['frames'] return gwcs.CompositeFrame(frames, name) @classmethod def to_tree(cls, frame, ctx): return { 'name': frame.name, 'frames': yamlutil.custom_tree_to_tagged_tree(frame.frames, ctx) } @classmethod def assert_equal(cls, old, new): from ...tests import helpers assert old.name == new.name for old_frame, new_frame in zip(old.frames, new.frames): helpers.assert_tree_match(old_frame, new_frame) class ICRSCoord(AsdfType): name = "wcs/icrs_coord" types = ['astropy.coordinates.ICRS'] requires = ['astropy'] version = "1.1.0" @classmethod def from_tree(cls, node, ctx): from ..unit import QuantityType from astropy.coordinates import ICRS, Longitude, Latitude, Angle angle = QuantityType.from_tree(node['ra']['wrap_angle'], ctx) wrap_angle = Angle(angle.value, unit=angle.unit) ra = Longitude( node['ra']['value'], unit=node['ra']['unit'], wrap_angle=wrap_angle) dec = Latitude(node['dec']['value'], unit=node['dec']['unit']) return ICRS(ra=ra, dec=dec) @classmethod def to_tree(cls, frame, ctx): from ..unit import QuantityType from astropy.units import Quantity from astropy.coordinates import ICRS node = {} wrap_angle = Quantity( frame.ra.wrap_angle.value, unit=frame.ra.wrap_angle.unit) node['ra'] = { 'value': frame.ra.value, 'unit': frame.ra.unit.to_string(), 'wrap_angle': yamlutil.custom_tree_to_tagged_tree(wrap_angle, ctx) } node['dec'] = { 'value': frame.dec.value, 'unit': frame.dec.unit.to_string() } return node asdf-1.3.3/asdf/tags/wcs/tests/0000755000175000017500000000000013246031665015600 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/wcs/tests/test_wcs.py0000644000175000017500000002121513246003441017775 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import pytest import warnings gwcs = pytest.importorskip('gwcs') astropy = pytest.importorskip('astropy', minversion='1.3.3') from astropy.modeling import models from astropy import coordinates as coord from astropy import units as u from astropy import time from gwcs import coordinate_frames as cf from gwcs import wcs from .... import AsdfFile from ....tests import helpers def test_create_wcs(tmpdir): m1 = models.Shift(12.4) & models.Shift(-2) m2 = models.Scale(2) & models.Scale(-2) icrs = cf.CelestialFrame(name='icrs', reference_frame=coord.ICRS()) det = cf.Frame2D(name='detector', axes_order=(0,1)) gw1 = wcs.WCS(output_frame='icrs', input_frame='detector', forward_transform=m1) gw2 = wcs.WCS(output_frame='icrs', forward_transform=m1) gw3 = wcs.WCS(output_frame=icrs, input_frame=det, forward_transform=m1) tree = { 'gw1': gw1, 'gw2': gw2, 'gw3': gw3 } helpers.assert_roundtrip_tree(tree, tmpdir) def test_composite_frame(tmpdir): icrs = coord.ICRS() fk5 = coord.FK5() cel1 = cf.CelestialFrame(reference_frame=icrs) cel2 = cf.CelestialFrame(reference_frame=fk5) spec1 = cf.SpectralFrame(name='freq', unit=[u.Hz,], axes_order=(2,)) spec2 = cf.SpectralFrame(name='wave', unit=[u.m,], axes_order=(2,)) comp1 = cf.CompositeFrame([cel1, spec1]) comp2 = cf.CompositeFrame([cel2, spec2]) comp = cf.CompositeFrame([comp1, cf.SpectralFrame(axes_order=(3,), unit=(u.m,))]) tree = { 'comp1': comp1, 'comp2': comp2, 'comp': comp } helpers.assert_roundtrip_tree(tree, tmpdir) def create_test_frames(): """Creates an array of frames to be used for testing.""" # Suppress warnings from astropy that are caused by having 'dubious' dates # that are too far in the future. It's not a concern for the purposes of # unit tests. See issue #5809 on the astropy GitHub for discussion. from astropy._erfa import ErfaWarning warnings.simplefilter("ignore", ErfaWarning) frames = [ cf.CelestialFrame(reference_frame=coord.ICRS()), cf.CelestialFrame( reference_frame=coord.FK5(equinox=time.Time('2010-01-01'))), cf.CelestialFrame( reference_frame=coord.FK4( equinox=time.Time('2010-01-01'), obstime=time.Time('2015-01-01')) ), cf.CelestialFrame( reference_frame=coord.FK4NoETerms( equinox=time.Time('2010-01-01'), obstime=time.Time('2015-01-01')) ), cf.CelestialFrame( reference_frame=coord.Galactic()), cf.CelestialFrame( reference_frame=coord.Galactocentric( # A default galcen_coord is used since none is provided here galcen_distance=5.0*u.m, z_sun=3*u.pc, roll=3*u.deg) ), cf.CelestialFrame( reference_frame=coord.GCRS( obstime=time.Time('2010-01-01'), obsgeoloc=[1, 3, 2000] * u.pc, obsgeovel=[2, 1, 8] * (u.m/u.s))), cf.CelestialFrame( reference_frame=coord.CIRS( obstime=time.Time('2010-01-01'))), cf.CelestialFrame( reference_frame=coord.ITRS( obstime=time.Time('2022-01-03'))), cf.CelestialFrame( reference_frame=coord.PrecessedGeocentric( obstime=time.Time('2010-01-01'), obsgeoloc=[1, 3, 2000] * u.pc, obsgeovel=[2, 1, 8] * (u.m/u.s))) ] return frames def test_frames(tmpdir): tree = { 'frames': create_test_frames() } helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.skipif(astropy.__version__ <= '1.3.3', reason="It does not make sense to test backwards compatibility when using " "earlier versions of astropy") def test_backwards_compat_galcen(): # Hold these fields constant so that we can compare them declination = 1.0208 # in degrees right_ascension = 45.729 # in degrees galcen_distance = 3.14 roll = 4.0 z_sun = 0.2084 old_frame_yaml = """ frames: - !wcs/celestial_frame-1.0.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: type: galactocentric galcen_dec: - %f - deg galcen_ra: - %f - deg galcen_distance: - %f - m roll: - %f - deg z_sun: - %f - pc unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] """ % (declination, right_ascension, galcen_distance, roll, z_sun) new_frame_yaml = """ frames: - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: type: galactocentric galcen_coord: !wcs/icrs_coord-1.1.0 dec: {value: %f} ra: value: %f wrap_angle: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: 360.0} galcen_distance: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: %f} galcen_v_sun: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 11.1} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 232.24} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 7.25} roll: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: %f} z_sun: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 pc, value: %f} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] """ % (declination, right_ascension, galcen_distance, roll, z_sun) old_buff = helpers.yaml_to_asdf(old_frame_yaml) old_asdf = AsdfFile.open(old_buff) old_frame = old_asdf.tree['frames'][0] new_buff = helpers.yaml_to_asdf(new_frame_yaml) new_asdf = AsdfFile.open(new_buff) new_frame = new_asdf.tree['frames'][0] # Poor man's frame comparison since it's not implemented by astropy assert old_frame.axes_names == new_frame.axes_names assert old_frame.axes_order == new_frame.axes_order assert old_frame.unit == new_frame.unit old_refframe = old_frame.reference_frame new_refframe = new_frame.reference_frame # v1.0.0 frames have no representation of galcen_v_center, so do not compare assert old_refframe.galcen_distance == new_refframe.galcen_distance assert old_refframe.galcen_coord.dec == new_refframe.galcen_coord.dec assert old_refframe.galcen_coord.ra == new_refframe.galcen_coord.ra def test_backwards_compat_gcrs(): obsgeoloc = ( 3.0856775814671916e+16, 9.257032744401574e+16, 6.1713551629343834e+19 ) obsgeovel = (2.0, 1.0, 8.0) old_frame_yaml = """ frames: - !wcs/celestial_frame-1.0.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: type: GCRS obsgeoloc: - [%f, %f, %f] - !unit/unit-1.0.0 m obsgeovel: - [%f, %f, %f] - !unit/unit-1.0.0 m s-1 obstime: !time/time-1.0.0 2010-01-01 00:00:00.000 unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] """ % (obsgeovel + obsgeoloc) new_frame_yaml = """ frames: - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: type: GCRS obsgeoloc: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: %f} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: %f} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: %f} obsgeovel: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: %f} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: %f} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: %f} obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] """ % (obsgeovel + obsgeoloc) old_buff = helpers.yaml_to_asdf(old_frame_yaml) old_asdf = AsdfFile.open(old_buff) old_frame = old_asdf.tree['frames'][0] old_loc = old_frame.reference_frame.obsgeoloc old_vel = old_frame.reference_frame.obsgeovel new_buff = helpers.yaml_to_asdf(new_frame_yaml) new_asdf = AsdfFile.open(new_buff) new_frame = new_asdf.tree['frames'][0] new_loc = new_frame.reference_frame.obsgeoloc new_vel = new_frame.reference_frame.obsgeovel assert (old_loc.x == new_loc.x and old_loc.y == new_loc.y and old_loc.z == new_loc.z) assert (old_vel.x == new_vel.x and old_vel.y == new_vel.y and old_vel.z == new_vel.z) asdf-1.3.3/asdf/tags/wcs/tests/__init__.py0000644000175000017500000000000113243547254017703 0ustar dandan00000000000000 asdf-1.3.3/asdf/tags/wcs/__init__.py0000644000175000017500000000030013246003441016527 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from .wcs import * asdf-1.3.3/asdf/tags/core/0000755000175000017500000000000013246031665014572 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/core/table.py0000644000175000017500000000504213246003441016223 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np from ...asdftypes import AsdfType from ... import yamlutil class TableType(AsdfType): name = 'core/table' types = ['astropy.table.Table'] requires = ['astropy'] @classmethod def from_tree(cls, node, ctx): from astropy import table columns = [ yamlutil.tagged_tree_to_custom_tree(c, ctx) for c in node['columns'] ] return table.Table(columns, meta=node.get('meta', {})) @classmethod def to_tree(cls, data, ctx): columns = [] for name in data.colnames: column = yamlutil.custom_tree_to_tagged_tree( data.columns[name], ctx) columns.append(column) node = {'columns': columns} if data.meta: node['meta'] = data.meta return node @classmethod def assert_equal(cls, old, new): from .ndarray import NDArrayType assert old.meta == new.meta NDArrayType.assert_equal(np.array(old), np.array(new)) class ColumnType(AsdfType): name = 'core/column' types = ['astropy.table.Column', 'astropy.table.MaskedColumn'] requires = ['astropy'] handle_dynamic_subclasses = True @classmethod def from_tree(cls, node, ctx): from astropy import table data = yamlutil.tagged_tree_to_custom_tree( node['data'], ctx) name = node['name'] description = node.get('description') unit = node.get('unit') meta = node.get('meta', None) return table.Column( data=data._make_array(), name=name, description=description, unit=unit, meta=meta) @classmethod def to_tree(cls, data, ctx): node = { 'data': yamlutil.custom_tree_to_tagged_tree( data.data, ctx), 'name': data.name } if data.description: node['description'] = data.description if data.unit: node['unit'] = yamlutil.custom_tree_to_tagged_tree( data.unit, ctx) if data.meta: node['meta'] = data.meta return node @classmethod def assert_equal(cls, old, new): from .ndarray import NDArrayType assert old.meta == new.meta assert old.description == new.description assert old.unit == new.unit NDArrayType.assert_equal(np.array(old), np.array(new)) asdf-1.3.3/asdf/tags/core/complex.py0000644000175000017500000000130413246003441016600 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import six import numpy as np from ...asdftypes import AsdfType from ... import util class ComplexType(AsdfType): name = 'core/complex' types = list(util.iter_subclasses(np.complexfloating)) + [complex] @classmethod def to_tree(cls, node, ctx): return six.text_type(node) @classmethod def from_tree(cls, tree, ctx): tree = tree.replace( 'inf', 'INF').replace( 'i', 'j').replace( 'INF', 'inf').replace( 'I', 'J') return complex(tree) asdf-1.3.3/asdf/tags/core/constant.py0000644000175000017500000000112713246003441016765 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from ...asdftypes import AsdfType class Constant(object): def __init__(self, value): self._value = value @property def value(self): return self._value class ConstantType(AsdfType): name = 'core/constant' types = [Constant] @classmethod def from_tree(self, node, ctx): return Constant(node) @classmethod def to_tree(self, data, ctx): return data.value asdf-1.3.3/asdf/tags/core/tests/0000755000175000017500000000000013246031665015734 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/core/tests/setup_package.py0000644000175000017500000000045213246003441021111 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function def get_package_data(): # pragma: no cover return { str(_PACKAGE_NAME_ + '.tags.core.tests'): ['data/*.yaml']} asdf-1.3.3/asdf/tags/core/tests/test_table.py0000644000175000017500000001115313246003441020424 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np try: import astropy except ImportError: HAS_ASTROPY = False else: HAS_ASTROPY = True import pytest from .... import asdf from ....tests import helpers @pytest.mark.skipif('not HAS_ASTROPY') def test_table(tmpdir): from astropy import table data_rows = [(1, 2.0, 'x'), (4, 5.0, 'y'), (5, 8.2, 'z')] t = table.Table(rows=data_rows, names=('a', 'b', 'c'), dtype=('i4', 'f8', 'S1')) t.columns['a'].description = 'RA' t.columns['a'].unit = 'degree' t.columns['a'].meta = {'foo': 'bar'} t.columns['c'].description = 'Some description of some sort' def asdf_check(ff): assert len(ff.blocks) == 3 helpers.assert_roundtrip_tree({'table': t}, tmpdir, asdf_check) @pytest.mark.skipif('not HAS_ASTROPY') def test_array_columns(tmpdir): from astropy import table a = np.array([([[1, 2], [3, 4]], 2.0, 'x'), ([[5, 6], [7, 8]], 5.0, 'y'), ([[9, 10], [11, 12]], 8.2, 'z')], dtype=[(str('a'), str('f8')), 'little': np.arange(0, 10, dtype=str('' assert tree['little'].dtype.byteorder == '=' else: assert tree['bigendian'].dtype.byteorder == '=' assert tree['little'].dtype.byteorder == '<' def check_raw_yaml(content): assert b'byteorder: little' in content assert b'byteorder: big' in content helpers.assert_roundtrip_tree(tree, tmpdir, check_asdf, check_raw_yaml) def test_all_dtypes(tmpdir): tree = {} for byteorder in ('>', '<'): for dtype in ndarray._datatype_names.values(): # Python 3 can't expose these dtypes in non-native byte # order, because it's using the new Python buffer # interface. if six.PY3 and dtype in ('c32', 'f16'): continue if dtype == 'b1': arr = np.array([True, False]) else: arr = np.arange(0, 10, dtype=str(byteorder + dtype)) tree[byteorder + dtype] = arr helpers.assert_roundtrip_tree(tree, tmpdir) def test_dont_load_data(): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2] } ff = asdf.AsdfFile(tree) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff: ff.run_hook('reserve_blocks') # repr and str shouldn't load data str(ff.tree['science_data']) repr(ff.tree) for block in ff.blocks.internal_blocks: assert block._data is None def test_table_inline(tmpdir): table = np.array( [(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[(str('MINE'), np.int8), (str(''), np.float64), (str('arr'), '>i4', (2,))]) tree = {'table_data': table} def check_raw_yaml(content): tree = yaml.load( re.sub(br'!core/\S+', b'', content)) assert tree['table_data'] == { 'datatype': [ {'datatype': 'int8', 'name': 'MINE'}, {'datatype': 'float64', 'name': 'f1'}, {'datatype': 'int32', 'name': 'arr', 'shape': [2]} ], 'data': [[0, 1.0, [2, 3]], [4, 5.0, [6, 7]]], 'shape': [2] } helpers.assert_roundtrip_tree( tree, tmpdir, None, check_raw_yaml, {'auto_inline': 64}) @pytest.mark.skipif('not HAS_ASTROPY') def test_auto_inline_recursive(tmpdir): from astropy.modeling import models aff = models.AffineTransformation2D(matrix=[[1, 2], [3, 4]]) tree = {'test': aff} def check_asdf(asdf): assert len(list(asdf.blocks.internal_blocks)) == 0 helpers.assert_roundtrip_tree( tree, tmpdir, check_asdf, None, {'auto_inline': 64}) def test_copy_inline(): yaml = """ x0: !core/ndarray-1.0.0 data: [-1.0, 1.0] """ buff = helpers.yaml_to_asdf(yaml) with asdf.AsdfFile.open(buff) as infile: with asdf.AsdfFile() as f: f.tree['a'] = infile.tree['x0'] f.tree['b'] = f.tree['a'] f.write_to(io.BytesIO()) def test_table(tmpdir): table = np.array( [(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[(str('MINE'), np.int8), (str(''), np.float64), (str('arr'), '>i4', (2,))]) tree = {'table_data': table} def check_raw_yaml(content): tree = yaml.load( re.sub(br'!core/\S+', b'', content)) assert tree['table_data'] == { 'datatype': [ {'byteorder': 'big', 'datatype': 'int8', 'name': 'MINE'}, {'byteorder': 'little', 'datatype': 'float64', 'name': 'f1'}, {'byteorder': 'big', 'datatype': 'int32', 'name': 'arr', 'shape': [2]} ], 'shape': [2], 'source': 0, 'byteorder': 'big' } helpers.assert_roundtrip_tree(tree, tmpdir, None, check_raw_yaml) def test_table_nested_fields(tmpdir): table = np.array( [(0, (1, 2)), (4, (5, 6)), (7, (8, 9))], dtype=[(str('A'), np.int64), (str('B'), [(str('C'), np.int64), (str('D'), np.int64)])]) tree = {'table_data': table} def check_raw_yaml(content): tree = yaml.load( re.sub(br'!core/\S+', b'', content)) assert tree['table_data'] == { 'datatype': [ {'datatype': 'int64', 'name': 'A', 'byteorder': 'little'}, {'datatype': [ {'datatype': 'int64', 'name': 'C', 'byteorder': 'little'}, {'datatype': 'int64', 'name': 'D', 'byteorder': 'little'} ], 'name': 'B', 'byteorder': 'big'}], 'shape': [3], 'source': 0, 'byteorder': 'big' } helpers.assert_roundtrip_tree(tree, tmpdir, None, check_raw_yaml) def test_inline(): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2] } buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.blocks.set_array_storage(ff.blocks[tree['science_data']], 'inline') ff.write_to(buff) buff.seek(0) with asdf.AsdfFile.open(buff, mode='rw') as ff: helpers.assert_tree_match(tree, ff.tree) assert len(list(ff.blocks.internal_blocks)) == 0 buff = io.BytesIO() ff.write_to(buff) assert b'[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]' in buff.getvalue() def test_inline_bare(): content = "arr: !core/ndarray-1.0.0 [[1, 2, 3, 4], [5, 6, 7, 8]]" buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff) as ff: assert_array_equal(ff.tree['arr'], [[1, 2, 3, 4], [5, 6, 7, 8]]) def test_mask_roundtrip(tmpdir): x = np.arange(0, 10, dtype=np.float) m = ma.array(x, mask=x > 5) tree = { 'masked_array': m, 'unmasked_array': x } def check_asdf(asdf): tree = asdf.tree m = tree['masked_array'] x = tree['unmasked_array'] print(m) print(m.mask) assert np.all(m.mask[6:]) assert len(asdf.blocks) == 2 helpers.assert_roundtrip_tree(tree, tmpdir, check_asdf) def test_mask_arbitrary(): content = """ arr: !core/ndarray-1.0.0 data: [[1, 2, 3, 1234], [5, 6, 7, 8]] mask: 1234 """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff) as ff: assert_array_equal( ff.tree['arr'].mask, [[False, False, False, True], [False, False, False, False]]) def test_mask_nan(): content = """ arr: !core/ndarray-1.0.0 data: [[1, 2, 3, .NaN], [5, 6, 7, 8]] mask: .NaN """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff) as ff: assert_array_equal( ff.tree['arr'].mask, [[False, False, False, True], [False, False, False, False]]) def test_string(tmpdir): tree = { 'ascii': np.array([b'foo', b'bar', b'baz']), 'unicode': np.array(['სამეცნიერო', 'данные', 'வடிவம்']) } helpers.assert_roundtrip_tree(tree, tmpdir) def test_string_table(tmpdir): tree = { 'table': np.array([(b'foo', 'სამეცნიერო', 42, 53.0)]) } helpers.assert_roundtrip_tree(tree, tmpdir) def test_inline_string(): content = "arr: !core/ndarray-1.0.0 ['a', 'b', 'c']" buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff) as ff: assert_array_equal(ff.tree['arr']._make_array(), ['a', 'b', 'c']) def test_inline_structured(): content = """ arr: !core/ndarray-1.0.0 datatype: [['ascii', 4], uint16, uint16, ['ascii', 4]] data: [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]]""" buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff) as ff: assert ff.tree['arr']['f1'].dtype.char == 'H' def test_simple_table(): table = np.array( [(10.683262825012207, 41.2674560546875, 0.13, 0.12, 213.916), (10.682777404785156, 41.270111083984375, 0.1, 0.09, 306.825), (10.684737205505371, 41.26903533935547, 0.08, 0.07, 96.656), (10.682382583618164, 41.26792526245117, 0.1, 0.09, 237.145), (10.686025619506836, 41.26922607421875, 0.13, 0.12, 79.581), (10.685656547546387, 41.26955032348633, 0.13, 0.12, 55.219), (10.684028625488281, 41.27090072631836, 0.13, 0.12, 345.269), (10.687610626220703, 41.270301818847656, 0.18, 0.14, 60.192)], dtype=[ (str('ra'), str(' a: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 shape: [1, 3] data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [[1, 2, 3]] """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [[[1, 2, 3]]] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass def test_datatype_validation(tmpdir): content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: float32 """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: float64 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: int16 """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! b: !core/ndarray-1.0.0 data: [1, 2, 3] datatype: int16 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! a: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass def test_structured_datatype_validation(tmpdir): content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int64 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [[1, 'a', 0], [2, 'b', 1], [3, 'c', 2]] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] - name: c datatype: float64 """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! c: !core/ndarray-1.0.0 data: [1, 2, 3] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! d: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int8 - name: b datatype: ['ascii', 8] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(jsonschema.ValidationError): with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass content = """ obj: ! d: !core/ndarray-1.0.0 data: [[1, 'a'], [2, 'b'], [3, 'c']] datatype: - name: a datatype: int16 - name: b datatype: ['ascii', 16] """ buff = helpers.yaml_to_asdf(content) with asdf.AsdfFile.open(buff, extensions=CustomExtension()) as ff: pass def test_string_inline(): x = np.array([b'a', b'b', b'c']) l = ndarray.numpy_array_to_list(x) for entry in l: assert isinstance(entry, six.text_type) def test_inline_shape_mismatch(): content = """ arr: !core/ndarray-1.0.0 data: [1, 2, 3] shape: [2] """ buff = helpers.yaml_to_asdf(content) with pytest.raises(ValueError): with asdf.AsdfFile.open(buff) as ff: pass asdf-1.3.3/asdf/tags/core/tests/test_history.py0000644000175000017500000000206413246003441021037 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import datetime import pytest from jsonschema import ValidationError from .... import asdf def test_history(): ff = asdf.AsdfFile() assert 'history' not in ff.tree ff.add_history_entry('This happened', {'name': 'my_tool', 'homepage': 'http://nowhere.com', 'author': 'John Doe', 'version': '2.0'}) assert len(ff.tree['history']) == 1 with pytest.raises(ValidationError): ff.add_history_entry('That happened', {'name': 'my_tool', 'author': 'John Doe', 'version': '2.0'}) assert len(ff.tree['history']) == 1 ff.add_history_entry('This other thing happened') assert len(ff.tree['history']) == 2 assert isinstance(ff.tree['history'][0]['time'], datetime.datetime) asdf-1.3.3/asdf/tags/core/__init__.py0000644000175000017500000000103513246003441016671 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from ...asdftypes import AsdfType class AsdfObject(dict, AsdfType): name = 'core/asdf' class Software(dict, AsdfType): name = 'core/software' class HistoryEntry(dict, AsdfType): name = 'core/history_entry' from .constant import ConstantType from .ndarray import NDArrayType from .complex import ComplexType from .table import TableType, ColumnType asdf-1.3.3/asdf/tags/core/ndarray.py0000644000175000017500000005176013246003441016604 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import sys import numpy as np from numpy import ma from jsonschema import ValidationError import six from ...asdftypes import AsdfType from ... import schema from ... import util from ... import yamlutil _datatype_names = { 'int8' : 'i1', 'int16' : 'i2', 'int32' : 'i4', 'int64' : 'i8', 'uint8' : 'u1', 'uint16' : 'u2', 'uint32' : 'u4', 'uint64' : 'u8', 'float32' : 'f4', 'float64' : 'f8', 'complex64' : 'c8', 'complex128' : 'c16', 'bool8' : 'b1' } _string_datatype_names = { 'ascii' : 'S', 'ucs4' : 'U' } def asdf_byteorder_to_numpy_byteorder(byteorder): if byteorder == 'big': return '>' elif byteorder == 'little': return '<' raise ValueError("Invalid ASDF byteorder '{0}'".format(byteorder)) def asdf_datatype_to_numpy_dtype(datatype, byteorder=None): if byteorder is None: byteorder = sys.byteorder if isinstance(datatype, six.string_types) and datatype in _datatype_names: datatype = _datatype_names[datatype] byteorder = asdf_byteorder_to_numpy_byteorder(byteorder) return np.dtype(str(byteorder + datatype)) elif (isinstance(datatype, list) and len(datatype) == 2 and isinstance(datatype[0], six.text_type) and isinstance(datatype[1], six.integer_types) and datatype[0] in _string_datatype_names): length = datatype[1] byteorder = asdf_byteorder_to_numpy_byteorder(byteorder) datatype = str(byteorder) + str(_string_datatype_names[datatype[0]]) + str(length) return np.dtype(datatype) elif isinstance(datatype, dict): if 'datatype' not in datatype: raise ValueError("Field entry has no datatype: '{0}'".format(datatype)) name = datatype.get('name', '') byteorder = datatype.get('byteorder', byteorder) shape = datatype.get('shape') datatype = asdf_datatype_to_numpy_dtype(datatype['datatype'], byteorder) if shape is None: return (str(name), datatype) else: return (str(name), datatype, tuple(shape)) elif isinstance(datatype, list): datatype_list = [] for i, subdatatype in enumerate(datatype): np_dtype = asdf_datatype_to_numpy_dtype(subdatatype, byteorder) if isinstance(np_dtype, tuple): datatype_list.append(np_dtype) elif isinstance(np_dtype, np.dtype): datatype_list.append((str(''), np_dtype)) else: raise RuntimeError("Error parsing asdf datatype") return np.dtype(datatype_list) raise ValueError("Unknown datatype {0}".format(datatype)) def numpy_byteorder_to_asdf_byteorder(byteorder): if byteorder == '=': return sys.byteorder elif byteorder == '<': return 'little' else: return 'big' def numpy_dtype_to_asdf_datatype(dtype, include_byteorder=True): dtype = np.dtype(dtype) if dtype.names is not None: fields = [] for name in dtype.names: field = dtype.fields[name][0] d = {} d['name'] = name field_dtype, byteorder = numpy_dtype_to_asdf_datatype(field) d['datatype'] = field_dtype if include_byteorder: d['byteorder'] = byteorder if field.shape: d['shape'] = list(field.shape) fields.append(d) return fields, numpy_byteorder_to_asdf_byteorder(dtype.byteorder) elif dtype.subdtype is not None: return numpy_dtype_to_asdf_datatype(dtype.subdtype[0]) elif dtype.name in _datatype_names: return dtype.name, numpy_byteorder_to_asdf_byteorder(dtype.byteorder) elif dtype.name == 'bool': return 'bool8', numpy_byteorder_to_asdf_byteorder(dtype.byteorder) elif dtype.name.startswith('string') or dtype.name.startswith('bytes'): return ['ascii', dtype.itemsize], 'big' elif dtype.name.startswith('unicode') or dtype.name.startswith('str'): return (['ucs4', int(dtype.itemsize / 4)], numpy_byteorder_to_asdf_byteorder(dtype.byteorder)) raise ValueError("Unknown dtype {0}".format(dtype)) def inline_data_asarray(inline, dtype=None): # np.asarray doesn't handle structured arrays unless the innermost # elements are tuples. To do that, we drill down the first # element of each level until we find a single item that # successfully converts to a scalar of the expected structured # dtype. Then we go through and convert everything at that level # to a tuple. This probably breaks for nested structured dtypes, # but it's probably good enough for now. It also won't work with # object dtypes, but ASDF explicitly excludes those, so we're ok # there. if dtype is not None and dtype.fields is not None: def find_innermost_match(l, depth=0): if not isinstance(l, list) or not len(l): raise ValueError( "data can not be converted to structured array") try: np.asarray(tuple(l), dtype=dtype) except ValueError: return find_innermost_match(l[0], depth + 1) else: return depth depth = find_innermost_match(inline) def convert_to_tuples(l, data_depth, depth=0): if data_depth == depth: return tuple(l) else: return [convert_to_tuples(x, data_depth, depth+1) for x in l] inline = convert_to_tuples(inline, depth) return np.asarray(inline, dtype=dtype) else: def handle_mask(inline): if isinstance(inline, list): if None in inline: inline_array = np.asarray(inline) nones = np.equal(inline_array, None) return np.ma.array(np.where(nones, 0, inline), mask=nones) else: return [handle_mask(x) for x in inline] return inline inline = handle_mask(inline) inline = np.ma.asarray(inline, dtype=dtype) if not ma.is_masked(inline): return inline.data else: return inline def numpy_array_to_list(array): def tolist(x): if isinstance(x, (np.ndarray, NDArrayType)): if x.dtype.char == 'S': x = x.astype('U').tolist() else: x = x.tolist() if isinstance(x, (list, tuple)): return [tolist(y) for y in x] else: return x def ascii_to_unicode(x): # Convert byte string arrays to unicode string arrays, since YAML # doesn't handle the former. if isinstance(x, list): return [ascii_to_unicode(y) for y in x] elif isinstance(x, bytes): return x.decode('ascii') else: return x result = ascii_to_unicode(tolist(array)) schema.validate_large_literals(result) return result class NDArrayType(AsdfType): name = 'core/ndarray' types = [np.ndarray, ma.MaskedArray] def __init__(self, source, shape, dtype, offset, strides, order, mask, asdffile): self._asdffile = asdffile self._source = source self._block = None self._array = None self._mask = mask if isinstance(source, list): self._array = inline_data_asarray(source, dtype) self._array = self._apply_mask(self._array, self._mask) self._block = asdffile.blocks.add_inline(self._array) if shape is not None: if ((shape[0] == '*' and self._array.shape[1:] != tuple(shape[1:])) or (self._array.shape != tuple(shape))): raise ValueError( "inline data doesn't match the given shape") self._shape = shape self._dtype = dtype self._offset = offset self._strides = strides self._order = order def _make_array(self): if self._array is None: block = self.block shape = self.get_actual_shape( self._shape, self._strides, self._dtype, len(block)) self._array = np.ndarray( shape, self._dtype, block.data, self._offset, self._strides, self._order) self._array = self._apply_mask(self._array, self._mask) return self._array def _apply_mask(self, array, mask): if isinstance(mask, (np.ndarray, NDArrayType)): # Use "mask.view()" here so the underlying possibly # memmapped mask array is freed properly when the masked # array goes away. array = ma.array(array, mask=mask.view()) # assert util.get_array_base(array.mask) is util.get_array_base(mask) return array elif np.isscalar(mask): if np.isnan(mask): return ma.array(array, mask=np.isnan(array)) else: return ma.masked_values(array, mask) return array def __array__(self): return self._make_array() def __repr__(self): # repr alone should not force loading of the data if self._array is None: return "<{0} (unloaded) shape: {1} dtype: {2}>".format( 'array' if self._mask is None else 'masked array', self._shape, self._dtype) return repr(self._array) def __str__(self): # str alone should not force loading of the data if self._array is None: return "<{0} (unloaded) shape: {1} dtype: {2}>".format( 'array' if self._mask is None else 'masked array', self._shape, self._dtype) return str(self._array) def get_actual_shape(self, shape, strides, dtype, block_size): """ Get the actual shape of an array, by computing it against the block_size if it contains a ``*``. """ num_stars = shape.count('*') if num_stars == 0: return shape elif num_stars == 1: if shape[0] != '*': raise ValueError("'*' may only be in first entry of shape") if strides is not None: stride = strides[0] else: stride = np.product(shape[1:]) * dtype.itemsize missing = int(block_size / stride) return [missing] + shape[1:] raise ValueError("Invalid shape '{0}'".format(shape)) @property def block(self): if self._block is None: self._block = self._asdffile.blocks.get_block(self._source) return self._block @property def shape(self): if self._shape is None: return self.__array__().shape if '*' in self._shape: return tuple(self.get_actual_shape( self._shape, self._strides, self._dtype, len(self.block))) return tuple(self._shape) @property def dtype(self): if self._array is None: return self._dtype else: return self._array.dtype @property def __len__(self): if self._array is None: return self._shape[0] else: return len(self._array) def __getattr__(self, attr): # We need to ignore __array_struct__, or unicode arrays end up # getting "double casted" and upsized. This also reduces the # number of array creations in the general case. if attr == '__array_struct__': raise AttributeError() return getattr(self._make_array(), attr) @classmethod def from_tree(cls, node, ctx): if isinstance(node, list): return cls(node, None, None, None, None, None, None, ctx) elif isinstance(node, dict): source = node.get('source') data = node.get('data') if source and data: raise ValueError( "Both source and data may not be provided " "at the same time") if data: source = data shape = node.get('shape', None) if data is not None: byteorder = sys.byteorder else: byteorder = node['byteorder'] if 'datatype' in node: dtype = asdf_datatype_to_numpy_dtype( node['datatype'], byteorder) else: dtype = None offset = node.get('offset', 0) strides = node.get('strides', None) mask = node.get('mask', None) return cls(source, shape, dtype, offset, strides, 'C', mask, ctx) raise TypeError("Invalid ndarray description.") @classmethod def reserve_blocks(cls, data, ctx): # Find all of the used data buffers so we can add or rearrange # them if necessary if isinstance(data, np.ndarray): yield ctx.blocks.find_or_create_block_for_array(data, ctx) elif isinstance(data, NDArrayType): yield data.block @classmethod def to_tree(cls, data, ctx): base = util.get_array_base(data) block = ctx.blocks.find_or_create_block_for_array(data, ctx) shape = data.shape dtype = data.dtype offset = data.ctypes.data - base.ctypes.data if data.flags[b'C_CONTIGUOUS']: strides = None else: strides = data.strides result = {} result['shape'] = list(shape) if block.array_storage == 'streamed': result['shape'][0] = '*' dtype, byteorder = numpy_dtype_to_asdf_datatype( dtype, include_byteorder=(block.array_storage != 'inline')) byteorder = block.override_byteorder(byteorder) if block.array_storage == 'inline': listdata = numpy_array_to_list(data) result['data'] = yamlutil.custom_tree_to_tagged_tree( listdata, ctx) result['datatype'] = dtype else: result['shape'] = list(shape) if block.array_storage == 'streamed': result['shape'][0] = '*' result['source'] = ctx.blocks.get_source(block) result['datatype'] = dtype result['byteorder'] = byteorder if offset > 0: result['offset'] = offset if strides is not None: result['strides'] = list(strides) if isinstance(data, ma.MaskedArray): if np.any(data.mask): if block.array_storage == 'inline': ctx.blocks.set_array_storage(ctx.blocks[data.mask], 'inline') result['mask'] = yamlutil.custom_tree_to_tagged_tree( data.mask, ctx) return result @classmethod def _assert_equality(cls, old, new, func): if old.dtype.fields: if not new.dtype.fields: assert False, "arrays not equal" for a, b in zip(old, new): cls._assert_equality(a, b, func) else: old = old.__array__() new = new.__array__() if old.dtype.char in 'SU': if old.dtype.char == 'S': old = old.astype('U') if new.dtype.char == 'S': new = new.astype('U') old = old.tolist() new = new.tolist() assert old == new else: func(old, new) @classmethod def assert_equal(cls, old, new): from numpy.testing import assert_array_equal cls._assert_equality(old, new, assert_array_equal) @classmethod def assert_allclose(cls, old, new): from numpy.testing import assert_allclose, assert_array_equal if (old.dtype.kind in 'iu' and new.dtype.kind in 'iu'): cls._assert_equality(old, new, assert_array_equal) else: cls._assert_equality(old, new, assert_allclose) @classmethod def copy_to_new_asdf(cls, node, asdffile): if isinstance(node, NDArrayType): array = node._make_array() asdffile.blocks.set_array_storage(asdffile.blocks[array], node.block.array_storage) return node._make_array() return node def _make_operation(name): def __operation__(self, *args): return getattr(self._make_array(), name)(*args) return __operation__ for op in [ '__neg__', '__pos__', '__abs__', '__invert__', '__complex__', '__int__', '__long__', '__float__', '__oct__', '__hex__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', '__cmp__', '__rcmp__', '__add__', '__sub__', '__mul__', '__floordiv__', '__mod__', '__divmod__', '__pow__', '__lshift__', '__rshift__', '__and__', '__xor__', '__or__', '__div__', '__truediv__', '__radd__', '__rsub__', '__rmul__', '__rdiv__', '__rtruediv__', '__rfloordiv__', '__rmod__', '__rdivmod__', '__rpow__', '__rlshift__', '__rrshift__', '__rand__', '__rxor__', '__ror__', '__iadd__', '__isub__', '__imul__', '__idiv__', '__itruediv__', '__ifloordiv__', '__imod__', '__ipow__', '__ilshift__', '__irshift__', '__iand__', '__ixor__', '__ior__', '__getitem__', '__delitem__', '__contains__', '__setitem__']: setattr(NDArrayType, op, _make_operation(op)) def _get_ndim(instance): if isinstance(instance, list): array = inline_data_asarray(instance) return array.ndim elif isinstance(instance, dict): if 'shape' in instance: return len(instance['shape']) elif 'data' in instance: array = inline_data_asarray(instance['data']) return array.ndim elif isinstance(instance, (np.ndarray, NDArrayType)): return len(instance.shape) def validate_ndim(validator, ndim, instance, schema): in_ndim = _get_ndim(instance) if in_ndim != ndim: yield ValidationError( "Wrong number of dimensions: Expected {0}, got {1}".format( ndim, in_ndim), instance=repr(instance)) def validate_max_ndim(validator, max_ndim, instance, schema): in_ndim = _get_ndim(instance) if in_ndim > max_ndim: yield ValidationError( "Wrong number of dimensions: Expected max of {0}, got {1}".format( max_ndim, in_ndim), instance=repr(instance)) def validate_datatype(validator, datatype, instance, schema): if isinstance(instance, list): array = inline_data_asarray(instance) in_datatype, _ = numpy_dtype_to_asdf_datatype(array.dtype) elif isinstance(instance, dict): if 'datatype' in instance: in_datatype = instance['datatype'] elif 'data' in instance: array = inline_data_asarray(instance['data']) in_datatype, _ = numpy_dtype_to_asdf_datatype(array.dtype) else: raise ValidationError("Not an array") elif isinstance(instance, (np.ndarray, NDArrayType)): in_datatype, _ = numpy_dtype_to_asdf_datatype(instance.dtype) else: raise ValidationError("Not an array") if datatype == in_datatype: return if schema.get('exact_datatype', False): yield ValidationError( "Expected datatype '{0}', got '{1}'".format( datatype, in_datatype)) np_datatype = asdf_datatype_to_numpy_dtype(datatype) np_in_datatype = asdf_datatype_to_numpy_dtype(in_datatype) if not np_datatype.fields: if np_in_datatype.fields: yield ValidationError( "Expected scalar datatype '{0}', got '{1}'".format( datatype, in_datatype)) if not np.can_cast(np_in_datatype, np_datatype, 'safe'): yield ValidationError( "Can not safely cast from '{0}' to '{1}' ".format( in_datatype, datatype)) else: if not np_in_datatype.fields: yield ValidationError( "Expected structured datatype '{0}', got '{1}'".format( datatype, in_datatype)) if len(np_in_datatype.fields) != len(np_datatype.fields): yield ValidationError( "Mismatch in number of columns: " "Expected {0}, got {1}".format( len(datatype), len(in_datatype))) for i in range(len(np_datatype.fields)): in_type = np_in_datatype[i] out_type = np_datatype[i] if not np.can_cast(in_type, out_type, 'safe'): yield ValidationError( "Can not safely cast to expected datatype: " "Expected {0}, got {1}".format( numpy_dtype_to_asdf_datatype(out_type)[0], numpy_dtype_to_asdf_datatype(in_type)[0])) NDArrayType.validators = { 'ndim': validate_ndim, 'max_ndim': validate_max_ndim, 'datatype': validate_datatype } asdf-1.3.3/asdf/tags/transform/0000755000175000017500000000000013246031665015655 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/transform/basic.py0000644000175000017500000001246613246003441017310 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function try: import astropy except ImportError: HAS_ASTROPY = False else: HAS_ASTROPY = True from astropy.modeling import mappings from astropy.utils import minversion ASTROPY_12 = minversion(astropy, "1.2") from ...asdftypes import AsdfType from ... import tagged from ... import yamlutil __all__ = ['TransformType', 'IdentityType', 'ConstantType', 'DomainType'] class TransformType(AsdfType): version = '1.1.0' requires = ['astropy'] @classmethod def _from_tree_base_transform_members(cls, model, node, ctx): if 'inverse' in node: model.inverse = yamlutil.tagged_tree_to_custom_tree( node['inverse'], ctx) if 'name' in node: model = model.rename(node['name']) # TODO: Remove domain in a later version. if 'domain' in node: model.bounding_box = cls._domain_to_bounding_box(node['domain']) elif 'bounding_box' in node: model.bounding_box = node['bounding_box'] return model @classmethod def _domain_to_bounding_box(cls, domain): bb = tuple([(item['lower'], item['upper']) for item in domain]) if len(bb) == 1: bb = bb[0] return bb @classmethod def from_tree_transform(cls, node, ctx): raise NotImplementedError( "Must be implemented in TransformType subclasses") @classmethod def from_tree(cls, node, ctx): model = cls.from_tree_transform(node, ctx) model = cls._from_tree_base_transform_members(model, node, ctx) return model @classmethod def _to_tree_base_transform_members(cls, model, node, ctx): if ASTROPY_12: if getattr(model, '_user_inverse', None) is not None: node['inverse'] = yamlutil.custom_tree_to_tagged_tree( model._user_inverse, ctx) else: if getattr(model, '_custom_inverse', None) is not None: node['inverse'] = yamlutil.custom_tree_to_tagged_tree( model._custom_inverse, ctx) if model.name is not None: node['name'] = model.name try: bb = model.bounding_box except NotImplementedError: bb = None if bb is not None: if model.n_inputs == 1: bb = list(bb) else: bb = [list(item) for item in model.bounding_box] node['bounding_box'] = bb @classmethod def to_tree_transform(cls, model, ctx): raise NotImplementedError("Must be implemented in TransformType subclasses") @classmethod def to_tree(cls, model, ctx): node = cls.to_tree_transform(model, ctx) cls._to_tree_base_transform_members(model, node, ctx) return node @classmethod def assert_equal(cls, a, b): # TODO: If models become comparable themselves, remove this. from ...tests.helpers import assert_tree_match assert a.name == b.name # TODO: Assert inverses are the same class IdentityType(TransformType): name = "transform/identity" types = ['astropy.modeling.mappings.Identity'] @classmethod def from_tree_transform(cls, node, ctx): return mappings.Identity(node.get('n_dims', 1)) @classmethod def to_tree_transform(cls, data, ctx): node = {} if data.n_inputs != 1: node['n_dims'] = data.n_inputs return node @classmethod def assert_equal(cls, a, b): from astropy.modeling import mappings # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (isinstance(a, mappings.Identity) and isinstance(b, mappings.Identity) and a.n_inputs == b.n_inputs) class ConstantType(TransformType): name = "transform/constant" types = ['astropy.modeling.functional_models.Const1D'] @classmethod def from_tree_transform(cls, node, ctx): from astropy.modeling import functional_models return functional_models.Const1D(node['value']) @classmethod def to_tree_transform(cls, data, ctx): return { 'value': data.amplitude.value } class DomainType(AsdfType): name = "transform/domain" @classmethod def from_tree(cls, node, ctx): return node @classmethod def to_tree(cls, data, ctx): return data # TODO: This is just here for bootstrapping and will go away eventually if HAS_ASTROPY: class GenericModel(mappings.Mapping): def __init__(self, n_inputs, n_outputs): mapping = tuple(range(n_inputs)) super(GenericModel, self).__init__(mapping) self._outputs = tuple('x' + str(idx) for idx in range(self.n_outputs + 1)) class GenericType(TransformType): name = "transform/generic" if HAS_ASTROPY: types = [GenericModel] @classmethod def from_tree_transform(cls, node, ctx): return GenericModel( node['n_inputs'], node['n_outputs']) @classmethod def to_tree_transform(cls, data, ctx): return { 'n_inputs': data.n_inputs, 'n_outputs': data.n_outputs } asdf-1.3.3/asdf/tags/transform/compound.py0000644000175000017500000001110713246003441020042 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import six from ... import tagged from ... import yamlutil from .basic import TransformType, ConstantType __all__ = ['CompoundType', 'RemapAxesType'] _operator_to_tag_mapping = { '+' : 'add', '-' : 'subtract', '*' : 'multiply', '/' : 'divide', '**' : 'power', '|' : 'compose', '&' : 'concatenate' } _tag_to_method_mapping = { 'add' : '__add__', 'subtract' : '__sub__', 'multiply' : '__mul__', 'divide' : '__truediv__', 'power' : '__pow__', 'compose' : '__or__', 'concatenate' : '__and__' } class CompoundType(TransformType): name = ['transform/' + x for x in _tag_to_method_mapping.keys()] types = ['astropy.modeling.core._CompoundModel'] handle_dynamic_subclasses = True @classmethod def from_tree_tagged(cls, node, ctx): from astropy import modeling tag = node._tag[node._tag.rfind('/')+1:] tag = tag[:tag.rfind('-')] oper = _tag_to_method_mapping[tag] left = yamlutil.tagged_tree_to_custom_tree( node['forward'][0], ctx) if not isinstance(left, modeling.Model): raise TypeError("Unknown model type '{0}'".format( node['forward'][0]._tag)) right = yamlutil.tagged_tree_to_custom_tree( node['forward'][1], ctx) if not isinstance(right, modeling.Model): raise TypeError("Unknown model type '{0}'".format( node['forward'][1]._tag)) model = getattr(left, oper)(right) model = cls._from_tree_base_transform_members(model, node, ctx) return model @classmethod def _to_tree_from_model_tree(cls, tree, ctx): if tree.left.isleaf: left = yamlutil.custom_tree_to_tagged_tree( tree.left.value, ctx) else: left = cls._to_tree_from_model_tree(tree.left, ctx) if tree.right.isleaf: right = yamlutil.custom_tree_to_tagged_tree( tree.right.value, ctx) else: right = cls._to_tree_from_model_tree(tree.right, ctx) node = { 'forward': [left, right] } try: tag_name = 'transform/' + _operator_to_tag_mapping[tree.value] except KeyError: raise ValueError("Unknown operator '{0}'".format(tree.value)) node = tagged.tag_object(cls.make_yaml_tag(tag_name), node, ctx=ctx) return node @classmethod def to_tree_tagged(cls, model, ctx): node = cls._to_tree_from_model_tree(model._tree, ctx) cls._to_tree_base_transform_members(model, node, ctx) return node @classmethod def assert_equal(cls, a, b): # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) from ...tests.helpers import assert_tree_match assert_tree_match(a._tree.left.value, b._tree.left.value) assert_tree_match(a._tree.right.value, b._tree.right.value) assert a._tree.value == b._tree.value class RemapAxesType(TransformType): name = 'transform/remap_axes' types = ['astropy.modeling.models.Mapping'] @classmethod def from_tree_transform(cls, node, ctx): from astropy.modeling.models import Identity, Mapping mapping = node['mapping'] n_inputs = node.get('n_inputs') if all([isinstance(x, six.integer_types) for x in mapping]): return Mapping(tuple(mapping), n_inputs) if n_inputs is None: n_inputs = max([x for x in mapping if isinstance(x, six.integer_types)]) + 1 transform = Identity(n_inputs) new_mapping = [] i = n_inputs for entry in mapping: if isinstance(entry, six.integer_types): new_mapping.append(entry) else: new_mapping.append(i) transform = transform & ConstantType.from_tree( {'value': int(entry.value)}, ctx) i += 1 return transform | Mapping(new_mapping) @classmethod def to_tree_transform(cls, model, ctx): node = {'mapping': list(model.mapping)} if model.n_inputs > max(model.mapping) + 1: node['n_inputs'] = model.n_inputs return node @classmethod def assert_equal(cls, a, b): TransformType.assert_equal(a, b) assert a.mapping == b.mapping assert(a.n_inputs == b.n_inputs) asdf-1.3.3/asdf/tags/transform/tabular.py0000644000175000017500000000607413246003441017657 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np from numpy.testing import assert_array_equal from ... import yamlutil from .basic import TransformType try: import astropy except ImportError: HAS_ASTROPY = False else: HAS_ASTROPY = True from astropy.utils import minversion ASTROPY_13 = minversion(astropy, "1.3.dev16506") if HAS_ASTROPY and ASTROPY_13: __all__ = ['TabularType'] else: __all__ = [] class TabularType(TransformType): name = "transform/tabular" if HAS_ASTROPY and ASTROPY_13: types = [astropy.modeling.models.Tabular2D, astropy.modeling.models.Tabular1D ] else: types = [] @classmethod def from_tree_transform(cls, node, ctx): from astropy import modeling lookup_table = node.pop("lookup_table") dim = lookup_table.ndim name = node.get('name', None) fill_value = node.pop("fill_value", None) if dim == 1: # The copy is necessary because the array is memory mapped. points = (node['points'][0][:],) model = modeling.models.Tabular1D(points=points, lookup_table=lookup_table, method=node['method'], bounds_error=node['bounds_error'], fill_value=fill_value, name=name) elif dim == 2: points = tuple([p[:] for p in node['points']]) model = modeling.models.Tabular2D(points=points, lookup_table=lookup_table, method=node['method'], bounds_error=node['bounds_error'], fill_value=fill_value, name=name) else: tabular_class = modeling.models.tabular_model(dim, name) points = tuple([p[:] for p in node['points']]) model = tabular_class(points=points, lookup_table=lookup_table, method=node['method'], bounds_error=node['bounds_error'], fill_value=fill_value, name=name) return model @classmethod def to_tree_transform(cls, model, ctx): node = {} node["fill_value"] = model.fill_value node["lookup_table"] = model.lookup_table node["points"] = [p for p in model.points] node["method"] = str(model.method) node["bounds_error"] = model.bounds_error node["name"] = model.name return yamlutil.custom_tree_to_tagged_tree(node, ctx) @classmethod def assert_equal(cls, a, b): assert_array_equal(a.lookup_table, b.lookup_table) assert_array_equal(a.points, b.points) assert (a.method == b.method) if a.fill_value is None: assert b.fill_value is None elif np.isnan(a.fill_value): assert np.isnan(b.fill_value) else: assert(a.fill_value == b.fill_value) assert(a.bounds_error == b.bounds_error) asdf-1.3.3/asdf/tags/transform/polynomial.py0000644000175000017500000001113113246003441020376 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np from numpy.testing import assert_array_equal from ... import yamlutil from .basic import TransformType __all__ = ['ShiftType', 'ScaleType', 'PolynomialType'] class ShiftType(TransformType): name = "transform/shift" types = ['astropy.modeling.models.Shift'] @classmethod def from_tree_transform(cls, node, ctx): from astropy import modeling offset = node['offset'] if not np.isscalar(offset): raise NotImplementedError( "Asdf currently only supports scalar inputs to Shift transform.") return modeling.models.Shift(offset) @classmethod def to_tree_transform(cls, model, ctx): return {'offset': model.offset.value} #return yamlutil.custom_tree_to_tagged_tree(node, ctx) @classmethod def assert_equal(cls, a, b): from astropy import modeling # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (isinstance(a, modeling.models.Shift) and isinstance(b, modeling.models.Shift)) assert_array_equal(a.offset.value, b.offset.value) class ScaleType(TransformType): name = "transform/scale" types = ['astropy.modeling.models.Scale'] @classmethod def from_tree_transform(cls, node, ctx): from astropy import modeling factor = node['factor'] if not np.isscalar(factor): raise NotImplementedError( "Asdf currently only supports scalar inputs to Scale transform.") return modeling.models.Scale(factor) @classmethod def to_tree_transform(cls, model, ctx): node = {'factor': model.factor.value} return yamlutil.custom_tree_to_tagged_tree(node, ctx) @classmethod def assert_equal(cls, a, b): from astropy import modeling # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (isinstance(a, modeling.models.Scale) and isinstance(b, modeling.models.Scale)) assert_array_equal(a.factor, b.factor) class PolynomialType(TransformType): name = "transform/polynomial" types = ['astropy.modeling.models.Polynomial1D', 'astropy.modeling.models.Polynomial2D'] @classmethod def from_tree_transform(cls, node, ctx): from astropy import modeling coefficients = np.asarray(node['coefficients']) n_dim = coefficients.ndim if n_dim == 1: model = modeling.models.Polynomial1D(coefficients.size - 1) model.parameters = coefficients elif n_dim == 2: shape = coefficients.shape degree = shape[0] - 1 if shape[0] != shape[1]: raise TypeError("Coefficients must be an (n+1, n+1) matrix") coeffs = {} for i in range(shape[0]): for j in range(shape[0]): if i + j < degree + 1: name = 'c' + str(i) + '_' +str(j) coeffs[name] = coefficients[i, j] model = modeling.models.Polynomial2D(degree, **coeffs) else: raise NotImplementedError( "Asdf currently only supports 1D or 2D polynomial transform.") return model @classmethod def to_tree_transform(cls, model, ctx): from astropy import modeling if isinstance(model, modeling.models.Polynomial1D): coefficients = np.array(model.parameters) elif isinstance(model, modeling.models.Polynomial2D): degree = model.degree coefficients = np.zeros((degree + 1, degree + 1)) for i in range(degree + 1): for j in range(degree + 1): if i + j < degree + 1: name = 'c' + str(i) + '_' +str(j) coefficients[i, j] = getattr(model, name).value node = {'coefficients': coefficients} return yamlutil.custom_tree_to_tagged_tree(node, ctx) @classmethod def assert_equal(cls, a, b): from astropy import modeling # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (isinstance(a, (modeling.models.Polynomial1D, modeling.models.Polynomial2D)) and isinstance(b, (modeling.models.Polynomial1D, modeling.models.Polynomial2D))) assert_array_equal(a.parameters, b.parameters) asdf-1.3.3/asdf/tags/transform/tests/0000755000175000017500000000000013246031665017017 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/transform/tests/test_transform.py0000644000175000017500000001042513246003441022434 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np try: import astropy except ImportError: HAS_ASTROPY = False test_models = [] else: HAS_ASTROPY = True from astropy.utils import minversion ASTROPY_13 = minversion(astropy, "1.3.dev16506") from astropy.modeling import models as astmodels test_models = [astmodels.Identity(2), astmodels.Polynomial1D(2, c0=1, c1=2, c2=3), astmodels.Polynomial2D(1, c0_0=1, c0_1=2, c1_0=3), astmodels.Shift(2.), astmodels.Scale(3.4), astmodels.RotateNative2Celestial(5.63, -72.5, 180), astmodels.RotateCelestial2Native(5.63, -72.5, 180), astmodels.EulerAngleRotation(23, 14, 2.3, axes_order='xzx'), astmodels.Mapping((0, 1), n_inputs=3)] import pytest from ....tests import helpers from .... import util from ..basic import DomainType @pytest.mark.skipif('not HAS_ASTROPY') def test_transforms_compound(tmpdir): tree = { 'compound': astmodels.Shift(1) & astmodels.Shift(2) | astmodels.Sky2Pix_TAN() | astmodels.Rotation2D() | astmodels.AffineTransformation2D([[2, 0], [0, 2]], [42, 32]) + astmodels.Rotation2D(32) } helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.skipif('not HAS_ASTROPY') def test_inverse_transforms(tmpdir): rotation = astmodels.Rotation2D(32) rotation.inverse = astmodels.Rotation2D(45) real_rotation = astmodels.Rotation2D(32) tree = { 'rotation': rotation, 'real_rotation': real_rotation } def check(ff): assert ff.tree['rotation'].inverse.angle == 45 helpers.assert_roundtrip_tree(tree, tmpdir, check) @pytest.mark.skipif('not HAS_ASTROPY') @pytest.mark.parametrize(('model'), test_models) def test_single_model(tmpdir, model): tree = {'single_model': model} helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.skipif('not HAS_ASTROPY') def test_name(tmpdir): def check(ff): assert ff.tree['rot'].name == 'foo' tree = {'rot': astmodels.Rotation2D(23, name='foo')} helpers.assert_roundtrip_tree(tree, tmpdir, check) @pytest.mark.skipif('not HAS_ASTROPY') def test_zenithal_with_arguments(tmpdir): tree = { 'azp': astmodels.Sky2Pix_AZP(0.5, 0.3) } helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.skipif('not HAS_ASTROPY') def test_naming_of_compound_model(tmpdir): """Issue #87""" def asdf_check(ff): assert ff.tree['model'].name == 'compound_model' offx = astmodels.Shift(1) scl = astmodels.Scale(2) model = (offx | scl).rename('compound_model') tree = { 'model': model } helpers.assert_roundtrip_tree(tree, tmpdir, asdf_check) @pytest.mark.skipif('not HAS_ASTROPY') def test_generic_projections(tmpdir): from .. import projections for tag_name, (name, params) in projections._generic_projections.items(): tree = { 'forward': util.resolve_name( 'astropy.modeling.projections.Sky2Pix_{0}'.format(name))(), 'backward': util.resolve_name( 'astropy.modeling.projections.Pix2Sky_{0}'.format(name))() } helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.skipif('not HAS_ASTROPY') @pytest.mark.skipif('not ASTROPY_13') def test_tabular_model(tmpdir): points = np.arange(0, 5) values = [1., 10, 2, 45, -3] model = astmodels.Tabular1D(points=points, lookup_table=values) tree = {'model': model} helpers.assert_roundtrip_tree(tree, tmpdir) table = np.array([[ 3., 0., 0.], [ 0., 2., 0.], [ 0., 0., 0.]]) points = ([1, 2, 3], [1, 2, 3]) model2 = astmodels.Tabular2D(points, lookup_table=table, bounds_error=False, fill_value=None, method='nearest') tree = {'model': model2} helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.skipif('not HAS_ASTROPY') def test_bounding_box(tmpdir): model = astmodels.Shift(1) & astmodels.Shift(2) model.bounding_box = ((1, 3), (2, 4)) tree = {'model': model} helpers.assert_roundtrip_tree(tree, tmpdir) asdf-1.3.3/asdf/tags/transform/tests/__init__.py0000644000175000017500000000000013246003441021105 0ustar dandan00000000000000asdf-1.3.3/asdf/tags/transform/__init__.py0000644000175000017500000000045413246003441017760 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from .basic import * from .compound import * from .projections import * from .polynomial import * from .tabular import * asdf-1.3.3/asdf/tags/transform/projections.py0000644000175000017500000002101213246003441020551 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from numpy.testing import assert_array_equal from ... import yamlutil from .basic import TransformType __all__ = ['AffineType', 'Rotate2DType', 'Rotate3DType'] class AffineType(TransformType): name = "transform/affine" types = ['astropy.modeling.projections.AffineTransformation2D'] @classmethod def from_tree_transform(cls, node, ctx): from astropy import modeling matrix = node['matrix'] translation = node['translation'] if matrix.shape != (2, 2): raise NotImplementedError( "asdf currently only supports 2x2 (2D) rotation transformation " "matrices") if translation.shape != (2,): raise NotImplementedError( "asdf currently only supports 2D translation transformations.") return modeling.projections.AffineTransformation2D( matrix=matrix, translation=translation) @classmethod def to_tree_transform(cls, model, ctx): node = {'matrix': model.matrix.value, 'translation': model.translation.value} return yamlutil.custom_tree_to_tagged_tree(node, ctx) @classmethod def assert_equal(cls, a, b): # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (a.__class__ == b.__class__) assert_array_equal(a.matrix, b.matrix) assert_array_equal(a.translation, b.translation) class Rotate2DType(TransformType): name = "transform/rotate2d" types = ['astropy.modeling.rotations.Rotation2D'] @classmethod def from_tree_transform(cls, node, ctx): from astropy import modeling return modeling.rotations.Rotation2D(node['angle']) @classmethod def to_tree_transform(cls, model, ctx): return {'angle': model.angle.value} @classmethod def assert_equal(cls, a, b): from astropy import modeling # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (isinstance(a, modeling.rotations.Rotation2D) and isinstance(b, modeling.rotations.Rotation2D)) assert_array_equal(a.angle, b.angle) class Rotate3DType(TransformType): name = "transform/rotate3d" types = ['astropy.modeling.rotations.RotateNative2Celestial', 'astropy.modeling.rotations.RotateCelestial2Native', 'astropy.modeling.rotations.EulerAngleRotation'] @classmethod def from_tree_transform(cls, node, ctx): from astropy import modeling if node['direction'] == 'native2celestial': return modeling.rotations.RotateNative2Celestial(node["phi"], node["theta"], node["psi"]) elif node['direction'] == 'celestial2native': return modeling.rotations.RotateCelestial2Native(node["phi"], node["theta"], node["psi"]) else: return modeling.rotations.EulerAngleRotation(node["phi"], node["theta"], node["psi"], axes_order=node["direction"]) @classmethod def to_tree_transform(cls, model, ctx): from astropy import modeling if isinstance(model, modeling.rotations.RotateNative2Celestial): try: return {"phi": model.lon.value, "theta": model.lat.value, "psi": model.lon_pole.value, "direction": "native2celestial" } except AttributeError: return {"phi": model.lon, "theta": model.lat, "psi": model.lon_pole, "direction": "native2celestial" } elif isinstance(model, modeling.rotations.RotateCelestial2Native): try: return {"phi": model.lon.value, "theta": model.lat.value, "psi": model.lon_pole.value, "direction": "celestial2native" } except AttributeError: return {"phi": model.lon, "theta": model.lat, "psi": model.lon_pole, "direction": "celestial2native" } else: return {"phi": model.phi.value, "theta": model.theta.value, "psi": model.psi.value, "direction": model.axes_order } @classmethod def assert_equal(cls, a, b): # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert a.__class__ == b.__class__ if a.__class__.__name__ == "EulerAngleRotation": assert_array_equal(a.phi, b.phi) assert_array_equal(a.psi, b.psi) assert_array_equal(a.theta, b.theta) else: assert_array_equal(a.lon, b.lon) assert_array_equal(a.lat, b.lat) assert_array_equal(a.lon_pole, b.lon_pole) class GenericProjectionType(TransformType): @classmethod def from_tree_transform(cls, node, ctx): args = [] for param_name, default in cls.params: args.append(node.get(param_name, default)) if node['direction'] == 'pix2sky': return cls.types[0](*args) else: return cls.types[1](*args) @classmethod def to_tree_transform(cls, model, ctx): node = {} if isinstance(model, cls.types[0]): node['direction'] = 'pix2sky' else: node['direction'] = 'sky2pix' for param_name, default in cls.params: val = getattr(model, param_name).value if val != default: node[param_name] = val return node @classmethod def assert_equal(cls, a, b): # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert a.__class__ == b.__class__ _generic_projections = { 'zenithal_perspective': ('ZenithalPerspective', (('mu', 0.0), ('gamma', 0.0))), 'gnomonic': ('Gnomonic', ()), 'stereographic': ('Stereographic', ()), 'slant_orthographic': ('SlantOrthographic', (('xi', 0.0), ('eta', 0.0))), 'zenithal_equidistant': ('ZenithalEquidistant', ()), 'zenithal_equal_area': ('ZenithalEqualArea', ()), 'airy': ('Airy', (('theta_b', 90.0),)), 'cylindrical_perspective': ('CylindricalPerspective', (('mu', 0.0), ('lam', 0.0))), 'cylindrical_equal_area': ('CylindricalEqualArea', (('lam', 0.0),)), 'plate_carree': ('PlateCarree', ()), 'mercator': ('Mercator', ()), 'sanson_flamsteed': ('SansonFlamsteed', ()), 'parabolic': ('Parabolic', ()), 'molleweide': ('Molleweide', ()), 'hammer_aitoff': ('HammerAitoff', ()), 'conic_perspective': ('ConicPerspective', (('sigma', 0.0), ('delta', 0.0))), 'conic_equal_area': ('ConicEqualArea', (('sigma', 0.0), ('delta', 0.0))), 'conic_equidistant': ('ConicEquidistant', (('sigma', 0.0), ('delta', 0.0))), 'conic_orthomorphic': ('ConicOrthomorphic', (('sigma', 0.0), ('delta', 0.0))), 'bonne_equal_area': ('BonneEqualArea', (('theta1', 0.0),)), 'polyconic': ('Polyconic', ()), 'tangential_spherical_cube': ('TangentialSphericalCube', ()), 'cobe_quad_spherical_cube': ('COBEQuadSphericalCube', ()), 'quad_spherical_cube': ('QuadSphericalCube', ()), 'healpix': ('HEALPix', (('H', 4.0), ('X', 3.0))), 'healpix_polar': ('HEALPixPolar', ()) } def make_projection_types(): for tag_name, (name, params) in _generic_projections.items(): class_name = '{0}Type'.format(name) types = ['astropy.modeling.projections.Pix2Sky_{0}'.format(name), 'astropy.modeling.projections.Sky2Pix_{0}'.format(name)] globals()[class_name] = type( str(class_name), (GenericProjectionType,), {'name': 'transform/{0}'.format(tag_name), 'types': types, 'params': params}) __all__.append(class_name) make_projection_types() asdf-1.3.3/asdf/tags/fits/0000755000175000017500000000000013246031665014607 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/fits/fits.py0000644000175000017500000000540313246003441016117 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np from ...asdftypes import AsdfType from ... import yamlutil class FitsType(AsdfType): name = 'fits/fits' types = ['astropy.io.fits.HDUList'] requires = ['astropy'] @classmethod def from_tree(cls, data, ctx): from astropy.io import fits hdus = [] first = True for hdu_entry in data: header = fits.Header([fits.Card(*x) for x in hdu_entry['header']]) data = hdu_entry.get('data') if data is not None: try: data = data.__array__() except ValueError: data = None if first: hdu = fits.PrimaryHDU(data=data, header=header) first = False elif data.dtype.names is not None: hdu = fits.BinTableHDU(data=data, header=header) else: hdu = fits.ImageHDU(data=data, header=header) hdus.append(hdu) hdulist = fits.HDUList(hdus) return hdulist @classmethod def to_tree(cls, hdulist, ctx): from astropy import table units = [] for hdu in hdulist: header_list = [] for card in hdu.header.cards: if card.comment: new_card = [card.keyword, card.value, card.comment] else: if card.value: new_card = [card.keyword, card.value] else: if card.keyword: new_card = [card.keyword] else: new_card = [] header_list.append(new_card) hdu_dict = {} hdu_dict['header'] = header_list if hdu.data is not None: if hdu.data.dtype.names is not None: data = table.Table(hdu.data) else: data = hdu.data hdu_dict['data'] = yamlutil.custom_tree_to_tagged_tree(data, ctx) units.append(hdu_dict) return units @classmethod def reserve_blocks(cls, data, ctx): for hdu in data: if hdu.data is not None: yield ctx.blocks.find_or_create_block_for_array(hdu.data, ctx) @classmethod def assert_equal(cls, old, new): from numpy.testing import assert_array_equal for hdua, hdub in zip(old, new): assert_array_equal(hdua.data, hdub.data) for carda, cardb in zip(hdua.header.cards, hdub.header.cards): assert tuple(carda) == tuple(cardb) asdf-1.3.3/asdf/tags/fits/setup_package.py0000644000175000017500000000044213246003441017763 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function def get_package_data(): # pragma: no cover return { str('asdf.tags.fits.tests'): ['data/*.fits'] } asdf-1.3.3/asdf/tags/fits/tests/0000755000175000017500000000000013246031665015751 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/fits/tests/data/0000755000175000017500000000000013246031665016662 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/fits/tests/data/complex.fits0000644000175000017500000010340013246003441021205 0ustar dandan00000000000000SIMPLE = T / conforms to FITS standard BITPIX = 8 / array data type NAXIS = 0 / number of array dimensions EXTEND = T Top Level MIRI Metadata DATE = '2013-08-30T10:49:55.070373' / The date this file was created (UTC) FILENAME= 'MiriDarkReferenceModel_test.fits' / The name of the file TELESCOP= 'JWST ' / The telescope used to acquire the data Information about the observation DATE-OBS= '2013-08-30T10:49:55.000000' / The date the observation was made (UTC) Information about the instrument and detectors INSTRUME= 'MIRI ' / Instrument used to acquire data DETECTOR= 'SW ' / String mnemonic for SCA used to acquire data CCC_POS = 'CLOSED ' / MIRI CCC position. CHANNEL = 'A ' / MIRI sub-channel relevant (MRS) DECKTEMP= 11.0 / MIRI Deck Temperature (K) DETTEMP = 6.0 / Detector temperature (K) Information related to the exposure TFRAME = 1.0 / Time between consecutive frames (in seconds) EFFINTTM= 10.0 / Integration time (in seconds) TGROUP = 10.0 / Integration time per group (in seconds) EXPEND = '2013-08-30T10:49:55.000000' / Time of the last readout of this exposuNFRAMES = 1 / Number of frames coadded in a group NINTS = 1 / Number of integrations in exposure NGROUPS = 1 / Number of groups in integration GROUPGAP= 0 / Number of frames dropped between groups READPATT= 'FAST ' / Readout pattern ROWRSETS= 0 / Width of reset pulse for SCA (cycles) FRMRSETS= 3 / Number of extra frame resets for SCA GRPAVG = 1 / Number of groups averaged INTAVG = 1 / Number of integrations averaged Information about the target TARG_RA = 0.0 / RA of the target TARG_DEC= 0.0 / DEC of the target Subarray coordinates SUBSTRT1= 0 / starting pixel in axis 1 direction SUBSTRT2= 0 / starting pixel in axis 2 direction SUBSIZE1= 0 / number of pixels in axis 1 direction SUBSIZE2= 0 / number of pixels in axis 2 direction TYPE = 'Dark ' / Type of data found in file Information about the data array Information about the error array Information about the total error on fit array END XTENSION= 'IMAGE ' / Image extension BITPIX = -32 / array data type NAXIS = 4 / number of array dimensions NAXIS1 = 4 NAXIS2 = 3 NAXIS3 = 3 NAXIS4 = 2 PCOUNT = 0 / number of parameters GCOUNT = 1 / number of groups EXTNAME = 'SCI ' / extension name BUNIT = 'DN ' / Units of the data array END <# < <=# =L=L=\)= =Q==G=<# < <=# =L=L=\)= =Q==G=<# < <=# =L=L=\)= =Q==G=<# < <=# =L=L=\)= =Q==G=<# < <=# =L=L=\)= =Q==G=<# < <=# =L=L=\)= =Q==G=XTENSION= 'IMAGE ' / Image extension BITPIX = -32 / array data type NAXIS = 4 / number of array dimensions NAXIS1 = 4 NAXIS2 = 3 NAXIS3 = 3 NAXIS4 = 2 PCOUNT = 0 / number of parameters GCOUNT = 1 / number of groups EXTNAME = 'ERR ' / extension name BUNIT = 'DN ' / Units of the error array END ?@@@@@@@AAA A0A@?@@@@@@@AAA A0A@?@@@@@@@AAA A0A@?@@@@@@@AAA A0A@?@@@@@@@AAA A0A@?@@@@@@@AAA A0A@XTENSION= 'IMAGE ' / Image extension BITPIX = -32 / array data type NAXIS = 2 / number of array dimensions NAXIS1 = 4 NAXIS2 = 3 PCOUNT = 0 / number of parameters GCOUNT = 1 / number of groups EXTNAME = 'FITERR ' / extension name BUNIT = 'DN ' / Units of the fit error array END <# < <=# =L=L=\)= =Q==G=XTENSION= 'IMAGE ' / Image extension BITPIX = 16 / array data type NAXIS = 2 / number of array dimensions NAXIS1 = 4 NAXIS2 = 3 PCOUNT = 0 / number of parameters GCOUNT = 1 / number of groups EXTNAME = 'DQ ' / extension name END XTENSION= 'IMAGE ' / Image extension BITPIX = 8 / array data type NAXIS = 1 / number of array dimensions NAXIS1 = 1231 PCOUNT = 0 / number of parameters GCOUNT = 1 / number of groups EXTNAME = 'METADATA' / extension name END { "photometry": {}, "observation": { "date": "2013-08-30T10:49:55" }, "err": { "units": "DN" }, "datatype": "Dark", "ref_file": { "flat": {}, "mask": {}, "linearity": {}, "amplifier": {}, "photom": {}, "dark": {}, "gain": {}, "readnoise": {} }, "dq": {}, "coordinates": {}, "filename": "MiriDarkReferenceModel_test.fits", "fiterr": { "units": "DN" }, "instrument": { "ccc_pos": "CLOSED", "deck_temperature": 11.0, "detector_temperature": 6.0, "detector": "SW", "type": "MIRI", "channel": "A" }, "telescope": "JWST", "cal_step": {}, "date": "2013-08-30T10:49:55.070374", "subarray": { "xstart": 0, "ysize": 0, "ystart": 0, "xsize": 0 }, "exposure": { "integration_time": 10.0, "groups_averaged": 1, "ngroups": 1, "expend": "2013-08-30T10:49:55", "frame_time": 1.0, "group_integration_time": 10.0, "integrations_averaged": 1, "readpatt": "FAST", "frame_resets": 3, "reset_time": 0, "nframes": 1, "groupgap": 0, "nints": 1 }, "data": { "units": "DN" }, "target": { "dec": 0.0, "ra": 0.0 } }asdf-1.3.3/asdf/tags/fits/tests/__init__.py0000644000175000017500000000000013246003441020037 0ustar dandan00000000000000asdf-1.3.3/asdf/tags/fits/tests/test_fits.py0000644000175000017500000000220213246003441020312 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function try: import astropy except ImportError: HAS_ASTROPY = False else: HAS_ASTROPY = True import pytest import os import numpy as np from ....tests import helpers @pytest.mark.skipif('not HAS_ASTROPY') def test_complex_structure(tmpdir): from astropy.io import fits with fits.open(os.path.join( os.path.dirname(__file__), 'data', 'complex.fits'), memmap=False) as hdulist: tree = { 'fits': hdulist } helpers.assert_roundtrip_tree(tree, tmpdir) @pytest.mark.skipif('not HAS_ASTROPY') def test_fits_table(tmpdir): from astropy.io import fits a = np.array( [(0, 1), (2, 3)], dtype=[(str('A'), int), (str('B'), int)]) print(a.dtype) h = fits.HDUList() h.append(fits.BinTableHDU.from_columns(a)) tree = {'fits': h} def check_yaml(content): assert b'!core/table' in content helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_yaml) asdf-1.3.3/asdf/tags/fits/__init__.py0000644000175000017500000000031013246003441016701 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from .fits import FitsType asdf-1.3.3/asdf/tags/time/0000755000175000017500000000000013246031665014600 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/time/time.py0000644000175000017500000001174313246003441016105 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np from numpy.testing import assert_array_equal import six from ...asdftypes import AsdfType from ...versioning import AsdfSpec from ... import yamlutil _guessable_formats = set(['iso', 'byear', 'jyear', 'yday']) _astropy_format_to_asdf_format = { 'isot': 'iso', 'byear_str': 'byear', 'jyear_str': 'jyear' } def _assert_earthlocation_equal(a, b): from astropy import __version__ as version assert_array_equal(a.x, b.x) assert_array_equal(a.y, b.y) assert_array_equal(a.z, b.z) # This allows us to test against earlier versions of astropy # This code path does get tested in CI, but we don't run a coverage test if version < '2.0.0': # pragma: no cover assert_array_equal(a.latitude, b.latitude) assert_array_equal(a.longitude, b.longitude) else: assert_array_equal(a.lat, b.lat) assert_array_equal(a.lon, b.lon) class TimeType(AsdfType): name = 'time/time' version = '1.1.0' supported_versions = ['1.0.0', AsdfSpec('>=1.1.0')] types = ['astropy.time.core.Time'] requires = ['astropy'] @classmethod def to_tree(cls, node, ctx): from astropy import time format = node.format if format == 'byear': node = time.Time(node, format='byear_str') elif format == 'jyear': node = time.Time(node, format='jyear_str') elif format in ('fits', 'datetime', 'plot_date'): node = time.Time(node, format='isot') format = node.format format = _astropy_format_to_asdf_format.get(format, format) guessable_format = format in _guessable_formats if node.scale == 'utc' and guessable_format: if node.isscalar: return node.value else: return yamlutil.custom_tree_to_tagged_tree( node.value, ctx) d = {'value': yamlutil.custom_tree_to_tagged_tree(node.value, ctx)} if not guessable_format: d['format'] = format if node.scale != 'utc': d['scale'] = node.scale if node.location is not None: x, y, z = node.location.x, node.location.y, node.location.z # Preserve backwards compatibility for writing the old schema # This allows WCS to test backwards compatibility with old frames # This code does get tested in CI, but we don't run a coverage test if cls.version == '1.0.0': # pragma: no cover unit = node.location.unit d['location'] = { 'x': x, 'y': y, 'z': z, 'unit': unit } else: d['location'] = { # It seems like EarthLocations can be represented either in # terms of Cartesian coordinates or latitude and longitude, so # we rather arbitrarily choose the former for our representation 'x': yamlutil.custom_tree_to_tagged_tree(x, ctx), 'y': yamlutil.custom_tree_to_tagged_tree(y, ctx), 'z': yamlutil.custom_tree_to_tagged_tree(z, ctx) } return d @classmethod def from_tree(cls, node, ctx): from astropy import time from astropy import units as u from astropy.units import Quantity from astropy.coordinates import EarthLocation if isinstance(node, (six.string_types, list, np.ndarray)): t = time.Time(node) format = _astropy_format_to_asdf_format.get(t.format, t.format) if format not in _guessable_formats: raise ValueError("Invalid time '{0}'".format(node)) return t value = node['value'] format = node.get('format') scale = node.get('scale') location = node.get('location') if location is not None: unit = location.get('unit', u.m) # This ensures that we can read the v.1.0.0 schema and convert it # to the new EarthLocation object, which expects Quantity components for comp in ['x', 'y', 'z']: if not isinstance(location[comp], Quantity): location[comp] = Quantity(location[comp], unit=unit) location = EarthLocation.from_geocentric( location['x'], location['y'], location['z']) return time.Time(value, format=format, scale=scale, location=location) @classmethod def assert_equal(cls, old, new): from astropy.coordinates import EarthLocation assert old.format == new.format assert old.scale == new.scale if isinstance(old.location, EarthLocation): assert isinstance(new.location, EarthLocation) _assert_earthlocation_equal(old.location, new.location) else: assert old.location == new.location assert_array_equal(old, new) asdf-1.3.3/asdf/tags/time/tests/0000755000175000017500000000000013246031665015742 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/time/tests/test_time.py0000644000175000017500000001044013246003441020277 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import six import pytest import datetime from collections import OrderedDict astropy = pytest.importorskip('astropy') from astropy import time import numpy as np from .... import asdf, AsdfFile from .... import tagged from .... import yamlutil from .... import schema as asdf_schema from ....tests import helpers def _walk_schema(schema, callback, ctx={}): def recurse(schema, path, combiner, ctx): if callback(schema, path, combiner, ctx, recurse): return for c in ['allOf', 'not']: for sub in schema.get(c, []): recurse(sub, path, c, ctx) for c in ['anyOf', 'oneOf']: for i, sub in enumerate(schema.get(c, [])): recurse(sub, path + [i], c, ctx) if schema.get('type') == 'object': for key, val in six.iteritems(schema.get('properties', {})): recurse(val, path + [key], combiner, ctx) if schema.get('type') == 'array': items = schema.get('items', {}) if isinstance(items, list): for i, item in enumerate(items): recurse(item, path + [i], combiner, ctx) elif len(items): recurse(items, path + ['items'], combiner, ctx) recurse(schema, [], None, ctx) def _flatten_combiners(schema): newschema = OrderedDict() def add_entry(path, schema, combiner): # TODO: Simplify? cursor = newschema for i in range(len(path)): part = path[i] if isinstance(part, int): cursor = cursor.setdefault('items', []) while len(cursor) <= part: cursor.append({}) cursor = cursor[part] elif part == 'items': cursor = cursor.setdefault('items', OrderedDict()) else: cursor = cursor.setdefault('properties', OrderedDict()) if i < len(path) - 1 and isinstance(path[i+1], int): cursor = cursor.setdefault(part, []) else: cursor = cursor.setdefault(part, OrderedDict()) cursor.update(schema) def callback(schema, path, combiner, ctx, recurse): type = schema.get('type') schema = OrderedDict(schema) if type == 'object': del schema['properties'] elif type == 'array': del schema['items'] if 'allOf' in schema: del schema['allOf'] if 'anyOf' in schema: del schema['anyOf'] add_entry(path, schema, combiner) _walk_schema(schema, callback) return newschema def test_time(tmpdir): time_array = time.Time( np.arange(100), format="unix") tree = { 'large_time_array': time_array } helpers.assert_roundtrip_tree(tree, tmpdir) def test_time_with_location(tmpdir): # See https://github.com/spacetelescope/asdf/issues/341 from astropy import units as u from astropy.coordinates.earth import EarthLocation location = EarthLocation(x=[1,2]*u.m, y=[3,4]*u.m, z=[5,6]*u.m) t = time.Time([1,2], location=location, format='cxcsec') tree = {'time': t} helpers.assert_roundtrip_tree(tree, tmpdir) def test_isot(tmpdir): tree = { 'time': time.Time('2000-01-01T00:00:00.000') } helpers.assert_roundtrip_tree(tree, tmpdir) ff = asdf.AsdfFile(tree) tree = yamlutil.custom_tree_to_tagged_tree(ff.tree, ff) assert isinstance(tree['time'], six.text_type) def test_time_tag(): schema = asdf_schema.load_schema( 'http://stsci.edu/schemas/asdf/time/time-1.1.0', resolve_references=True) schema = _flatten_combiners(schema) date = time.Time(datetime.datetime.now()) tree = {'date': date} asdf = AsdfFile(tree=tree) instance = yamlutil.custom_tree_to_tagged_tree(tree['date'], asdf) asdf_schema.validate(instance, schema=schema) tag = 'tag:stsci.edu:asdf/time/time-1.1.0' date = tagged.tag_object(tag, date) tree = {'date': date} asdf = AsdfFile(tree=tree) instance = yamlutil.custom_tree_to_tagged_tree(tree['date'], asdf) asdf_schema.validate(instance, schema=schema) asdf-1.3.3/asdf/tags/time/tests/__init__.py0000644000175000017500000000000013246003441020030 0ustar dandan00000000000000asdf-1.3.3/asdf/tags/time/__init__.py0000644000175000017500000000031013246003441016672 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from .time import TimeType asdf-1.3.3/asdf/tags/__init__.py0000644000175000017500000000046113246003441015743 0ustar dandan00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function # TODO: Import entire tree automatically and make these work like "plugins"? from . import core from . import fits from . import unit from . import time from . import transform from . import wcs asdf-1.3.3/asdf/tags/unit/0000755000175000017500000000000013246031665014621 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/unit/unit.py0000644000175000017500000000154413246003441016145 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import six from ...asdftypes import AsdfType class UnitType(AsdfType): name = 'unit/unit' types = ['astropy.units.UnitBase'] requires = ['astropy'] @classmethod def to_tree(cls, node, ctx): from astropy.units import Unit, UnitBase if isinstance(node, six.string_types): node = Unit(node, format='vounit', parse_strict='warn') if isinstance(node, UnitBase): return node.to_string(format='vounit') raise TypeError("'{0}' is not a valid unit".format(node)) @classmethod def from_tree(cls, node, ctx): from astropy.units import Unit return Unit(node, format='vounit', parse_strict='silent') asdf-1.3.3/asdf/tags/unit/quantity.py0000644000175000017500000000240313246003441017037 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import six from ...yamlutil import custom_tree_to_tagged_tree from ...asdftypes import AsdfType from . import UnitType from ..core import NDArrayType class QuantityType(AsdfType): name = 'unit/quantity' types = ['astropy.units.Quantity'] requires = ['astropy'] version = '1.1.0' @classmethod def to_tree(cls, quantity, ctx): from numpy import isscalar from astropy.units import Quantity node = {} if isinstance(quantity, Quantity): node['value'] = custom_tree_to_tagged_tree(quantity.value, ctx) node['unit'] = custom_tree_to_tagged_tree(quantity.unit, ctx) return node raise TypeError("'{0}' is not a valid Quantity".format(quantity)) @classmethod def from_tree(cls, node, ctx): from astropy.units import Quantity if isinstance(node, Quantity): return node unit = UnitType.from_tree(node['unit'], ctx) value = node['value'] if isinstance(value, NDArrayType): value = value._make_array() return Quantity(value, unit=unit) asdf-1.3.3/asdf/tags/unit/tests/0000755000175000017500000000000013246031665015763 5ustar dandan00000000000000asdf-1.3.3/asdf/tags/unit/tests/test_unit.py0000644000175000017500000000134713246003441020347 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import pytest astropy = pytest.importorskip('astropy') from astropy import units as u from .... import asdf from ....tests import helpers # TODO: Implement defunit def test_unit(): yaml = """ unit: !unit/unit-1.0.0 "2.1798721 10-18kg m2 s-2" """ buff = helpers.yaml_to_asdf(yaml) with asdf.AsdfFile.open(buff) as ff: assert ff.tree['unit'].is_equivalent(u.Ry) buff2 = io.BytesIO() ff.write_to(buff2) buff2.seek(0) with asdf.AsdfFile.open(buff2) as ff: assert ff.tree['unit'].is_equivalent(u.Ry) asdf-1.3.3/asdf/tags/unit/tests/test_quantity.py0000644000175000017500000000370613246003441021247 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import pytest astropy = pytest.importorskip('astropy') from astropy import units from .... import asdf from ....tests import helpers def roundtrip_quantity(yaml, quantity): buff = helpers.yaml_to_asdf(yaml) with asdf.AsdfFile.open(buff) as ff: assert (ff.tree['quantity'] == quantity).all() buff2 = io.BytesIO() ff.write_to(buff2) buff2.seek(0) with asdf.AsdfFile.open(buff2) as ff: assert (ff.tree['quantity'] == quantity).all() def test_value_scalar(tmpdir): testval = 2.71828 testunit = units.kpc yaml = """ quantity: !unit/quantity-1.1.0 value: {} unit: {} """.format(testval, testunit) quantity = units.Quantity(testval, unit=testunit) roundtrip_quantity(yaml, quantity) def test_value_array(tmpdir): testval = [3.14159] testunit = units.kg yaml = """ quantity: !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 {} unit: {} """.format(testval, testunit) quantity = units.Quantity(testval, unit=testunit) roundtrip_quantity(yaml, quantity) def test_value_multiarray(tmpdir): testval = [x*2.3081 for x in range(10)] testunit = units.ampere yaml = """ quantity: !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 {} unit: {} """.format(testval, testunit) quantity = units.Quantity(testval, unit=testunit) roundtrip_quantity(yaml, quantity) def test_value_ndarray(tmpdir): from numpy import array, float64 testval = [[1,2,3],[4,5,6]] testunit = units.km yaml = """ quantity: !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 datatype: float64 data: {} unit: {} """.format(testval, testunit) data = array(testval, float64) quantity = units.Quantity(data, unit=testunit) roundtrip_quantity(yaml, quantity) asdf-1.3.3/asdf/tags/unit/tests/__init__.py0000644000175000017500000000000013246003441020051 0ustar dandan00000000000000asdf-1.3.3/asdf/tags/unit/__init__.py0000644000175000017500000000035313246003441016722 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from .unit import UnitType from .quantity import QuantityType asdf-1.3.3/asdf/asdf.py0000644000175000017500000011100113246003441014154 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import datetime import copy import io import re import numpy as np from . import block from . import constants from . import extension from . import generic_io from . import reference from . import schema from . import treeutil from . import util from . import version from . import versioning from . import yamlutil from .tags.core import AsdfObject, Software, HistoryEntry def get_asdf_library_info(): """ Get information about asdf to include in the asdf_library entry in the Tree. """ return Software({ 'name': 'asdf', 'version': version.version, 'homepage': 'http://github.com/spacetelescope/asdf', 'author': 'Space Telescope Science Institute' }) class AsdfFile(versioning.VersionedMixin): """ The main class that represents a ASDF file. """ def __init__(self, tree=None, uri=None, extensions=None, version=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False, copy_arrays=False): """ Parameters ---------- tree : dict or AsdfFile, optional The main tree data in the ASDF file. Must conform to the ASDF schema. uri : str, optional The URI for this ASDF file. Used to resolve relative references against. If not provided, will be automatically determined from the associated file object, if possible and if created from `AsdfFile.open`. extensions : list of AsdfExtension A list of extensions to the ASDF to support when reading and writing ASDF files. See `asdftypes.AsdfExtension` for more information. version : str, optional The ASDF version to use when writing out. If not provided, it will write out in the latest version supported by asdf. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. Set to `True` by default. ignore_unrecognized_tag : bool, optional When `True`, do not raise warnings for unrecognized tags. Set to `False` by default. copy_arrays : bool, optional When `False`, when reading files, attempt to memmap underlying data arrays when possible. """ if extensions is None or extensions == []: self._extensions = extension._builtin_extension_list else: if isinstance(extensions, extension.AsdfExtensionList): self._extensions = extensions else: if not isinstance(extensions, list): extensions = [extensions] extensions.insert(0, extension.BuiltinExtension()) self._extensions = extension.AsdfExtensionList(extensions) self._ignore_version_mismatch = ignore_version_mismatch self._ignore_unrecognized_tag = ignore_unrecognized_tag self._file_format_version = None self._fd = None self._external_asdf_by_uri = {} self._blocks = block.BlockManager(self, copy_arrays=copy_arrays) self._uri = None if tree is None: self.tree = {} elif isinstance(tree, AsdfFile): if self._extensions != tree._extensions: raise ValueError( "Can not copy AsdfFile and change active extensions") self._uri = tree.uri # Set directly to self._tree (bypassing property), since # we can assume the other AsdfFile is already valid. self._tree = tree.tree self.run_modifying_hook('copy_to_new_asdf', validate=False) self.find_references() else: self.tree = tree self.find_references() if uri is not None: self._uri = uri self._comments = [] if version is not None: self.version = version def __enter__(self): return self def __exit__(self, type, value, traceback): if self._fd: # This is ok to always do because GenericFile knows # whether it "owns" the file and should close it. self._fd.__exit__(type, value, traceback) self._fd = None for external in self._external_asdf_by_uri.values(): external.__exit__(type, value, traceback) self._external_asdf_by_uri.clear() self._blocks.close() @property def file_format_version(self): if self._file_format_version is None: return versioning.AsdfVersion(self.version_map['FILE_FORMAT']) else: return self._file_format_version def close(self): """ Close the file handles associated with the `AsdfFile`. """ if self._fd: # This is ok to always do because GenericFile knows # whether it "owns" the file and should close it. self._fd.close() self._fd = None for external in self._external_asdf_by_uri.values(): external.close() self._external_asdf_by_uri.clear() self._blocks.close() def copy(self): return self.__class__( copy.deepcopy(self._tree), self._uri, self._extensions ) __copy__ = __deepcopy__ = copy @property def uri(self): """ Get the URI associated with the `AsdfFile`. In many cases, it is automatically determined from the file handle used to read or write the file. """ if self._uri is not None: return self._uri if self._fd is not None: return self._fd._uri return None @property def tag_to_schema_resolver(self): return self._extensions.tag_to_schema_resolver @property def url_mapping(self): return self._extensions.url_mapping @property def type_index(self): return self._extensions.type_index def resolve_uri(self, uri): """ Resolve a (possibly relative) URI against the URI of this ASDF file. May be overridden by base classes to change how URIs are resolved. This does not apply any `uri_mapping` that was passed to the constructor. Parameters ---------- uri : str An absolute or relative URI to resolve against the URI of this ASDF file. Returns ------- uri : str The resolved URI. """ return generic_io.resolve_uri(self.uri, uri) def open_external(self, uri, do_not_fill_defaults=False): """ Open an external ASDF file, from the given (possibly relative) URI. There is a cache (internal to this ASDF file) that ensures each external ASDF file is loaded only once. Parameters ---------- uri : str An absolute or relative URI to resolve against the URI of this ASDF file. do_not_fill_defaults : bool, optional When `True`, do not fill in missing default values. Returns ------- asdffile : AsdfFile The external ASDF file. """ # For a cache key, we want to ignore the "fragment" part. base_uri = util.get_base_uri(uri) resolved_uri = self.resolve_uri(base_uri) # A uri like "#" should resolve back to ourself. In that case, # just return `self`. if resolved_uri == '' or resolved_uri == self.uri: return self asdffile = self._external_asdf_by_uri.get(resolved_uri) if asdffile is None: asdffile = self.open( resolved_uri, do_not_fill_defaults=do_not_fill_defaults) self._external_asdf_by_uri[resolved_uri] = asdffile return asdffile @property def tree(self): """ Get/set the tree of data in the ASDF file. When set, the tree will be validated against the ASDF schema. """ return self._tree @tree.setter def tree(self, tree): asdf_object = AsdfObject(tree) self._validate(asdf_object) self._tree = asdf_object def __getitem__(self, key): return self._tree[key] def __setitem__(self, key, value): self._tree[key] = value @property def comments(self): """ Get the comments after the header, before the tree. """ return self._comments def _validate(self, tree): tagged_tree = yamlutil.custom_tree_to_tagged_tree( tree, self) schema.validate(tagged_tree, self) def validate(self): """ Validate the current state of the tree against the ASDF schema. """ self._validate(self._tree) def make_reference(self, path=[]): """ Make a new reference to a part of this file's tree, that can be assigned as a reference to another tree. Parameters ---------- path : list of str and int, optional The parts of the path pointing to an item in this tree. If omitted, points to the root of the tree. Returns ------- reference : reference.Reference A reference object. Examples -------- For the given AsdfFile ``ff``, add an external reference to the data in an external file:: >>> import asdf >>> flat = asdf.open("http://stsci.edu/reference_files/flat.asdf") # doctest: +SKIP >>> ff.tree['flat_field'] = flat.make_reference(['data']) # doctest: +SKIP """ return reference.make_reference(self, path) @property def blocks(self): """ Get the block manager associated with the `AsdfFile`. """ return self._blocks def set_array_storage(self, arr, array_storage): """ Set the block type to use for the given array data. Parameters ---------- arr : numpy.ndarray The array to set. If multiple views of the array are in the tree, only the most recent block type setting will be used, since all views share a single block. array_storage : str Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. """ block = self.blocks[arr] self.blocks.set_array_storage(block, array_storage) def get_array_storage(self, arr): """ Get the block type for the given array data. Parameters ---------- arr : numpy.ndarray """ return self.blocks[arr].array_storage def set_array_compression(self, arr, compression): """ Set the compression to use for the given array data. Parameters ---------- arr : numpy.ndarray The array to set. If multiple views of the array are in the tree, only the most recent compression setting will be used, since all views share a single block. compression : str or None Must be one of: - ``''`` or `None`: no compression - ``zlib``: Use zlib compression - ``bzp2``: Use bzip2 compression - ``lz4``: Use lz4 compression - ``''`` or `None`: no compression - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None. """ self.blocks[arr].output_compression = compression def get_array_compression(self, arr): """ Get the compression type for the given array data. Parameters ---------- arr : numpy.ndarray Returns ------- compression : str or None """ return self.blocks[arr].output_compression @classmethod def _parse_header_line(cls, line): """ Parses the header line in a ASDF file to obtain the ASDF version. """ parts = line.split() if len(parts) != 2 or parts[0] != constants.ASDF_MAGIC: raise ValueError("Does not appear to be a ASDF file.") try: version = versioning.AsdfVersion(parts[1].decode('ascii')) except ValueError: raise ValueError( "Unparseable version in ASDF file: {0}".format(parts[1])) return version @classmethod def _parse_comment_section(cls, content): """ Parses the comment section, between the header line and the Tree or first block. """ comments = [] lines = content.splitlines() for line in lines: if not line.startswith(b'#'): raise ValueError("Invalid content between header and tree") comments.append(line[1:].strip()) return comments @classmethod def _find_asdf_version_in_comments(cls, comments): for comment in comments: parts = comment.split() if len(parts) == 2 and parts[0] == constants.ASDF_STANDARD_COMMENT: try: version = versioning.AsdfVersion(parts[1].decode('ascii')) except ValueError: pass else: return version return None @classmethod def _open_asdf(cls, self, fd, uri=None, mode='r', validate_checksums=False, do_not_fill_defaults=False, _get_yaml_content=False, _force_raw_types=False): """Attempt to populate AsdfFile data from file-like object""" fd = generic_io.get_file(fd, mode=mode, uri=uri) self._fd = fd # The filename is currently only used for tracing warning information self._fname = self._fd._uri if self._fd._uri else '' header_line = fd.read_until(b'\r?\n', 2, "newline", include=True) self._file_format_version = cls._parse_header_line(header_line) self.version = self._file_format_version comment_section = fd.read_until( b'(%YAML)|(' + constants.BLOCK_MAGIC + b')', 5, "start of content", include=False, exception=False) self._comments = cls._parse_comment_section(comment_section) version = cls._find_asdf_version_in_comments(self._comments) if version is not None: self.version = version yaml_token = fd.read(4) tree = {} has_blocks = False if yaml_token == b'%YAM': reader = fd.reader_until( constants.YAML_END_MARKER_REGEX, 7, 'End of YAML marker', include=True, initial_content=yaml_token) # For testing: just return the raw YAML content if _get_yaml_content: yaml_content = reader.read() fd.close() return yaml_content # We parse the YAML content into basic data structures # now, but we don't do anything special with it until # after the blocks have been read tree = yamlutil.load_tree(reader, self, self._ignore_version_mismatch) has_blocks = fd.seek_until(constants.BLOCK_MAGIC, 4, include=True) elif yaml_token == constants.BLOCK_MAGIC: has_blocks = True elif yaml_token != b'': raise IOError("ASDF file appears to contain garbage after header.") if has_blocks: self._blocks.read_internal_blocks( fd, past_magic=True, validate_checksums=validate_checksums) self._blocks.read_block_index(fd, self) tree = reference.find_references(tree, self) if not do_not_fill_defaults: schema.fill_defaults(tree, self) self._validate(tree) tree = yamlutil.tagged_tree_to_custom_tree(tree, self, _force_raw_types) self._tree = tree self.run_hook('post_read') return self @classmethod def _open_impl(cls, self, fd, uri=None, mode='r', validate_checksums=False, do_not_fill_defaults=False, _get_yaml_content=False, _force_raw_types=False): """Attempt to open file-like object as either AsdfFile or AsdfInFits""" if not is_asdf_file(fd): try: # TODO: this feels a bit circular, try to clean up. Also # this introduces another dependency on astropy which may # not be desireable. from . import fits_embed return fits_embed.AsdfInFits.open(fd, uri=uri, validate_checksums=validate_checksums, ignore_version_mismatch=self._ignore_version_mismatch, extensions=self._extensions) except ValueError: pass raise ValueError( "Input object does not appear to be ASDF file or FITS with " + "ASDF extension") return cls._open_asdf(self, fd, uri=uri, mode=mode, validate_checksums=validate_checksums, do_not_fill_defaults=do_not_fill_defaults, _get_yaml_content=_get_yaml_content, _force_raw_types=_force_raw_types) @classmethod def open(cls, fd, uri=None, mode='r', validate_checksums=False, extensions=None, do_not_fill_defaults=False, ignore_version_mismatch=True, ignore_unrecognized_tag=False, _force_raw_types=False, copy_arrays=False): """ Open an existing ASDF file. Parameters ---------- fd : string or file-like object May be a string ``file`` or ``http`` URI, or a Python file-like object. uri : string, optional The URI of the file. Only required if the URI can not be automatically determined from `fd`. mode : string, optional The mode to open the file in. Must be ``r`` (default) or ``rw``. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. Requires reading the entire file, so disabled by default. extensions : list of AsdfExtension A list of extensions to the ASDF to support when reading and writing ASDF files. See `asdftypes.AsdfExtension` for more information. do_not_fill_defaults : bool, optional When `True`, do not fill in missing default values. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. Set to `True` by default. ignore_unrecognized_tag : bool, optional When `True`, do not raise warnings for unrecognized tags. Set to `False` by default. copy_arrays : bool, optional When `False`, when reading files, attempt to memmap underlying data arrays when possible. Returns ------- asdffile : AsdfFile The new AsdfFile object. """ self = cls(extensions=extensions, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag, copy_arrays=copy_arrays) return cls._open_impl( self, fd, uri=uri, mode=mode, validate_checksums=validate_checksums, do_not_fill_defaults=do_not_fill_defaults, _force_raw_types=_force_raw_types) def _write_tree(self, tree, fd, pad_blocks): fd.write(constants.ASDF_MAGIC) fd.write(b' ') fd.write(self.version_map['FILE_FORMAT'].encode('ascii')) fd.write(b'\n') fd.write(b'#') fd.write(constants.ASDF_STANDARD_COMMENT) fd.write(b' ') fd.write(self.version_string.encode('ascii')) fd.write(b'\n') if len(tree): yamlutil.dump_tree(tree, fd, self) if pad_blocks: padding = util.calculate_padding( fd.tell(), pad_blocks, fd.block_size) fd.fast_forward(padding) def _pre_write(self, fd, all_array_storage, all_array_compression, auto_inline): if all_array_storage not in (None, 'internal', 'external', 'inline'): raise ValueError( "Invalid value for all_array_storage: '{0}'".format( all_array_storage)) self._all_array_storage = all_array_storage self._all_array_compression = all_array_compression if auto_inline in (True, False): raise ValueError( "Invalid value for auto_inline: '{0}'".format(auto_inline)) if auto_inline is not None: try: self._auto_inline = int(auto_inline) except ValueError: raise ValueError( "Invalid value for auto_inline: '{0}'".format(auto_inline)) else: self._auto_inline = None if len(self._tree): self.run_hook('pre_write') # This is where we'd do some more sophisticated block # reorganization, if necessary self._blocks.finalize(self) self._tree['asdf_library'] = get_asdf_library_info() def _serial_write(self, fd, pad_blocks, include_block_index): self._write_tree(self._tree, fd, pad_blocks) self.blocks.write_internal_blocks_serial(fd, pad_blocks) self.blocks.write_external_blocks(fd.uri, pad_blocks) if include_block_index: self.blocks.write_block_index(fd, self) def _random_write(self, fd, pad_blocks, include_block_index): self._write_tree(self._tree, fd, False) self.blocks.write_internal_blocks_random_access(fd) self.blocks.write_external_blocks(fd.uri, pad_blocks) if include_block_index: self.blocks.write_block_index(fd, self) fd.truncate() def _post_write(self, fd): if len(self._tree): self.run_hook('post_write') if hasattr(self, '_all_array_storage'): del self._all_array_storage if hasattr(self, '_all_array_compression'): del self._all_array_compression if hasattr(self, '_auto_inline'): del self._auto_inline def update(self, all_array_storage=None, all_array_compression='input', auto_inline=None, pad_blocks=False, include_block_index=True, version=None): """ Update the file on disk in place. Parameters ---------- all_array_storage : string, optional If provided, override the array storage type of all blocks in the file immediately before writing. Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. all_array_compression : string, optional If provided, set the compression type on all binary blocks in the file. Must be one of: - ``''`` or `None`: No compression. - ``zlib``: Use zlib compression. - ``bzp2``: Use bzip2 compression. - ``lz4``: Use lz4 compression. - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None auto_inline : int, optional When the number of elements in an array is less than this threshold, store the array as inline YAML, rather than a binary block. This only works on arrays that do not share data with other arrays. Default is 0. pad_blocks : float or bool, optional Add extra space between blocks to allow for updating of the file. If `False` (default), add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. include_block_index : bool, optional If `False`, don't include a block index at the end of the file. (Default: `True`) A block index is never written if the file has a streamed block. version : str, optional The ASDF version to write out. If not provided, it will write out in the latest version supported by asdf. """ fd = self._fd if fd is None: raise ValueError( "Can not update, since there is no associated file") if not fd.writable(): raise IOError( "Can not update, since associated file is read-only") if version is not None: self.version = version if all_array_storage == 'external': # If the file is fully exploded, there's no benefit to # update, so just use write_to() self.write_to(fd, all_array_storage=all_array_storage) fd.truncate() return if not fd.seekable(): raise IOError( "Can not update, since associated file is not seekable") self.blocks.finish_reading_internal_blocks() self._pre_write(fd, all_array_storage, all_array_compression, auto_inline) try: fd.seek(0) if not self.blocks.has_blocks_with_offset(): # If we don't have any blocks that are being reused, just # write out in a serial fashion. self._serial_write(fd, pad_blocks, include_block_index) fd.truncate() return # Estimate how big the tree will be on disk by writing the # YAML out in memory. Since the block indices aren't yet # known, we have to count the number of block references and # add enough space to accommodate the largest block number # possible there. tree_serialized = io.BytesIO() self._write_tree(self._tree, tree_serialized, pad_blocks=False) array_ref_count = [0] from .tags.core.ndarray import NDArrayType for node in treeutil.iter_tree(self._tree): if (isinstance(node, (np.ndarray, NDArrayType)) and self.blocks[node].array_storage == 'internal'): array_ref_count[0] += 1 serialized_tree_size = ( tree_serialized.tell() + constants.MAX_BLOCKS_DIGITS * array_ref_count[0]) if not block.calculate_updated_layout( self.blocks, serialized_tree_size, pad_blocks, fd.block_size): # If we don't have any blocks that are being reused, just # write out in a serial fashion. self._serial_write(fd, pad_blocks, include_block_index) fd.truncate() return fd.seek(0) self._random_write(fd, pad_blocks, include_block_index) fd.flush() finally: self._post_write(fd) def write_to(self, fd, all_array_storage=None, all_array_compression='input', auto_inline=None, pad_blocks=False, include_block_index=True, version=None): """ Write the ASDF file to the given file-like object. `write_to` does not change the underlying file descriptor in the `AsdfFile` object, but merely copies the content to a new file. Parameters ---------- fd : string or file-like object May be a string path to a file, or a Python file-like object. If a string path, the file is automatically closed after writing. If not a string path, all_array_storage : string, optional If provided, override the array storage type of all blocks in the file immediately before writing. Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. all_array_compression : string, optional If provided, set the compression type on all binary blocks in the file. Must be one of: - ``''`` or `None`: No compression. - ``zlib``: Use zlib compression. - ``bzp2``: Use bzip2 compression. - ``lz4``: Use lz4 compression. - ``input``: Use the same compression as in the file read. If there is no prior file, acts as None. auto_inline : int, optional When the number of elements in an array is less than this threshold, store the array as inline YAML, rather than a binary block. This only works on arrays that do not share data with other arrays. Default is 0. pad_blocks : float or bool, optional Add extra space between blocks to allow for updating of the file. If `False` (default), add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. include_block_index : bool, optional If `False`, don't include a block index at the end of the file. (Default: `True`) A block index is never written if the file has a streamed block. version : str, optional The ASDF version to write out. If not provided, it will write out in the latest version supported by asdf. """ original_fd = self._fd if version is not None: self.version = version try: with generic_io.get_file(fd, mode='w') as fd: self._fd = fd self._pre_write(fd, all_array_storage, all_array_compression, auto_inline) try: self._serial_write(fd, pad_blocks, include_block_index) fd.flush() finally: self._post_write(fd) finally: self._fd = original_fd def find_references(self): """ Finds all external "JSON References" in the tree and converts them to `reference.Reference` objects. """ # Set directly to self._tree, since it doesn't need to be # re-validated. self._tree = reference.find_references(self._tree, self) def resolve_references(self, do_not_fill_defaults=False): """ Finds all external "JSON References" in the tree, loads the external content, and places it directly in the tree. Saving a ASDF file after this operation means it will have no external references, and will be completely self-contained. """ # Set to the property self.tree so the resulting "complete" # tree will be validated. self.tree = reference.resolve_references(self._tree, self) def run_hook(self, hookname): """ Run a "hook" for each custom type found in the tree. Parameters ---------- hookname : str The name of the hook. If a `AsdfType` is found with a method with this name, it will be called for every instance of the corresponding custom type in the tree. """ type_index = self.type_index if not type_index.has_hook(hookname): return for node in treeutil.iter_tree(self._tree): hook = type_index.get_hook_for_type(hookname, type(node), self.version_string) if hook is not None: hook(node, self) def run_modifying_hook(self, hookname, validate=True): """ Run a "hook" for each custom type found in the tree. The hook is free to return a different object in order to modify the tree. Parameters ---------- hookname : str The name of the hook. If a `AsdfType` is found with a method with this name, it will be called for every instance of the corresponding custom type in the tree. validate : bool When `True` (default) validate the resulting tree. """ type_index = self.type_index if not type_index.has_hook(hookname): return def walker(node): hook = type_index.get_hook_for_type(hookname, type(node), self.version_string) if hook is not None: return hook(node, self) return node tree = treeutil.walk_and_modify(self.tree, walker) if validate: self._validate(tree) self._tree = tree return self._tree def resolve_and_inline(self): """ Resolves all external references and inlines all data. This produces something that, when saved, is a 100% valid YAML file. """ self.blocks.finish_reading_internal_blocks() self.resolve_references() for b in list(self.blocks.blocks): self.blocks.set_array_storage(b, 'inline') def fill_defaults(self): """ Fill in any values that are missing in the tree using default values from the schema. """ tree = yamlutil.custom_tree_to_tagged_tree(self._tree, self) schema.fill_defaults(tree, self) self._tree = yamlutil.tagged_tree_to_custom_tree(tree, self) def remove_defaults(self): """ Remove any values in the tree that are the same as the default values in the schema """ tree = yamlutil.custom_tree_to_tagged_tree(self._tree, self) schema.remove_defaults(tree, self) self._tree = yamlutil.tagged_tree_to_custom_tree(tree, self) def add_history_entry(self, description, software=None): """ Add an entry to the history list. Parameters ---------- description : str A description of the change. software : dict or list of dict A description of the software used. It should not include asdf itself, as that is automatically notated in the `asdf_library` entry. Each dict must have the following keys: - ``name``: The name of the software - ``author``: The author or institution that produced the software - ``homepage``: A URI to the homepage of the software - ``version``: The version of the software """ if isinstance(software, list): software = [Software(x) for x in software] elif software is not None: software = Software(software) entry = HistoryEntry({ 'description': description, 'time': datetime.datetime.utcnow() }) if software is not None: entry['software'] = software if 'history' not in self.tree: self.tree['history'] = [] self.tree['history'].append(entry) try: self.validate() except: self.tree['history'].pop() raise def is_asdf_file(fd): """ Determine if fd is an ASDF file. Reads the first five bytes and looks for the ``#ASDF`` string. Parameters ---------- fd : str, `~asdf.generic_io.GenericFile` """ if isinstance(fd, generic_io.InputStream): # If it's an InputStream let ASDF deal with it. return True to_close = False if isinstance(fd, AsdfFile): return True elif isinstance(fd, generic_io.GenericFile): pass else: try: fd = generic_io.get_file(fd, mode='r', uri=None) if not isinstance(fd, io.IOBase): to_close = True except ValueError: return False asdf_magic = fd.read(5) if fd.seekable(): fd.seek(0) if to_close: fd.close() if asdf_magic == constants.ASDF_MAGIC: return True return False asdf-1.3.3/asdf/asdftypes.py0000644000175000017500000005162613246003441015261 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import bisect import importlib import warnings import re import six from copy import copy from .compat import lru_cache from . import tagged from . import util from .versioning import AsdfVersion, AsdfSpec, get_version_map __all__ = ['format_tag', 'AsdfTypeIndex', 'AsdfType'] _BASIC_PYTHON_TYPES = set(list(six.string_types) + list(six.integer_types) + [float, list, dict, tuple]) # regex used to parse module name from optional version string MODULE_RE = re.compile(r'([a-zA-Z]+)(-(\d+\.\d+\.\d+))?') def format_tag(organization, standard, version, tag_name): """ Format a YAML tag. """ if isinstance(version, AsdfSpec): version = str(version.spec) return 'tag:{0}:{1}/{2}-{3}'.format( organization, standard, tag_name, version) def split_tag_version(tag): """ Split a tag into its base and version. """ name, version = tag.rsplit('-', 1) version = AsdfVersion(version) return name, version def join_tag_version(name, version): """ Join the root and version of a tag back together. """ return '{0}-{1}'.format(name, version) class _AsdfWriteTypeIndex(object): """ The _AsdfWriteTypeIndex is a helper class for AsdfTypeIndex that manages an index of types for writing out ASDF files, i.e. from converting from custom types to tagged_types. It is not always the inverse of the mapping from tags to custom types, since there are likely multiple versions present for a given tag. This uses the `version_map.yaml` file that ships with the ASDF standard to figure out which schemas correspond to a particular version of the ASDF standard. An AsdfTypeIndex manages multiple _AsdfWriteTypeIndex instances for each version the user may want to write out, and they are instantiated on-demand. If version is ``'latest'``, it will just use the highest-numbered versions of each of the schemas. This is currently only used to aid in testing. """ _version_map = None def __init__(self, version, index): self._version = version self._type_by_cls = {} self._type_by_name = {} self._type_by_subclasses = {} self._types_with_dynamic_subclasses = {} def add_type(asdftype): self._type_by_cls[asdftype] = asdftype for typ in asdftype.types: self._type_by_cls[typ] = asdftype for typ2 in util.iter_subclasses(typ): self._type_by_subclasses[typ2] = asdftype if asdftype.handle_dynamic_subclasses: for typ in asdftype.types: self._types_with_dynamic_subclasses[typ] = asdftype def add_by_tag(name, version): tag = join_tag_version(name, version) if tag in index._type_by_tag: asdftype = index._type_by_tag[tag] self._type_by_name[name] = asdftype add_type(asdftype) if self._version == 'latest': for name, versions in six.iteritems(index._versions_by_type_name): add_by_tag(name, versions[-1]) else: try: version_map = get_version_map(self._version) except ValueError: raise ValueError( "Don't know how to write out ASDF version {0}".format( self._version)) for name, _version in six.iteritems(version_map['tags']): add_by_tag(name, AsdfVersion(_version)) # Now add any extension types that aren't known to the ASDF standard for name, versions in six.iteritems(index._versions_by_type_name): if name not in self._type_by_name: add_by_tag(name, versions[-1]) for asdftype in index._unnamed_types: add_type(asdftype) def from_custom_type(self, custom_type): """ Given a custom type, return the corresponding AsdfType definition. """ # Try to find an exact class match first... try: return self._type_by_cls[custom_type] except KeyError: # ...failing that, match any subclasses try: return self._type_by_subclasses[custom_type] except KeyError: # ...failing that, try any subclasses that we couldn't # cache in _type_by_subclasses. This generally only # includes classes that are created dynamically post # Python-import, e.g. astropy.modeling._CompoundModel # subclasses. for key, val in six.iteritems( self._types_with_dynamic_subclasses): if issubclass(custom_type, key): self._type_by_cls[custom_type] = val return val return None class AsdfTypeIndex(object): """ An index of the known `AsdfType`s. """ def __init__(self): self._write_type_indices = {} self._type_by_tag = {} self._versions_by_type_name = {} self._best_matches = {} self._real_tag = {} self._unnamed_types = set() self._hooks_by_type = {} self._all_types = set() self._has_warned = {} def add_type(self, asdftype): """ Add a type to the index. """ self._all_types.add(asdftype) if asdftype.yaml_tag is None and asdftype.name is None: return if isinstance(asdftype.name, list): yaml_tags = [asdftype.make_yaml_tag(name) for name in asdftype.name] elif isinstance(asdftype.name, six.string_types): yaml_tags = [asdftype.yaml_tag] elif asdftype.name is None: yaml_tags = [] else: raise TypeError("name must be a string, list or None") for yaml_tag in yaml_tags: self._type_by_tag[yaml_tag] = asdftype name, version = split_tag_version(yaml_tag) versions = self._versions_by_type_name.get(name) if versions is None: self._versions_by_type_name[name] = [version] else: idx = bisect.bisect_left(versions, version) if idx == len(versions) or versions[idx] != version: versions.insert(idx, version) if not len(yaml_tags): self._unnamed_types.add(asdftype) def from_custom_type(self, custom_type, version='latest'): """ Given a custom type, return the corresponding AsdfType definition. """ # Basic Python types should not ever have an AsdfType # associated with them. if custom_type in _BASIC_PYTHON_TYPES: return None write_type_index = self._write_type_indices.get(version) if write_type_index is None: write_type_index = _AsdfWriteTypeIndex(version, self) self._write_type_indices[version] = write_type_index return write_type_index.from_custom_type(custom_type) def _get_version_mismatch(self, name, version, latest_version): warning_string = None if (latest_version.major, latest_version.minor) != \ (version.major, version.minor): warning_string = \ "'{}' with version {} found in file{{}}, but latest " \ "supported version is {}".format( name, version, latest_version) return warning_string def _warn_version_mismatch(self, ctx, tag, warning_string, fname): if warning_string is not None: # Ensure that only a single warning occurs per tag per AsdfFile # TODO: If it is useful to only have a single warning per file on # disk, then use `fname` in the key instead of `ctx`. if not (ctx, tag) in self._has_warned: warnings.warn(warning_string.format(fname)) self._has_warned[(ctx, tag)] = True def fix_yaml_tag(self, ctx, tag, ignore_version_mismatch=True): """ Given a YAML tag, adjust it to the best supported version. If there is no exact match, this finds the newest version understood that is still less than the version in file. Or, the earliest understood version if none are less than the version in the file. If ``ignore_version_mismatch==False``, this function raises a warning if it could not find a match where the major and minor numbers are the same. """ warning_string = None name, version = split_tag_version(tag) fname = " '{}'".format(ctx._fname) if ctx._fname else '' if tag in self._type_by_tag: asdftype = self._type_by_tag[tag] # Issue warnings for the case where there exists a class for the # given tag due to the 'supported_versions' attribute being # defined, but this tag is not the latest version of the type. # This prevents 'supported_versions' from affecting the behavior of # warnings that are purely related to YAML validation. if not ignore_version_mismatch and hasattr(asdftype, '_latest_version'): warning_string = self._get_version_mismatch( name, version, asdftype._latest_version) self._warn_version_mismatch(ctx, tag, warning_string, fname) return tag if tag in self._best_matches: best_tag, warning_string = self._best_matches[tag] if not ignore_version_mismatch: self._warn_version_mismatch(ctx, tag, warning_string, fname) return best_tag versions = self._versions_by_type_name.get(name) if versions is None: return tag # The versions list is kept sorted, so bisect can be used to # quickly find the best option. i = bisect.bisect_left(versions, version) i = max(0, i - 1) if not ignore_version_mismatch: warning_string = self._get_version_mismatch( name, version, versions[-1]) self._warn_version_mismatch(ctx, tag, warning_string, fname) best_version = versions[i] best_tag = join_tag_version(name, best_version) self._best_matches[tag] = best_tag, warning_string if tag != best_tag: self._real_tag[best_tag] = tag return best_tag def get_real_tag(self, tag): if tag in self._real_tag: return self._real_tag[tag] elif tag in self._type_by_tag: return tag return None def from_yaml_tag(self, ctx, tag): """ From a given YAML tag string, return the corresponding AsdfType definition. """ tag = self.fix_yaml_tag(ctx, tag) return self._type_by_tag.get(tag) @lru_cache(5) def has_hook(self, hook_name): """ Returns `True` if the given hook name exists on any of the managed types. """ for cls in self._all_types: if hasattr(cls, hook_name): return True return False def get_hook_for_type(self, hookname, typ, version='latest'): """ Get the hook function for the given type, if it exists, else return None. """ hooks = self._hooks_by_type.setdefault(hookname, {}) hook = hooks.get(typ, None) if hook is not None: return hook tag = self.from_custom_type(typ, version) if tag is not None: hook = getattr(tag, hookname, None) if hook is not None: hooks[typ] = hook return hook hooks[typ] = None return None _all_asdftypes = set() def _from_tree_tagged_missing_requirements(cls, tree, ctx): # A special version of AsdfType.from_tree_tagged for when the # required dependencies for an AsdfType are missing. plural, verb = ('s', 'are') if len(cls.requires) else ('', 'is') message = "{0} package{1} {2} required to instantiate '{3}'".format( util.human_list(cls.requires), plural, verb, tree._tag) # This error will be handled by yamlutil.tagged_tree_to_custom_tree, which # will cause a warning to be issued indicating that the tree failed to be # converted. raise TypeError(message) class ExtensionTypeMeta(type): """ Custom class constructor for extension types. """ _import_cache = {} @classmethod def _has_required_modules(cls, requires): for string in requires: has_module = True match = MODULE_RE.match(string) modname, _, version = match.groups() if modname in cls._import_cache: if not cls._import_cache[modname]: return False try: module = importlib.import_module(modname) if version and hasattr(module, '__version__'): if module.__version__ < version: has_module = False except ImportError: has_module = False finally: cls._import_cache[modname] = has_module if not has_module: return False return True @classmethod def _find_in_bases(cls, attrs, bases, name, default=None): if name in attrs: return attrs[name] for base in bases: if hasattr(base, name): return getattr(base, name) return default @property def versioned_siblings(mcls): return getattr(mcls, '__versioned_siblings') or [] def __new__(mcls, name, bases, attrs): requires = mcls._find_in_bases(attrs, bases, 'requires', []) if not mcls._has_required_modules(requires): attrs['from_tree_tagged'] = classmethod( _from_tree_tagged_missing_requirements) attrs['types'] = [] attrs['has_required_modules'] = False else: attrs['has_required_modules'] = True types = mcls._find_in_bases(attrs, bases, 'types', []) new_types = [] for typ in types: if isinstance(typ, six.string_types): typ = util.resolve_name(typ) new_types.append(typ) attrs['types'] = new_types cls = super(ExtensionTypeMeta, mcls).__new__(mcls, name, bases, attrs) if hasattr(cls, 'version'): if not isinstance(cls.version, (AsdfVersion, AsdfSpec)): cls.version = AsdfVersion(cls.version) if hasattr(cls, 'name'): if isinstance(cls.name, six.string_types): if 'yaml_tag' not in attrs: cls.yaml_tag = cls.make_yaml_tag(cls.name) elif isinstance(cls.name, list): pass elif cls.name is not None: raise TypeError("name must be string or list") if hasattr(cls, 'supported_versions'): if not isinstance(cls.supported_versions, (list, set)): cls.supported_versions = [cls.supported_versions] supported_versions = set() for version in cls.supported_versions: if not isinstance(version, (AsdfVersion, AsdfSpec)): version = AsdfVersion(version) # This should cause an exception for invalid input supported_versions.add(version) # We need to convert back to a list here so that the 'in' operator # uses actual comparison instead of hash equality cls.supported_versions = list(supported_versions) siblings = list() for version in cls.supported_versions: if version != cls.version: new_attrs = copy(attrs) new_attrs['version'] = version new_attrs['supported_versions'] = set() new_attrs['_latest_version'] = cls.version siblings.append( ExtensionTypeMeta. __new__(mcls, name, bases, new_attrs)) setattr(cls, '__versioned_siblings', siblings) return cls class AsdfTypeMeta(ExtensionTypeMeta): """ Keeps track of `AsdfType` subclasses that are created, and stores them in `AsdfTypeIndex`. """ def __new__(mcls, name, bases, attrs): cls = super(AsdfTypeMeta, mcls).__new__(mcls, name, bases, attrs) # Classes using this metaclass get added to the list of built-in # extensions _all_asdftypes.add(cls) return cls class ExtensionType(object): """ The base class of all custom types in the tree. Besides the attributes defined below, most subclasses will also override `to_tree` and `from_tree`. To customize how the type's schema is located, override `get_schema_path`. Attributes ---------- name : str The name of the type. organization : str The organization responsible for the type. standard : str The standard the type is defined in. For built-in ASDF types, this is ``"asdf"``. version : 3-tuple of int The version of the standard the type is defined in. supported_versions : set If provided, indicates explicit compatibility with the given set of versions. Other versions of the same schema that are not included in this set will not be converted to custom types with this class. yaml_tag : str The YAML tag to use for the type. If not provided, it will be automatically generated from name, organization, standard and version. types : list of Python types Custom Python types that, when found in the tree, will be converted into basic types for YAML output. validators : dict Mapping JSON Schema keywords to validation functions for jsonschema. Useful if the type defines extra types of validation that can be performed. requires : list of str A list of Python packages that are required to instantiate the object. """ name = None organization = 'stsci.edu' standard = 'asdf' version = (1, 0, 0) supported_versions = set() types = [] handle_dynamic_subclasses = False validators = {} requires = [] yaml_tag = None @classmethod def make_yaml_tag(cls, name): return format_tag( cls.organization, cls.standard, cls.version, name) @classmethod def to_tree(cls, node, ctx): """ Converts from a custom type to any of the basic types (dict, list, str, number) supported by YAML. In most cases, must be overridden by subclasses. """ return node.__class__.__bases__[0](node) @classmethod def to_tree_tagged(cls, node, ctx): """ Converts from a custom type to any of the basic types (dict, list, str, number) supported by YAML. The result should be a tagged object. Overriding this, rather than the more common `to_tree`, allows types to customize how the result is tagged. """ obj = cls.to_tree(node, ctx) return tagged.tag_object(cls.yaml_tag, obj, ctx=ctx) @classmethod def from_tree(cls, tree, ctx): """ Converts from basic types to a custom type. """ return cls(tree) @classmethod def from_tree_tagged(cls, tree, ctx): """ Converts from basic types to a custom type. Overriding this, rather than the more common `from_tree`, allows types to deal with the tag directly. """ return cls.from_tree(tree.data, ctx) @classmethod def incompatible_version(cls, version): """ If this tag class explicitly identifies compatible versions then this checks whether a given version is compatible or not. Otherwise, all versions are assumed to be compatible. Child classes can override this method to affect how version compatiblity for this type is determined. """ if cls.supported_versions: if version not in cls.supported_versions: return True return False @six.add_metaclass(AsdfTypeMeta) @six.add_metaclass(util.InheritDocstrings) class AsdfType(ExtensionType): """ Base class for all built-in ASDF types. Types that inherit this class will be automatically added to the list of built-ins. This should *not* be used for user-defined extensions. """ @six.add_metaclass(ExtensionTypeMeta) @six.add_metaclass(util.InheritDocstrings) class CustomType(ExtensionType): """ Base class for all user-defined types. Unlike classes that inherit AsdfType, classes that inherit this class will *not* automatically be added to the list of built-ins. This should be used for user-defined extensions. """ asdf-1.3.3/asdf/version.py0000644000175000017500000001620013246031664014741 0ustar dandan00000000000000# Autogenerated by Astropy-affiliated package asdf's setup.py on 2018-03-01 17:09:08 from __future__ import unicode_literals import datetime import locale import os import subprocess import warnings def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding = 'utf-8' try: text = stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback text = stream.decode('latin1') return text def update_git_devstr(version, path=None): """ Updates the git revision string if and only if the path is being imported directly from a git working copy. This ensures that the revision number in the version string is accurate. """ try: # Quick way to determine if we're in git or not - returns '' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version if not devstr: # Probably not in git so just pass silently return version if 'dev' in version: # update to the current git revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' + devstr else: # otherwise it's already the true/release version return version def get_git_devstr(sha=False, show_warning=True, path=None): """ Determines the number of revisions in this repository. Parameters ---------- sha : bool If True, the full SHA1 hash will be returned. Otherwise, the total count of commits in the repository will be used as a "revision number". show_warning : bool If True, issue a warning if git returns an error code, otherwise errors pass silently. path : str or None If a string, specifies the directory to look in to find the git repository. If `None`, the current working directory is used, and must be the root of the git repository. If given a filename it uses the directory containing that file. Returns ------- devversion : str Either a string with the revision number (if `sha` is False), the SHA1 hash of the current commit (if `sha` is True), or an empty string if git version info could not be identified. """ if path is None: path = os.getcwd() if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha: # Faster for getting just the hash of HEAD cmd = ['rev-parse', 'HEAD'] else: cmd = ['rev-list', '--count', 'HEAD'] def run_git(cmd): try: p = subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate() except OSError as e: if show_warning: warnings.warn('Error running git: ' + str(e)) return (None, b'', b'') if p.returncode == 128: if show_warning: warnings.warn('No git repository present at {0!r}! Using ' 'default dev version.'.format(path)) return (p.returncode, b'', b'') if p.returncode == 129: if show_warning: warnings.warn('Your git looks old (does it support {0}?); ' 'consider upgrading to v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif p.returncode != 0: if show_warning: warnings.warn('Git failed while determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return p.returncode, stdout, stderr returncode, stdout, stderr = run_git(cmd) if not sha and returncode == 128: # git returns 128 if the command is not run from within a git # repository tree. In this case, a warning is produced above but we # return the default dev version of '0'. return '0' elif not sha and returncode == 129: # git returns 129 if a command option failed to parse; in # particular this could happen in git versions older than 1.7.2 # where the --count option is not supported # Also use --abbrev-commit and --abbrev=0 to display the minimum # number of characters needed per-commit (rather than the full hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr = run_git(cmd) # Fall back on the old method of getting all revisions and counting # the lines if returncode == 0: return str(stdout.count(b'\n')) else: return '' elif sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() # This function is tested but it is only ever executed within a subprocess when # creating a fake package, so it doesn't get picked up by coverage metrics. def _get_repo_path(pathname, levels=None): # pragma: no cover """ Given a file or directory name, determine the root of the git repository this path is under. If given, this won't look any higher than ``levels`` (that is, if ``levels=0`` then the given path must be the root of the git repository and is returned if so. Returns `None` if the given path could not be determined to belong to a git repo. """ if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return None current_level = 0 while levels is None or current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level += 1 if current_dir == os.path.dirname(current_dir): break current_dir = os.path.dirname(current_dir) return None _packagename = "asdf" _last_generated_version = "1.3.3" _last_githash = "a112e78aaa901c5f9dbbced2b60425c992edf896" # Determine where the source code for this module # lives. If __file__ is not a filesystem path then # it is assumed not to live in a git repo at all. if _get_repo_path(__file__, levels=len(_packagename.split('.'))): version = update_git_devstr(_last_generated_version, path=__file__) githash = get_git_devstr(sha=True, show_warning=False, path=__file__) or _last_githash else: # The file does not appear to live in a git repo so don't bother # invoking git version = _last_generated_version githash = _last_githash major = 1 minor = 3 bugfix = 3 release = True timestamp = datetime.datetime(2018, 3, 1, 17, 9, 8) debug = False try: from ._compiler import compiler except ImportError: compiler = "unknown" try: from .cython_version import cython_version except ImportError: cython_version = "unknown" asdf-1.3.3/asdf/extension.py0000644000175000017500000001267213246003441015271 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import abc import six from . import asdftypes from . import resolver @six.add_metaclass(abc.ABCMeta) class AsdfExtension(object): """ Subclass to define an extension to ASDF. """ @classmethod def __subclasshook__(cls, C): if cls is AsdfExtension: return (hasattr(C, 'types') and hasattr(C, 'tag_mapping') and hasattr(C, 'url_mapping')) return NotImplemented @abc.abstractproperty def types(self): """ A list of AsdfType subclasses that describe how to store custom objects to and from ASDF. """ pass @abc.abstractproperty def tag_mapping(self): """ A list of 2-tuples or callables mapping YAML tag prefixes to JSON Schema URL prefixes. For each entry: - If a 2-tuple, the first part of the tuple is a YAML tag prefix to match. The second part is a string, where case the following are available as Python formatting tokens: - ``{tag}``: the complete YAML tag. - ``{tag_suffix}``: the part of the YAML tag after the matched prefix. - ``{tag_prefix}``: the matched YAML tag prefix. - If a callable, it is passed the entire YAML tag must return the entire JSON schema URL if it matches, otherwise, return `None`. Note that while JSON Schema URLs uniquely define a JSON Schema, they do not have to actually exist on an HTTP server and be fetchable (much like XML namespaces). For example, to match all YAML tags with the ``tag:nowhere.org:custom` prefix to the ``http://nowhere.org/schemas/custom/`` URL prefix:: return [('tag:nowhere.org:custom/', 'http://nowhere.org/schemas/custom/{tag_suffix}')] """ pass @abc.abstractproperty def url_mapping(self): """ A list of 2-tuples or callables mapping JSON Schema URLs to other URLs. This is useful if the JSON Schemas are not actually fetchable at their corresponding URLs but are on the local filesystem, or, to save bandwidth, we have a copy of fetchable schemas on the local filesystem. If neither is desirable, it may simply be the empty list. For each entry: - If a 2-tuple, the first part is a URL prefix to match. The second part is a string, where the following are available as Python formatting tokens: - ``{url}``: The entire JSON schema URL - ``{url_prefix}``: The matched URL prefix - ``{url_suffix}``: The part of the URL after the prefix. - If a callable, it is passed the entire JSON Schema URL and must return a resolvable URL pointing to the schema content. If it doesn't match, should return `None`. For example, to map a remote HTTP URL prefix to files installed alongside as data alongside Python module:: return [('http://nowhere.org/schemas/custom/1.0.0/', asdf.util.filepath_to_url( os.path.join(SCHEMA_PATH, 'stsci.edu')) + '/{url_suffix}.yaml' )] """ pass class AsdfExtensionList(object): """ Manage a set of extensions that are in effect. """ def __init__(self, extensions): tag_mapping = [] url_mapping = [] validators = {} self._type_index = asdftypes.AsdfTypeIndex() for extension in extensions: if not isinstance(extension, AsdfExtension): raise TypeError( "Extension must implement asdftypes.AsdfExtension " "interface") tag_mapping.extend(extension.tag_mapping) url_mapping.extend(extension.url_mapping) for typ in extension.types: self._type_index.add_type(typ) validators.update(typ.validators) for sibling in typ.versioned_siblings: self._type_index.add_type(sibling) validators.update(sibling.validators) self._tag_mapping = resolver.Resolver(tag_mapping, 'tag') self._url_mapping = resolver.Resolver(url_mapping, 'url') self._validators = validators @property def tag_to_schema_resolver(self): return self._tag_mapping @property def url_mapping(self): return self._url_mapping @property def type_index(self): return self._type_index @property def validators(self): return self._validators class BuiltinExtension(object): """ This is the "extension" to ASDF that includes all the built-in tags. Even though it's not really an extension and it's always available, it's built in the same way as an extension. """ @property def types(self): return asdftypes._all_asdftypes @property def tag_mapping(self): return [ ('tag:stsci.edu:asdf', 'http://stsci.edu/schemas/asdf{tag_suffix}') ] @property def url_mapping(self): return resolver.DEFAULT_URL_MAPPING # A special singleton for the common case of when no extensions are used. _builtin_extension_list = AsdfExtensionList([BuiltinExtension()]) asdf-1.3.3/asdf/setup_package.py0000644000175000017500000000226213246003441016062 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os def get_package_data(): # pragma: no cover ASDF_STANDARD_ROOT = os.environ.get("ASDF_STANDARD_ROOT", "asdf-standard") schemas = [] root = os.path.join(ASDF_STANDARD_ROOT, "schemas") for node, dirs, files in os.walk(root): for fname in files: if fname.endswith('.yaml'): schemas.append( os.path.relpath( os.path.join(node, fname), root)) reference_files = [] root = os.path.join(ASDF_STANDARD_ROOT, "reference_files") for node, dirs, files in os.walk(root): for fname in files: if fname.endswith('.yaml') or fname.endswith('.asdf'): reference_files.append( os.path.relpath( os.path.join(node, fname), root)) return { str('asdf.schemas'): schemas, str('asdf.reference_files'): reference_files } def requires_2to3(): # pragma: no cover return False asdf-1.3.3/asdf/reference.py0000644000175000017500000001256613246003441015215 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Manages external references in the YAML tree using the `JSON Reference standard `__ and `JSON Pointer standard `__. """ from __future__ import absolute_import, division, unicode_literals, print_function from collections import Sequence import weakref import numpy as np from six.moves.urllib import parse as urlparse from .asdftypes import AsdfType from . import generic_io from . import treeutil from . import util __all__ = [ 'resolve_fragment', 'Reference', 'find_references', 'resolve_references', 'make_reference'] def resolve_fragment(tree, pointer): """ Resolve a JSON Pointer within the tree. """ pointer = pointer.lstrip(u"/") parts = urlparse.unquote(pointer).split(u"/") if pointer else [] for part in parts: part = part.replace(u"~1", u"/").replace(u"~0", u"~") if isinstance(tree, Sequence): # Array indexes should be turned into integers try: part = int(part) except ValueError: pass try: tree = tree[part] except (TypeError, LookupError): raise ValueError( "Unresolvable reference: '{0}'".format(pointer)) return tree class Reference(AsdfType): yaml_tag = 'tag:yaml.org,2002:map' def __init__(self, uri, base_uri=None, asdffile=None, target=None): self._uri = uri if asdffile is not None: self._asdffile = weakref.ref(asdffile) self._base_uri = base_uri self._target = target def _get_target(self, do_not_fill_defaults=False): if self._target is None: base_uri = self._base_uri if base_uri is None: base_uri = self._asdffile().uri uri = generic_io.resolve_uri(base_uri, self._uri) asdffile = self._asdffile().open_external( uri, do_not_fill_defaults=do_not_fill_defaults) parts = urlparse.urlparse(self._uri) fragment = parts.fragment self._target = resolve_fragment(asdffile.tree, fragment) return self._target def __repr__(self): # repr alone should not force loading of the reference if self._target is None: return "".format( self._uri) else: return "".format(repr(self._target)) def __str__(self): # str alone should not force loading of the reference if self._target is None: return "".format( self._uri) else: return str(self._target) def __len__(self): return len(self._get_target()) def __getattr__(self, attr): if attr == '_tag': return None try: return getattr(self._get_target(), attr) except: raise AttributeError("No attribute '{0}'".format(attr)) def __getitem__(self, item): return self._get_target()[item] def __setitem__(self, item, val): self._get_target()[item] = val def __array__(self): return np.asarray(self._get_target()) def __call__(self, do_not_fill_defaults=False): return self._get_target(do_not_fill_defaults=do_not_fill_defaults) def __contains__(self, item): return item in self._get_target() @classmethod def to_tree(self, data, ctx): if ctx.uri is not None: uri = generic_io.relative_uri(ctx.uri, data._uri) else: uri = data._uri return {'$ref': uri} @classmethod def validate(self, data): pass def find_references(tree, ctx): """ Find all of the JSON references in the tree, and convert them into `Reference` objects. """ def do_find(tree, json_id): if isinstance(tree, dict) and '$ref' in tree: return Reference(tree['$ref'], json_id, asdffile=ctx) return tree return treeutil.walk_and_modify(tree, do_find) def resolve_references(tree, ctx, do_not_fill_defaults=False): """ Resolve all of the references in the tree, by loading the external data and inserting it directly into the tree. """ def do_resolve(tree): if isinstance(tree, Reference): return tree(do_not_fill_defaults=do_not_fill_defaults) return tree tree = find_references(tree, ctx) return treeutil.walk_and_modify(tree, do_resolve) def make_reference(asdffile, path): """ Make a reference to a subtree of the given ASDF file. Parameters ---------- asdffile : AsdfFile path : list of str and int, optional The parts of the path pointing to an item in this tree. If omitted, points to the root of the tree. Returns ------- reference : reference.Reference A reference object. """ path_str = '/'.join( x.replace(u"~", u"~0").replace(u"/", u"~1") for x in path) target = resolve_fragment(asdffile.tree, path_str) if asdffile.uri is None: raise ValueError( "Can not make a reference to a AsdfFile without an associated URI.") base_uri = util.get_base_uri(asdffile.uri) uri = base_uri + '#' + path_str return Reference(uri, target=target) asdf-1.3.3/asdf/resolver.py0000644000175000017500000000713713246003441015116 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os.path import six from . import constants from . import util SCHEMA_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), 'schemas')) class Resolver(object): """ A class that can be used to map strings with a particular prefix to another. """ def __init__(self, mapping=[], prefix=''): """ Parameters ---------- mapping : list of tuple or callable, optional A list of mappings to try, in order. For each entry: - If a callable, must take a string and return a remapped string. Should return `None` if the mapping does not apply to the input. - If a tuple, the first item is a string prefix to match. The second item specifies how to create the new result in Python string formatting syntax. The following formatting tokens are available, where ``X`` relates to the ``prefix`` argument: - ``{X}``: The entire string passed in. - ``{X_prefix}``: The prefix of the string that was matched. - ``{X_suffix}``: The part of the string following the prefix. prefix : str, optional The prefix to use for the Python formatting token names. """ self._mapping = self._validate_mapping(mapping)[::-1] self._prefix = prefix def _validate_mapping(self, mappings): normalized = [] for mapping in mappings: if six.callable(mapping): func = mapping elif (isinstance(mapping, (list, tuple)) and len(mapping) == 2 and isinstance(mapping[0], six.string_types) and isinstance(mapping[1], six.string_types)): def _make_map_func(mapping): def _map_func(uri): if uri.startswith(mapping[0]): format_tokens = { self._prefix: uri, self._prefix + "_prefix": mapping[0], self._prefix + "_suffix": uri[len(mapping[0]):] } return len(mapping[0]), mapping[1].format(**format_tokens) return None return _map_func func = _make_map_func(mapping) else: raise ValueError("Invalid mapping '{0}'".format(mapping)) normalized.append(func) return tuple(normalized) def __call__(self, input): candidates = [] for mapper in self._mapping: output = mapper(input) if isinstance(output, tuple): candidates.append(output) elif output is not None: candidates.append((six.MAXSIZE, output)) if len(candidates): candidates.sort() return candidates[-1][1] else: return input def __hash__(self): return hash(self._mapping) DEFAULT_URL_MAPPING = [ (constants.STSCI_SCHEMA_URI_BASE, util.filepath_to_url( os.path.join(SCHEMA_PATH, 'stsci.edu')) + '/{url_suffix}.yaml'), ('tag:stsci.edu:asdf/', util.filepath_to_url( os.path.join(SCHEMA_PATH, 'stsci.edu')) + '/asdf/{url_suffix}.yaml')] default_url_mapping = Resolver(DEFAULT_URL_MAPPING, 'url') asdf-1.3.3/asdf/schema.py0000644000175000017500000004344313246003441014515 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import datetime import json import os from collections import OrderedDict import six from six.moves.urllib import parse as urlparse from jsonschema import validators as mvalidators from jsonschema.exceptions import ValidationError import yaml from .compat import lru_cache from . import constants from . import generic_io from . import reference from . import resolver as mresolver from . import treeutil from . import util YAML_SCHEMA_METASCHEMA_ID = 'http://stsci.edu/schemas/yaml-schema/draft-01' if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_loader = yaml.SafeLoader __all__ = ['validate', 'fill_defaults', 'remove_defaults', 'check_schema'] SCHEMA_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), 'schemas')) PYTHON_TYPE_TO_YAML_TAG = { None: 'null', six.text_type: 'str', bytes: 'str', bool: 'bool', int: 'int', float: 'float', list: 'seq', dict: 'map', set: 'set', OrderedDict: 'omap' } if six.PY2: # pragma: no cover PYTHON_TYPE_TO_YAML_TAG[long] = 'int' # Prepend full YAML tag prefix for k, v in PYTHON_TYPE_TO_YAML_TAG.items(): PYTHON_TYPE_TO_YAML_TAG[k] = constants.YAML_TAG_PREFIX + v def _type_to_tag(type_): for base in type_.mro(): if base in PYTHON_TYPE_TO_YAML_TAG: return PYTHON_TYPE_TO_YAML_TAG[base] def validate_tag(validator, tagname, instance, schema): # Shortcut: If the instance is a subclass of YAMLObject then we know it # should have a yaml_tag attribute attached; otherwise we have to use a # hack of reserializing the object and seeing what tags get attached to it # (though there may be a better way than this). if hasattr(instance, '_tag'): instance_tag = instance._tag else: # Try tags for known Python builtins instance_tag = _type_to_tag(type(instance)) if instance_tag is not None and instance_tag != tagname: yield ValidationError( "mismatched tags, wanted '{0}', got '{1}'".format( tagname, instance_tag)) def validate_propertyOrder(validator, order, instance, schema): """ Stores a value on the `tagged.TaggedDict` instance so that properties can be written out in the preferred order. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not validator.is_type(instance, 'object'): return if not order: # propertyOrder may be an empty list return instance.property_order = order def validate_flowStyle(validator, flow_style, instance, schema): """ Sets a flag on the `tagged.TaggedList` or `tagged.TaggedDict` object so that the YAML generator knows which style to use to write the element. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not (validator.is_type(instance, 'object') or validator.is_type(instance, 'array')): return instance.flow_style = flow_style def validate_style(validator, style, instance, schema): """ Sets a flag on the `tagged.TaggedString` object so that the YAML generator knows which style to use to write the string. In that sense this isn't really a "validator", but using the `jsonschema` library's extensible validation system is the easiest way to get this property assigned. """ if not validator.is_type(instance, 'string'): return instance.style = style def validate_type(validator, types, instance, schema): """ PyYAML returns strings that look like dates as datetime objects. However, as far as JSON is concerned, this is type==string and format==date-time. That detects for that case and doesn't raise an error, otherwise falling back to the default type checker. """ if (isinstance(instance, datetime.datetime) and schema.get('format') == 'date-time' and 'string' in types): return return mvalidators.Draft4Validator.VALIDATORS['type']( validator, types, instance, schema) YAML_VALIDATORS = util.HashableDict( mvalidators.Draft4Validator.VALIDATORS.copy()) YAML_VALIDATORS.update({ 'tag': validate_tag, 'propertyOrder': validate_propertyOrder, 'flowStyle': validate_flowStyle, 'style': validate_style, 'type': validate_type }) def validate_fill_default(validator, properties, instance, schema): if not validator.is_type(instance, 'object'): return for property, subschema in six.iteritems(properties): if "default" in subschema: instance.setdefault(property, subschema["default"]) for err in mvalidators.Draft4Validator.VALIDATORS['properties']( validator, properties, instance, schema): yield err FILL_DEFAULTS = util.HashableDict() for key in ('allOf', 'anyOf', 'oneOf', 'items'): FILL_DEFAULTS[key] = mvalidators.Draft4Validator.VALIDATORS[key] FILL_DEFAULTS['properties'] = validate_fill_default def validate_remove_default(validator, properties, instance, schema): if not validator.is_type(instance, 'object'): return for property, subschema in six.iteritems(properties): if subschema.get("default", None) is not None: if instance.get(property, None) == subschema["default"]: del instance[property] for err in mvalidators.Draft4Validator.VALIDATORS['properties']( validator, properties, instance, schema): yield err REMOVE_DEFAULTS = util.HashableDict() for key in ('allOf', 'anyOf', 'oneOf', 'items'): REMOVE_DEFAULTS[key] = mvalidators.Draft4Validator.VALIDATORS[key] REMOVE_DEFAULTS['properties'] = validate_remove_default @lru_cache() def _create_validator(validators=YAML_VALIDATORS): meta_schema = load_schema(YAML_SCHEMA_METASCHEMA_ID, mresolver.default_url_mapping) base_cls = mvalidators.create(meta_schema=meta_schema, validators=validators) class ASDFValidator(base_cls): DEFAULT_TYPES = base_cls.DEFAULT_TYPES.copy() DEFAULT_TYPES['array'] = (list, tuple) def iter_errors(self, instance, _schema=None, _seen=set()): # We can't validate anything that looks like an external reference, # since we don't have the actual content, so we just have to defer # it for now. If the user cares about complete validation, they # can call `AsdfFile.resolve_references`. if id(instance) in _seen: return if _schema is None: schema = self.schema else: schema = _schema if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return if _schema is None: tag = getattr(instance, '_tag', None) if tag is not None: schema_path = self.ctx.tag_to_schema_resolver(tag) if schema_path != tag: s = load_schema(schema_path, self.ctx.url_mapping) if s: with self.resolver.in_scope(schema_path): for x in super(ASDFValidator, self).iter_errors(instance, s): yield x if isinstance(instance, dict): new_seen = _seen | set([id(instance)]) for val in six.itervalues(instance): for x in self.iter_errors(val, _seen=new_seen): yield x elif isinstance(instance, list): new_seen = _seen | set([id(instance)]) for val in instance: for x in self.iter_errors(val, _seen=new_seen): yield x else: for x in super(ASDFValidator, self).iter_errors(instance, _schema=schema): yield x return ASDFValidator # We want to load mappings in schema as ordered dicts class OrderedLoader(_yaml_base_loader): pass def construct_mapping(loader, node): loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) if six.PY2: # pragma: no cover # Load strings in as Unicode on Python 2 OrderedLoader.add_constructor('tag:yaml.org,2002:str', OrderedLoader.construct_scalar) @lru_cache() def _load_schema(url): with generic_io.get_file(url) as fd: if isinstance(url, six.text_type) and url.endswith('json'): json_data = fd.read().decode('utf-8') result = json.loads(json_data, object_pairs_hook=OrderedDict) else: result = yaml.load(fd, Loader=OrderedLoader) return result, fd.uri def _make_schema_loader(resolver): def load_schema(url): url = resolver(url) return _load_schema(url) return load_schema def _make_resolver(url_mapping): handlers = {} schema_loader = _make_schema_loader(url_mapping) def get_schema(url): return schema_loader(url)[0] for x in ['http', 'https', 'file']: handlers[x] = get_schema # We set cache_remote=False here because we do the caching of # remote schemas here in `load_schema`, so we don't need # jsonschema to do it on our behalf. Setting it to `True` # counterintuitively makes things slower. return mvalidators.RefResolver( '', {}, cache_remote=False, handlers=handlers) def _load_draft4_metaschema(): from jsonschema import _utils return _utils.load_schema('draft4') # This is a list of schema that we have locally on disk but require # special methods to obtain HARDCODED_SCHEMA = { 'http://json-schema.org/draft-04/schema': _load_draft4_metaschema } @lru_cache() def load_schema(url, resolver=None, resolve_references=False): """ Load a schema from the given URL. Parameters ---------- url : str The path to the schema resolver : callable, optional A callback function used to map URIs to other URIs. The callable must take a string and return a string or `None`. This is useful, for example, when a remote resource has a mirror on the local filesystem that you wish to use. resolve_references : bool, optional If `True`, resolve all `$ref` references. """ if resolver is None: resolver = mresolver.default_url_mapping loader = _make_schema_loader(resolver) if url in HARDCODED_SCHEMA: schema = HARDCODED_SCHEMA[url]() else: schema, url = loader(url) if resolve_references: def resolve_refs(node, json_id): if json_id is None: json_id = url if isinstance(node, dict) and '$ref' in node: suburl = generic_io.resolve_uri(json_id, resolver(node['$ref'])) parts = urlparse.urlparse(suburl) fragment = parts.fragment if len(fragment): suburl_path = suburl[:-(len(fragment) + 1)] else: suburl_path = suburl suburl_path = resolver(suburl_path) if suburl_path == url: subschema = schema else: subschema = load_schema(suburl_path, resolver, True) subschema_fragment = reference.resolve_fragment( subschema, fragment) return subschema_fragment return node schema = treeutil.walk_and_modify(schema, resolve_refs) return schema def get_validator(schema={}, ctx=None, validators=None, url_mapping=None, *args, **kwargs): """ Get a JSON schema validator object for the given schema. The additional *args and **kwargs are passed along to `jsonschema.validate`. Parameters ---------- schema : schema, optional Explicit schema to use. If not provided, the schema to use is determined by the tag on instance (or subinstance). ctx : AsdfFile context Used to resolve tags and urls validators : dict, optional A dictionary mapping properties to validators to use (instead of the built-in ones and ones provided by extension types). url_mapping : resolver.Resolver, optional A resolver to convert remote URLs into local ones. Returns ------- validator : jsonschema.Validator """ if ctx is None: from .asdf import AsdfFile ctx = AsdfFile() if validators is None: validators = util.HashableDict(YAML_VALIDATORS.copy()) validators.update(ctx._extensions.validators) kwargs['resolver'] = _make_resolver(url_mapping) # We don't just call validators.validate() directly here, because # that validates the schema itself, wasting a lot of time (at the # time of this writing, it was half of the runtime of the unit # test suite!!!). Instead, we assume that the schemas are valid # through the running of the unit tests, not at run time. cls = _create_validator(validators=validators) validator = cls(schema, *args, **kwargs) validator.ctx = ctx return validator if six.PY2: # pragma: no cover def validate_large_literals(instance): """ Validate that the tree has no large numeric literals. """ # We can count on 52 bits of precision upper = ((long(1) << 51) - 1) lower = -((long(1) << 51) - 2) for instance in treeutil.iter_tree(instance): if (isinstance(instance, six.integer_types) and (instance > upper or instance < lower)): raise ValidationError( "Integer value {0} is too large to safely represent as a " "literal in ASDF".format(instance)) else: def validate_large_literals(instance): """ Validate that the tree has no large numeric literals. """ # We can count on 52 bits of precision for instance in treeutil.iter_tree(instance): if (isinstance(instance, int) and ( instance > ((1 << 51) - 1) or instance < -((1 << 51) - 2))): raise ValidationError( "Integer value {0} is too large to safely represent as a " "literal in ASDF".format(instance)) def validate(instance, ctx=None, schema={}, validators=None, *args, **kwargs): """ Validate the given instance (which must be a tagged tree) against the appropriate schema. The schema itself is located using the tag on the instance. The additional *args and **kwargs are passed along to `jsonschema.validate`. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls schema : schema, optional Explicit schema to use. If not provided, the schema to use is determined by the tag on instance (or subinstance). validators : dict, optional A dictionary mapping properties to validators to use (instead of the built-in ones and ones provided by extension types). """ if ctx is None: from .asdf import AsdfFile ctx = AsdfFile() validator = get_validator(schema, ctx, validators, ctx.url_mapping, *args, **kwargs) validator.validate(instance, _schema=(schema or None)) validate_large_literals(instance) def fill_defaults(instance, ctx): """ For any default values in the schema, add them to the tree if they don't exist. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls """ validate(instance, ctx, validators=FILL_DEFAULTS) def remove_defaults(instance, ctx): """ For any values in the tree that are the same as the default values specified in the schema, remove them from the tree. Parameters ---------- instance : tagged tree ctx : AsdfFile context Used to resolve tags and urls """ validate(instance, ctx, validators=REMOVE_DEFAULTS) def check_schema(schema): """ Check a given schema to make sure it is valid YAML schema. """ # We also want to validate the "default" values in the schema # against the schema itself. jsonschema as a library doesn't do # this on its own. def validate_default(validator, default, instance, schema): if not validator.is_type(instance, 'object'): return if 'default' in instance: with instance_validator.resolver.in_scope(scope): for err in instance_validator.iter_errors( instance['default'], instance): yield err VALIDATORS = util.HashableDict( mvalidators.Draft4Validator.VALIDATORS.copy()) VALIDATORS.update({ 'default': validate_default }) meta_schema = load_schema(YAML_SCHEMA_METASCHEMA_ID, mresolver.default_url_mapping) resolver = _make_resolver(mresolver.default_url_mapping) cls = mvalidators.create(meta_schema=meta_schema, validators=VALIDATORS) validator = cls(meta_schema, resolver=resolver) instance_validator = mvalidators.Draft4Validator(schema, resolver=resolver) scope = schema.get('id', '') validator.validate(schema, _schema=meta_schema) asdf-1.3.3/asdf/fits_embed.py0000644000175000017500000002156613246003441015360 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function """ Utilities for embedded ADSF files in FITS. """ import io import re import numpy as np import six from . import asdf from . import block from . import util from . import generic_io try: from astropy.io import fits except ImportError: raise ImportError("AsdfInFits requires astropy") ASDF_EXTENSION_NAME = 'ASDF' FITS_SOURCE_PREFIX = 'fits:' class _FitsBlock(object): def __init__(self, hdu): self._hdu = hdu def __repr__(self): return ''.format(self._hdu.name, self._hdu.ver) def __len__(self): return self._hdu.data.nbytes @property def data(self): return self._hdu.data @property def array_storage(self): return 'fits' def override_byteorder(self, byteorder): return 'big' class _EmbeddedBlockManager(block.BlockManager): def __init__(self, hdulist, asdffile): self._hdulist = hdulist super(_EmbeddedBlockManager, self).__init__(asdffile) def get_block(self, source): if (isinstance(source, six.string_types) and source.startswith(FITS_SOURCE_PREFIX)): parts = re.match( '((?P[A-Z0-9]+),)?(?P[0-9]+)', source[len(FITS_SOURCE_PREFIX):]) if parts is not None: ver = int(parts.group('ver')) if parts.group('name'): pair = (parts.group('name'), ver) else: pair = ver return _FitsBlock(self._hdulist[pair]) else: raise ValueError("Can not parse source '{0}'".format(source)) return super(_EmbeddedBlockManager, self).get_block(source) def get_source(self, block): if isinstance(block, _FitsBlock): for i, hdu in enumerate(self._hdulist): if hdu is block._hdu: if hdu.name == '': return '{0}{1}'.format( FITS_SOURCE_PREFIX, i) else: return '{0}{1},{2}'.format( FITS_SOURCE_PREFIX, hdu.name, hdu.ver) raise ValueError("FITS block seems to have been removed") return super(_EmbeddedBlockManager, self).get_source(block) def find_or_create_block_for_array(self, arr, ctx): from .tags.core import ndarray if not isinstance(arr, ndarray.NDArrayType): base = util.get_array_base(arr) for hdu in self._hdulist: if base is hdu.data: return _FitsBlock(hdu) return super( _EmbeddedBlockManager, self).find_or_create_block_for_array(arr, ctx) class AsdfInFits(asdf.AsdfFile): """ Embed ASDF tree content in a FITS file. The YAML rendering of the tree is stored in a special FITS extension with the EXTNAME of ``ASDF``. Arrays in the ASDF tree may refer to binary data in other FITS extensions by setting source to a string with the prefix ``fits:`` followed by an ``EXTNAME``, ``EXTVER`` pair, e.g. ``fits:SCI,0``. Examples -------- Create a FITS file with ASDF structure, based on an existing FITS file:: from astropy.io import fits hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='SCI')) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='DQ')) tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist['DQ'].data, 'wcs': 'WCS info' } } } ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to('test.fits') # doctest: +SKIP """ def __init__(self, hdulist=None, tree=None, **kwargs): if hdulist is None: hdulist = fits.HDUList() super(AsdfInFits, self).__init__(tree=tree, **kwargs) self._blocks = _EmbeddedBlockManager(hdulist, self) self._hdulist = hdulist self._close_hdulist = False def __exit__(self, type, value, traceback): super(AsdfInFits, self).__exit__(type, value, traceback) if self._close_hdulist: self._hdulist.close() self._tree = {} def close(self): super(AsdfInFits, self).close() if self._close_hdulist: self._hdulist.close() self._tree = {} @classmethod def open(cls, fd, uri=None, validate_checksums=False, extensions=None, ignore_version_mismatch=True, ignore_unrecognized_tag=False): """Creates a new AsdfInFits object based on given input data Parameters ---------- fd : FITS HDUList instance, URI string, or file-like object May be an already opened instance of a FITS HDUList instance, string ``file`` or ``http`` URI, or a Python file-like object. uri : str, optional The URI for this ASDF file. Used to resolve relative references against. If not provided, will be automatically determined from the associated file object, if possible and if created from `AsdfFile.open`. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. Requires reading the entire file, so disabled by default. extensions : list of AsdfExtension, optional A list of extensions to the ASDF to support when reading and writing ASDF files. See `asdftypes.AsdfExtension` for more information. ignore_version_mismatch : bool, optional When `True`, do not raise warnings for mismatched schema versions. """ close_hdulist = False if isinstance(fd, fits.hdu.hdulist.HDUList): hdulist = fd else: file_obj = generic_io.get_file(fd, uri=uri) uri = file_obj._uri if uri is None and file_obj._uri else '' try: hdulist = fits.open(file_obj) # Since we created this HDUList object, we need to be # responsible for cleaning up upon close() or __exit__ close_hdulist = True except IOError: file_obj.close() msg = "Failed to parse given file '{}'. Is it FITS?" raise ValueError(msg.format(file_obj.uri)) self = cls(hdulist, uri=uri, extensions=extensions, ignore_version_mismatch=ignore_version_mismatch, ignore_unrecognized_tag=ignore_unrecognized_tag) self._close_hdulist = close_hdulist try: asdf_extension = hdulist[ASDF_EXTENSION_NAME] except (KeyError, IndexError, AttributeError): # This means there is no ASDF extension return self buff = io.BytesIO(asdf_extension.data) return cls._open_asdf(self, buff, uri=uri, mode='r', validate_checksums=validate_checksums) def _update_asdf_extension(self, all_array_storage=None, all_array_compression=None, auto_inline=None, pad_blocks=False): if self.blocks.streamed_block is not None: raise ValueError( "Can not save streamed data to ASDF-in-FITS file.") buff = io.BytesIO() super(AsdfInFits, self).write_to( buff, all_array_storage=all_array_storage, all_array_compression=all_array_compression, auto_inline=auto_inline, pad_blocks=pad_blocks, include_block_index=False) array = np.frombuffer(buff.getvalue(), np.uint8) try: asdf_extension = self._hdulist[ASDF_EXTENSION_NAME] except (KeyError, IndexError, AttributeError): self._hdulist.append(fits.ImageHDU(array, name=ASDF_EXTENSION_NAME)) else: asdf_extension.data = array def write_to(self, filename, all_array_storage=None, all_array_compression=None, auto_inline=None, pad_blocks=False, *args, **kwargs): self._update_asdf_extension( all_array_storage=all_array_storage, all_array_compression=all_array_compression, auto_inline=auto_inline, pad_blocks=pad_blocks) self._hdulist.writeto(filename, *args, **kwargs) def update(self, all_array_storage=None, all_array_compression=None, auto_inline=None, pad_blocks=False): self._update_asdf_extension( all_array_storage=all_array_storage, all_array_compression=all_array_compression, auto_inline=auto_inline, pad_blocks=pad_blocks) asdf-1.3.3/asdf/extern/0000755000175000017500000000000013246031665014211 5ustar dandan00000000000000asdf-1.3.3/asdf/extern/RangeHTTPServer.py0000755000175000017500000001566313246003441017513 0ustar dandan00000000000000#!/usr/bin/env python #Portions Copyright (C) 2009,2010 Xyne #Portions Copyright (C) 2011 Sean Goller # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # (version 2) as published by the Free Software Foundation. # # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Range HTTP Server. This module builds on BaseHTTPServer by implementing the standard GET and HEAD requests in a fairly straightforward manner, and includes support for the Range header. """ __version__ = "0.1" __all__ = ["RangeHTTPRequestHandler"] import os import posixpath import shutil import six class RangeHTTPRequestHandler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler): # pragma: no cover """Simple HTTP request handler with GET and HEAD commands. This serves files from the current directory and any of its subdirectories. The MIME type for files is determined by calling the .guess_type() method. The GET and HEAD requests are identical except that the HEAD request omits the actual contents of the file. """ server_version = "RangeHTTP/" + __version__ def do_GET(self): """Serve a GET request.""" f, start_range, end_range = self.send_head() if f: f.seek(start_range, 0) chunk = 0x1000 total = 0 while chunk > 0: if start_range + chunk > end_range: chunk = end_range - start_range try: self.wfile.write(f.read(chunk)) except: break total += chunk start_range += chunk f.close() def do_HEAD(self): """Serve a HEAD request.""" f, start_range, end_range = self.send_head() if f: f.close() def send_head(self): """Common code for GET and HEAD commands. This sends the response code and MIME headers. Return value is either a file object (which has to be copied to the outputfile by the caller unless the command was HEAD, and must be closed by the caller under all circumstances), or None, in which case the caller has nothing further to do. """ path = self.translate_path(self.path) f = None if os.path.isdir(path): if not self.path.endswith('/'): # redirect browser - doing basically what apache does self.send_response(301) self.send_header("Location", self.path + "/") self.end_headers() return (None, 0, 0) for index in "index.html", "index.htm": index = os.path.join(path, index) if os.path.exists(index): path = index break else: return self.list_directory(path) ctype = self.guess_type(path) try: # Always read in binary mode. Opening files in text mode may cause # newline translations, making the actual size of the content # transmitted *less* than the content-length! f = open(path, 'rb') except IOError: self.send_error(404, "File not found") return (None, 0, 0) if "Range" in self.headers: self.send_response(206) else: self.send_response(200) self.send_header("Content-type", ctype) fs = os.fstat(f.fileno()) size = int(fs[6]) start_range = 0 end_range = size self.send_header("Accept-Ranges", "bytes") if "Range" in self.headers: s, e = self.headers['range'][6:].split('-', 1) sl = len(s) el = len(e) if sl > 0: start_range = int(s) if el > 0: end_range = int(e) + 1 elif el > 0: ei = int(e) if ei < size: start_range = size - ei self.send_header( "Content-Range", 'bytes ' + str(start_range) + '-' + str(end_range - 1) + '/' + str(size)) self.send_header("Content-Length", end_range - start_range) self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) self.end_headers() return (f, start_range, end_range) def translate_path(self, path): """Translate a /-separated PATH to the local filename syntax. Components that mean special things to the local file system (e.g. drive or directory names) are ignored. (XXX They should probably be diagnosed.) """ # abandon query parameters path = path.split('?', 1)[0] path = path.split('#', 1)[0] path = posixpath.normpath(six.moves.urllib.parse.unquote(path)) words = path.split('/') words = filter(None, words) path = os.getcwd() for word in words: drive, word = os.path.splitdrive(word) head, word = os.path.split(word) if word in (os.curdir, os.pardir): continue path = os.path.join(path, word) return path def copyfile(self, source, outputfile): """Copy all data between two file objects. The SOURCE argument is a file object open for reading (or anything with a read() method) and the DESTINATION argument is a file object open for writing (or anything with a write() method). The only reason for overriding this would be to change the block size or perhaps to replace newlines by CRLF -- note however that this the default server uses this to copy binary data as well. """ shutil.copyfileobj(source, outputfile) def guess_type(self, path): """Guess the type of a file. Argument is a PATH (a filename). Return value is a string of the form type/subtype, usable for a MIME Content-type header. The default implementation looks the file's extension up in the table self.extensions_map, using application/octet-stream as a default; however it would be permissible (if slow) to look inside the data to make a better guess. """ base, ext = posixpath.splitext(path) if ext in self.extensions_map: return self.extensions_map[ext] ext = ext.lower() if ext in self.extensions_map: return self.extensions_map[ext] else: return self.extensions_map[''] extensions_map = {'': 'unknown'} asdf-1.3.3/asdf/extern/__init__.py0000644000175000017500000000011013243547254016315 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ """ asdf-1.3.3/asdf/extern/atomicfile.py0000644000175000017500000001017513246003441016672 0ustar dandan00000000000000import six import os import tempfile import sys import errno if os.name == 'nt': # pragma: no cover import random import time _rename = lambda src, dst: False _rename_atomic = lambda src, dst: False import ctypes _MOVEFILE_REPLACE_EXISTING = 0x1 _MOVEFILE_WRITE_THROUGH = 0x8 _MoveFileEx = ctypes.windll.kernel32.MoveFileExW def _rename(src, dst): if not isinstance(src, six.text_type): src = six.text_type(src, sys.getfilesystemencoding()) if not isinstance(dst, six.text_type): dst = six.text_type(dst, sys.getfilesystemencoding()) if _rename_atomic(src, dst): return True retry = 0 rv = False while not rv and retry < 100: rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH) if not rv: time.sleep(0.001) retry += 1 return rv # new in Vista and Windows Server 2008 _CreateTransaction = ctypes.windll.ktmw32.CreateTransaction _CommitTransaction = ctypes.windll.ktmw32.CommitTransaction _MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW _CloseHandle = ctypes.windll.kernel32.CloseHandle def _rename_atomic(src, dst): ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Atomic rename') if ta == -1: return False try: retry = 0 rv = False while not rv and retry < 100: rv = _MoveFileTransacted(src, dst, None, None, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH, ta) if rv: rv = _CommitTransaction(ta) break else: time.sleep(0.001) retry += 1 return rv finally: _CloseHandle(ta) def atomic_rename(src, dst): # Try atomic or pseudo-atomic rename if _rename(src, dst): return # Fall back to "move away and replace" try: os.rename(src, dst) except OSError as e: if e.errno != errno.EEXIST: raise old = "%s-%08x" % (dst, random.randint(0, sys.maxint)) os.rename(dst, old) os.rename(src, dst) try: os.unlink(old) except Exception: pass else: atomic_rename = os.rename class _AtomicWFile(object): """Helper class for :func:`atomic_open`.""" def __init__(self, f, tmp_filename, filename): self._f = f self._tmp_filename = tmp_filename self._filename = filename def __getattr__(self, attr): return getattr(self._f, attr) def __enter__(self): return self @property def name(self): return self._filename def close(self): if self._f.closed: return self._f.close() atomic_rename(self._tmp_filename, self._filename) def __exit__(self, exc_type, exc_value, tb): if exc_type is None: self.close() else: self._f.close() try: os.remove(self._tmp_filename) except OSError: pass def __repr__(self): return '<%s %s%r, mode %r>' % ( self.__class__.__name__, self._f.closed and 'closed ' or '', self._filename, self._f.mode ) def atomic_open(filename, mode='w'): """Works like a regular `open()` but writes updates into a temporary file instead of the given file and moves it over when the file is closed. The file returned behaves as if it was a regular Python """ if mode in ('r', 'rb', 'r+', 'rb+', 'a', 'ab'): raise TypeError('Read or append modes don\'t work with atomic_open') f = tempfile.NamedTemporaryFile(mode, prefix='.___atomic_write', dir=os.path.dirname(filename), delete=False) return _AtomicWFile(f, f.name, filename) asdf-1.3.3/asdf/commands/0000755000175000017500000000000013246031665014505 5ustar dandan00000000000000asdf-1.3.3/asdf/commands/diff.py0000644000175000017500000002301613246003441015760 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Implementation of command for displaying differences between two ASDF files. """ from __future__ import absolute_import, division, unicode_literals, print_function import os import sys from numpy import array_equal try: # Provides cross-platform color support import colorama colorama.init() RED = colorama.Fore.RED GREEN = colorama.Fore.GREEN RESET = colorama.Style.RESET_ALL except ImportError: from sys import platform # These platforms should support ansi color codes if platform.startswith('linux') or platform.startswith('darwin'): RED = '\x1b[31m' GREEN = '\x1b[32m' RESET = '\x1b[0m' else: RED = '' GREEN = '' RESET = '' from .main import Command from .. import AsdfFile from .. import treeutil from ..tagged import Tagged from ..util import human_list from ..tags.core.ndarray import NDArrayType __all__ = ['diff'] RESET_NEWLINE = RESET + '\n' NDARRAY_TAG = 'core/ndarray' LIST_MARKER = '-' THIS_MARKER = GREEN + "> " THAT_MARKER = RED + "< " class Diff(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("diff"), help="Report differences between two ASDF files", description="""Reports differences between two ASDF files""") parser.add_argument( 'filenames', metavar='asdf_file', nargs=2, help="The ASDF files to compare.") parser.add_argument( '-m', '--minimal', action='store_true', help="Show minimal differences between the two files") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return diff(args.filenames, args.minimal) class ArrayNode(object): """This class is used to represent unique dummy nodes in the diff tree. In general these dummy nodes will be list elements that we want to keep track of but not necessarily display. This allows the diff output to be cleaner.""" def __init__(self, name): self.name = name def __hash__(self): return hash(self.name) class PrintTree(object): """This class is used to remember the nodes in the tree that have already been displayed in the diff output. """ def __init__(self): self.__tree = dict(visited=False, children=dict()) def get_print_list(self, node_list): at_end = False print_list = [] current = self.__tree for node in ['tree'] + node_list: if at_end: print_list.append(node) elif not node in current['children']: print_list.append(node) at_end = True elif not current['children'][node]['visited']: print_list.append(node) else: print_list.append(None) if not at_end: current = current['children'][node] return print_list def __setitem__(self, node_list, visit): assert isinstance(node_list, list) current = self.__tree for node in ['tree'] + node_list: if not node in current['children']: current['children'][node] = dict(visited=True, children=dict()) current = current['children'][node] class DiffContext(object): """Class that contains context data of the diff to be computed""" def __init__(self, asdf0, asdf1, iostream, minimal=False): self.asdf0 = asdf0 self.asdf1 = asdf1 self.iostream = iostream self.minimal = minimal self.print_tree = PrintTree() def print_tree_context(diff_ctx, node_list, other, use_marker, last_was_list): """Print context information indicating location in ASDF tree.""" prefix = "" marker = THAT_MARKER if other else THIS_MARKER for node in diff_ctx.print_tree.get_print_list(node_list): if node is not None: node = LIST_MARKER if isinstance(node, ArrayNode) else node + ":" # All of this logic is just to make the display of arrays prettier if use_marker: line_prefix = " " if last_was_list else marker + prefix[2:] line_suffix = "" if node == LIST_MARKER else RESET_NEWLINE else: line_prefix = prefix line_suffix = RESET_NEWLINE diff_ctx.iostream.write(line_prefix + node + line_suffix) last_was_list = node == LIST_MARKER prefix += " " diff_ctx.print_tree[node_list] = True return last_was_list def print_in_tree(diff_ctx, node_list, thing, other, use_marker=False, last_was_list=False, ignore_lwl=False): """Recursively print tree context and diff information about object.""" last_was_list = print_tree_context( diff_ctx, node_list, other, use_marker, last_was_list) # If tree element is list, recursively print list contents if isinstance(thing, list): for i, subthing in enumerate(thing): key = ArrayNode("{}_{}".format(node_list[-1], i)) last_was_list = print_in_tree( diff_ctx, node_list+[key], subthing, other, use_marker=True, last_was_list=last_was_list, ignore_lwl=ignore_lwl) # If tree element is dictionary, recursively print dictionary contents elif isinstance(thing, dict): for key in sorted(thing.keys()): last_was_list = print_in_tree( diff_ctx, node_list+[key], thing[key], other, use_marker=True, last_was_list=last_was_list, ignore_lwl=ignore_lwl) # Print difference between leaf objects (no need to recurse further) else: use_marker = not last_was_list or ignore_lwl marker = THAT_MARKER if other else THIS_MARKER prefix = marker + " " * len(node_list) if use_marker else " " diff_ctx.iostream.write(prefix + str(thing) + RESET_NEWLINE) last_was_list = False return last_was_list def compare_objects(diff_ctx, obj0, obj1, keys=[]): """Displays diff of two objects if they are not equal""" if obj0 != obj1: print_in_tree(diff_ctx, keys, obj0, False, ignore_lwl=True) print_in_tree(diff_ctx, keys, obj1, True, ignore_lwl=True) def print_dict_diff(diff_ctx, tree, node_list, keys, other): """Recursively traverses dictionary object and displays differences""" for key in keys: if diff_ctx.minimal: nodes = node_list key = key else: nodes = node_list+[key] key = tree[key] use_marker = not diff_ctx.minimal print_in_tree(diff_ctx, nodes, key, other, use_marker=use_marker) def compare_ndarrays(diff_ctx, array0, array1, keys): """Compares two ndarray objects""" ignore_keys = set(['source', 'data']) compare_dicts(diff_ctx, array0, array1, keys, ignore_keys) differences = [] for field in ['shape', 'datatype']: if array0[field] != array1[field]: differences.append(field) array0 = NDArrayType.from_tree(array0, diff_ctx.asdf0) array1 = NDArrayType.from_tree(array1, diff_ctx.asdf1) if not array_equal(array0, array1): differences.append('contents') if differences: prefix = " " * (len(keys) + 1) msg = "ndarrays differ by {}".format(human_list(differences)) diff_ctx.iostream.write(prefix + RED + msg + RESET_NEWLINE) def both_are_ndarrays(tree0, tree1): """Returns True if both inputs correspond to ndarrays, False otherwise""" if not (isinstance(tree0, Tagged) and isinstance(tree1, Tagged)): return False if not (NDARRAY_TAG in tree0._tag and NDARRAY_TAG in tree1._tag): return False return True def compare_dicts(diff_ctx, dict0, dict1, keys, ignores=set()): """Recursively compares two dictionary objects""" keys0 = set(dict0.keys()) - ignores keys1 = set(dict1.keys()) - ignores # Recurse into subtree elements that are shared by both trees for key in sorted(keys0 & keys1): obj0 = dict0[key] obj1 = dict1[key] compare_trees(diff_ctx, obj0, obj1, keys=keys+[key]) # Display subtree elements existing only in this tree print_dict_diff(diff_ctx, dict0, keys, sorted(keys0-keys1), False) # Display subtree elements existing only in that tree print_dict_diff(diff_ctx, dict1, keys, sorted(keys1-keys0), True) def compare_trees(diff_ctx, tree0, tree1, keys=[]): """Recursively traverses two ASDF tree and compares them""" if both_are_ndarrays(tree0, tree1): compare_ndarrays(diff_ctx, tree0, tree1, keys) elif isinstance(tree0, dict) and isinstance(tree1, dict): compare_dicts(diff_ctx, tree0, tree1, keys) elif isinstance(tree0, list) and isinstance(tree1, list): for i, (obj0, obj1) in enumerate(zip(tree0, tree1)): key = ArrayNode("item_{}".format(i)) compare_trees(diff_ctx, obj0, obj1, keys+[key]) else: compare_objects(diff_ctx, tree0, tree1, keys) def diff(filenames, minimal, iostream=sys.stdout): """Top-level implementation of diff algorithm""" try: with AsdfFile.open(filenames[0], _force_raw_types=True) as asdf0: with AsdfFile.open(filenames[1], _force_raw_types=True) as asdf1: diff_ctx = DiffContext(asdf0, asdf1, iostream, minimal=minimal) compare_trees(diff_ctx, asdf0.tree, asdf1.tree) except ValueError as error: raise RuntimeError(str(error)) asdf-1.3.3/asdf/commands/to_yaml.py0000644000175000017500000000406413246003441016516 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Contains commands for dealing with exploded and imploded forms. """ from __future__ import absolute_import, division, unicode_literals, print_function import os from .main import Command from .. import AsdfFile __all__ = ['to_yaml'] class ToYaml(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("to_yaml"), help="Convert as ASDF file to pure YAML.", description="""Convert all data to inline YAML so the ASDF file contains no binary blocks.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to convert to YAML.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with a '.yaml' extension.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return to_yaml(args.filename[0], args.output, args.resolve_references) def to_yaml(input, output=None, resolve_references=False): """ Implode a given ASDF file, which may reference external data, back into a single ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. """ if output is None: base, ext = os.path.splitext(input) output = base + '.yaml' with AsdfFile.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to(output, all_array_storage='inline') asdf-1.3.3/asdf/commands/defragment.py0000644000175000017500000000413613246003441017166 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Defragment command. """ from __future__ import absolute_import, division, unicode_literals, print_function import os from .main import Command from .. import AsdfFile __all__ = ['defragment'] class Defragment(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("defragment"), help="Defragment an ASDF file..", description="""Removes any unused blocks and unused space.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to collect.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.add_argument( "--compress", "-c", type=str, nargs="?", choices=['zlib', 'bzp2', 'lz4'], help="""Compress blocks using one of "zlib", "bzp2" or "lz4".""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return defragment(args.filename[0], args.output, args.resolve_references, args.compress) def defragment(input, output=None, resolve_references=False, compress=None): """ Defragment a given ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. compress : str, optional Compression to use. """ with AsdfFile.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to( output, all_array_storage='internal', all_array_compression=compress) asdf-1.3.3/asdf/commands/tests/0000755000175000017500000000000013246031665015647 5ustar dandan00000000000000asdf-1.3.3/asdf/commands/tests/test_diff.py0000644000175000017500000000326213246003441020162 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os import io import numpy as np import pytest from ... import AsdfFile from .. import main, diff TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data') def _assert_diffs_equal(filenames, result_file, minimal=False): iostream = io.StringIO() file_paths = ["{}/{}".format(TEST_DATA_PATH, name) for name in filenames] diff(file_paths, minimal=minimal, iostream=iostream) iostream.seek(0) result_path = "{}/{}".format(TEST_DATA_PATH, result_file) with open(result_path, 'r') as handle: assert handle.read() == iostream.read() def test_diff(): filenames = ['frames0.asdf', 'frames1.asdf'] result_file = 'frames.diff' _assert_diffs_equal(filenames, result_file, minimal=False) def test_diff_minimal(): filenames = ['frames0.asdf', 'frames1.asdf'] result_file = 'frames_minimal.diff' _assert_diffs_equal(filenames, result_file, minimal=True) def test_diff_block(): filenames = ['block0.asdf', 'block1.asdf'] result_file = 'blocks.diff' _assert_diffs_equal(filenames, result_file, minimal=False) def test_file_not_found(): # Try to open files that exist but are not valid asdf filenames = ['frames.diff', 'blocks.diff'] with pytest.raises(RuntimeError): diff(["{}/{}".format(TEST_DATA_PATH, name) for name in filenames], False) def test_diff_command(): filenames = ['frames0.asdf', 'frames1.asdf'] paths = ["{}/{}".format(TEST_DATA_PATH, name) for name in filenames] assert main.main_from_args(['diff'] + paths) == 0 asdf-1.3.3/asdf/commands/tests/test_main.py0000644000175000017500000000111413246003441020170 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import pytest from .. import main def test_help(): # Just a smoke test, really main.main_from_args(['help']) def test_invalid_command(): with pytest.raises(SystemExit) as e: main.main([]) assert e.value.code == 2 with pytest.raises(SystemExit) as e: main.main(['foo']) if isinstance(e.value, int): assert e.value == 2 else: assert e.value.code == 2 asdf-1.3.3/asdf/commands/tests/setup_package.py0000644000175000017500000000047013246003441021024 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function def get_package_data(): # pragma: no cover return { str(_PACKAGE_NAME_ + '.commands.tests'): ['data/*.asdf', 'data/*.diff']} asdf-1.3.3/asdf/commands/tests/test_exploded.py0000644000175000017500000000275413246003441021063 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os import numpy as np from ... import AsdfFile from .. import main from ...tests.helpers import get_file_sizes def test_explode_then_implode(tmpdir): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') ff = AsdfFile(tree) ff.write_to(path) assert len(ff.blocks) == 2 result = main.main_from_args(['explode', path]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original_exploded.asdf' in files assert 'original_exploded0000.asdf' in files assert 'original_exploded0001.asdf' in files assert 'original_exploded0002.asdf' not in files assert files['original.asdf'] > files['original_exploded.asdf'] path = os.path.join(str(tmpdir), 'original_exploded.asdf') result = main.main_from_args(['implode', path]) files = get_file_sizes(str(tmpdir)) assert 'original_exploded_all.asdf' in files assert files['original_exploded_all.asdf'] == files['original.asdf'] def test_file_not_found(tmpdir): path = os.path.join(str(tmpdir), 'original.asdf') assert main.main_from_args(['explode', path]) == 2 asdf-1.3.3/asdf/commands/tests/test_to_yaml.py0000644000175000017500000000202213246003441020707 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os import numpy as np from ... import AsdfFile from .. import main from ...tests.helpers import get_file_sizes, assert_tree_match def test_to_yaml(tmpdir): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') ff = AsdfFile(tree) ff.write_to(path) assert len(ff.blocks) == 2 result = main.main_from_args(['to_yaml', path]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original.yaml' in files with AsdfFile.open(os.path.join(str(tmpdir), 'original.yaml')) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff.blocks.internal_blocks)) == 0 asdf-1.3.3/asdf/commands/tests/test_defragment.py0000644000175000017500000000317413246003441021370 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os import sys import numpy as np import pytest from ... import AsdfFile from .. import main from ...tests.helpers import get_file_sizes, assert_tree_match def _test_defragment(tmpdir, codec): x = np.arange(0, 1000, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(100, 0, -1, dtype=np.uint8) } path = os.path.join(str(tmpdir), 'original.asdf') out_path = os.path.join(str(tmpdir), 'original.defragment.asdf') ff = AsdfFile(tree) ff.write_to(path) assert len(ff.blocks) == 2 result = main.main_from_args( ['defragment', path, '-o', out_path, '-c', codec]) assert result == 0 files = get_file_sizes(str(tmpdir)) assert 'original.asdf' in files assert 'original.defragment.asdf' in files assert files['original.defragment.asdf'] < files['original.asdf'] with AsdfFile.open(os.path.join(str(tmpdir), 'original.defragment.asdf')) as ff: assert_tree_match(ff.tree, tree) assert len(list(ff.blocks.internal_blocks)) == 2 def test_defragment_zlib(tmpdir): _test_defragment(tmpdir, 'zlib') def test_defragment_bzp2(tmpdir): _test_defragment(tmpdir, 'bzp2') @pytest.mark.skipif(sys.platform.startswith('win'), reason="This test fails on AppVeyor even though the lz4 module exists") def test_defragment_lz4(tmpdir): pytest.importorskip('lz4') _test_defragment(tmpdir, 'lz4') asdf-1.3.3/asdf/commands/tests/test_tags.py0000644000175000017500000000223113246003441020203 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io from ... import AsdfFile from .. import list_tags def _get_tags(display_classes): iostream = io.StringIO() list_tags(display_classes=display_classes, iostream=iostream) iostream.seek(0) return [line.strip() for line in iostream.readlines()] def _class_to_string(_class): return "{}.{}".format(_class.__module__, _class.__name__) def test_list_schemas(): obs_tags = _get_tags(False) af = AsdfFile() exp_tags = sorted(af._extensions._type_index._type_by_tag.keys()) for exp, obs in zip(exp_tags, obs_tags): assert exp == obs def test_list_schemas_and_tags(): tag_lines = _get_tags(True) af = AsdfFile() type_by_tag = af._extensions._type_index._type_by_tag exp_tags = sorted(type_by_tag.keys()) for exp_tag, line in zip(exp_tags, tag_lines): tag_name, tag_class = line.split(": ") assert tag_name == exp_tag exp_class = _class_to_string(type_by_tag[exp_tag]) assert tag_class == exp_class asdf-1.3.3/asdf/commands/tests/data/0000755000175000017500000000000013246031665016560 5ustar dandan00000000000000asdf-1.3.3/asdf/commands/tests/data/frames_minimal.diff0000644000175000017500000000041513243547254022400 0ustar dandan00000000000000tree: asdf_library: version: > 1.2.2.dev858 < 1.2.2.dev846 frames: - reference_frame: > galcen_coord > galcen_v_sun < galcen_dec < galcen_ra asdf-1.3.3/asdf/commands/tests/data/block1.asdf0000644000175000017500000023512513243547254020605 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev870} foobar: !core/ndarray-1.0.0 bizbaz: red source: 0 datatype: float64 byteorder: little shape: [10000] ... BLK0888\7ZN%><І̯?Mm?JDb?B+j?F>:c?g?v ~?_C?`?a,P?'?SP?Փv2?dqi?nF?@MqZ?5^֢? $?%J?Q0I?\N?z=?Ś`?PYs!?9&? Ә/? դ?T-?l, ?*)YR?=m2x?`c@??&D?DA?hY??:l:?B.:? |Ҙ?f@-? G%M?iF?|Ϙ?|8R?^B?w?)Fix ?6Ao??c8?ǘ?D?t#?W-l`?Iy?:VtE? og?0ֻ?t&hVj?hl?J6?b\?G?bԃC-?Dd?#_?Ӣp?Or?ƏE?h ?]@y?hC??M?ms?@,l?( *?@i?FH+J?~94 ?l'd?@? y2?g7?!|a?i?3 ?t BC?PM?p4?abև?" 2C?|?`q ? (?dl?f:L?>݋?`?5/?2 Q?*Ԓy&?;yw?/D=? '"NH?N*? ]2?7?VJ(?%W?ԍd"?j0U? UPc?s%N?»L?hhއ?[?! !?@rU?]X'?J?9t?{xoLv?PPEpT ?GW?)?m,?u?dN6??@\r5??ં?ib?v! RD?8?(M_gw?zFh?x?T`?x ճ4? 9Ɵ?K?=%J?ťi?.?DZ'r?˝4?wi(0?b%#A2?n޲?P_mS?ʫN?hZ$?2p ?4B?lH(?V?-Zۑ?AO!|(?YR?0?Tõ? ?X0? p|*4?NL-?\ya?>,?8 8?&݈c?ho X?(=!?-C1??+]6MD?c#T?5VO? jY?5 `\?`?o-~t?&V&?:+~r? ~??`Q8W?K?V&?N1?|!?54?(% ? 3X?"G?og ? H׸?+?? ?1?u:?G4?t?]?xs/,? O?m;=?U 9?NNE?0o?#a?{h?a,}?s?ڊf?FIu?(?{`?ne?ulx(?`w??vj?h往?C͇:?[?R+Y?k?N6i,??x??O&?n|r?H?)b?RO? B3?J\?/)?l7=F?H ?Y?ɪD? ^?n\?P:y?Ӿ?? ? ?3P=?Da?@?{\?v>S???u?djE?8?z!u+?HE ?8ݺ?,Y?S]A-?Q??_i}?VO ?Ɗs?ጆ?D}ޙ_?}8?b`?RA?䌈 Q?L!ke?\?es?? Áx?^r{?!? o1?ǿ F?j?iJ?r/??4)?{S?l)F(??@ D?vw?-kNh?U ?ʷ?vY*?f1a^? XD?W?0Yl?06 ?aFi?P{Ko? Nyz?>ù?Pc2?N?ؽF??2? ?(ze ?[Bij?U?]F?-EE;?Bz?w?.9?M=?3/?8nF?0ݡ/?y?MQ? y/?9#?r(d?Y!j?5>?0I!{? oMC?Ӧ?R ?X"?jKAd?[è|?vzI?4!?Y;?x1͸?9k?IU6%?蜋% x?Գ8;?RN?S}?p9B?x?0)J?n?K p?> ޻?!<_?V/&?=ŲEF?+5?߱F;?ݯȍ?d}-?A ?|@??'|?ޓ[?A v? y?ﱖKQ?v4U:?Bqs?aa~?Ks"? łzv?ذyGN?t<7NV?.R?dy__?'z|?̾ >?YQ%?xv4"?T$?\G?ELu?8?䪚*??'?cH?*nN_?f^Y4?0JT? _1?xYE?-#/?/p:?G?xy-;?kv?cm?5@?-t?VYJ?yŮg'?A?gB?RDI?A]v?<;B?Oy?aDZ]?~0ك?ٍV?[Y{\?&L?R 9j???Y[kX?r\?}6d?<Ӧ?˲Xj?2U#;?2iGJ?X}?pFl?hr)?bWi?AOqd?ӻ3L?d?2#?1xJS?g'?*B?#92?٩1B#?|֔?4–s7?%R?Y33[?AfV?Ϟe?07?"cb?ddR2?1~?f^zQ?M5?XyG39:?(T?-?$ $&=??sT@?֍!?F?ԬdҮ$? ~?~R?・ ?ʲ=?cE?N瓍? %?Jp?x)Y?"/Ȫi?g ?E:?9 ?մ?0/,f?@T#?vXQe?PjN?HI? v?֐){?H"js?\^?PԳU?w2?0oծ?菇?Δn;G?9I5?TL~}|?sI ?\ya7}k?NUF?\"ī??`?~p?|.s?C ?о?./C?çi?7t"?;1?w]@?,?tUot?1?,c?`?dG#?K??^>_@?bMJmK?.p*?ɳ?:yf ?X?t"?X8B?8[?!$?A?J'={?p]?Q4?p?D]g?:E.?w?yE?sJ7?['Q?Hu? K!?4G? ?hɳ?,?ZjJ?z؏S?礛-?x%j?L:?^]?LGBΦ?doM?mY̖?~*?&^?XY=:? ë?r?a ~?pwyU??h Ug?µ`R??1̮?5^N?0,)?d?D_j?,_t?6e&?*f?`*ա?݆U"?7F90?db?0\U&?H}2 .?= ?$zXZ?n]?A?/dJ? ^m٪@?)_Q?~@?vS?J渣?\Bs?RG ?f@yZ/?ۂ?!{4?C?z[v??c'm?'H?M${PŐ?`#}??N?tJnU?,я?:`7?d}Q?Mӛ?A;M?V H!?xYe/?4wGW?`8p#J?䵪z?LulI>?!l;)?`{~;L?8#2? a?}=?w+7|?0PB!?e{?q0U?dԈ? ?|u" $?Tk"G?xF?U?tTSa?T] ?VG?SR?^Jp?vm?|ei??%9?=cPU? CgMl?t/j?D;?a{?^Ok?^xI?a?ܡ|?ld?f63?-X@?.U"?=E# ??( =?jџ? u?K+ܻ?>?l ?{??VrP?*ԙ?D??>7? 'J@?t)?_?D>s%?s?xQ ?UOA?OQ?tڳ? غ?uo0v|?M]xU?9}i?tl&L?2?z?N`5?G?'˅?Ii?Mpo?z;fh?)?+?dcP:3?`n?ݯ?h*?tdC8?Ba?Pei?<b?Y?3K?C|>i?`H?0K ?ICu?`Z?b^?yS?|Io? ,WU?-+%?70k_?D }? \4k?Rߞ:?|‘{?>?WU?Z>z?0ѝX,?` /H?h??v٘YU?>B{]?k-?W7?T/?tݤ?pֺ?8x?f.b|9?͉^O?N|{?XTr*?$C]s.?@#/Y?bbIA?DG9?c h?t?+8?H?bO1V?Pr;Y?,EB??B#Fi?>?c?\Ã??SOa?DAS?љr??H(w?u\ ?BNmb?GT{g?f? 2+-?hv6??)f,?t.N?+Q? n]?WT?=?(]?)? ME?r/a?Pqys?.¶F>?s7J?9ʅ?M?9F̊6?@ 2?Χr?_{e?㙽`@? LR)?n5O?x ?f-?/⨁?BDs?]@0?`??_aX?h3?̩]?8r?bBke?QP?^?gU +? -k!?;@?{b?@?8DT@?]tm?a?~? :?Mx?жM ?Ydv?+*T?L#O?H?Aw~?}?5v?s ?ܹ&?dm?' ?b_?ܔ?y/?@4E]Γ? fG?2٩?Dċk?7>ļ?%vd?7b$W?fG?` tO?_0n?'%?qIjt? Nh:?Cn?kJ>{?8b~?\?wJ?' ju? ?U ?3+s?-?$F?0sy?CGl?/A ?gy?|8?dbZ?;?"S3?j`? kB?_@L?~Δ,??@}w?HnT??! ?8CJ?՜?ᭀB9?+?<: ?h??LJ?qR?P?c\~*?ްM?)W_z8?.T]2?c}?hNR?€#}H?8J^"s? ?=70)?}4 kQ?M? ?!Ǿ?ЀC?,U]?=T?'o?۩1Q?Vx?V? T??W?v?q| ?9??[o?E-?-^Ө?t?섏e&?/o+?жbJ?v?[???K?Rn0?@fEsЬ?A?#]P:?"Ͷ??kL.?<?(6?C?^e,?Y?Hu?C?2!?Pv}?`=0PO?Eb?F\?9 }?jb4??f!?$j~?f}T?Le0??$?x {&j?W?n%Eh?  ??j2Mչy?]Q?4d ?8ȯ?)h?xT\??(+r? "6d5??L?N ?pD?ëk?"o} ?nuS?!\??Q?j?=_'w?R?| wѦ?LC?^??Βo|?^ ?e ?`8G:s?P"??WH?1k??׷?Ef? K?>k?4P7?\'s?MٰZP?-Zx<?0$-?;Z??%1?O??5S? 7?lc?C?kV?xH?m d?Nٳf!?/6< ?w'?f?P.?e?9>??7b#?eϼk??,o?ට[|?75?j,?lI4?5Z'? g?Kګ?G?ʐl?wά?o?]N?fn(`?:%?H@L?3?V½?6#-=?hXд ?hAO K?z#hr?͛K ?[lw?,,?@g)??X~}?ar?` ?G?J&y?}?(k?Ud[w?!b?Om? &I?pq>M?QZ?tL?!\n?.լ+?h.:?^l7:??G{ْ?Y'?З?|GQxv?{.u?ೂS?ǣ?K?@Y]?[ף/?:{-m?6;?{??wa?oP ? f7?:|S??j6 ?ٷؖ?ެ?y,?ktpWCz?`=?7p%Z? v?l;?l~1?>n?̧>?鴩?+k8?h5ǟ?'2?1Q$?v*? ?h7헅?֒M%?M=\? ?[\8?S3j?0~(}?"?]? n?4־&bF? ovL?P2?F?d~?O^?/?I ?eS0?C]G?-X5^?G a?:H?}`?PeӠ?S?(?jTЅ?松?ZW4?Iۺ?mBa"?sXbD?H> :?P>oI?\?PZC?^H*?۴&?M_U?D]@2?x1?,/x?j)?)=?B@ӥ?ܤS,Ɣ?$7V?S>ӻ?P6?)ps{?i{j?ܸ? ?0GhW?2 ?p!T?1h?ޡT?"G? $%?ul?Ef ?S:qM?io?a?bSv5?覗:?Ipf?( ?%_`m?Yƛ?N ?0E o?T?-I""?8g ??%V?oԿ??PY?V.Q?8avp?6U7?Ҫ*ˋh?Yy?vt?ǧ?!?E 2?.0p??n*S?2X$Τ?Vh6j?)?~@2?f?D7[5?,?rW?Q??N@?_q.?<ĵ(?3?5?&u ?V3T?o9?ȃ]?D?VY48?ܕ*#?Ag?Y ?Jư?M{e?E~?zҭ?Ê$?1$?Gr?ǨG?& X?#?H?lp?<龍?6 Mi?D̴?`]fl?B.Hg?Ȕ?iF?f|?iLK?Mj*C? hy?OH?|?KOL? ʲO?г?w|E?aԂu?U' ?Ro?{?w?^HV?'YY??O?4s?CɿU?c6?\f)?p?8?IF?͢?`w??*…J?"$z?Jp?>PsA?T ;k?.?Aa:>?m ?`P $?`qP?PxX:?U?oO>E?3du?L其?>J>?=C[?1?N'x?|dC?N UE?P%dz{%?1?*F?Юߌ?b?~A' ?`Y!~?T $b?XŽ?G?Qd??jg(2\?;?\rP30?ƳDZ?l1:? j?=IU?#!W?xn8[x?w%. ?a??GV}?^N?F'ϊ?aP?_/$?,[.?ȽSG?ˀ/?h_?tY2?rH?n?Dx?K?`dL?G}Z3?O|WZ?؜?{?t{Aɫ? ?b\?%?avW? c?7Ly8?(h?s? =?T?p`ư?cq?Bob?0G??@CµX? T?nr/?a,6v?XSq?uG5?DA?xĜO?XA?;?N(7?\?^e4?b d? TC(?iv$7?v|MZ? KU?(C9C?Dۈ*?z?5͗S?P? ?U.x?pegj?k˿??\?`Xo~?@v?+1?R +?ڦJH`?kׂ?|v?z\?X64?ф?E?9I?pD?0??_:_?0?L]+N?̚ALK?N=Y?Fyع?`w?`+x?[ʥ?"`?n?Ț?"cu?I?2?|&̝x?lY?D9pd?ގU?h跘?vW_B?`H?-9?.ɜs=k?G?r׍??\?&?ހ~?@p-?c? TTS~?!z;1{?Nbo~?b4B?ؼ?\w5\I?)?qfT3?y ?.v?;f?LJ? N-? B , ?sAZ?H?vwh.?Q#??jL4?#ɷ?$ƗE{?Knj?5:4?Rj?LԿ?h? A?f?녚?lJG?k[1?K-,@2? #?4_?|O?4C?ed@?lD?S,`Eg?VSā?6?gB̧?{' ?}>?*9?knY?p9Y?#?x4t-C?XeG?('3?J}?jb?L龘???PTN+꽨?ܘA?VaI? BK?d(?m}n'?R&q?Ƀ78?zƺ?4 ?{?q?gmlЂ?d2c퍹?ړ?{T?=!ם:?ICl?XRdbj?lQeX?4j?x:?&qЗn?3 ?D?v@I?\;0?ε?@#ֻ?oIu? 5s?q?8 5?&\?Tv?ĎD?bT?#ܺ?3_[? ?&?a\d?@ۺ?\F?Rq;d? &?-3M? &?i0_RZ8?+z-}?ta?ؔ9?_FxA?:Ԑ?T?N!,?"? rU?0XV? DA=?y\Љ?` ?FV?)ʰ?ٻq0?׸_W?|0T?@Yf?<&8i?@Pdn?pz݂?ف?dwv_?}$?" j?,y$ ?K?8C!Y? ?Yҿ|?J ??h$?? ߩ?ŗqy?@ ?p+YJx?Ж?vnY?^S]?#q?ZM? pZ?=?OdY?d?BTN?`K??${?wB?~@?z&'@?>Y?@_XW3?@򧯏?$x?$ƌ8O?6o?d?h?fi5?n? ΌTZ?u??ˍ?vv?lE3?W_j?#c?s8@$?@3R?z]{{4?EʥSg?֭?{Me?z dy?`yY?t o^P?&<8?P׭?zi+:v?5?ʕ?TTa=?Jv?\gGEa?-?uFX?T ?L U? +(}?g3?3k'?(6?23V?f~ ?Na N?8^bm??`0J?a] ?L!3?Z( ]B? O/@&?k(G6ؿ?T -9?yR?gK?;?Rݣ?b?$wu?s?*B&~?*Ov?B ߄?ӊ?``^?H?n? ?։l?e۴?>OE?z]Q?c\iCh?X? ,Yz? EBB?hr?|-=?Rndm?bm@??jS?dbR?I bb??C?4:ar? y?Y]?Nl:?[!?0 :@?= ?1Uk?'??`n?b?-?gbҌ=?@˕Wۅ?,bO?=%?nS?aCl9?pDµ?Yɍ?-H @$?zZ?Bk܍j?Vץ?tv?0z?p?AY*?^8?Q ?B~?g1A?N۔\{?ʘ"?\*ñ?L@?@`?[N@?hГA?F??1:?y?Ԕ=?jf?O@Pk?xF?Peq??`~qGf?Ԫel?h?`r}?@?l /b?;R\??*+#?6iЍ?׎$?ņ̟?)?8{@5?֋+W?H%??ДS:p?4?~G?tUV/a? E?1? -?j7u?9]$?) k?> ȩ?v' ?"%?l?Lt ? Xs?=c?Aa?H_?Ș?a'✇?2?Rw?2?'` 4?315?j?0i?ȹ$e?hHL?N G;:?F>?Y7?FT?@\ǻ?8k?<Xa??; ?h,pB?# ?].M?Xˎ?v)&E? w?76n?BZz?[?ANt?%?Hf9?i?Q0&?Ҫ?0?o,?2bo?P%?^ܒ'?I&"?'.g??E?uy?@>o?@'?R ?`(s?3D?wU?0 B? 8?5F?f3M?LH2?.7vz? ?l~U?4 A?N?b_L? %=??^0?f B?*b?`/?un TE?U>??xiO?F~W?pfM?Dd-E?̐?~?@ܹE?x&?ʇP? L?|xMj?y;?1Z?ddY?$??.BA??lVI ??nc?wP?!q?{l ?|Ͼږ?6j4?!7~@?Ua'?I?lO?_?"԰}?ڱ ?,Kw?.#;? ik??yrY?ݏ?H q?B8_-?T#4?{=I?A'C.?J][6N?i;?T`%?|`ޠ?pi?@ß,?(N^?|^? ?*ü?vC?|ai{?,^g?Yf?X{9I?8uX?%?H_2x?⳽o}?(?F= s?C?VrU$X?5#[CL?p$$?d\&.?L_?&|?D>?PA=(?#`u?2G?ȯF?ڤ? I?]ţ??0:?[?.^??b•^^?0>B?xH?]>?t|pc?>?V3?&}X?o?D?PLX? =Mv?+n?,??jU?h?X܈y?/6T?TG[/?vc[?)oRl?}G?_Dը?G??lH?ϕ ?1]Κ?7R?+?s{QB?S?P-w"??b1?6\t?ZbL?.?v ?p?0?8?bxш-?10y?XSy]?۰z?p=y?,x ?g'ym?$N?k?tc L?ZΔE ?D34:L?WU"?VHȁ?bN?)Qh?z;@U?JE?E+?y?i?),^?@˰:?v[?9͞t?*SY+&>?amQ]?⡦?Nmʑ\?D#QC?,Tt?:?#X0?f7ԙ??!e'?䲋?(T}? ?P-@q?4?8?=s??Lx?չ?&àR?"?T@ ?3s?s?YU?˫5?NO2c?C 6 ?pqQۮ?_T?[X?G ??R}?(T?h$q?&#?-i!?fϵ ?-)? 0t?pL`?vX$"?Xr?<Ι|? 7 }?vN?"vF?Nrq??f2?S%?fod?Yf?J?dxU?p_?"&?zA?4,? b+?)*a7T?tm;?DŽEP?0γ4??K]?{Y$?/Y:?G#ص?yŽ?@~@ ?=^?`$IA?z9SP? ? ?t?A!?Y_?aU?+}c?\pY?.?G?e ?m`p?E?d5?oi5V}??.-/?*>ru?X|?7D?5P?EvTuM?8Dua?x9?PD?? j?{? )87?4WB?o|{:?0?8&wm?܀X_?]Դ?os|?ЭPԷ?J?]? u?|L?Gag? Ȭ?)?Յ=?hk?I\y g?6?|#?<3?:? `?^?:wx?#!?kx?$2?b`?' ?)r?MvR?x(O?!Q?pٰ ?a.r? |x?N:oW?"Z?x??=ps?}N?p?;&Q?fV*?}"?>p'?R?6':?ꂩ? s?({;?e?/+Ͱ[?l?e1_?CiQHK?}W?q?hM?H?Uu!?xzX)?U ?$4g?AkĤ?eF ?BHp?SњB?O4?Fվ?cְoV?!",? &?w5Y]?ц2@?d?ݯ?cE1[?R?XϹ?he8?LC{?xI?XׯJR? ڌ? /? 7c?y?**?mX?V?$!6?6E,?E ޤ?`?"RI?f$?P?ptG72?S{?XOp?yĉ-T?U??\7c?WK?yE?Zh?3V?J?W}?0 ?1sV?MJ?`NF{?ktz^?l|?@ d^?YEW?S?ajRZ7?W?z@&?P?\ ?2%|?@G?0W@?T^3[?$tѦ?(?+jj?CTK?B?B$?IF%?Bsme? 7d???lKR?`ZEc?]y?'?h? o?TI?`&xwJ?@?aI?@R?LJ~a?8s?1Qi?B?(-?pI? ?H;m?ކ?qb?, ԛ-?4?9Ux?u%_?Gʐn?p?>6`? ?h౎?!ֹ?H:?T?DhbE?,ߛt?6}n?:/+?Џ-?w?~=?xp?x}?\^?;=?@K?WD? IT?قsJ? Dw ?9q:?o&з?~._?4t?l$?ۂ>? IM~?̹?x9? ׭?ty?hf?9?-?Ž`nm?=w?,/á?i? P?i? O?88?$?XbsL?>LX?Ƿq?=P o?YH?Dp?Ҹ̻[?}M+ ?bYۡ}?ߘU? :?$&߈d?ղLO?--L5?Xqm?&K?{|*?0j .U?*Ѕ:?I<\?Z̯×?4Wv?J*? |`"Q?5U?B?Uy?1!?Eu?Ҳbߖ?KYe}?0Zݗ/?㈲`? jN?rJ?iVCT?tb?ua U9?p?4F??Z3C?;DY?W*?BXl?8?EBf? AY?%Em?}? 8m?6?Ȝ?lŦ?$5 ?_| ? a?!?E?Gf#?0EQ?^@?}g?%?/I?:Kqq?L?&?blL?"2?JgNt?R:???7m?l?#? ?K9y?9? O;F?q}vQb?55\:)?$I?JP?dM 8kxx?$vG?ԭ?D ]4O?@_԰:?'?P+?ӋG?(c?0-V+?`aմ?7U?gW?i?a|?J?,!?\L_@?Oq]?OXD|?&8M?\;}?x9ã?{M?tK?FT~?nOl`>?_]?-y?l4ڃ?hi8UȰ?/yɓ?*=@ ?*s1?T?|ġ=?xؙ?uZ!??Vk?0UU z?eI;?H?" ?u~_??r?L3 n?QN?oQ%5?>6?qyh6?˄q-?@q@?F?E0!]?uI?z ?-&??NH?ӰNG?҆ -? y?=w*?`=O?Y|?B0 dH?|J)?y8?լ3m?t.7?x W?ag&7?`d92?z,? ;?'8uN?o8?7:ʊ4?2d,h?Cʹ?V?:?X 9t9?5lC?4t?"H??#?c?0_ቓv?>kK?pR76?aY>,?U{?a~ p?HqJ?_qU?qJu??h%@?HF?R2t?2݌?~Aڨ?@=[?b??jvR?PD~B?3I߸?6?R~T?12'}?5O7?6#P?8V ⥻?_? 9?{r?3S?B=?@J87??}Չ?ƏǺ?4?@?p`W/?Ta?]Bs?q9]?DrS{K?0 ?Թ#?]jf??xܶ?KA6x ?eF)?j {O[Y?91?p #B?ɹ9?Đp?rK 9?r?;0C`?tA?@|&?nB`?N?kP?8N4?]) ?:yx?N .?pۉFJ?ms?kKk}Y?nz?/fL?`1z?,l@d ?`C ?HwT?1??P-2Sr?2?bT?}ш?^o.?Hz?` h ?O?Vr?ܲf~?y ?bޥ6?7T{? RR?\Vf?Le:Q?n?d*>?WƑ?hQi?4Ld"?;M4?{?#@E?𑽅? vtf?TPϪ#?ȪV?dw ?vv?Pǿ?o2;]?:?P"?? >5?`vj2?7X*3?2It?|?ڢ=9?}?vl?YWXd?%+ˌ'?a????w?J?}֥?a\?L?v|? U? ?¦?B0??_ԥ ?Ć`;m?t^ ?~|r?e?u+??2N5? z.?`7 Z?@l\?bvC?kt?`SQuu?uIC?ӄm?ļuTx?sK ?4SC?TŸ ?L`;?޹9?|w/?:Ry?HNR?aZ$?PIF[?nY?wa?~F]?4O?E?`p?R?,֋?W@ȳ??, /P?p&]I?Że?Czī?@nݯ?$bb?4\B9?s9pL3?.?*~?ٛX? Pʛ?]?ʉT0?6R!v?n?$? oԻ?$?@*]?ӑ? ??u(.?%dS?zETx?p;N ? FA?вZ?rVz&?K+/K??,1N?mJe%? ?CV]?K([#?h7 ϻ?YK?.ƶ?Իh?/2J?XlSg?׳}?Qv1%?,:-?O ?A:?ꖈ}?=?¯ܛ?>i|?&,.?NTƿ?m-}Z? bZ?9p?EU??!?c#?^ 5?eI?'b?g~?RBH?^K?R*>Q?JV)?4 h?1?C?Kt? ωl?zS4? D,Z|?Z?ڼ?f|X^?*GN?*)PΎ?'|u?$[?>0?xHA??o?Rm?s ?`&8?)<-?f?gߦ;?`q3?14?*~x?p궓 ?Y; ?<@?~/+?n)?(Ꜿ?W ?"˱?üaO`?ly6k?܎-hu?p}A??(Er?i>kͰ?X?ֵa?P x}? ,?;Zi?p`I5?C-p \?"e?{/?P(@A?`к?Tr,%?hp)?^K?:n?k 4G?? yYI?U?T?Ž?ջ0?S3e?^l?k?1E?ȹ3?"(?T-?”!?қ n?H|nvA?X܏N ?($}?^"G?S?CB?(ĭo?7B;?̕\-?A=?+?O:z?} L?*? 3?0r*!?l9?|NU?G/?*1N?,{?͵?G!Tb?Z?dO??*?\f?pm%?Fel?y~?g=B?mql?8)o?&_w|?6 ?K]?MM??@˛?Z_m?Mz?vN?tG f?`㈀? ^ ?vC?mpc9?<>?L7/?~*u?A "?Q;?"j#E?z?Kd?< Az?k[?"<(u?,i?/3_?X١Ǭ?`Y[?3{s>?M!?$ZS:T?@/dz?hWX8?<$yTF? Z?qG?O̘r?(; ?;]6?_o?!_^?xϊS?0l?X̠=?kftF?[?E?1Y?D?:85?E?:??x%?(ja?nan?` ?,'?ح??_?|n"??PGb?wXw?̀]Xx?jDW?ݡ>?- ??K}?|!? ?B92?q5?ay&O?W??3q?.Fw?A'?Z-6[?bͬ?__La?W63?@aR ?Ě#?yu,?yJS#? >?G[?p?p?RB?}:?-8$?` J!?Hȱe?A ?Š?#1rN?\IEf?@%қq}?H ?N~ |?x^?J!:?9_A? s?4#:? &?>7??|F?4rk?pL5z6?2?nE?QPL?*?׶?h590?]9q?)2 ?|Q. ?lr?4?A?Rdc?S?f=IA?A+?“#8?t ?'?Z?`ɏ?0C%=?G?O?)^@^? 1'?NvL?̥?xvMF?Tϗ?f-z?8/?x\ؽ?:?|Ϛ?&?l?ws?%b?Cԧ{^?oo?@ULp?eI??Fm?}$uH?ݥ\%? ;/?9 (d?h+Ūb?r8$?? 錧 ?qu? ɯ$?a G?!-Ml?fe^^?X9T?ۺ?OM0(?rf ? I??ř'?( ?ꮾ?G[?\ ;E?2y;{?TKlr?5Hr`?L2ŨY?wĤ?'{R?u\?'B?(?jdf?k;O?5u?J.4? 9U?оԸ?Mʝ? kZ?5g?(S?Nސ]?/?m?.?jsu??4ɒ?M -|?? ´'?ʜW?V?k9p? ?Gd>b?/?~2?r?Һ#>?.ښ\??Nr?C)?V)_?&b?k3ò}? H?NF/s?&`S?ҷ]?}.?jL?x "? FBH?8_?>F?(R?UGmh?îl?x?~P*?&?*0?k+?(7.,?'7{?1H!E?6l;?+μ?f?d]E?f?xʮ+?耠׊?0=?pt?yR?4h1?Ϩv?k +?;߇?(+ 7?0?2Y z_m?rA?b|?Zk?[& ?'~?w>ױ?GUx? ?]i?ϯG?D !?о@9̼?^$Tm1?%?E?-M8?p/3?SK? Rj?S? r?d7|?p~n?]?tu?`?[o ?]&?'OG?-y?%j$i?Υ#?<?$mNd??T7$8S?g3E#?D>4c?85EӠ?ޣ?trI}y?Z_ y?lк]"H?Jv0Ӟ?wM?)Q?!QƮ?tȪ?97?35? :?1a?o/p|7?j+:? ׃?|?B#?:IM?"nn?Yy[?ڠ{m?b{[?w|?ݹ_* ? x?r5)?P:?THRV?]"-??y?A^ ?E0n?rq&?xvV_?0_W?FaB?;X? Вw?"?w?rךh?ELr?g?]?c ?_Pv?D⻬t?@ U?V7W3&?&CQ?C?`f?ε?#à?&x?>$F?9V?22 N?@AM?,֞?=?3?'5)?iި?,ڽ? $$?oo?Fd޴?ob?,K?LM?``ꅖ?؞)*?: ?Ns|-?)?~?TH?2a?$\?:T?MobH?8~(5?译%~?Dc8?5=?:(?o?5V?dI'?s7?HP^?D'?< Sj? VC?r!?35?jK?Χ_M?7vL?bl1?,?/l?O'?h?g ?b;eoR?RY?>?T"u?XQ?PV;? 8?rr?FD?*Ͻ 3?lK;]??԰'U-?[?HfC?c?BM?,j?]7z;? h?&RYK$?pF?`*K?@^**t?Lv'Ϛ?$?#?♆?q=q0?y^`?L?(t?B>u?t?tYa_?XeQ\f?[La?3W?n eYE?>q? FZ?9Ҋc? F?2&?@no? 9?:P?硦?dO,?6??٦ڮ?p3?0vX{?wT?(m"??nL?\uS>??贮 ?oW?Pڹ?R1+BX?C??*vh?TuO?|]Ow?]ay?$?3lR?̠KPA?SGi?e{\?sY`[8?pB?b+?%Uk?P:6x?]!P? U?,i? U#?-1]?n?m;S? ~%?H ?iiW? wtѹ? gW>?p0͢,?L_?85Ƈ&?&?|L?t?|?o E?+?cMS?Ԡ%?5cg?B?ĒG?Nz3?,LpF?%X1??s?XH?:&?fJ?[$?\?q?t=9\? H?ڷ\ ?l^{s??]?8Vhk?d= ?F[,??נ9%?$ s? F4?C[A=?4:bO?׳Q??C?}]?Z9(?kzq5;B?WY{s?!R? J?T83i?qY?V$?P-?+9?`?Ou/?x~?9??7P.p?jWv?C0G?3 ?؝>ju?p4?|6E??}tQ?dc~'?{?Zsf?㊗5z?R?4 V|?mNy?J5V?g?y^? ~(z?DKK?:rֹ?a6?z?>pS?l!/?=g?S?"?XY?*~& ?3H?Gʬ?. )H"?&Ȃ?9?M? )/݁?b?pFe?vi?*Qb?$?b?ןM?F p?;89?H ?¸r?TV?`?Dv?N)t?WޓBD?΃?L4?赤 ?4[?iE?Z!E???Jžh?;??2Ц?~qbm?[vV?S#?Oɶ;VS?'̔?5qd?7G"?Of?x??0,z,?%?DiD?pS?) в?uC?l?? ?(L?2ڇ3B?tP`?-?# ?ld?ڰ37?Y,N?F? 5p7??wZ=?ؒZ??ɋ!?!c?&0?A ?e?1.*?zGE?,m/j?$Bd"?"H?m[e3?K?kƿ?KG;?ģ_?ښ ?4&?*.?ǸH?G]?KoF?=>?o0? ?RP?u|?{ ?MBC)?Hpa^?mc?a7?Mas?(Ȥl?DŦ'?$Զ?)?h )?pqnm?g3A?{a?]^տ!?J=#?:S@N?qO? ? ?R" \?'OŃ a?b&?"?y'?d&>"?6޺u?@_ڗ?>]e??P]e?d`SH?و?=WN?N95"?D_?'`@?*?|? Px?k{R?%}?heaLE?F?zY?=?82Vn?h+S?J [?x `$???yا?}F]?Ő̐}?nR?tMW4?ᕏd? u\a'?&ZP_Wt?4WR?PHo?M ?S06-s?x~?` ?h+*?Pĥ.?4?N޼⮻?Ic?i ,?E>.? |8?~GBu?l{ ?Rw??EWa?, 7?bP{+?xku?lJn3?lUy?x< ?]l?nIR?\uIΐ?s?@ ? qI|?(J?c?DAOHN?z՘*?o{?TF?]?r ?`$b"?@^{ ?| /?w|? qπ'g?>iL?d?g?T0P?\U?'3??~??TuMK?i1u?aZt?\x?`8x?6>?PIՉ?Aވ?yP6Y?_k}?e!{?T?/+?6]k?=y? l?Z?LF?,nO?iu ??D$'4 ?l2?lY?$SY?E@i?w?.y? -?oj|?}NP?БNN?8j?d?bKT?~? 꿕?t?? ?^??:"?m0?c?P!}?T&`?ʸc?oZo U?pAx?6Y?i܎? IKSh?# %?@/U?*?[j?r6? {?ϙ׾?T,?(_=-U? i߿?t?c]?qP?`Ҳ9?f 1?ҡ?VT\?+H'?BfM?Tj?>ذ?ªR?%*?d ?ȝcy?f?k^?p0fm#??4?}[ZҺ?xY8>?Z??0\7?ɤ q?@ ?S?VL?#v?7;z?|*?!?e'?u\?vH?MxT?Юԙ? -?Luoc?ig?.O]?e?8Ů?(h6?<=)?Ȯ?DJD?;?t`VU?A܋UT?*E??Z(r?Jc??S$?ͪB??DI?4N ?Ko?+ 4?@/?[?8:@?8ݴ?(bW4?|Q?]B?m(??;?5I ?5?V8?8L1?~(i2=?N?sP`?Ol?pwR?pcQ?J+|?仟C?֨.?qGط? JL</?bOK?0$?o$3?#U?rNMio? !?=^o?kAPU?ms?C"*T?Ay{?N۫?b@[?~6?H?)NV?ֻۥ?gֳXC.?%*Ս? r?7}vU? )?PkN-?v݆.Q?Q{q?>.g?@Wi ?4S9?.m?1 _?u?t?^]?ܵR?Dn)?T7?0F` ?[1?{?ל?ON?R?tK?hv0!?[]e?`5?!9?`?y?MBU[?нq?~3K?X?ZL?B8k? ?cGS?.G?EbV8?+%m?JN?]ЊA?/|-?X?N|?fh?^{γ? ag?Ql?6FW5k?=?9K'>? ml?oԠ(?ܻV?qI?N=Pp?1ޏ?F|*?*8@`??4?}?Do?`"‣?8yؚ ?Y9S?Ud?ETq(!?Jz?f?>%n?͓yJ$?˼?\jp?8P_?o?-'?(jX)?Fz?d?|֏[?{ʏ?`26?31?@ ?c ?r?h]U?^]?x DE?ޣ?fn&B? @\[?NWF?ρ?-*?o?XM1?|܋??!?<=Ꞵ?2Fh?d&*bt9?5^?~v?Ni`,'?s5?A?^C?$_.?{t?Ag?H_p?l X?zKq?<=?,="E?!?W?I?]^??5??0?hE7f?v96t?nW??f?\TƸ?_ٽ]?ɩ"?3?p?um??I3?W(mԸ?Re9?)`?C?U5 ?Pa?H ?UnSn?tO?(?fe?91? KD?JLJ$?Vyo ?>5{? .)|?X[?Lux?S6P?UWqŽ?z# ?5͐???'?U~Uv?r9?2?8,\,?5xt?9w?n]?8?Ndt?<?Acy?ަU?h? G1?pK?j?> )?Hr[A?? ?0E?Uf ?#,?3 (ju?xY4?xN?Hݻ w?jx?\E?wUJ?P?fi4S?2?T?"MI)? l?b[a?=8k???1.?`쯑C?T4~?@օ?%2c?`jW?3dPY?lgS?*?R"?b̛Y?$K4S?oZ?ra?:$n?Oh:?QqH?DZH? Ez ?o~.H!?*ڊha?Vm?:,?;;??D[cs?G6<{?x?`?q>?k6@~?2J?5? ;)U?E?c$G?3`9?EoAF?R㚼?3?׉4?]IJ)0?TMǺ?1N? ?72U?m[f?Nrb?1y~4?w?KK?3h?ݭ3?Y`x?Pi,?cY8? aM?@`i?b&?Glkg?X}: z?"?3Ʊ?uF?BP6?I4 t?K? HT?Ÿdҩ?*Ш?X$N?Wd???x6? ժ?|N?F0R?M?'7v?HC?7? ; l?(&Շ-?%b>v?`ٯ?AZR?y?SP?j??V4?#6@?PN?;?8Qy?T&W? ]K?]?h?$J҆ͧ?OWJ?||m? *vH?>4?e?(l?N`?Ш?pJ&?a?ͭ?O_$A?-;?N?j? )^?xQc?]Ox?  m?@L? Rr???{?@2>ٽ?r*;{?Am?Pm?B?g=F?ݷ?W?|"^2?@Ŵf?l,?ŵ[Q?[o?2? 6?TQ ?`NQ?_?_Vm^?2:?岉?lI1?H/'uk?t?}?*P ?@^?$[?.7M?HkbQ? Q?i@M?Ogt?\?nov?Kǵ?T_Pb?CF(W?v?&8?J"2?*?^E\^? I?`̜"?lj6RWF?z\7?/?2a_m?U?6?$X-?4Ka?`JE? f?U˼a?4?hnKOo?5P;v?^fܼ?ԶQF?O?H?J?ׇ2q ?R`?Pլ?XNQ] Z?f8?ۭ?uJ+Zۉ?"2X?TW?XF ?pW?m촇? (3?'^p? ?h;7?`&8H?ZL?1$?2d@&?2S?p;?C3e?x>|?;^W?y,?_xk?<(V?AZٰ?ML?Y1n?m7[?3(⥃? 9T +?@B?|{P?q ?:+7?Vyj? 2,4?ګ?n/&?(+?@$uu?~?P,u?`ʚ?A?qd?+GU?/?T.0?W1Uhf?r?nF*?C\/(?]` l?<?Xȫ$?$%!ج?24T&?/vCK?1\3?/4?ݐM?\ ? }N? T6э?.cs?D0 ?%&?5?? ? }$G4?C?`®?2ܲ??׋4?2M80?8 d|?c?_ڲM?STR?d}$?=?ڨ!?PuT?͹㭲?h?|y3*?t ? i.?t1U%?|J?&\?B?Rd?ʝ;B?֨E?ӏ?VpGϼ?WHw I?7O?$?Pi'sѫ?92=?zrJG?4QU?Gw?}T?Ғ?z3?8pa?]?Z;_o? z[?p,`l?43å?Z? ?f҉?N8Ty?(B?jY?8ő? 6?b՜P?I ?䔋`?ȴ?Fn$t?N Q4@?X~`?=\?hjO|? 2e?v P1?I?PSx?P~ŝ뫫?2?J?.?C ?ձ~?F?n?ԏ?fzۀ?sS,?)c?z,?k"?A ?K9?̩??E ? ?U??(X?p*? ]G?`%,j?Jw}?Eɫ?.r5HgV?|(Ut?w#{?u?%yk?м,?/I?n ?~NA?x3!?Rk?h?(C?~&?K?9?a?ؗQ2?ؠl?txX?H $U?%W=]?Uu?"Y?P8$?̠j*?d?98?ZE?Ns³`?.N|?? /?XU_?ķ'u?C*4J?;xz?/w7?}??:bt?h?KwJ8ŵ?`҈S?F}?p9V?@Fl?H#yǂ?zt׬%?Nvt@?܃hj?̧7?0+3?lV?w7?p}?;_??ya?d5.d?: A?pPr;?DU?lUc-?z5_?BI?z""?0O2?K&?d EX?Am<?8'-?jp(?ԏ|Y?0,0 ?GS_?9?(ͪ]?*B?!?[bp?n\wW?df?{ש?d??^8?˳j?T0yz?2zhsP?`D|?r?UL+fn?7r>?Qֵ?E+?xM?PpCG?fDu?@*Ԍ?taV?vjD?]\?Zޛ#?Ii?+G?Ż??Sag)?qy ?@ -d?@Ÿ?|_x?`PM7?f?USo?ݷF?4q$éj?*?Ir?β}X?]z?ܷM?ƫ͞?@P.?h>?yu?tq?;?bb?r[?vE?% ?:S?ޏra?u?#=Ű?l)_?1?ǀ 3?X6,=?W?i?`C?ze'?h6Ƿ?Tb}!?T{?РН?:tA?M+?&?G#.6?&H?vxOt?~]&5? {%?n?+?c??E!y?s'? 8\?{ӓM?YH ,?2Z|?v4?7U ?@?X6&(?0 ?Ma7??pe!?W߷?@kO?R;{ ?N{? g=z?&QL?ġ?=S^?oyi?2?Ԫ?Ϻ?`>Yl?VG?2g?nS?1a'? *sS[?@om?[#s?ғq?4?(?>tn?\_r?`cE=? ! ?sS?PQ?5c?bF(^?fr?~?&N?.q?%QRF?<?Tn3?v]~ o?$ ?{.?e* ?Z%˹a?zB?e\ l?|Aʵ?(߯2?1>+?\I$?cv?h5@?>v X?WMQ?\\$?"28?IPA?T- ?) ?dĔ?_ٳE ?n%PH?L%~Fx? ,Z?+%_?o@?۹?b$ ? P?.+p<&v?WN?dvS?Ol&?lњW? HL7s?nҳwV?:N?ZG&V?rJE?=!?G? 8)s?(?-n]?O.B ?= j1?uN?ZZ?|R-q?.?5?A,n?,?g y? PC? y?jO>?`9O#?C)?ːT?'n?8?? [w/?EӔ?c8څ?ׄ]?PbF?@~?4lb?_os?5]?ь?61Q-?4-X^?] ?jYѶ?, jS?Rg?`iA ?~S?,}?Z ?E?1֠?[=r?,.?x?<[?; i?~v?"FAu?D(:?ߓd(/?38J?3E?!?"XhU,?J԰?~?A+ ?;j_?oI?L q?W u:?8GȘ?,g!??y?Ǝ+J?6B??zp?A?AG?#?NA4)?L?H(?ɸ?|^p?wEl?eCZ??cqn?la+?hL?W\J?b +?$O3ת?4_+?d?idӜ?92?@/@z?J?!?2W?hG?0ka?h!Gd?D ?ߴ??2?rm1i?2DGS?QZ=?9u6?F}Ox?b%E?to?0>8B\?Aa?z}Y?ᵙm?8Z?2m!D?̈'V?sGe?y^#?p ?^0?27>?2??}z?6ˠ?^~?q,9?Ԩ}a?rsʹ?4 V!?0myme?~r<( ?`?{>?> ?Ӯt?`?`C"?Y< 5?88?vBb?1?N?)?Jn?{q?-K>:?ǠM!?Ե}???wJ?h?(Q~s??[|H?AaCZ?T'=Hc?AHN?|? ?_Q?q-?0%y?0MS?0 ?e[x.?b2?[?YKU?xL+ފ?#]&(?>o;?#M?SW ѣ?r/6p}?\b?U[j?nW?0f$4?u?h8?-??(zT?ϮkP? ų? ?XTI?ܳO]?+fNԞ?~ ?̱%K??h*b?*cJE?Q~[?N?s̚ ?hF_? x?"t3?LFh=?\?/eM?/CSx? ʱ?N?pl ?~~?͈>??Wk:?"Ք?vU?e? iӪ??7׉2?Mvk??FU0?{b?Tg?fOP?z?] Y?, ?PU?nV?a/6??geh?\ٔ?Z?lY?R!?LR?`qe?oR?0h?9rCU? </? Ak$?Ɛb9?Ӎ? 2r ?@Ԏ??rm`?ycS?%7?x/?L ?(m?d➵?wr?d/H?T}*?.?@;h?ٞ#? v#?3W ?$t~?܉)8?aPt?Pp"?:KYi?Ԩɋ?ۑ?l:0 ʦ?;K?8U?*2Ъ?Uϒ?{u r?4 "?ԞS\?nL? 'g?t|̮?h?e/?T?xf,?@?4,?Ng?D*?i.?st?*?dB}?Ūd?e.?_4xZf?ԉ?amz4?vf Xg?8?B-?62^c?AJ4?̸ei?!񙡹.??@ 0?D'?dONy?ء?8A?Tch?hk?FGƔ?۳?-q* ?};&?wA;6q?@Z?,-w?L^c(?6?-T?ޕր?g>?Um,?R=1?n?P؊2l?@E?Go/g?Xj? y?Iվ?or(?ތq?Vrj?[;0?LHh?A>?x>@?Pe*?Ml?P2?%?$8?̸?}?T"l5?hq???+9w3?=#{?Υ%J?0MJ ?U[t?H$K5?ul-?{#U?Ibk®?Q?-M2? ?̸H|?F?qW'?d9 5h?V`16a?|ί?Cm?pt?Jf?@Ŗ?T??xgJX?3?X=?0?0?I?+oN?boa?,4?5P!?́}?A'Qy?k9?ɉ0^?!.?ب}? js=(?@j?_ص0?YJO? '?EwI\|?@-ч ?槃ld?H|A?k^ ?'i?;t? I?T桭~?F@ ?f2?"?b<](?Y9r?<]˦?VF!?Ԭ}?cv?+ T?8p ?\]?ƣUp)?VE??p ?e2 n?d*w?c??r_9?W-b?ڽMMQ?6`?x1 ?(bf?@\#~?sR D?7kH;?k? aX?P:븴?݋#ԁ?Qd0 ʃ?N?ao?2?9\fT ?y#?P9N&? /1??!z|???f?⍬?i>"?ڢ-F??XR? ?яx?Lk8?I ?:[P?,s! &?We? c"?`4?`~?P? )|~.?FkO?86ԙ?LW?1.?k@I???{?tZ?XKm?!al?F~#?n7?8N?pnW??]ː? g? Vw?&RH?@j?]v/5?B^?7o->?0%:+I? n?3Dq? r}:?0P:? M?Bp?ֿb(k?PgJ?ei:?d?]ӧ?-3`?4y?L`?`tV?J囊?Ϋi/?,WjhE?~fzY?x&wq?U?'R?@dg?fu?Zܘwe?UΏן?`Ρ?[?O]? 9g??6m)? +:?0 F?ɥC?صU(l? `? >?_,H?v(??z.e?r8av?6J?..8?QͿ?V?dDy?U?nT z?-]A?XM8.?xrF?LUZ?Oa.?D3?u?ÿf?%?b?(r#w?SO[0?Y?yUv3?4# $y?]?-ky?{^ݤ?Ff?y ?ʢ2g?m`n?A?()pD?DS?`K? 3?`VǭM?2i?3?fp?zC?/lB?LV?\6ݦ?|p?s5=?`qʹ?,^?Z5U ?@X?Ǧ?l=6w?Pǣ?vY$?6?*k?iz;?kd?*$g ?6Ců?-̋|w?lB6V?0`??r?rS}?bj??>?9k ͠?\W?)dW?bLFx??`6 ?0\?Il.?hej ?Gqd?nrr?$s\?=H?\?b;R7?h9?G?bT.?Hj; ?RT~?s0kc?xb2C?]?N {?9a"? ?{ w?TA@o?h?(?fm?p6BBݶ?Nx?xT ?pq>?8\?,*?tG? ?s? ?f+EMfJ?YS?`(?ps?'?q?4`u?R?Z;(?(sh?H?)#?6/?E9?{q?l톎?l?hu?$ e?`BL?LOE? ʣ?HI4?% 9 ??qH ?Bl?>,?x?@ʦ[6?@RXũ?HV?n3H?ՈiG?۹m?0hee?bf?$a@?AQE?Y;ɺ?84?нF?d̓?H?ā?(I?U?*[? I?`P?mx?>F?tEi8U?R'G?e?p㞏?yb]?I>a?ҨG? ?OjmxY?Pc8?d?p%?k"%?n"w?Y>?amk?_ml?dxL?Hd?X?1ѳ?Bw?쀉B?^>P?\҂` ?Œ1?i)w?Tk١?+Y?-Og?#I?1{u?B~s?[,º?0К;u?$S@#?@ȞI?$?ԍ &i?w"?Y:?(W@?պ=?Gr?@:U?Iȱ?n?*xH?NސF?Pp?F+r?t(㏣??tjڷ?6>_?P&C?.?< ?8k?L|?E.U?HR?sKA?Hj[?F (v?lX?Ii?vgc? )#?XwL?/?d*r?U-?u?4y?2kĴ,?(VpU?b(?g?"nk?ʘ??I.&?ET?\!8?@?]vאC?S??v~5n?/0??B?0R?nc>o?g%?؛? ~?5ub?u?V)d?#?T5xI?7?+ơ?0Vmr? |u? ?g?{?u>i?B?gϼ?hlt5m?0#_?n?h?`u?h\?9q?@g?-ZF ?JNF۲?Wgx!?؂}&c? LŃ]?h?m?Kq5?q?v8?@T?T?'I{s?HkUј?2t?`\C?>/,4?Dq?/!mR?# ?l.O?57?TƮ?\A??1ٺGl?|6=?n֗Q?|v@V?R?)v\?Q]K?P~uTA?գ?#?.-s??N?GP/?T?7n?`?NU{B?yt?Q?[?/}?L)?!΅9?(_yh?yi~@?Оb?*ɿ ?=`dl?'?l7\?|)ʺk?[w ? L? ?rT*[v?u{?8Qs?X}5Ӣ?T(0?D9?[2 ,?-Z?Za?i?`W ?@?`*?wed<? T(?XK?6Z?R;?8p)?F÷?$ˮ?$͍?8 z?0EL'?%!?_j]g?xOn?-%ɗ??;ᮈ?%?bz?hɚF? s?x?|p"?جTU?ɔCg?CUM?jIIy?V]H?xFϱ?R|$?gHX?VӰ?[;K?OV?J`?0a}?k9q?زaa?p!?α?H?Z,?(/7?Z?ҿ ? YY-?Iq??8:?ֹ׀??T²L?>lH{?_Cɏ?~&?PIg?zԓK?%h:?pLlQ?2Z1? wx?:ѓ?u#*6@? #?ׅD؄? N?f?U( ?P3?U0?OV?YZ?BDz?\?g%bOK?+&T~??PxV?p'^ރ?(Z?c^t_??ّ۽?) d?RH }?b?_2X?v9Dn?w3d?Ze?D '_"E??Z3y??З?# AF?Z 4?lq?<H?v)6"?@`o?pGk??<m?m?M?cJ?z?Dǎ?Ґ?O\2b??)m??QS?U/?5h?}JDz??ӻMF?` ?Ĩ ?@Р?(?ra"A?`b?"{][???!)?}떏?*nrf?h̗?x?xE??q? g?Ɋ15?K+?>F[?> M?p'r?9T?7r?:p?"?o ?~ς?"?2???E}E0M?ݸ? ?%&?4-_D?X?ȹw?ԔR?Nq?4JyI?qGM?v;&?+K^M?Ad?g,?S?||q??I]s?WM?nt6? 2>? P?\D?@Of6?}.\? ?%-?l*_?vЌ"?+@?hX1? (?Iu? ث??"b?‚H+y?ݡ?en;R?s?]ć?$ꗚ??Pd?%cR?^fs?|w?Bo(?3P?K]2?b9y7?~*`0??l7ü#?79%?$5#?/;?G q?rPw?O?a/r?$,ǸZ?|#? ?qF? C?P@IU?\A*?m%j?4Mr??ui!a?ȯL"? v}?8+@?p˧?K?xE&b?#ʹ?Ja2B?H?@hՙ?u?yg?j5R?_Xvp*?Z?zlؔ4??_Yϰ4??ٜ?~ D?DX?Cp? po?]A$?`ڦ?.lH?K?p ?:??*`>?;]&?` d?l?P/?4DF9?,Qo?P`?$?h.V@?ΦC?aZʄ?n%j"? 9N?ԏO?E?={?&c?*?5?!D"? mީ?כإ?lw?G ??a~?#V%(?G+3?d?`tk:?$U?Bn;ʛ?5Y8?=r?S҄~?{T~V?O?P-X?;=N?0?0vڿ?X\?YN??8?`?>Ϧ?X?vR"?_W}?SB?uD?sd?\m?P2 .?K^?q-T?pZ5P?Hsj?~baʍ?I4?$7?f_?F?8gb?b5/X?ڰ?.XC?2 r'?"Q?A:le?P]D|?nbl3?5Jfu|?2M [?!@?X2:?[?}`lG?ȶ|?Rcʄ?Tx?EL?5O ?X?q ?1+?S?..J?׽2į?TD?K}?H4?_nj?, #?* [W8?P|g?a/? T?\_]H?i9X?Đ]U+?H3K/?f? 5?.? P?Mgi3?([\? P?|\?a|3d?Pa?Zw ?zh?U;w?*șvg?5U?G ?0y?-@?9?߱?TƾX? `ķ?SSڔ?lQ L?H?j׳js?p?0VF?1?/.?u_??kj?$?r?J%Qs8?,Kb6c?YCgT?1Ly?4?&?h{%?̄&?LaG*?n/MJ?+p? @R?` o?rRR?z|?rQ?5&?PkT?!|f|?>U7?g?2s?e?90&7?O*?i Y?Sb[?X[Zh?X@Q?Ǖg?uzI?HPH?J/RAW?4L++?t||?r%?I短F?,?ulE?XѸg?D?@Cq?uͧ?]M "a?W?Fc?x+A?&8A?ʆ피?D4?$`E?"D6`?H7Jk?95~?,nM?T@^?U?R?¨]:?%w ?UE??+D-?ma? Rn3?d? Oۑ?%uk?Vۇ?Wq?1:C#?'_?'?4]qV?pVU?OFU"Xo?7cK`5?L!?dZ5M^?d@X-?XL?,DxA?dX?fU?Rrf?Ve8?j?>T3cS?Ѡ!?xI ?NK!?){F_?Hc8;?DAS!a?`l8G?x%bd?XSG4'?Rv۟?}ɶ?jX9?%g'4?H)9? Mt?X??sO92O?ȃ9V?`-t)?2b} ?A? zS?pt?0i?@];j-?"F(? U?8¦?8m?n]b? ?]b:ۄ?Ys ?\_??~WÉ?[\h ?\M??*8(?tuV?|[Z?]P^rC?Џ? H(?Fv:?Oq?L? 贛;?7C?ZrX?`9q?,X~U?8)?8$$h~?XRO?yK? ?7?xb;Zj;?tn:?jT5j?s')? ?;?P;^?eѩ? $@?F\?0?3z?e n?;j?%7?KN!?|u%?_.*?%­e?4S?4xQQ?a=J?U?(in? hXz?P{7?Dpc?lT]?\?Js?AMD3?Z1}?e?T# E?4Lb?h? e?P_N4?"d?4 .?fH?g=?ɪ?%!Ϫ?0 -?\{?P7}5?cǀ?M(jc.?Lᜎ?xaU??B ?R>W?ޜ! ?N?ZW?Xq?d$o3?YW:k?;fՁ;?솥w?c_?P2 ? ?Z#Cy?ְGH-?x~? le&? [?"z?=jL?.(?i+v?h.u?4k ?6i?j-HG?b??1qg??N?nM8w?ܟ1?89 ?f}?]*?xMU[?Z_w?ZX+Z?O?#?$,a?;Da?*l?H'$ש?:hoʭ?/8"=?w? ?2|?^LE?q ?D:W|?lJOd?}? Q?`??\(N?0Q??w?%v?x1(?@?@/?xK?>?1dsN?,ż ?#ASDF BLOCK INDEX %YAML 1.1 --- [359] ... asdf-1.3.3/asdf/commands/tests/data/frames1.asdf0000644000175000017500000000732113243547254020763 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev846} frames: - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat, blurg] name: CelestialFrame reference_frame: {type: ICRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: FK5} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4_noeterms} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: galactic} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: galcen_dec: !unit/quantity-1.1.0 unit: rad value: 1.0 galcen_distance: !unit/quantity-1.1.0 unit: m value: 5.0 galcen_ra: !unit/quantity-1.1.0 unit: deg value: 45.0 roll: !unit/quantity-1.1.0 unit: deg value: 3.0 type: galactocentric z_sun: !unit/quantity-1.1.0 unit: pc value: 3.0 unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: obsgeoloc: - !unit/quantity-1.1.0 unit: m value: 3.0856775814671916e+16 - !unit/quantity-1.1.0 unit: m value: 9.257032744401574e+16 - !unit/quantity-1.1.0 unit: m value: 6.1713551629343834e+19 obsgeovel: - !unit/quantity-1.1.0 unit: m s-1 value: 2.0 - !unit/quantity-1.1.0 unit: m s-1 value: 1.0 - !unit/quantity-1.1.0 unit: m s-1 value: 8.0 obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: GCRS unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: CIRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2022-01-03 00:00:00.000', type: ITRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: equinox: !time/time-1.1.0 J2000.000 obsgeoloc: - !unit/quantity-1.1.0 unit: m value: 3.0856775814671916e+16 - !unit/quantity-1.1.0 unit: m value: 9.257032744401574e+16 - !unit/quantity-1.1.0 unit: m value: 6.1713551629343834e+19 obsgeovel: - !unit/quantity-1.1.0 unit: m s-1 value: 2.0 - !unit/quantity-1.1.0 unit: m s-1 value: 1.0 - !unit/quantity-1.1.0 unit: m s-1 value: 8.0 obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: precessed_geocentric unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] ... asdf-1.3.3/asdf/commands/tests/data/frames.diff0000644000175000017500000000233613243547254020676 0ustar dandan00000000000000tree: asdf_library: version: > 1.2.2.dev858 < 1.2.2.dev846 > frames: > - reference_frame: > galcen_coord: > dec: > unit: > deg > value: > -28.936175 > ra: > unit: > deg > value: > 266.4051 > wrap_angle: > unit: > deg > value: > 360.0 > galcen_v_sun: > - unit: > km s-1 > value: > 11.1 > - unit: > km s-1 > value: > 232.24 > - unit: > km s-1 > value: > 7.25 < galcen_dec: < unit: < rad < value: < 1.0 < galcen_ra: < unit: < deg < value: < 45.0 asdf-1.3.3/asdf/commands/tests/data/frames0.asdf0000644000175000017500000001013713243547254020761 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev858} frames: - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: ICRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: FK5} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000', obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000', type: FK4_noeterms} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {type: galactic} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: galcen_coord: !wcs/icrs_coord-1.1.0 dec: {value: -28.936175} ra: value: 266.4051 wrap_angle: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: 360.0} galcen_distance: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 5.0} galcen_v_sun: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 11.1} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 232.24} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km s-1, value: 7.25} roll: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: 3.0} type: galactocentric z_sun: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 pc, value: 3.0} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: obsgeoloc: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 3.0856775814671916e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 9.257032744401574e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 6.1713551629343834e+19} obsgeovel: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 2.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 1.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 8.0} obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: GCRS unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2010-01-01 00:00:00.000', type: CIRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [x, y, z] axes_order: [0, 1, 2] name: CelestialFrame reference_frame: {obstime: !time/time-1.1.0 '2022-01-03 00:00:00.000', type: ITRS} unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] - !wcs/celestial_frame-1.1.0 axes_names: [lon, lat] name: CelestialFrame reference_frame: equinox: !time/time-1.1.0 J2000.000 obsgeoloc: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 3.0856775814671916e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 9.257032744401574e+16} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m, value: 6.1713551629343834e+19} obsgeovel: - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 2.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 1.0} - !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 m s-1, value: 8.0} obstime: !time/time-1.1.0 2010-01-01 00:00:00.000 type: precessed_geocentric unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] ... asdf-1.3.3/asdf/commands/tests/data/blocks.diff0000644000175000017500000000042413243547254020672 0ustar dandan00000000000000tree: foobar: bizbaz: > green < red datatype: > uint64 < float64 shape: - > 9000 < 10000 ndarrays differ by shape, datatype and contents asdf-1.3.3/asdf/commands/tests/data/block0.asdf0000644000175000017500000023512513243547254020604 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev870} foobar: !core/ndarray-1.0.0 source: 0 datatype: uint64 byteorder: little bizbaz: green shape: [9000] ... BLK0888٭ٞZv/$Gt?]?Xkr?ڎ??rӓ?Xc^?]j7*?pn')?pD/±?5?{G8?x_\|?f^?wz^?T?nm?HS?'?`髎??v\"?(V_\?M_?@@U?jNg?P!H?#?N? Q?F{?ˌ2?@ J/'?o?XjH?sG'?L/"?㾉?*@7|]? [6 ?6L?[Px??w1`?Q"uݺ?N?.? mR?% ˬ?y?֗q*?* ?R?WcR\?ųM3@?|{.$?zt? m?Ωg?*I? ?1?ƟX?r`z?ۢ-A?̩p;?5ì? K?گ5K%?۬U?,8?i'?sA?1eU?x&2,?th+#?Gav?{ޚN?"2 >?bzxx?JD'A?H-?vЍZ?I#?4?$"sm+?ٷR?%?ȴ'O?W?f@?ꐐ<#?xVz?6X(&?\? nR9?: C?r?F.?i)R?b ?4*9s?+ ?״*0?0Ee?n?8ot7Q??:?S?D9?c:?|ey?bT?*\?:E.?Ui?(ew?)c?Ҹi {?4tƼ?^p^?[ XR?ȗ{?;|{?w}N?tNP?44$?lK? ^?$sLm?VO?Xj?oU?f"%p?a_??`&\-|?t@?Xd?;#?Au-?"қ?g^}?N%?xLt?&=?$?2 ?o&?V_?\?6Ǖ| ?%ĶS?]:?4X?0\?{&̀?p?fX?%?`0;?o?u"?..?|RQ?L,Ku?gѠ?6?`&J?ڢ?G̍?ć?d͊? c.?xdx?!Q,S?wa2!?@$%?f} ?-Z?8H?H ?ХM?D?#?x-Fc?P?s [?8)7X?k|?P)w"?|V?xw{?Ry/Ũ?n1`2?,[t?^ Hm?fY?ẃ3Ƭ?)_?w',?N?<^N?ws?X(64?H֥?x[?l1?9&{(h?ټ?:Р?6?*{un_y?:)^'x?~8?kB5!?I'E?Xcb?l?.? ?(?i?Ȑc0Ӡ?.q?,N?r'H?:?vM?=\H ?oH.)?4s*v?Q}?;_W?hVS?.N"?*MV?VfF?U7?Њ)g(?h4^k??s?_9_?&L ?8 #偹?үI%?[i?5o?]?% @?X,?XGs?[̑?N@YTA?ωCgJ?¢oQ? hvϰ?Lk?rca?_L?E? g?s?Jq??XRfI?pl?hW^?@)?-?*?p@?q_.? P?J7?&/?&?bɞ?$h?O2?Pi?HA-S?-;?7|'?N?6>Y?nX S?&:?.?V3??@FN?xrXF?9~?Cr?P+JF?^J0?企??zX?Zހ?Tj?/?av?j=:E?lcbt?9yq?{?gue?`舿?0d'?nI?>J?L:?ظv?4:?(EHr6{?1f?`sC?]?X}?)ۄ?h7i?bOB?٭)?;gEH?|%G,?*vy?͉3?"(?ł&?̳FR?p?Nz??K >س?N?Hx`?a ?0gO"[?\?dz+? |O?A߃?kx }s?*M?7$cz _?Q?f?0Z?8_? 8 o??򣋩'?T1"?3 ?V?9?)p?pJhO?SD"}?0d)?/"u?01ʄ?pY[v??j?ff??-?v+,/i? JB?fՕ?8T/,?fu?v`?Sd?FnJ?~L!? (*??qM??d$58?L ^y? 7c?.,?cb? R7[?؃:? okX? |%? s܋?T?j\0 ?68?}?n*h?$ƹ??j? ?E%.?)5?6?wMc?'?p2?|c?[?z!?VCf?R1#'?P@`J?g?RA? _?n??HbG?RD?c?0=»?0PQ?>z?#Yˮ?n>d?6b?葆]G?E]?3ӹcW?f??|z ^?mz?2?Bf?ʔQ+?#5l?RkM?HUĉ ?C3?hx?a@Ҿ?6 #)?ʑ?P2'?.ݒ[? +?m?@ q2?8~?D ?V%%?8F? ??pzώ?M>?~?` n 4?غe?Y?&?3?-?(?4Iwn?H2?c(a?(F?;z!y?=$uiJ?^?&),#?%??8N=a?\Z?c?8?wj?<-_?y?,&?-#?Bb?5?cIY?%?ʴy?d~?P9'I?(?,2:?کnz?{C?h%Ag?rz?x ?L0? Ef? t?,0?h?p߈Ճ,?%+?hɋ[?~05?@\ ? E9?8`?0WB?ئ?ds ?*N?? e2?dLô?@S?!?bx?,? ?8S?eq i?P5Sߍ?ˇ?ϙ2?/v_R?ݫ?+8?R]@?2&?h0?Лg]?@Q9 墡? ]?K?&?I@~?TOf?|?g yP?zb? X??İ?%?K> ?SCP?>*??X/[?Q k?]rQ?Vढ़g?B?ȟ0?7>?Xۢ?!??O?w9?Tju=?|c?y?J{|m?ֽ&?~?`[@@?H?MF?pZN #???;>)8?F(wK?0?b3?,?m֭?\P9c?΅o'?X:Q>0?D?SOѓ?L |?&G=?|(? y?Ԙ?Kfv?c 3x?cT?L|i"?۽Z?C&?~^?$9?p f?9(/K?#/?_?mޖ?k5Q?G*fj?S/?)ʱ?|B,#?-?nڭ?TGB?:tfD?" Wc?zd?L9>ۓ?hT?:O?HD~i?WL ?jб?t\q+&?+hvH? kϴȖ?D!?2?$2?Rā'?*0?]t?Tܲ4???kX?p-&?.}{?q9?<'_9?pDa?>?!y?7[?xP?4P'?>I2?E١?rʷ?Fc?Pƞ?z?PJ(?΄s?;DQo?rbh?4m?}0? ?L??Z>T-?3?]eN?i8ZQ?@f??}?~ͷ? |?^[?z?GdQ?|?en?q?^rhw?lbҿ??S'?F?In?^Ki?8)H"i?6o0*?ë_Z?-6p?1H?`3-6?2,r?N?bt ?|d5K?Y*?ҥ$3?6O9?B?AM(Z?@ ͭ?oX?c>?*J?5kn?p=sP??.7N?4?9:>p?׶пB?P"L8?Ya2?tZ '?|$?_1N?h~r??rA? ?W ?H*!?;i8c?2:Q?rI?X?`{K?$A@?E$L?ls\ؾt?;s?HD?©dw?4~E?)?wgY`?ƬѰ?4.?XX?o**??O4? _x?F=?nf?)?r?`ke?R3?}i G?I{w?0b0?:hf??&*?jL0?KfD?8Կ?)A.4?ȪBĄH?u ? m?`0X ?gk?{0?R1o?Ę?nFK?F?3?]h?/??X 0 ?0~7?j?.K6O?s҇?U?bOG?!޳?llED? >G?;t?ۜ;?SAK?N|K?֦.4?"?<h=?W 7?tJy?)Q?\*Y?r?vCm?DV ?e 'U?J.?ޥ4m5?@ذ?'; ?lB?3.?kJlp?ڸd?>!tx?\o?ʔ3n?|_Tk]?$BÍC?ac?f ?`!1?#)?$B ?+͢"? )X?S;?k?L۲?Ci ?@0r4/!? y?~?yw?wΑ?z?6?.?Xp#?"L,?89F:F?l ?Qp?٢ s?hr=s?10?,Np/?kQ*??-j*?{,@??ܖr?qUր?BUVѱ?l6 a?T;1?\~?+? Y?A4_?eG"?W?3Q?ܟ.?Es?l?8p`?& R?$v^b? #p5?Zd?|c\?=?%xԱ?4Dt6?{u?X ?ZC?'.Ǽ?E?X ?6.?x<аiz? ,?EB a?$'?%L?~-?6?ykǿ?׿?Uw?+0TZ?9ʒ@?8쌦? cv(?$7m? qY9?k?H#&S?\~Ҩ?5RZ?Ѭy?H?0Rq?~l?o?O?zйn?M?$d?T^oRh?D4$8?}Bu?*ʢ?x87?LiKi?o{?(6Ky?(e?(踾?n?a$?g+?^1.;?~7B?8 iC?~uLA?" ?L1?E” ?y?LЩ?grYA?HX? W?B&?pv?u{"?Ol#?õ? ?Кk?z 7~?P€?]qO?ʹ?R ??GP?/?ܥ7??ᴘ:?VH?]'41?<r?[?O}?[rj$?̣.7?Pqik?&/}u?6 #?J?g? DU??Jǰ?6?{T)?,t??7o? J?ƤM6?dhQ?Mʏ?G6rD?LK?`P?6|K"? ZN?ꃣ? 40K?͉_?;?R.#?8?3?ԇ?4q?J$?}?T.?X_ ?#-?qsԤ?Ң??>?|?zIv?WuT?|{~4?l:4?+Yx?L@/ܧ3?@9 ?_@g?.9P?PD?i?"Þ[%S?pw?kݾ?@),?`[}?X64?2Rl5=?+}?? &/%?n,?bM?xl28?!n`g?'?eI?x5Fo?@(?tܽ?% J?9^ ?)–?*?"r^?B[?@!6칿?? 5b?NR}?Tq?ʉA?d/(?ws? `}?wk=?8&z'S?d3z? %?\? ?p? Q4?%>?eo"W?:?J; ?P G?yQ?!m? 4E$?f >?;?cG+?)F?4\?2򜰠?C֧?EԚ.?_?TD~e? ^?~ʒ?N?c<?{?+?}>d?#M? w?8 ;o?&:lp?h5Χ?r?VZ_?D\?&z\?|9f?ɫ?&?K=-^?@t ?6\j??T!a1!?YX?057 ?b͗?x0Y?; H?!?ޓ+?4]u1?'#?U)i?vig?(e?Pk? J%]?J0?L,ͣ4?dK^?x{EE?:tn?d~?aKv?ۣ?@e?@8=θ?#,?DD4g?}b?;Jʎ?ƋQt?σ>?,`?R=jH? (?A!d?.(?$JTo?h?"728"?h_iI?|d?07I?plLO?m)?XhT?p i?#D&u ?A?p O?` ;I?k{?,?L{Ѷ#?=5?ƺ ?ߌ ?|M?1s1?*?Q 8t?X gy?d@Ǽg?⒘t?|nF16?[҂j)?z-? 'Q?h(?@;PQ??o|?|yE?Ɋ?_?H?j~? ^JBd?U9 ?Ծky?+| ?~Oy?-%?'?^Ur?d;?X(K[?<dl?E h ?t?# K?07aŦ?Tb?bV:]?ELAS?&?hn?h9 ? p!?M\A??+CGx??_"\?ڱ?=D?pE?$*:? B?x?9-?,ߢxA?#+?uV??(? L49ƞ?\+J?R|?RG?+xq?BmS?'p:?HPe?@8Hbd?=m?< u?' ?R?6+ڌ?H x?mJ?`ڤ7?2a;ƹ?HxZ;?^;-?.0{?@yK?F??6,? *??6D?~솃G;? [}? ڹ?{% ܵ? _jK?^)?S\?28ѹ?p}?Tb20?6q|!ɲ?bC?+**?漊?Kxh#+?ivA?6Nn?8RW?uĹs|?tJ?K?\?|ow?]?:?:?}X?y+?ۣg?S?{x?(?vw?k͎?L"?eM>PN?zUR?E;g1=?k? ۚO$?PjYI?&6??pM?s?US?v?0#[?BHB?M~ڴ?K>c)?fx?EU0]?P??%Z?@?*# ?S?0?c?N9*?jz?,?Pl?Di?@H$?6?Ys?(3{d?MN"?T˨?ZG?Nv?$V^??Djw?v- ?.o? 9?pm7x?E?Ϳ6?Lo8?|?>?FXvs?6?`c?Zi|?\I?pbT4?1O?ܣL1O?H?\*?JQ_1?|RY=n? sq?:E?r?(~?K|?P#q{?'?NƇ%u?1J?=ϡ*?`c?+L?y?fm?PJ t?9 K?F?W?at?[TGO?|o?'N?п?s?_WL?x@o? ?@o Ϟ?6A]?x8*]R?BԸn?X!R?Z!?l Z?ⓨ\ c? ß:E? 6Rx?{S?Ы3&?R[??]?WQ+?݃a?H?mrreiI?\6?'17?j? r|?47?hL3?S!?$? qٖ?Ɔo?_?y??0?$rnO?Qw?nQ?k?] ?a"?Up̀`?~*'?Y,פ?Tز?pE?0$k&?d?0?@7?V8?k_Gk$?{T;2?d0b ?c?Rf?.{?;?6L#?h"mza ?p?ڳK?``M?H߰>??RM7?q?a$R'?t?r?B?('cT?:"v?/?H_{?|. R:?0%?}Zu?LL?8M ?ۨ>B`??JP??PNѱ?GC?X2?*iש?[w?Y}=?f t$?Bm?@T??`~{nH?7?8[?CN?YeYւ? ?e?u?6Wp!I?u7TV??-I>?wms?K1˦?Lp>o?ߖ>?B ?V Ft?혇?mw??c?XLE=?!?ԨX?]T?N?:(E?x_5? sȏ?/*a?sJf?P"~?6N?G)f?7U? ]>?`{u#D?"wQ?q%?0Uyl?l8u?~^RM6?iվ?/?y0?a4gh?9V?P7`i?r?*?lDŽ?ԭe?K?d4?`?o ?g*?Gc{'?DJ?wc?q+U?>{X? ᴪ?~?\ ??M?){,?5˯?z +L?*.S#?N?j"?q\=8?P3xR?~-?8:T?ۼ?鋟?<(D?h(A?MZ?I?І? ?p}?(3-r׼?Dn-q}? tn?8)6?Ğ? ko?ٓ?_Q;?'?4Ա~?ssG?ou?F,Y?1?x*Ro28?,?BW}?"3ƕ??0\7 ?]C{?`ֶ闥? h?@¯? h?.,?`?2Px?h?C2(?*Fҍ?2o9z??F?#Wc?-/O?rȥWyv?>10U?-rR?je?t:"?/:Z?KՂ?zn??2iH?K8di? ?#r?'y|C?LME?xk?PIx?pW?"??ݮ0??Æ ??Rَ?] ?bmP.͹?J} ?I?t?\lx?d5ډb?E?g(;?T ?&3N?C5?M??xp,5 ?4E;a?|?*?Q `?FG?!&^e?P?8@B&?1$w?g)6?T M?ȯѳ?G? ƒyW? -?@D$? ??MDW?/`??m ?P ~?xʾ? Ty|? I9bX?2|?i? j?SU?Y\?|?Pz(?BT?V{ra?3Nզ}?`W|-?p`?A ?I?hj?C0P?d?Dˢ?. ?DE`(?P pC?VN9|?8B?G?!?X? ?ƣR"? 09?kCw?~,?}Md?{ѣ?TGw?z??D:?ʓW8?h[1?A%?e^C?V?\TV]?uT?xG_?|7G?t?Vz3?^?,/?Im?:ة?_ZH ? Cz?8$V?i7?CT:?7yuZ?pj\? S? _?9Bh`?S@?L8 G ?! P?oj?A?Xz?'?/ޔ?j %V?0zH?U?8Loo?nHy#?S,?9K?乆k2?l??WNTnL?:i ?p{L?詵4O?v?q̼?m21ų?&u?ptaЅ¶?:?@w?vԘQ/g?%V5!?td?(d?ū8?V-?U?)?D?$e|)?*g?/s?m7v?pƗ'v?:?7 R?R)?Ҭ6S? ң?FL?@H?4so?Ī ?v5]?-?0MFѾ?KFx?p$|?p?7b@\?gkf?n?p_?e*?w@ ?@v# ?y,c?P9? n?kk,J?P3pi?&?dI?עQ?Ԛzv? ~b?2g?Jx? :? A\? zbU?!E]?jزjW??? x"?D2?2-?՛??$Y1?B2&?ܡaI/?nb!?,?fHD?,4?ö'?E'~?ƀ²?\;!?l]=?,t\?!? d?[ ?ǨӱP?d?b'"p?4֘?H? y??(#?pܧ?WP0?T~?ץ1?GI ?D]?^?OЀ?$N /?cq?? f]?I?hWSm ?2|?~O?=?m?K'??3hV?@+?l""=? )?}" sM?JKQ?` vP?Tԯ ?Eo?JƘ"?t`E?-?&׼?\Fb?꺅\?t@f?Lc&?Zs?H J?O?j?_:]?fMi?(W?hR ۹?ֹW?jX?wÍ?̩2a?,w4ܣ?B'?nC[??`e?]N=?k`?];V?m֏hA?8?z?j?ROZ5?Jhl> ?Gb?r?pnA:?x5?Dv?03?z2?H˺ ?_BJ? փ?mn? ?p?+$_?:8?ez?~t? .F?dG}^a?1K?J -?lQS ?aL?Wf?"U?d?]"ê?@3? Ԩ\?\AҌ?e?̀:]?=a?ry?W1?ɐu+?uv?-w?X= ?x?=#? H?\C{?x7?OI?$;*[4?a<?~O?ASl?ג?O5\?.q?Q%ų2?OR?"?d//z??@b+? 籹{?7rI;?h?>;I??$i?`?܄?|p?]I?&k?}i??n:_?`j?E?{L?6K.?y?ea?/~+?ȨU(?\NC?/c?ڬ ?Q!?0_B ?&fw?46>?rB z?*:K?/j?k=?0S?>p4?=(?~r$?~tLڂ?r ?l?}?o?)?#1?v?9S?t@?>obI?.? *Oz?)[?*?.N}?F}?8ZF>x?1?L?8W?$tEs?cjj??! ? n?Q]?2LͮE?Ջ?VB z?4?t33P?nTӕ?p(t?R FF?ïj?~1­?a?%g?J9?s,?w9?w?]?@ E?3T?]< ?d*-?SD?7 ?̚?߫w?f1~X?˷w&?΂?J?5S?//?c_ 7?8oa ?V [ep?0d?u=? E _?6?-I?eMjf?>\"7?+?Xsy?іp? u^o?ˋM? Ɉ?:V?(?QE(?89Ih?%?h6?nG=3?jp9?e@=y??$3?A~? X?+]$?j0H?KX?Zs? ?ڄ3?`~g$?c4?>d=?lι?ljI?OgT?p\h!? ?r6?`AB?x2xu?.|gf?c?*?yˎ?2E h?п)?TQ?X?3/?1H?-EK?w?Vd!o`?Vμz?Y.;?>? ?fO?EM?ҷL?@/L?y?MZ?1?%B?+ka?@`A?D?A0'e?I0y?Li?IK:?lؤ?V;?Bi? ~9? Z?Z?LhµVa?/r?naa?Ze`f?4?P@?pd;?.?O?@ҙ?z?hQڸ?h(?6}I?0?ޫo?,?)J.?M?=?'L9?PH?g ?0 N?wCъX_?={?o8?<?[ڇC?|?w£?rӀQ?XT?{?w$NvR?x|io ?& ?ȱ,?m4?dNK?k? _a:?Sc?G1?Լ?}:?l@u?5fo?4[Ga?7l(?Fԗ?)?XXhg?{*I?~?F<?)EN?fH'o4?/W?E|?G)!5?'?hBT? 5)? ?~~?~YY?U?04Z[!?~D?(/?CF|:H?fu?ȉ? w?|? 5A?G ?o??n˼Ω?35J ?X{e?nC?:sap.? Á?o ?ZR?4Y0?^TT?]1h?u ?r? ? ?p?y>TQ?́9f?z.?R?=?x?䯹F?#R?.v,B? ?+ik?մ7g?B?2Ň?88?(kF?خA?h|a{?Ss?;?sR?%6F0?١ܢ?z_bm?ڞÖ?_`Nv??(44?*3gE? ?ۦf$?4Qr?,j-? ?TN4@?vXBE?H$+?rq?d9??6ة?vX?u9?]|S?xh?2~00]?E>?Lˆ?Jܛv?oIO?5s?ȾAT?oCM? i)2D?F0?`nk?3!?u'S?pj?Q6?q&KK?rdC?y?SD? #8q(?(դ:?. ?6lGn?3y?a-W?E^h?$S?T|k?ZE?gb?xR=?F`?4m?ky?DWg0?p23z?y?2iF ?U{N??_?%H"?As ?$jx[??#E,?k+?߀ȸ?,v? .h8?u ^?c|9??[BG?Oש?4Ix/?0 K?(V7?:T ?pѨ?h?rj$d?IT?u?|H1?oD??- ?j@*?,k?m&!?n咱?C?Ap7?8Q?3?C)YԨ?@r/?@b|?Dfs#?Xps?jpK?ܨ?rNz?}sy!f?e ɸ?`&lR?xg?rWi?a[Y?LmW?;dz? r/?Jr.?o ?av?=>?ս ?/n?nsr5?J=?8?B B??I?GBI?-=?7uE?6 wS?097?I?챜?1A?5ݟ!?M&?&>?q؈"?xy?ߌ?`܄>?%'Z?=̫?E:? y ?BF?z}t?h<\?x P_? JP?.f?|נg?cەt?j<=?=90/?Di``?6LD?Lj/3?5K\(?\ŧ?}0? ?Txg?,Li/l??M~?SrZ?/D?`b?u 3??ař,?+Gz?jb˥5?|*? C?1%: ?4?%?A2c?TъA?L^׬?!*?tMn_?qw-?& ??@fI?4h<. ?A*M?ܘ?(?D|0?`o?B/8\?dv?%Be?`1 ?:c??L~,?'I?>??Qy?S\?2,?csY?CGL[?6־?z?pN?\6X?u3A?h-Ag?p Ijت?PO(N6޿?N%?=-?\?̓@?P/?ArJS?jN7gL?:#!ϰ?y)M?RF_s?[?Ϲ3/I?w ^i?(?e8?GK5?۝#L?8?G2߷?/9 ?"* ?xCx?yNƙ?`q9 ?>B?f"?r?5oc?\ۥaA?ceQL? rK?QyI{R?"N3?k{???~?"/?a/ ?j?B?v&:?B`f?fy??U;oD?V`f?<,?;&?t/?"|(?\}?t Ӂ?"~&?gP?$ ?ԞT׏?^ܰݘ?8+?w j?ףE?Y?>l ?{?? [?i?L}(L? (X0F? Jd?Mf2 g?! ?O?q6 i?⚸)?c?8x?k{a?{a??#m?†o?SOX?C?E\q?)%)?i?cİ?ڊ TG?Ħ?\0?7!.b?03r?6E(?H ?Sԟ\?#axN???/?pK?I ?+<?O?ddUc? ?d'K?[!? Ɗ?!l?*?pǹw ?ʥ?숻?(nx?(!C?t;49I?|)?({9?' ?Y] ?r ?>??6 ?0v?b? I A{?/U?z`?j̭F?8+Lf? ?FĆ?-m膪?GhX-?j?u5?[=?&4[:?z?[*?#?+ ?`=Yp?L5+?v0?E??Wm?}>9?i ?5?ڇ?M&?4?dn?:?[N?P?i?q?M@?2ݫ?2?d!?v\p??|?UK!0?M~: ?]u?2.)U?$wI-?@+5?E@??yKShZ?`#p~?%Q&??~bk?g?֕d?PmR?h#&?15h?Twa~? m? v}?0aL=? ,?_;?$J)?DV?/0o?ǎ˴?5xyCC?K zȉ?qcv?ij?MsU?`3@?&?؀Da9?"?8{}-?fau?@#?@?9Fvp=?hD,?@ڗ-?8L?>B ?<ȟ?n ?0n-?Y䐦+?i*9H?مf?ZX?!SB?&|=?z:ؗ?u'?>6 ?a?5v2?M+?1j"?P@? l? c[y?Ƿ5c?UQ? ?Z?+E\?x?HN}&?o?^xws?t.+o?(OS3?8r@?úMQ?.k?G3p/?@ 6Od?iA?-? ?^QEl? \z?\?qa?p8D??5?x5u? JMm{?Д9ٻ?8?M&w|V?s?@OA߀?:?0I?n՛?m>@? sF?G?fDp?P?z?}?ͅ\?Y_?|R<5t?2"ߪ?*?aQ$?#?T5?t2?ǞXh?]I?=?bo?>_?" ?y_?G_ v?Qe$?L<*0?66?g a1?UzHo?m?D2]w?}qX?~Uj?6Ě?bځ0C?J7?56?\drB?| ?-=?Gt]h6? E?Vq?Fʙ*?|?į'?[?rn2?QX+? F? 9։?W?$@Xb?I h?*$8(?ډяL?^j?/;o?ht,?y_>? v?KFq?+0ֱ?01Zq?}b3? ?_p?wk?*-[T?:@:*?(?y,?lS?S#?A ?>kO?xG?Ak?It?TNH?yb?H,?X?+Q?@d?zl ?k?^&?k?UK?8ߥ?Xj?D?^5*?? > l?,܀4?4Ǩ?r)? }"4?I (H?w?:@=,?Wݓ?B(0?DN|?7g?@r?{ek?@zN2?A?zTz?(=?z?G3f+?4Z?G[?@*]f?:z?,?xr]C?eJ?-k?,ݸ0?`׉_E?T.?3ʆL?R5X?V?MX{?CQ%?3J?eJC? U*#?3S5{L?"c,[?wT_X?ZJJ ?%\*?#?Q?Ɩq?)|?M;?)#v?&$1H?(z8?F?x?8N2?)t?L2*Or?H?h*?8&3??,C?6N?S!?ub8?M5?tY&?"&5?6?8P?<]O?_8?J-? ?Z0?A!Db?hQ?''?NT3?0?⑘WJ??*?=?>"? k? Ƶ?`?Yx6?( $?R2|?̱;3?-O~?Qt?!qG?aY{.?rܔ?W?"#%?p}{ZQ+?XJ%JD?ЮR?L2=wK?f;?04q?V)?&z6?$7*E?0D??, ?xb?.Hdͫ?TD&4?@G:?4?46?R?/? #?=t?hxg?g^? +p?& ?&Xz?d)"?}1?Jn5)?jl@{?3"?rS?Ik? K=? [̷?{KJ6?6?=۟?Q"E?0T#?0A?؟sM2?y?^*Z?Z;e?rV?B?l;$?p?™?iV?d?HȎР?B&a?U:q?$0?dT2Zu?p4Vm?櫒?TȤ?Nn?0)n#?,M ?,U?+j?=v` ?4?sk?>t?i,;7?N}.?j?\;? x0+?>>XS?B=a2?4(NZ?`F?4?\PC? -am?lĺ?K?ưX'?jp?C^? ?og%?]'?ֿ:?c|?hI?pE?@l6?.wx0?oȮ?D?^?3X?~?pV,? .1?6'?W[?'&?,Ι?H mG?t<5?=eK?Vړ,\?o?`{RD?Pcbv'?p}H3?1H{.?m8L?Z ??%3?ٟǟ?)a?_Ja?5M*?5z?l95-?W/b?#zY?mZ? Rg?ؕuhu?N?p^3o?ьP?@N?$V"q?(C@?1?D?%:Q(?YF?B=?x\ȝ?3ۄB{?h0F?HeB? ?7j?jsN |?Pw?H<i?$T? H??|??? z?P3W? -W?Ȼj?"Q"\?p?^m?@$~?PD[?TN? }?0/?frS?4XM?6s= ?ڋg ?2z?ME?G?sa??I9?dvio?f{T?^pzJ[k?]3'?iw?8\]O?t|V?5?X{?e7]?,6m?B\4rԽ?>?j{h[? q?G${?6Bc?Xo4?l?4&.?`EU7?? WA? +^ֹ?,`':? |?e?,,|?Q? x?taU%:?-?Rw?FR?h/j>?yCS?r&?& w?畄Z? %(?K|F?Ѧ{?MT?4B?!>1 ?M5{?ܒB?z{?#?1 ,?MȤ?@!?C?Mڵ6??3υ?p?(?j:?DHM?Z?\1?`p:v?WX?I?B:?(V?=??0,I?ۧ??ea?8?I?bYZX?GK?q?xMg ?`4? ;oI r?xL?b?yA?Z ?x5?E~[?FT?2MN\?LK`w?鿼~?@V?zU?D?Âс?aU?Nӡذ? pJ?0%d?ywq[?DZ= ?it?Tsa?mjA?ѐ?܈\%?=䱴?%I߰?r=?K 4?B ]?N?MY?,@?Лr[P?J\2J?]KP2?qK2?U+.?o?u?1?e4f?Mk?gy?*KȪ?nk*?>MN5l?j?Z΁z?(n? O٤?Iٹ?ė-6?!?I?9ԁ? ??_t?'GJ?K?4:0?>>k??В|=N?$*?x?kUq?8{Z ?e?Tw? ,/kA?`|iϞ?v?Q7U?.͏3?@ݑ ۈ?%*?zKI?f"~?R?Ȇw?6?{?+fʓL?a6 ?q?\?z?`R/?s?俸?ύ?8} ?Uwc?9(V?%U?,OĦ?4Q"H?\|??]OE?QBN ?)aH?K?QOL?p%Q?j?P9P?L?4!+?iac?F>TX)?j2?g`?d,!?ʢ??])?>F?x̔7?yoif?F?H?z?w?1'9? G?%gr2?f?8S?=?؄8?}?tru?r?h]}?QW.?&lĔD?0n?[?\/5Z? ?F)?`xEw(?fdC?4TڏY?\Y7?Qߔ?oY_?a?Lw.(?ɣd?`_(?$7l#??5 ?? ?iT?Ȫ?G?zE?q(???(b?T ?J?>UM?u?sY?dZ~??  k?h~[? _?srQxB?,R?$!?? Sza?Iu=8?ªľ?Rl3[?4ד?xsFm?HUg O?hA?H? ?sf뭆i? W?1>?/E??!e?شB?\~?z?0/_?/ ."?t8G?=\?.d`oNv?Tv~{?i^ܢy?ow?_Y?h׫?gyE?Z[wP?j?#?j1c?$PCG?X?ф?hhZ?ֹc?5W|?`ǫd?'?Kyb?;ߞR?X?RC2*?Tkv?6}\?L#?A?F$O?DF;?⨎?l9l_?_b?G?Q?P<*?s??L?N?].#?S?"?d7? ?A={?`&'ޥ? ?cj?֓׹@??C#?P1 ?#I ?`v?^0d5? uE? C?US(?.?gx;?e/?*aC?Xm?v{?^Z?vK?=`U_P??V??g__?H3[f?'?N?g K?fV?aRN?NQ ?#K_VZ?zH.N)?y?npI?^7?}ɗԛ? b;?d?V?`xQ?dxY?Ȃ@?N(BЧ?q?a?(m?j~{n??Pým?0m|Uա?rza???LT3?,?~j #?z ?9{A[?,]x? DjT{?T&?v ,?dl?y?>?n[q?@@ ?JL?u&?\dD"?8?؍*?&?W,v?7 ?&???0wT?0?6sd?o)?VZF p?:x?Mh~?9LN]?6 侑?zqӻ?{? @?]Ef?I>=$?*;ڹ?Ⱦd?3?w)A?| ?`?(^?g?H" B ?L?5(+?g?ۇL?O*?ȩA?D?,M?D"B~?cdQ? m?碏?o"9?04^d ?s3};)?5ĭ?B?k_?]I?$1Z?6 e M? /Xr?%9?tu@U?m?{?)7W?d=?$c_?S??GH{?8j?K+PE?uX<? ?f]zh?Z?Gj$?kعb?hy?' U?|?~ͣ?أ1?0m?[?8jy:?\KK?Z2˵?0~{?^0-?)> ?]f?X?mG?\4v?6}wJ?O?<(? '?=-?3P?J* ]?1=>?hJw?r 8 ??6pʼnI?64?C( w?; ÿ?t(?'.?.V?X ?օ`V?R(.?cE6?/ ??؅\?m+H?;eR/? i?]>?S-?ڗ}?'K?1o?=@?3?LsBk?.??`%Jǭ?0!1DC?`?pg?1x_?*'?Ԝ'd*?Jq?X5٬>?V`?r))?X%n?'s?&Ɨ?o?8 r?^Q!?k?@*8U@?"(4?m聸?6c?u^?X<̜-?(܎~n? 9O(?Nǿ?=󄕰?O<(??dVm|?>1?g*@?iC?>?麟Q?b/?LTh?V3L_?` ƫ?lrت?VY?L?Bj?$Ԍ???GSc?r #?Ou?o|#ρ?ĕ- ?HL ?fA[?L?I9rz?u0Mh?QFT?,ʬ/?܍?C?9(a\?@3?rog?P،?>?6?{X?JJ?t$NR?٣.U?N\B?ďxT)(?&ďa?4?<$?sT?KT?O?@ 2?j?p<Φ?٥ҍR?C??ۓ?rNkw?I6?\1K?sCz?z2?L2L? K?*xd-?V#?y?YX?=-??b˪`&?괮?gC;]?IFl?7煾?] ??VQM?^T?D,SX?yO?<6\?n&ޅ?_JJ?& ?,M?ņ?Q䅆|?;Āނ?J# ?%H?ݗ?bh?zM?pfآ?t6?M|&?_TIZ??|ԁ?Q^?%qH?B&aC?IN?[b?@, ?o7?X: u9?-W|u?/%?S7?{?gP?(%?ks?ʟ?1fNĉn?;nv?CfW?ۣ[?*$1 l?|3NUIm?3?9.d?zl??zq?|7QX?ޣ?J? h9? 50?-ق?KՆ?%:?ie)?U?OX'?^g|9? M1?3!gs?H++?eR[t?A. 3?U#6?! ?>Cr=?uY?R,?p? 7/?;^?fͬ?g?L?v" ?p̡?r]3?M/e>?Pj?k0%? ?\p?Y5ы?l?f('?5#{[?u?F$U&?U?~?Q^S?h^u8?O?jKή?d c6 ?z?4ї?%L?ؓ`o?I?d?6+(?qZ&?TLNO?a>e?y#Tm?tA?$v?^L?C?8^??®;? ߺ?wІl?X?Lk\?+ ~k?LfE?!?~9y+?>9ؐ?H\$ '?0ь?=3o?Z?B@"5?mh_?(?0n?Y\?u1Ѫ?R$H`?Ϗj?g ?Ǐ?}`?zY??ȱir?͠$?6rTx?Ӯȴ?AEW?A/?VP0'?P/Ʋi?d[?<>3?6Ad?ϪM5?sA?ۅ? \ ?6d+Z?%L?jG?RiJ? ̪?Ԕ{L?OcLN|?.v#?NA ?nR ;o?Oyȏ?p*?e{V?Lx?3NZ,?`4;d?0@?z?:?JOāf?ҟy??վ2?Lm?l?jn?L-ˤ"?Fs?Z&n?,'U?HT9:Y?$^?",u?Râ?ZW?^?QR?C7%?g3D(?/?up?=r?zZ? :_~I?p |?tE?t|;??0?p98?<?6@6?Or1?#?DX?W~>?ꗱ.U?߀q?P~?LXI?k#?rq?$T?S+d?hQ?#TQ?M?u m?tA? ?en?cE?^z?rW?ҵ?&?1L'?}mh?ۼ?$Su?ЌU?CyK"?, ??hG3?T=-' ?X9+?x,?2y'?hr?[J?^(D?^4)?Y1?.] ?!e?޽?MI?>? \U?}z?RGj?еÕ?Q9u?ց5?xY?/)?AR?dA?cpe?t?KUY?3c?<]?3x??Pw?!?wl?.2yO?8j36??y49 ?qe]( ?C#?'e?4su0??l zt?@16i?f?mvꥼ??l*?ŤQ?Ni?NG=???m?Ad3Q?ݪ_??M?\=vu?ېl?)8 Ϡ?nӽo?j7V?gBnu??Zs? ?#lx?EeZ?mNM?~J?HYȝ?M&?N= V"?n~O?. *?Fa=?ae?& D?@e ?J?%uy?ȯ9?\?`ma?\??? Pm?[r??0x>k?S?5\7? w?_֎?\a0? !?[?Tg?IP|.??^?L?F?ڧ?'7?1#? `܂?x{`p?G?DxeT?>?Fw6?g{?[h? g?Jɛ?>0J/?P.S3L?d;<*?Qe`? {Y߷?(/Z?vؤZ?w%?%W4?pO5 ? ?O?',94?w?z ?8 R?ЯDGW;?3~A?/??ӡ;G?,_?DMف?q }.?zr2F?N&?@)Wc8?@uX??.G?G"\?L.JN?\[9`? .}P??_@=X?ri?T?^,b?2"U?V? L?ed!?Wsp?z?n;?/ (?=*?j?ć?WF"e?į??e1 r=?r2f??%w? ݹ?(^$S?@VT?A.s ?f*?RGm?US?Q?h !?4#Asy?#'??H? Սg?,DRA?w+?*\Bv?x2[ ?(? /9?fBCg?F?`e?)DWS?5? Խ? O[?;Vu?Z4T?`wj?i&#P?]F?c M?"i?f?ڹ? ?B_?0#4? $@?v V8?Q5J?g??9mJ=?d?}߼Z?O#??U[?M!@?#f.8?T?b{J ?pw?(ӷ]}?G?-MEf?{x?Vǚw?@G?r"? ?@\ۊ?V{?ӞD?pz ?D<]?Lam?b܄?@?AiT?$ O?K@{ ?ܣ+d?Ln$?}`U}?_4E??2c?8fw?j?^?oeAP?j58?(+?(+4}?t$?{z?Dj?q? H<$?创?YІ?T j?7u?R\?D0?8?lۘ?hTl뭰?pp?? s?@k?(U? Cp? 4?s.?23jd?*z)?=D&s|?zy?@d?/mk?6?Wֈ?/A;s'?pAI.?@l]=v??pG??b;Rq?o^"?ٕ.?H􉵦?X6n?&a?CDP?h=)?.lqY?vJ@?H"?D?XuQS,?ym2v?XsK?@x)?N -?z)~'?GLM?NHT3?謼vP%? ?$ܵ? 7d?+)?Ie`?hC`?Bh?{T+k?(қ?LFWW?FH?k鈰?³?\Wp?ʴZ?Hl$?X?&z?T?o?SČ?W?"r?|%?fbL?vF?f?9Du?>a!H?:=r3.?x8Y?x&4?w5!?|?zG+?w3>?ocz?V R?4װ? ? 0ش?Pژd?yc?g6q?^?6͓d?Z1:?6R??-@?H?4pytz??$%`\?NYH맋??@̵?L /L??U_s\L?9?|ԏ?WȌ?F?5z/?P2?HC৵?TS$? ?!m?7?wI)c?&0\ h?ykG?CÞ? Ha?y?z9I{?Z?ą(?;9%?Aު?0NQ?"Q*?Wa(?r?0&g?+\g|?iN?Wdʈ?':? X,Ⱦ?wM?=f?~?_/?i-e??Oo6M?.My?üΨ?ܐ?B+-j0?:?p ?;? !?Dn?\E5?TC:2?nwa?g)'?2T ?`y6? ,+? P?[i9?9`?T^Ǟ?0k͆?<9E?fa.?Z?M?t}[/?/?]?!~@?of@?;mF?S*?9U?vcN?rȇz?ĹCR\?<ޏ?h?焷?o?+6Qk?X5Of?Mƻ?ŗt.?A=?`:3h?.\m?e˳R?p+ۂ!u?BB?&d?DH?&:?Ѣȴ?:w?q]?@;>?]6ߑA?o?Lhe?AVo>ZF? 8?&r̴?x3w ?pֽp?N?X(gTE?DBVu?>5?c6Cu)?I8?3#Y?W&otB?z6r?z?uYF??ؑ&1?{?`ה?䏦? b{?UO?t/?2T?&&?oZ?һ? @D@?߂?N&8? /ȕ?<)q?LM?(>һ!?c}?;?r0?HXp?mb? A?# -?g?S\'?u?=R5?oP4 ?H]?zI4?]? I.[?ޢ>?#6"? l,?~6 ?:Bc?vk޿?@*#N?hn~?zz?|T8?V/?c2ʉ? l?GUɏ?d2N?L~Li\?zh?ʿ0:?sw`i?v]q@?4rӄl?s??-dD?()u?[~;?;w??USn)w?;?sS?(Yŭ?-<#?"`e??>(?G$? hE7?Eu M.?`[l? ?A{L?6A"?CP߾?f~Qn G?X7??1e&w @?(Oi?x^IA?66 ?"V?·*\?I]I?vP?T|?<@ۮ?/?@uԠ?VN&K;?yT?Y7Ef?yG[jp?dL?fe?@MT4?+]i? 7?g(?llW?+GCm?UR?Т?Ɨ9?؊?/ɦ&U?m?tPo?x~Xa? b?!7>'?T0?hW(?^h(b?*y5?-?g?ʶԁ?FLlF]?䁃?Kf?W?b;Y?9k} ?PT6.?iP?4My>?#؟?r9? EY?Vٿ?<"?g?Z01ӹ?Gr?Im?n%?p?S??R?fo?:{m?pE?ћ/?N@;ʼn?ǯ?>?h. ?᪤؝f?\xc?`?ǯ?$?\?(<#?解l?Ӱ0?.q9?W^o/?ovzD?AկB?.y#h?Ąf@,? R?Pw?[y?LOa?D|]?v?n?;M ]??Ģlz'?.|%?tW?4t?B_v?SA?ϛ +?j՜a?3?ŨI?ӄ?T?#xq?|? ]E7??i?lav?ӗBpD,?2H7?@Mȿ?"T?xw?Tk#3?fE? =m?ye;?JexV?X.Z?̈? ?T? j`?xB^?_گ?@Q#u?Xu?QbR?饪?co? m?Hg)?OX1A?}2? ͞X? f?HtS? ^Z?ëU?kP?{?ĘybBQ?#5%?Ru?@1Cs?t ?{ȸG? Ѵ?v?FU?^@;?t p?BF? >m ?U"?;F?}^bg?^.e6?Σ_?0מ?Zr?}jd\?Z:?_z? ֍)?k}??&?h jϷ? E (h? m?βE?f ?@* 4?4MH?Hʗ?&4?}l? G]?G|? ?OL+?zu~@?bW.?1*8??#/4?2&E?(\+B?% w?^rP?0i@2?`?tEr?1e? 5dDj?3?ƃ?v F?x ?ha?UO?=He ?8-y? ?Mξ{?M6Ѳ?K?;g҂?vR?8?&`hyl?y5M?( E?{kV?&6"?Pb,?bu3E?j:>Q#?02?X.b?03H?}{?j"9?3I?Y!?ա?uR?l1?ҁ b ?L^-g?&H^d?du?\?E(t? 9? G89?+?K.d?$V{?+4?ks?l㠎?PMJ?p)`?y?`e?=v\V?Ӆ?brSI?Y3?u ?R?tx/?L?rga?=8Q3?H*?n S?xol?c,B?eVZ?v8D? J?hy?S?=Ov?>[?jɮ?-?ʚl?ҳ?-2k`Fq?f ɛF?lI?? 'd?`?P_?4Q"?:\??{G?x+?L?;j?QQ?y;F?=?%h?$`3{?d/Q?Xw?,z?!3?~5?h?oc?@l4?ft?r(в?ڙ?d@t?m?X$Ǯ?V㐸Q?ܕ??;?"ش?6Q~?IK>?5v?=g?4/?пGq?2+u?Ns?<#M?e:Y?ʿA?އ?'/?|"E!?q§?<@m? $9?Wb"Hc?<3_?>$?젍Q?v #*?`R!?擢}?4Wuf?i?K+?V$?G R?0[?2Ϟ?9*?0J뭓?F܊?sՕ? Od?yO?bf5?b^?W?ҙM?<å{?y@@?R?m?&-Bu?i3??齶?^;C?1ۛj?90w?1R4?lo 0?L$eZ?`Dik?j?JB#^?<3,{l?G?1: ?o:?uȊ?zi h?XDNE!?lc0?̎9 ?p>|֠?8}Gʦ?ұx,r?L?ZlM?B?ZO?M~Iqr?̺\?4 /?O: ?ާ\|? z7( ?M"!?#O?{&?q--?.R? ??g> ?<t%?BP?\ |3?$TE?+pE#?]`m{?<?fk?B?Йw?Y}?cC?ށ6?0o]?Up?[B?P 5?\{V]?xR?}V#?z?CE4?!sk?Dad?Q?$?(6+)?̎P?d/7?C%?gi?Gc??&a?T'?Ь?d3'? 3 ?+kϼ?/]?R{?hoK?tp?8˜ ?bC??Ÿ8?? ?MPF?㞽Mg? Fz?`z?\I?d?{fû?R ~?eM?﷊?@wܽ?⊹B?&?P?˓'cq?4,?\]?]?Ԧ?TT1K8?Ȧ!q?i ?\t?js\?#0W#?HN2&_?H8[VL@?yQ@?r6%?IQ?dEd5?p`?8JY?L0?ێ^r?hwmQ?JAQ%?Y ?rMB?{?f?q?(d9??#M?UK $C?RF?i?0K+^?oo6?,f?a[g?X7?(KP?X ?d@?D4d?V,@?f^]a?밖?<=?m?ʧ=(?Q]x??? >?Kfg?] ? ?\})?7`߿H?< ]?뾔?K%??sO??؄?J~?ܵp? ?z2?u&鋧?\$?? ?ɶ ? "c?z?!=,p??U<)???ftcs?d3(4?=?~mb?m?n7o? k$C?B׵?z u3?6qj?3Q?ȱ8A?3}w? []u?;ͅ? "?K0?4LA ?V8? s?;8f?XA??0??}_5?^.?@?V$v.?ɂ?xNgQ?G?ȣ߬?2LN?=0j5?#л?r x?h$?I*k?-Ŀ ??>^8h?l-?"?u͵?:O?߷&?Y? z,?:???W]l\?XO?jb?6?B2?k/5y?{oav?1F?LG??2D뭊?hH?Q=?PϜ?UC?G?p^&?#} ?k,?~ҵ z?3O? U^?Xi?n B?VA?je~?M.? HZ? oG ?Pp;v?ĸ*?c= ?a? ?@;D? @?&@ݴ`'?X}??֨Q%? i#w?IMqJi?^ci\?:i!s?L^q?=[8?:95ҳ?l?ش!?烼?=%}?r ?Aƒ?*Dax?8}u$?V4~?&>^?x?3=Z?&gK?:?;!!Nc?VÉ ?^ ,?cL?S˵?T$? bW?p1b?,8X5?弞Š?c(.??@?b?y?Zs?~[?D?eL$?DT?X@?:n??M?8k<?l> R?48C/?|(ǎ ?^E? ޸W?PL?hm{?~Du? 3?\N{O?1?8lif? =?qw?Adג?^G?p2*?|+l?Ŧ=7e?&u?Xg4? f?, Q?PK}?FʹT?D6 ?ϻD?缍?Tp1?bft?.ށW?k4?( ?IH?Gh;?@(?^ȇ?^?!u?3< l? ֖?Xo?Zm6?Kg@?XF?T?Oa?&*?l?δ-?ZQܥV?D? :i?A?cl.f?OW(?̋i?X'??N?|y}Pj?aɄu?>h ?~5? ؼ?'kf?ë{? @?7C?@ R?T좞?q#?(c1?2ta?lg?pw nԡ?\6O}?K*?#'{?B ?v?DL#?{ ? FX?s ?-?5ɷ?/<K?dO?@m?:S?`G7?.?HdL?wvy_?ZG}?"?Ilq?d?cy?}@?gT?A5b?c'9D?$~?G̊?ėp??uc?jg?@3 !l?S?T?$ă?WJX?:43?u+pS?a?YBj?`WMU?Oz%?B:?S?TF?5?\/?]"?.<Z?F??k9N?bF?QFx?Mi?يS?E`?=O?\B@+Q?*A\ ??Fgѝg?ilY?`VJHy?,V?z?L? L?(b ?|XB?.cZ?iO]?踜?=n?2AUe]? P?(je?d`?`n-o(?Qv%? ?f2I?>Lt? W?}@?.[+?T2nz?!/#?y J?\n;?y(a?>^\+?喕98?(,K5ts?QP?m/? `At?@_?-JS?h?4]s?~V0?[? 9XE?Bb?d}Q? K?pHX?j1i?̰Ǻ?LO-Nm?%og?86?zu?*^P=?!y8?B?;0t?:Jns?x_Į?Ov?Pآ?[~?bmn??8%X ? Kbs?\o?%N?a?Eo?f%?ᯉ"_?t?ɮP?M٘??6xG?Zg ?^/Huz?\4?ߠ?U?ę}?}ߎ?-?ePg?G'??b1 ?#Ӹ.q?d?mWK?L?lo? ?M?][ b?Lt!?k?:S?:!ze?Ey?i{~?G?4?Y?*W{?:$?bD?V2"?8|?YA?D%q\??Ul??о_?˻ o?$ѷo?9]?\ϧ?X 3?iY*?N' _?? Y?0su[?`? [dJ?NZk|?F"?>G{@?Voqy)?Xݜ\C????Җ? |?Qt?K?t?|?} ?X4oa?Fk]d?XF?4c?X뎊?Wu.jzD?-9?4@ek[?џS? Т?j ,??h-/ʸ?V'!?)jb?8Ƕҟ?#Nl?Z֧f?,/z?Z?Xm\D?Rb2?Ӟ??V2S?U9A[? ;l?Lu\??tA?C4 ]T?z?`8q;?h`97?D?L}j?Niۙ?5?X+!?*UHS?T9?ãP ?s@?ށnO?KDPMi? >?&r$?^nt?Xk?~NA?Y?q~?5e?Ƅ?x|?V?*_?5 d?ߘR?t? J? 'F?]!?h?DЯҶ?*Ԭ? 3?F5ې?!8??(ܫ?B<?02.z?~$n?R?V?b@ג?:!E?pb?(%?+.H?8CU?t!r`A?bq2t?h?X@? f ̈́r?|T?+% ?o=M??+g?TT?hR$?T5?Bkn?Zh6?05#?ֻ?il ?L?L鼒??1??w?(??`č?"T?<?"o y?Cz?f!$?r?~I{?o\?,??a? fg?k&?$=m?>X\)?@\\?n?*U?V?V?4͡? ??h?l?o[?^x؇?7W)?@s??N X?=Cqe?53bA?lgu?J?͢•s??p ?֓[F?^?ER<_?9k?@1z?5V3Ko?SOD?`$?"41?70"?@h?ĥ  ?di8?uh(?juB?rxʻ??ZVB??u ? ?NNǧdx?e?\ UD?8jc?8,u?!?+ ?Giuy?ڠ?oV?܀h?;S6?~S?&tF?}H?A\A(?D?Rm` ?$'?̕?(4,?fCm?<1t?]Rt?l쭮?yw΄??+I??Q?ARk?PN=C?,!G3?؞zr(?$s?{-?AI?Ca?\# ?rC?c*D?-ThT??4kĝ?e)?fOcJ?IY?2?o?` ?;:q?!I?ZW=?`h? ϭU?^1??@.=׊?*[??F i?B$Sõ? #eV?3?r+?KE?~^E?VyY}?~U??<6}?e U?y-?>?~4?]b?m(?9kWo?_mr? V?zpW#?l?HtzE?ݎA,.?@+RB?vt~?%s;l?pIxTB? ;y?x|{?BI\?P#4 $?,ڎ?Xp?ɞKu7?v$?63̍ ?'-?+"k?g?!{?5-?c崯?ps3r?W?0k!?x+:I?t!?Lw?p|?x$?c3.?*n▭?D,Y]?o}mn?0!D?tW?0p??@ V۽?h#᳢[??U|?sf~?&1?TŽ?@X2c?.??)R?oA;#?:_N?4٥?Р? vj?Ь83?v,ПЉ?K_?b_?v1Б?*Ak?,|?f6j??S?P"[N? }?,cf?S7?4+ K?b:?Nb?`du+?0t@Ϟ?v?hLػ?.Zʁ?!Z?ؠe?W?.:J?Pq詰???FO~??d?j?hz?v%?oB f?GV?PC ?:sm?;?Q2?003??\??7lU?ig?Nj?ot?l?v$ֆ?P9G?CzU?_d!?.m?-?^V`"!?§f?(%?bZ?P¬?bUa?-S}B?H5yb?)͗iʾ?:◠c?X8??FAm?"u?hD?B?U)?얛?>? Q@?WiX?hM.x:?U+j?e(?Hƪ?\PLF?(@ł?i@^?Uf?>,l? J?)?.?~P2?0U?8L?8{?1-?;0a;?Xq|,?ț?H7?q4'?? 7|?x)?|C ?DGe?h^+ ?8H?{1?D2N}? ,K?76g]?PB?(kl?(<b?iE??Ž?sq?(L?0?@kyĭ{?6ayc?J0?0AO?R є?/PiT?pƑi?xmΪ?;?csLY??*.?8?vc?y4?0PP?,/D?N_M$?d?^ ?/+ A?zx??0dZ?&E?F&?Z1W^?"5-?.#t?yP?>"?4t'?us?q?mO?{Q?(R?t=?H&2?m[?lb?p'?h?a\?i?`a? sH?8?;h#ף?6@]C?P 0?hڷ?:єI?o8?(ɭI?qN_|6? ?JK?X @?j-p? .,(l?Trgv?;?hZd?VU?u?"PTԬ?F \C?xr?:nM ?Tz$!?T9?k?.-]? adF?'Q?}lH?E&?u?o`G?xRϱ?胜֙?8;Bo?d z7?h/z-?ɮA?Z???0n?Xj5?2o?`V?^-;:?7?Gגev?/{??GYM|?@@)GÃ?ذ>N^'?i~n?aI?6XnL?x;뿤?l0?`??#$?u?B?aC?h/9~? 9?Zn?Z ?PqJq?8~?N 3? !?aEo?-j r?#ASDF BLOCK INDEX %YAML 1.1 --- [359] ... asdf-1.3.3/asdf/commands/tests/__init__.py0000644000175000017500000000025413246003441017750 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function asdf-1.3.3/asdf/commands/__init__.py0000644000175000017500000000043613246003441016610 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from .exploded import * from .to_yaml import * from .defragment import * from .diff import * from .tags import * asdf-1.3.3/asdf/commands/exploded.py0000644000175000017500000000664113246003441016661 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Contains commands for dealing with exploded and imploded forms. """ from __future__ import absolute_import, division, unicode_literals, print_function import os from .main import Command from .. import AsdfFile __all__ = ['implode', 'explode'] class Implode(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("implode"), help="Implode a ASDF file.", description="""Combine a ASDF file, where the data may be stored in multiple ASDF files, into a single ASDF file.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to implode.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with "_all" appended.""") parser.add_argument( "--resolve-references", "-r", action="store_true", help="""Resolve all references and store them directly in the output file.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return implode(args.filename[0], args.output, args.resolve_references) def implode(input, output=None, resolve_references=False): """ Implode a given ASDF file, which may reference external data, back into a single ASDF file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. resolve_references : bool, optional If `True` resolve all external references before saving. """ if output is None: base, ext = os.path.splitext(input) output = base + '_all' + '.asdf' with AsdfFile.open(input) as ff: ff2 = AsdfFile(ff) if resolve_references: ff2.resolve_references() ff2.write_to(output, all_array_storage='internal') class Explode(Command): @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("explode"), help="Explode a ASDF file.", description="""From a single ASDF file, create a set of ASDF files where each data block is stored in a separate file.""") parser.add_argument( 'filename', nargs=1, help="""The ASDF file to explode.""") parser.add_argument( "--output", "-o", type=str, nargs="?", help="""The name of the output file. If not provided, it will be the name of the input file with "_exploded" appended.""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return explode(args.filename[0], args.output) def explode(input, output=None): """ Explode a given ASDF file so each data block is in a separate file. Parameters ---------- input : str or file-like object The input file. output : str of file-like object The output file. """ if output is None: base, ext = os.path.splitext(input) output = base + '_exploded' + '.asdf' with AsdfFile.open(input) as ff: ff.write_to(output, all_array_storage='external') asdf-1.3.3/asdf/commands/main.py0000644000175000017500000000426213246003441015776 0ustar dandan00000000000000# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import argparse import logging import sys import six from .. import util # This list is ordered in order of average workflow command_order = [ 'Explode', 'Implode', ] class Command(object): @classmethod def setup_arguments(cls, subparsers): raise NotImplementedError() @classmethod def run(cls, args): raise NotImplementedError() def make_argparser(): """ Most of the real work is handled by the subcommands in the commands subpackage. """ def help(args): parser.print_help() return 0 parser = argparse.ArgumentParser( "asdftool", description="Commandline utilities for managing ASDF files.") parser.add_argument( "--verbose", "-v", action="store_true", help="Increase verbosity") subparsers = parser.add_subparsers( title='subcommands', description='valid subcommands') help_parser = subparsers.add_parser( str("help"), help="Display usage information") help_parser.set_defaults(func=help) commands = dict((x.__name__, x) for x in util.iter_subclasses(Command)) for command in command_order: commands[str(command)].setup_arguments(subparsers) del commands[command] for name, command in sorted(six.iteritems(commands)): command.setup_arguments(subparsers) return parser, subparsers def main_from_args(args): parser, subparsers = make_argparser() args = parser.parse_args(args) # Only needed for Python 3, apparently, but can't hurt if not hasattr(args, 'func'): parser.print_help() return 2 try: result = args.func(args) except RuntimeError as e: logging.error(six.text_type(e)) return 1 except IOError as e: logging.error(six.text_type(e)) return e.errno if result is None: result = 0 return result def main(args=None): if args is None: args = sys.argv[1:] sys.exit(main_from_args(args)) asdf-1.3.3/asdf/commands/tags.py0000644000175000017500000000272713246003441016014 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Implementation of command for displaying available tags in asdf """ from __future__ import absolute_import, division, unicode_literals, print_function import sys from .main import Command from .. import AsdfFile __all__ = ['list_tags'] class TagLister(Command): # pragma: no cover """This class is the plugin implementation for the asdftool runner.""" @classmethod def setup_arguments(cls, subparsers): parser = subparsers.add_parser( str("tags"), help="List currently available tags", description="""Lists currently available tags.""") parser.add_argument( '-d', '--display-classes', action='store_true', help="""Display associated class names in addition to tags""") parser.set_defaults(func=cls.run) return parser @classmethod def run(cls, args): return list_tags(display_classes=args.display_classes) def _qualified_name(_class): return "{}.{}".format(_class.__module__, _class.__name__) def list_tags(display_classes=False, iostream=sys.stdout): """Function to list tags""" af = AsdfFile() type_by_tag = af._extensions._type_index._type_by_tag tags = sorted(type_by_tag.keys()) for tag in tags: string = str(tag) if display_classes: string += ": " + _qualified_name(type_by_tag[tag]) iostream.write(string + '\n') asdf-1.3.3/asdf/versioning.py0000644000175000017500000001263213246003441015434 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ This module deals with things that change between different versions of the ASDF spec. """ from __future__ import absolute_import, division, unicode_literals, print_function import six import yaml from functools import total_ordering if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_loader = yaml.SafeLoader from semantic_version import Version, SpecItem, Spec from . import generic_io from . import resolver from . import util _version_map = {} def get_version_map(version): version_map = _version_map.get(version) if version_map is None: version_map_path = resolver.DEFAULT_URL_MAPPING[0][1].replace( '{url_suffix}', 'asdf/version_map-{0}'.format(version)) try: with generic_io.get_file(version_map_path, 'r') as fd: version_map = yaml.load( fd, Loader=_yaml_base_loader) except: raise ValueError( "Could not load version map for version {0}".format(version)) _version_map[version] = version_map return version_map @total_ordering class AsdfVersionMixin(object): """This mix-in is required in order to impose the total ordering that we want for ``AsdfVersion``, rather than accepting the total ordering that is already provided by ``Version`` from ``semantic_version``. Defining these comparisons directly in ``AsdfVersion`` and applying ``total_ordering`` there will not work since ``total_ordering`` only defines comparison operations if they do not exist already and the base class ``Version`` already defines these operations. """ def __eq__(self, other): # Seems like a bit of a hack... if isinstance(other, SpecItem): return other == self if isinstance(other, (six.string_types, tuple, list)): other = AsdfVersion(other) return Version.__eq__(self, other) def __ne__(self, other): return not self.__eq__(other) def __lt__(self, other): if isinstance(other, (six.string_types, tuple, list)): other = AsdfVersion(other) return Version.__lt__(self, other) def __hash__(self): # To be honest, I'm not sure why I had to make this explicit return Version.__hash__(self) class AsdfVersion(AsdfVersionMixin, Version): """This class adds features to the existing ``Version`` class from the ``semantic_version`` module. Namely, it allows ``Version`` objects to be constructed from tuples and lists as well as strings, and it allows ``Version`` objects to be compared with tuples, lists, and strings, instead of just other ``Version`` objects. If any of these features are added to the ``Version`` class itself (as requested in https://github.com/rbarrois/python-semanticversion/issues/52), then this class will become obsolete. """ def __init__(self, version): # This is a dirty hack and you know it if isinstance(version, AsdfVersion): version = str(version) if isinstance(version, (tuple, list)): version = '.'.join([str(x) for x in version]) super(AsdfVersion, self).__init__(version) class AsdfSpec(SpecItem, Spec): def __init__(self, *args, **kwargs): super(AsdfSpec, self).__init__(*args, **kwargs) def match(self, version): if isinstance(version, (six.string_types, tuple, list)): version = AsdfVersion(version) return super(AsdfSpec, self).match(version) def __iterate_versions(self, versions): for v in versions: if isinstance(v, (six.string_types, tuple, list)): v = AsdfVersion(v) yield v def select(self, versions): return super(AsdfSpec, self).select(self.__iterate_versions(versions)) def filter(self, versions): return super(AsdfSpec, self).filter(self.__iterate_versions(versions)) def __eq__(self, other): """Equality between Spec and Version, string, or tuple, means match""" if isinstance(other, SpecItem): return super(AsdfSpec, self).__eq__(other) return self.match(other) def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return super(AsdfSpec, self).__hash__() default_version = AsdfVersion('1.1.0') supported_versions = [ AsdfVersion('1.0.0'), AsdfVersion('1.1.0') ] class VersionedMixin(object): _version = default_version @property def version(self): return self._version @version.setter def version(self, version): if version not in supported_versions: human_versions = util.human_list( [str(x) for x in supported_versions]) raise ValueError( "asdf only understands how to handle ASDF versions {0}. " "Got '{1}'".format(human_versions, version)) self._version = version @property def version_string(self): return str(self._version) @property def version_map(self): try: version_map = get_version_map(self.version_string) except ValueError: raise ValueError( "Don't have information about version {0}".format( self.version_string)) return version_map asdf-1.3.3/asdf/_internal_init.py0000644000175000017500000001054213246003441016245 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function __all__ = ['__version__', '__githash__', 'test'] # this indicates whether or not we are in the package's setup.py try: _ASDF_SETUP_ except NameError: from sys import version_info if version_info < (2, 7, 0): # pragma: no cover raise ImportError("ASDF requires Python 2.7 or newer") if version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASDF_SETUP_ = False try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' # set up the test command def _get_test_runner(): import os from astropy.tests.helper import TestRunner return TestRunner(os.path.dirname(__file__)) def test(package=None, test_path=None, args=None, plugins=None, verbose=False, pastebin=None, remote_data=False, pep8=False, pdb=False, coverage=False, open_files=False, **kwargs): """ Run the tests using `py.test `__. A proper set of arguments is constructed and passed to `pytest.main`_. .. _py.test: http://pytest.org/latest/ .. _pytest.main: http://pytest.org/latest/builtin.html#pytest.main Parameters ---------- package : str, optional The name of a specific package to test, e.g. 'io.fits' or 'utils'. If nothing is specified all default tests are run. test_path : str, optional Specify location to test by path. May be a single file or directory. Must be specified absolutely or relative to the calling directory. args : str, optional Additional arguments to be passed to pytest.main_ in the ``args`` keyword argument. plugins : list, optional Plugins to be passed to pytest.main_ in the ``plugins`` keyword argument. verbose : bool, optional Convenience option to turn on verbose output from py.test_. Passing True is the same as specifying ``'-v'`` in ``args``. pastebin : {'failed','all',None}, optional Convenience option for turning on py.test_ pastebin output. Set to ``'failed'`` to upload info for failed tests, or ``'all'`` to upload info for all tests. remote_data : bool, optional Controls whether to run tests marked with @remote_data. These tests use online data and are not run by default. Set to True to run these tests. pep8 : bool, optional Turn on PEP8 checking via the `pytest-pep8 plugin `_ and disable normal tests. Same as specifying ``'--pep8 -k pep8'`` in ``args``. pdb : bool, optional Turn on PDB post-mortem analysis for failing tests. Same as specifying ``'--pdb'`` in ``args``. coverage : bool, optional Generate a test coverage report. The result will be placed in the directory htmlcov. open_files : bool, optional Fail when any tests leave files open. Off by default, because this adds extra run time to the test suite. Requires the `psutil `_ package. parallel : int, optional When provided, run the tests in parallel on the specified number of CPUs. If parallel is negative, it will use the all the cores on the machine. Requires the `pytest-xdist `_ plugin installed. Only available when using Astropy 0.3 or later. kwargs Any additional keywords passed into this function will be passed on to the astropy test runner. This allows use of test-related functionality implemented in later versions of astropy without explicitly updating the package template. """ try: import astropy except ImportError: raise ImportError("Running the tests requires astropy") test_runner = _get_test_runner() return test_runner.run_tests( package=package, test_path=test_path, args=args, plugins=plugins, verbose=verbose, pastebin=pastebin, remote_data=remote_data, pep8=pep8, pdb=pdb, coverage=coverage, open_files=open_files, **kwargs) asdf-1.3.3/asdf/generic_io.py0000644000175000017500000011700613246003441015355 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ This provides abstractions around a number of different file and stream types available to Python so that they are always used in the most efficient way. The classes in this module should not be instantiated directly, but instead, one should use the factory function `get_file`. """ from __future__ import absolute_import, division, unicode_literals, print_function from distutils.version import LooseVersion import io import math import os import platform import re import sys import tempfile from os import SEEK_SET, SEEK_CUR, SEEK_END import six from six.moves import xrange from six.moves.urllib import parse as urlparse from six.moves.urllib.request import url2pathname import numpy as np from .extern import atomicfile from . import util __all__ = ['get_file', 'resolve_uri', 'relative_uri'] _local_file_schemes = ['', 'file'] if sys.platform.startswith('win'): # pragma: no cover import string _local_file_schemes.extend(string.ascii_letters) def _check_bytes(fd, mode): """ Checks whether a given file-like object is opened in binary mode. """ # On Python 3, doing fd.read(0) on an HTTPResponse object causes # it to not be able to read any further, so we do this different # kind of check, which, unfortunately, is not as robust. if isinstance(fd, io.IOBase): if isinstance(fd, io.TextIOBase): return False return True if 'r' in mode: x = fd.read(0) if not isinstance(x, bytes): return False elif 'w' in mode: if six.PY2: # pragma: no cover if isinstance(fd, file): if 'b' not in fd.mode: return False elif six.PY3: try: fd.write(b'') except TypeError: return False return True if (sys.platform == 'darwin' and LooseVersion(platform.mac_ver()[0]) < LooseVersion('10.9')): # pragma: no cover def _array_fromfile(fd, size): chunk_size = 1024 ** 3 if size < chunk_size: return np.fromfile(fd, dtype=np.uint8, count=size) else: array = np.empty(size, dtype=np.uint8) for beg in xrange(0, size, chunk_size): end = min(size, beg + chunk_size) array[beg:end] = np.fromfile(fd, dtype=np.uint8, count=end - beg) return array else: def _array_fromfile(fd, size): return np.fromfile(fd, dtype=np.uint8, count=size) _array_fromfile.__doc__ = """ Load a binary array from a real file object. Parameters ---------- fd : real file object size : integer Number of bytes to read. """ def _array_tofile_chunked(write, array, chunksize): # pragma: no cover array = array.view(np.uint8).flatten() for i in xrange(0, array.nbytes, chunksize): write(array[i:i + chunksize].data) def _array_tofile_simple(fd, write, array): return write(array.data) if sys.platform == 'darwin': # pragma: no cover def _array_tofile(fd, write, array): OSX_WRITE_LIMIT = 2 ** 32 if fd is None or array.nbytes >= OSX_WRITE_LIMIT and array.nbytes % 4096 == 0: return _array_tofile_chunked(write, array, OSX_WRITE_LIMIT) return _array_tofile_simple(fd, write, array) elif sys.platform.startswith('win'): # pragma: no cover def _array_tofile(fd, write, array): WIN_WRITE_LIMIT = 2 ** 30 return _array_tofile_chunked(write, array, WIN_WRITE_LIMIT) else: _array_tofile = _array_tofile_simple _array_tofile.__doc__ = """ Write an array to a file. Parameters ---------- fd : real file object If fd is provided, must be a real system file as supported by numpy.tofile. May be None, in which case all writing will be done through the `write` method. write : callable A callable that writes bytes to the file. array : Numpy array Must be an underlying data array, not a view. """ def resolve_uri(base, uri): """ Resolve a URI against a base URI. """ if base is None: base = '' resolved = urlparse.urljoin(base, uri) parsed = urlparse.urlparse(resolved) if parsed.path != '' and not parsed.path.startswith('/'): raise ValueError( "Resolved to relative URL") return resolved def relative_uri(source, target): """ Make a relative URI from source to target. """ su = urlparse.urlparse(source) tu = urlparse.urlparse(target) extra = list(tu[3:]) relative = None if tu[0] == '' and tu[1] == '': if tu[2] == su[2]: relative = '' elif not tu[2].startswith('/'): relative = tu[2] elif su[0:2] != tu[0:2]: return target if relative is None: if tu[2] == su[2]: relative = '' else: relative = os.path.relpath(tu[2], os.path.dirname(su[2])) if relative == '.': relative = '' relative = urlparse.urlunparse(["", "", relative] + extra) return relative class _TruncatedReader(object): """ Reads until a given delimiter is found. Only works with RandomAccessFile and InputStream, though as this is a private class, this is not explicitly enforced. """ def __init__(self, fd, delimiter, readahead_bytes, delimiter_name=None, include=False, initial_content=b'', exception=True): self._fd = fd self._delimiter = delimiter self._readahead_bytes = readahead_bytes if delimiter_name is None: delimiter_name = delimiter self._delimiter_name = delimiter_name self._include = include self._initial_content = initial_content self._trailing_content = b'' self._exception = exception self._past_end = False def read(self, nbytes=None): if self._past_end: content = self._trailing_content[:nbytes] if nbytes is None: self._trailing_content = b'' else: self._trailing_content = self._trailing_content[nbytes:] return content if nbytes is None: content = self._fd._peek() elif nbytes <= len(self._initial_content): content = self._initial_content[:nbytes] self._initial_content = self._initial_content[nbytes:] return content else: content = self._fd._peek(nbytes - len(self._initial_content) + self._readahead_bytes) if content == b'': if self._exception: raise ValueError("{0} not found".format(self._delimiter_name)) self._past_end = True return content index = re.search(self._delimiter, content) if index is not None: if self._include: index = index.end() else: index = index.start() content = content[:index] self._past_end = True elif nbytes is None and self._exception: # Read the whole file and didn't find the delimiter raise ValueError("{0} not found".format(self._delimiter_name)) else: if nbytes: content = content[:nbytes - len(self._initial_content)] self._fd.fast_forward(len(content)) if self._initial_content: content = self._initial_content + content self._initial_content = b'' if self._past_end and nbytes: self._trailing_content = content[nbytes:] content = content[:nbytes] return content @six.add_metaclass(util.InheritDocstrings) class GenericFile(object): """ Base class for an abstraction layer around a number of different file-like types. Each of its subclasses handles a particular kind of file in the most efficient way possible. This class should not be instantiated directly, but instead the factory function `get_file` should be used to get the correct subclass for the given file-like object. """ def __init__(self, fd, mode, close=False, uri=None): """ Parameters ---------- fd : file-like object The particular kind of file-like object must match the subclass of `GenericFile` being instantiated. mode : str Must be ``"r"`` (read), ``"w"`` (write), or ``"rw"`` (read/write). close : bool, optional When ``True``, close the given `fd` in the ``__exit__`` method, i.e. at the end of the with block. Should be set to ``True`` when this object "owns" the file object. Default: ``False``. uri : str, optional The file path or URI used to open the file. This is used to resolve relative URIs when the file refers to external sources. """ if not _check_bytes(fd, mode): raise ValueError( "File-like object must be opened in binary mode.") self._fd = fd self._mode = mode self._close = close self._blksize = io.DEFAULT_BUFFER_SIZE self._size = None self._uri = uri def __enter__(self): return self def __exit__(self, type, value, traceback): if self._close: if hasattr(self._fd, '__exit__'): self._fd.__exit__(type, value, traceback) else: self._fd.close() @property def block_size(self): return self._blksize @property def mode(self): """ The mode of the file. Will be ``'r'``, ``'w'`` or ``'rw'``. """ return self._mode @property def uri(self): """ The base uri of the file. """ return self._uri def read(self, size=-1): """ Read at most size bytes from the file (less if the read hits EOF before obtaining size bytes). If the size argument is negative or omitted, read all data until EOF is reached. The bytes are returned as a `bytes` object. An empty `bytes` object is returned when EOF is encountered immediately. Only available if `readable` returns `True`. """ # On Python 3, reading 0 bytes from a socket causes it to stop # working, so avoid doing that at all costs. if size == 0: return b'' return self._fd.read(size) def read_block(self): """ Read a "block" from the file. For real filesystem files, the block is the size of a native filesystem block. """ return self.read(self._blksize) def read_blocks(self, size): """ Read ``size`` bytes of data from the file, one block at a time. The result is a generator where each value is a bytes object. """ i = 0 for i in xrange(0, size - self._blksize, self._blksize): yield self.read(self._blksize) if i < size: yield self.read(size - i) if sys.version_info[:2] == (2, 7) and sys.version_info[2] < 4: # pragma: no cover # On Python 2.7.x prior to 2.7.4, the buffer does not support the # new buffer interface, and thus can't be written directly. See # issue #10221. def write(self, content): if isinstance(content, buffer): self._fd.write(bytes(content)) else: self._fd.write(content) else: def write(self, content): self._fd.write(content) write.__doc__ = """ Write a string to the file. There is no return value. Due to buffering, the string may not actually show up in the file until the flush() or close() method is called. Only available if `writable` returns `True`. """ def write_array(self, array): _array_tofile(None, self.write, array) def seek(self, offset, whence=0): """ Set the file's current position. Only available if `seekable` returns `True`. Parameters ---------- offset : integer Offset, in bytes. whence : integer, optional The `whence` argument is optional and defaults to SEEK_SET or 0 (absolute file positioning); other values are SEEK_CUR or 1 (seek relative to the current position) and SEEK_END or 2 (seek relative to the file’s end). """ result = self._fd.seek(offset, whence) self.tell() return result def tell(self): """ Return the file's current position, in bytes. Only available in `seekable` returns `True`. """ return self._fd.tell() def flush(self): """ Flush the internal buffer. """ self._fd.flush() def close(self): """ Close the file. The underlying file-object will only be closed if ``close=True`` was passed to the constructor. """ if self._close: self._fd.close() def truncate(self, size=None): """ Truncate the file to the given size. """ raise NotImplementedError() def writable(self): """ Returns `True` if the file can be written to. """ return 'w' in self.mode def readable(self): """ Returns `True` if the file can be read from. """ return 'r' in self.mode def seekable(self): """ Returns `True` if the file supports random access (`seek` and `tell`). """ return False def can_memmap(self): """ Returns `True` if the file supports memmapping. """ return False def is_closed(self): """ Returns `True` if the underlying file object is closed. """ return self._fd.closed def read_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Reads until a match for a given regular expression is found. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Returns ------- content : bytes The content from the current position in the file, up to the delimiter. Includes the delimiter if `include` is ``True``. Raises ------ ValueError : If the delimiter is not found before the end of the file. """ buff = io.BytesIO() reader = self.reader_until( delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) while True: content = reader.read(self.block_size) buff.write(content) if len(content) < self.block_size: break return buff.getvalue() def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Returns a readable file-like object that treats the given delimiter as the end-of-file. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Raises ------ ValueError : If the delimiter is not found before the end of the file. """ raise NotImplementedError() def seek_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): """ Seeks in the file until a match for a given regular expression is found. This is similar to ``read_until``, except the intervening content is not retained. Parameters ---------- delimiter : str A regular expression. readahead_bytes : int The number of bytes to read ahead to make sure the delimiter isn't on a block boundary. delimiter_name : str, optional The name of the delimiter. Used in error messages if the delimiter is not found. If not provided, the raw content of `delimiter` will be used. include : bool, optional When ``True``, include the delimiter in the result. initial_content : bytes, optional Additional content to include at the beginning of the first read. exception : bool, optional If ``True`` (default), raise an exception if the end marker isn't found. Returns ------- content : bytes The content from the current position in the file, up to the delimiter. Includes the delimiter if `include` is ``True``. Raises ------ ValueError : If the delimiter is not found before the end of the file. """ reader = self.reader_until( delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) while True: try: content = reader.read(self.block_size) except ValueError: return False if content == b'': return True def fast_forward(self, size): """ Move the file position forward by `size`. """ raise NotImplementedError() def clear(self, nbytes): """ Write nbytes of zeros. """ blank_data = b'\0' * self.block_size for i in xrange(0, nbytes, self.block_size): length = min(nbytes - i, self.block_size) self.write(blank_data[:length]) def memmap_array(self, offset, size): """ Memmap a chunk of the file into a `np.core.memmap` object. Parameters ---------- offset : integer The offset, in bytes, in the file. size : integer The size of the data to memmap. Returns ------- array : np.core.memmap """ raise NotImplementedError() def read_into_array(self, size): """ Read a chunk of the file into a uint8 array. Parameters ---------- size : integer The size of the data. Returns ------- array : np.core.memmap """ buff = self.read(size) return np.frombuffer(buff, np.uint8, size, 0) class GenericWrapper(object): """ A wrapper around a `GenericFile` object so that closing only happens in the very outer layer. """ def __init__(self, fd): self._fd = fd def __enter__(self): return self def __exit__(self, type, value, traceback): pass def __getattr__(self, attr): return getattr(self._fd, attr) class RandomAccessFile(GenericFile): """ The base class of file types that support random access. """ def seekable(self): return True def _peek(self, size=-1): cursor = self.tell() content = self.read(size) self.seek(cursor, SEEK_SET) return content def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): return _TruncatedReader( self, delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) def fast_forward(self, size): if size < 0: self.seek(0, SEEK_END) self.seek(size, SEEK_CUR) if sys.platform.startswith('win'): # pragma: no cover def truncate(self, size=None): # ftruncate doesn't work on an open file in Windows. The # best we can do is clear the extra bytes or add extra # bytes to the end. if size is None: size = self.tell() self.seek(0, SEEK_END) file_size = self.tell() if size < file_size: self.seek(size, SEEK_SET) nbytes = file_size - size elif size > file_size: nbytes = size - file_size else: nbytes = 0 block = b'\0' * self.block_size while nbytes > 0: self.write(block[:min(nbytes, self.block_size)]) nbytes -= self.block_size self.seek(size, SEEK_SET) else: def truncate(self, size=None): if size is None: self._fd.truncate() else: self._fd.truncate(size) self.seek(size, SEEK_SET) class RealFile(RandomAccessFile): """ Handles "real" files on a filesystem. """ def __init__(self, fd, mode, close=False, uri=None): super(RealFile, self).__init__(fd, mode, close=close, uri=uri) stat = os.fstat(fd.fileno()) if sys.platform.startswith('win'): # pragma: no cover # There appears to be reliable way to get block size on Windows, # so just choose a reasonable default self._blksize = io.DEFAULT_BUFFER_SIZE else: self._blksize = stat.st_blksize self._size = stat.st_size if (uri is None and isinstance(fd.name, six.string_types)): self._uri = util.filepath_to_url(os.path.abspath(fd.name)) def write_array(self, arr): if isinstance(arr, np.memmap) and getattr(arr, 'fd', None) is self: arr.flush() self.fast_forward(len(arr.data)) else: _array_tofile(self._fd, self._fd.write, arr) def can_memmap(self): return True def memmap_array(self, offset, size): if 'w' in self._mode: mode = 'r+' else: mode = 'r' mmap = np.memmap( self._fd, mode=mode, offset=offset, shape=size) mmap.fd = self return mmap def read_into_array(self, size): return _array_fromfile(self._fd, size) class MemoryIO(RandomAccessFile): """ Handles random-access memory buffers, mainly `io.BytesIO` and `StringIO.StringIO`. """ def __init__(self, fd, mode, uri=None): super(MemoryIO, self).__init__(fd, mode, uri=uri) tell = fd.tell() fd.seek(0, 2) self._size = fd.tell() fd.seek(tell, 0) def read_into_array(self, size): buf = self._fd.getvalue() offset = self._fd.tell() result = np.frombuffer(buf, np.uint8, size, offset) # Copy the buffer so the original memory can be released. result = result.copy() self.seek(size, SEEK_CUR) return result class InputStream(GenericFile): """ Handles an input stream, such as stdin. """ def __init__(self, fd, mode='r', close=False, uri=None): super(InputStream, self).__init__(fd, mode, close=close, uri=uri) self._fd = fd self._buffer = b'' def _peek(self, size=-1): if size < 0: self._buffer += self._fd.read() else: len_buffer = len(self._buffer) if len_buffer < size: self._buffer += self._fd.read(size - len_buffer) return self._buffer def read(self, size=-1): # On Python 3, reading 0 bytes from a socket causes it to stop # working, so avoid doing that at all costs. if size == 0: return b'' len_buffer = len(self._buffer) if len_buffer == 0: return self._fd.read(size) elif size < 0: self._buffer += self._fd.read() buffer = self._buffer self._buffer = b'' return buffer elif len_buffer < size: if len_buffer < size: self._buffer += self._fd.read(size - len(self._buffer)) buffer = self._buffer self._buffer = b'' return buffer else: buffer = self._buffer[:size] self._buffer = self._buffer[size:] return buffer def reader_until(self, delimiter, readahead_bytes, delimiter_name=None, include=True, initial_content=b'', exception=True): return _TruncatedReader( self, delimiter, readahead_bytes, delimiter_name=delimiter_name, include=include, initial_content=initial_content, exception=exception) def fast_forward(self, size): if size >= 0 and len(self.read(size)) != size: raise IOError("Read past end of file") def read_into_array(self, size): try: # See if Numpy can handle this as a real file first... return np.fromfile(self._fd, np.uint8, size) except (IOError, AttributeError): # Else, fall back to reading into memory and then # returning the Numpy array. data = self.read(size) # We need to copy the array, so it is writable result = np.frombuffer(data, np.uint8, size) # When creating an array from a buffer, it is read-only. # If we need a read/write array, we have to copy it. if 'w' in self._mode: result = result.copy() return result class OutputStream(GenericFile): """ Handles an output stream, such as stdout. """ def __init__(self, fd, close=False, uri=None): super(OutputStream, self).__init__(fd, 'w', close=close, uri=uri) self._fd = fd def fast_forward(self, size): if size < 0: return self.clear(size) class HTTPConnection(RandomAccessFile): """ Uses a persistent HTTP connection to request specific ranges of the file and obtain its structure without transferring it in its entirety. It creates a temporary file on the local filesystem and copies blocks into it as needed. The `_blocks` array is a bitfield that keeps track of which blocks we have. """ # TODO: Handle HTTPS connection def __init__(self, connection, size, path, uri, first_chunk): self._mode = 'r' self._blksize = io.DEFAULT_BUFFER_SIZE # The underlying HTTPConnection object doesn't track closed # status, so we do that here. self._closed = False self._fd = connection self._path = path self._uri = uri # A bitmap of the blocks that we've already read and cached # locally self._blocks = np.zeros( int(math.ceil(size / self._blksize / 8)), np.uint8) local_file = tempfile.TemporaryFile() self._local = RealFile(local_file, 'rw', close=True) self._local.truncate(size) self._local.seek(0) self._local.write(first_chunk) self._local.seek(0) self._blocks[0] = 1 # The size of the entire file self._size = size self._nreads = 0 # Some methods just short-circuit to the local copy self.seek = self._local.seek self.tell = self._local.tell def __exit__(self, type, value, traceback): if not self._closed: self._local.close() if hasattr(self._fd, '__exit__'): self._fd.__exit__(type, value, traceback) else: self._fd.close() self._closed = True def close(self): if not self._closed: self._local.close() self._fd.close() self._closed = True def is_closed(self): return self._closed def _get_range(self, start, end): """ Ensure the range of bytes has been copied to the local cache. """ if start >= self._size: return end = min(end, self._size) blocks = self._blocks block_size = self.block_size def has_block(x): return blocks[x >> 3] & (1 << (x & 0x7)) def mark_block(x): blocks[x >> 3] |= (1 << (x & 0x7)) block_start = start // block_size block_end = end // block_size + 1 pos = self._local.tell() try: # Between block_start and block_end, some blocks may be # already loaded. We want to load all of the missing # blocks in as few requests as possible. a = block_start while a < block_end: # Skip over whole groups of blocks at a time while a < block_end and blocks[a >> 3] == 0xff: a = ((a >> 3) + 1) << 3 while a < block_end and has_block(a): a += 1 if a >= block_end: break b = a + 1 # Skip over whole groups of blocks at a time while b < block_end and blocks[b >> 3] == 0x0: b = ((b >> 3) + 1) << 3 while b < block_end and not has_block(b): b += 1 if b > block_end: b = block_end if a * block_size >= self._size: return headers = { 'Range': 'bytes={0}-{1}'.format( a * block_size, (b * block_size) - 1)} self._fd.request('GET', self._path, headers=headers) response = self._fd.getresponse() if response.status != 206: raise IOError("HTTP failed: {0} {1}".format( response.status, response.reason)) # Now copy over to the temporary file, block-by-block self._local.seek(a * block_size, os.SEEK_SET) for i in xrange(a, b): chunk = response.read(block_size) self._local.write(chunk) mark_block(i) response.close() self._nreads += 1 a = b finally: self._local.seek(pos, os.SEEK_SET) def read(self, size=-1): if self._closed: raise IOError("read from closed connection") pos = self._local.tell() # Adjust size so it doesn't go beyond the end of the file if size < 0 or pos + size > self._size: size = self._size - pos # On Python 3, reading 0 bytes from a socket causes it to stop # working, so avoid doing that at all costs. if size == 0: return b'' self._get_range(pos, pos + size) return self._local.read(size) def read_into_array(self, size): if self._closed: raise IOError("read from closed connection") pos = self._local.tell() if pos + size > self._size: raise IOError("Read past end of file.") self._get_range(pos, pos + size) return self._local.memmap_array(pos, size) def _make_http_connection(init, mode, uri=None): """ Creates a HTTPConnection instance if the HTTP server supports Range requests, otherwise falls back to a generic InputStream. """ from six.moves import http_client parsed = urlparse.urlparse(init) connection = http_client.HTTPConnection(parsed.netloc) connection.connect() block_size = io.DEFAULT_BUFFER_SIZE # We request a range of the whole file ("0-") to check if the # server understands that header entry, and also to get the # size of the entire file headers = {'Range': 'bytes=0-'} connection.request('GET', parsed.path, headers=headers) response = connection.getresponse() if response.status // 100 != 2: raise IOError("HTTP failed: {0} {1}".format( response.status, response.reason)) # Status 206 means a range was returned. If it's anything else # that indicates the server probably doesn't support Range # headers. if (response.status != 206 or response.getheader('accept-ranges', None) != 'bytes' or response.getheader('content-range', None) is None or response.getheader('content-length', None) is None): # Fall back to a regular input stream, but we don't # need to open a new connection. response.close = connection.close return InputStream(response, mode, uri=uri or init, close=True) # Since we'll be requesting chunks, we can't read at all with the # current request (because we can't abort it), so just close and # start over size = int(response.getheader('content-length')) first_chunk = response.read(block_size) response.close() return HTTPConnection(connection, size, parsed.path, uri or init, first_chunk) def get_file(init, mode='r', uri=None): """ Returns a `GenericFile` instance suitable for wrapping the given object `init`. If passed an already open file-like object, it must be opened for reading/writing in binary mode. It is the caller's responsibility to close it. Parameters ---------- init : object `init` may be: - A `bytes` or `unicode` file path or ``file:`` or ``http:`` url. - A Python 2 `file` object. - An `io.IOBase` object (the default file object on Python 3). - A ducktyped object that looks like a file object. If `mode` is ``"r"``, it must have a ``read`` method. If `mode` is ``"w"``, it must have a ``write`` method. If `mode` is ``"rw"`` it must have the ``read``, ``write``, ``tell`` and ``seek`` methods. - A `GenericFile` instance, in which case it is wrapped in a `GenericWrapper` instance, so that the file is closed when only when the final layer is unwrapped. mode : str Must be one of ``"r"``, ``"w"`` or ``"rw"``. uri : str Sets the base URI of the file object. This will be used to resolve any relative URIs contained in the file. This is redundant if `init` is a `bytes` or `unicode` object (since it will be the uri), and it may be determined automatically if `init` refers to a regular filesystem file. It is not required if URI resolution is not used in the file. Returns ------- fd : GenericFile Raises ------ ValueError, TypeError, IOError """ if mode not in ('r', 'w', 'rw'): raise ValueError("mode must be 'r', 'w' or 'rw'") if init in (sys.__stdout__, sys.__stdin__, sys.__stderr__): if six.PY3: init = init.buffer else: init = os.fdopen(init.fileno(), init.mode + 'b') if isinstance(init, (GenericFile, GenericWrapper)): if mode not in init.mode: raise ValueError( "File is opened as '{0}', but '{1}' was requested".format( init.mode, mode)) return GenericWrapper(init) elif isinstance(init, six.string_types): parsed = urlparse.urlparse(init) if parsed.scheme == 'http': if 'w' in mode: raise ValueError( "HTTP connections can not be opened for writing") return _make_http_connection(init, mode, uri=uri) elif parsed.scheme in _local_file_schemes: if mode == 'rw': realmode = 'r+b' else: realmode = mode + 'b' realpath = url2pathname(parsed.path) if mode == 'w': fd = atomicfile.atomic_open(realpath, realmode) else: fd = open(realpath, realmode) fd = fd.__enter__() return RealFile(fd, mode, close=True, uri=uri) elif isinstance(init, io.BytesIO): return MemoryIO(init, mode, uri=uri) elif isinstance(init, io.StringIO): raise TypeError( "io.StringIO objects are not supported. Use io.BytesIO instead.") elif six.PY2 and isinstance(init, file): # pragma: no cover if init.mode[0] not in mode: raise ValueError( "File is opened as '{0}', but '{1}' was requested".format( init.mode, mode)) try: init.tell() except IOError: if mode == 'w': return OutputStream(init, uri=uri) elif mode == 'r': return InputStream(init, mode, uri=uri) else: raise ValueError( "File '{0}' could not be opened in 'rw' mode".format(init)) else: return RealFile(init, mode, uri=uri) elif isinstance(init, io.IOBase): if (('r' in mode and not init.readable()) or ('w' in mode and not init.writable())): raise ValueError( "File is opened as '{0}', but '{1}' was requested".format( init.mode, mode)) if init.seekable(): if isinstance(init, (io.BufferedReader, io.BufferedWriter, io.BufferedRandom)): init2 = init.raw else: init2 = init if isinstance(init2, io.RawIOBase): result = RealFile(init2, mode, uri=uri) else: result = MemoryIO(init2, mode, uri=uri) result._secondary_fd = init return result else: if mode == 'w': return OutputStream(init, uri=uri) elif mode == 'r': return InputStream(init, mode, uri=uri) else: raise ValueError( "File '{0}' could not be opened in 'rw' mode".format(init)) elif mode == 'w' and ( hasattr(init, 'write') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'r' and ( hasattr(init, 'read') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'rw' and ( hasattr(init, 'read') and hasattr(init, 'write') and hasattr(init, 'seek') and hasattr(init, 'tell')): return MemoryIO(init, mode, uri=uri) elif mode == 'w' and hasattr(init, 'write'): return OutputStream(init, uri=uri) elif mode == 'r' and hasattr(init, 'read'): return InputStream(init, mode, uri=uri) raise ValueError("Can't handle '{0}' as a file for mode '{1}'".format( init, mode)) asdf-1.3.3/asdf/treeutil.py0000644000175000017500000001153013246003441015102 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ Utility functions for managing tree-like data structures. """ from __future__ import absolute_import, division, unicode_literals, print_function import inspect import six from .tagged import tag_object def walk(top, callback): """ Walking through a tree of objects, calling a given function at each node. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. The callback is called on an instance after all of its children have been visited (depth-first order). Returns ------- tree : object The modified tree. """ for x in iter_tree(top): callback(x) def iter_tree(top): """ Iterate over all nodes in a tree, in depth-first order. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. The callback is called on an instance after all of its children have been visited (depth-first order). Returns ------- tree : object The modified tree. """ seen = set() def recurse(tree): tree_id = id(tree) if tree_id in seen: return if isinstance(tree, (list, tuple)): seen.add(tree_id) for val in tree: for sub in recurse(val): yield sub seen.remove(tree_id) elif isinstance(tree, dict): seen.add(tree_id) for val in six.itervalues(tree): for sub in recurse(val): yield sub seen.remove(tree_id) yield tree return recurse(top) def walk_and_modify(top, callback): """Modify a tree by walking it with a callback function. It also has the effect of doing a deep copy. Parameters ---------- top : object The root of the tree. May be a dict, list or other Python object. callback : callable A function to call at each node in the tree. It takes either one or two arguments: - an instance from the tere - a json id (optional) It may return a different instance in order to modify the tree. The json id is the context under which any relative URLs should be resolved. It may be `None` if no ids are in the file The callback is called on an instance after all of its children have been visited (depth-first order). Returns ------- tree : object The modified tree. """ # For speed reasons, there are two different versions of the inner # function seen = set() def recurse(tree): id_tree = id(tree) if id_tree in seen: return tree if isinstance(tree, dict): result = tree.__class__() seen.add(id_tree) for key, val in six.iteritems(tree): val = recurse(val) if val is not None: result[key] = val seen.remove(id_tree) if hasattr(tree, '_tag'): result = tag_object(tree._tag, result) elif isinstance(tree, (list, tuple)): seen.add(id_tree) result = tree.__class__( [recurse(val) for val in tree]) seen.remove(id_tree) if hasattr(tree, '_tag'): result = tag_object(tree._tag, result) else: result = tree result = callback(result) return result def recurse_with_json_ids(tree, json_id): id_tree = id(tree) if id_tree in seen: return tree if isinstance(tree, dict): if 'id' in tree: json_id = tree['id'] result = tree.__class__() seen.add(id_tree) for key, val in six.iteritems(tree): val = recurse_with_json_ids(val, json_id) if val is not None: result[key] = val seen.remove(id_tree) if hasattr(tree, '_tag'): result = tag_object(tree._tag, result) elif isinstance(tree, (list, tuple)): seen.add(id_tree) result = tree.__class__( [recurse_with_json_ids(val, json_id) for val in tree]) seen.remove(id_tree) if hasattr(tree, '_tag'): result = tag_object(tree._tag, result) else: result = tree result = callback(result, json_id) return result if callback.__code__.co_argcount == 2: return recurse_with_json_ids(top, None) else: return recurse(top) asdf-1.3.3/asdf/tagged.py0000644000175000017500000000761513246003441014511 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ This file manages a transient representation of the tree made up of simple Python data types (lists, dicts, scalars) wrapped inside of `Tagged` subclasses, which add a ``tag`` attribute to hold the associated YAML tag. Below "basic data types" refers to the basic built-in data types defined in the core YAML specification. "Custom data types" are specialized tags that are added by ASDF or third-parties that are not in the YAML specification. When YAML is loaded from disk, we want to first validate it using JSON schema, which only understands basic Python data types, not the ``Nodes`` that ``pyyaml`` uses as its intermediate representation. However, basic Python data types do not preserve the tag information from the YAML file that we need later to convert elements to custom data types. Therefore, the approach here is to wrap those basic types inside of `Tagged` objects long enough to run through the jsonschema validator, and then convert to custom data types and throwing away the tag annotations in the process. Upon writing, the custom data types are first converted to basic Python data types wrapped in `Tagged` objects. The tags assigned to the ``Tagged`` objects are then used to write tags to the YAML file. All of this is an implementation detail of the our custom YAML loader and dumper (``yamlutil.AsdfLoader`` and ``yamlutil.AsdfDumper``) and is not intended to be exposed to the end user. """ from __future__ import absolute_import, division, unicode_literals, print_function import six from .compat import UserDict, UserList, UserString __all__ = ['tag_object', 'get_tag'] class Tagged(object): """ Base class of classes that wrap a given object and store a tag with it. """ pass class TaggedDict(Tagged, UserDict, dict): """ A Python dict with a tag attached. """ flow_style = None property_order = None def __init__(self, data=None, tag=None): if data is None: data = {} self.data = data self._tag = tag def __eq__(self, other): return (isinstance(other, TaggedDict) and self.data == other.data and self._tag == other._tag) class TaggedList(Tagged, UserList, list): """ A Python list with a tag attached. """ flow_style = None def __init__(self, data=None, tag=None): if data is None: data = [] self.data = data self._tag = tag def __eq__(self, other): return (isinstance(other, TaggedList) and self.data == other.data and self._tag == other._tag) class TaggedString(Tagged, UserString, six.text_type): """ A Python list with a tag attached. """ style = None def __eq__(self, other): return (isinstance(other, TaggedString) and six.text_type.__eq__(self, other) and self._tag == other._tag) def tag_object(tag, instance, ctx=None): """ Tag an object by wrapping it in a ``Tagged`` instance. """ if isinstance(instance, Tagged): instance._tag = tag elif isinstance(instance, dict): instance = TaggedDict(instance, tag) elif isinstance(instance, list): instance = TaggedList(instance, tag) elif isinstance(instance, six.string_types): instance = TaggedString(instance) instance._tag = tag else: from . import AsdfFile, yamlutil if ctx is None: ctx = AsdfFile() try: instance = yamlutil.custom_tree_to_tagged_tree(instance, ctx) except TypeError: raise TypeError("Don't know how to tag a {0}".format(type(instance))) instance._tag = tag return instance def get_tag(instance): """ Get the tag associated with the instance, if there is one. """ return getattr(instance, '_tag', None) asdf-1.3.3/asdf/stream.py0000644000175000017500000000365213246003441014546 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from .tags.core import ndarray class Stream(ndarray.NDArrayType): """ Used to put a streamed array into the tree. Examples -------- Save a double-precision array with 1024 columns, one row at a time:: >>> from asdf import AsdfFile, Stream >>> import numpy as np >>> ff = AsdfFile() >>> ff.tree['streamed'] = Stream([1024], np.float64) >>> with open('test.asdf', 'wb') as fd: ... ff.write_to(fd) ... for i in range(200): ... nbytes = fd.write( ... np.array([i] * 1024, np.float64).tostring()) """ name = None types = [] def __init__(self, shape, dtype, strides=None): self._shape = shape self._datatype, self._byteorder = ndarray.numpy_dtype_to_asdf_datatype(dtype) self._strides = strides self._array = None def _make_array(self): self._array = None @classmethod def reserve_blocks(cls, data, ctx): if isinstance(data, Stream): yield ctx.blocks.get_streamed_block() @classmethod def from_tree(cls, data, ctx): return ndarray.NDArrayType.from_tree(data, ctx) @classmethod def to_tree(cls, data, ctx): ctx.blocks.get_streamed_block() result = {} result['source'] = -1 result['shape'] = ['*'] + data._shape result['datatype'] = data._datatype result['byteorder'] = data._byteorder if data._strides is not None: result['strides'] = data._strides return result def __repr__(self): return "Stream({}, {}, strides={})".format( self._shape, self._datatype, self._strides) def __str__(self): return str(self.__repr__()) asdf-1.3.3/asdf/compat/0000755000175000017500000000000013246031665014167 5ustar dandan00000000000000asdf-1.3.3/asdf/compat/user_collections_py3/0000755000175000017500000000000013246031665020336 5ustar dandan00000000000000asdf-1.3.3/asdf/compat/user_collections_py3/UserString.py0000644000175000017500000001307413246003441023011 0ustar dandan00000000000000#! /usr/bin/env python ## vim:ts=4:et:nowrap """A user-defined wrapper around string objects """ # This file has been ported from the standard library version in # Python 2.7 to be Python 3.x compatible. As a result it is # probably not Python 2.x compatible any more. import sys import collections __all__ = ["UserString"] class UserString(collections.Sequence): def __init__(self, seq): if isinstance(seq, str): self.data = seq elif isinstance(seq, UserString): self.data = seq.data[:] else: self.data = str(seq) def __str__(self): return str(self.data) def __repr__(self): return repr(self.data) def __int__(self): return int(self.data) def __long__(self): return long(self.data) def __float__(self): return float(self.data) def __complex__(self): return complex(self.data) def __hash__(self): return hash(self.data) def __cmp__(self, string): if isinstance(string, UserString): return cmp(self.data, string.data) else: return cmp(self.data, string) def __contains__(self, char): return char in self.data def __len__(self): return len(self.data) def __getitem__(self, index): return self.__class__(self.data[index]) def __getslice__(self, start, end): start = max(start, 0); end = max(end, 0) return self.__class__(self.data[start:end]) def __add__(self, other): if isinstance(other, UserString): return self.__class__(self.data + other.data) elif isinstance(other, str): return self.__class__(self.data + other) else: return self.__class__(self.data + str(other)) def __radd__(self, other): if isinstance(other, str): return self.__class__(other + self.data) else: return self.__class__(str(other) + self.data) def __mul__(self, n): return self.__class__(self.data*n) __rmul__ = __mul__ def __mod__(self, args): return self.__class__(self.data % args) # the following methods are defined in alphabetical order: def capitalize(self): return self.__class__(self.data.capitalize()) def center(self, width, *args): return self.__class__(self.data.center(width, *args)) def count(self, sub, start=0, end=sys.maxsize): return self.data.count(sub, start, end) def decode(self, encoding=None, errors=None): # XXX improve this? if encoding: if errors: return self.__class__(self.data.decode(encoding, errors)) else: return self.__class__(self.data.decode(encoding)) else: return self.__class__(self.data.decode()) def encode(self, encoding=None, errors=None): # XXX improve this? if encoding: if errors: return self.__class__(self.data.encode(encoding, errors)) else: return self.__class__(self.data.encode(encoding)) else: return self.__class__(self.data.encode()) def endswith(self, suffix, start=0, end=sys.maxsize): return self.data.endswith(suffix, start, end) def expandtabs(self, tabsize=8): return self.__class__(self.data.expandtabs(tabsize)) def find(self, sub, start=0, end=sys.maxsize): return self.data.find(sub, start, end) def index(self, sub, start=0, end=sys.maxsize): return self.data.index(sub, start, end) def isalpha(self): return self.data.isalpha() def isalnum(self): return self.data.isalnum() def isdecimal(self): return self.data.isdecimal() def isdigit(self): return self.data.isdigit() def islower(self): return self.data.islower() def isnumeric(self): return self.data.isnumeric() def isspace(self): return self.data.isspace() def istitle(self): return self.data.istitle() def isupper(self): return self.data.isupper() def join(self, seq): return self.data.join(seq) def ljust(self, width, *args): return self.__class__(self.data.ljust(width, *args)) def lower(self): return self.__class__(self.data.lower()) def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) def partition(self, sep): return self.data.partition(sep) def replace(self, old, new, maxsplit=-1): return self.__class__(self.data.replace(old, new, maxsplit)) def rfind(self, sub, start=0, end=sys.maxsize): return self.data.rfind(sub, start, end) def rindex(self, sub, start=0, end=sys.maxsize): return self.data.rindex(sub, start, end) def rjust(self, width, *args): return self.__class__(self.data.rjust(width, *args)) def rpartition(self, sep): return self.data.rpartition(sep) def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars)) def split(self, sep=None, maxsplit=-1): return self.data.split(sep, maxsplit) def rsplit(self, sep=None, maxsplit=-1): return self.data.rsplit(sep, maxsplit) def splitlines(self, keepends=0): return self.data.splitlines(keepends) def startswith(self, prefix, start=0, end=sys.maxsize): return self.data.startswith(prefix, start, end) def strip(self, chars=None): return self.__class__(self.data.strip(chars)) def swapcase(self): return self.__class__(self.data.swapcase()) def title(self): return self.__class__(self.data.title()) def translate(self, *args): return self.__class__(self.data.translate(*args)) def upper(self): return self.__class__(self.data.upper()) def zfill(self, width): return self.__class__(self.data.zfill(width)) asdf-1.3.3/asdf/compat/user_collections_py3/UserDict.py0000644000175000017500000000546113246003441022427 0ustar dandan00000000000000"""A more or less complete user-defined wrapper around dictionary objects.""" # This file has been ported from the standard library version in # Python 2.7 to be Python 3.x compatible. As a result it is # probably not Python 2.x compatible any more. class UserDict(object): def __init__(self, dict=None, **kwargs): self.data = {} if dict is not None: self.update(dict) if len(kwargs): self.update(kwargs) def __repr__(self): return repr(self.data) def __cmp__(self, dict): if isinstance(dict, UserDict): return cmp(self.data, dict.data) else: return cmp(self.data, dict) __hash__ = None # Avoid Py3k warning def __len__(self): return len(self.data) def __getitem__(self, key): if key in self.data: return self.data[key] if hasattr(self.__class__, "__missing__"): return self.__class__.__missing__(self, key) raise KeyError(key) def __setitem__(self, key, item): self.data[key] = item def __delitem__(self, key): del self.data[key] def clear(self): self.data.clear() def copy(self): if self.__class__ is UserDict: return UserDict(self.data.copy()) import copy data = self.data try: self.data = {} c = copy.copy(self) finally: self.data = data c.update(self) return c def keys(self): return self.data.keys() def items(self): return self.data.items() def iteritems(self): return self.data.iteritems() def iterkeys(self): return self.data.iterkeys() def itervalues(self): return self.data.itervalues() def values(self): return self.data.values() def has_key(self, key): return key in self.data def update(self, dict=None, **kwargs): if dict is None: pass elif isinstance(dict, UserDict): self.data.update(dict.data) elif isinstance(dict, type({})) or not hasattr(dict, 'items'): self.data.update(dict) else: for k, v in dict.items(): self[k] = v if len(kwargs): self.data.update(kwargs) def get(self, key, failobj=None): if key not in self: return failobj return self[key] def setdefault(self, key, failobj=None): if key not in self: self[key] = failobj return self[key] def pop(self, key, *args): return self.data.pop(key, *args) def popitem(self): return self.data.popitem() def __contains__(self, key): return key in self.data @classmethod def fromkeys(cls, iterable, value=None): d = cls() for key in iterable: d[key] = value return d def __iter__(self): return iter(self.data) asdf-1.3.3/asdf/compat/user_collections_py3/__init__.py0000644000175000017500000000000013246003441022424 0ustar dandan00000000000000asdf-1.3.3/asdf/compat/user_collections_py3/UserList.py0000644000175000017500000000735213246003441022460 0ustar dandan00000000000000"""A more or less complete user-defined wrapper around list objects.""" # This file has been ported from the standard library version in # Python 2.7 to be Python 3.x compatible. As a result it is # probably not Python 2.x compatible any more. import collections class UserList(collections.MutableSequence): def __init__(self, initlist=None): self.data = [] if initlist is not None: # XXX should this accept an arbitrary sequence? if type(initlist) == type(self.data): self.data[:] = initlist elif isinstance(initlist, UserList): self.data[:] = initlist.data[:] else: self.data = list(initlist) def __repr__(self): return repr(self.data) def __lt__(self, other): return self.data < self.__cast(other) def __le__(self, other): return self.data <= self.__cast(other) def __eq__(self, other): return self.data == self.__cast(other) def __ne__(self, other): return self.data != self.__cast(other) def __gt__(self, other): return self.data > self.__cast(other) def __ge__(self, other): return self.data >= self.__cast(other) def __cast(self, other): if isinstance(other, UserList): return other.data else: return other def __cmp__(self, other): return cmp(self.data, self.__cast(other)) __hash__ = None # Mutable sequence, so not hashable def __contains__(self, item): return item in self.data def __len__(self): return len(self.data) def __getitem__(self, i): return self.data[i] def __setitem__(self, i, item): self.data[i] = item def __delitem__(self, i): del self.data[i] def __getslice__(self, i, j): i = max(i, 0); j = max(j, 0) return self.__class__(self.data[i:j]) def __setslice__(self, i, j, other): i = max(i, 0); j = max(j, 0) if isinstance(other, UserList): self.data[i:j] = other.data elif isinstance(other, type(self.data)): self.data[i:j] = other else: self.data[i:j] = list(other) def __delslice__(self, i, j): i = max(i, 0); j = max(j, 0) del self.data[i:j] def __add__(self, other): if isinstance(other, UserList): return self.__class__(self.data + other.data) elif isinstance(other, type(self.data)): return self.__class__(self.data + other) else: return self.__class__(self.data + list(other)) def __radd__(self, other): if isinstance(other, UserList): return self.__class__(other.data + self.data) elif isinstance(other, type(self.data)): return self.__class__(other + self.data) else: return self.__class__(list(other) + self.data) def __iadd__(self, other): if isinstance(other, UserList): self.data += other.data elif isinstance(other, type(self.data)): self.data += other else: self.data += list(other) return self def __mul__(self, n): return self.__class__(self.data*n) __rmul__ = __mul__ def __imul__(self, n): self.data *= n return self def append(self, item): self.data.append(item) def insert(self, i, item): self.data.insert(i, item) def pop(self, i=-1): return self.data.pop(i) def remove(self, item): self.data.remove(item) def count(self, item): return self.data.count(item) def index(self, item, *args): return self.data.index(item, *args) def reverse(self): self.data.reverse() def sort(self, *args, **kwds): self.data.sort(*args, **kwds) def extend(self, other): if isinstance(other, UserList): self.data.extend(other.data) else: self.data.extend(other) asdf-1.3.3/asdf/compat/functools_backport.py0000644000175000017500000001516313246003441020437 0ustar dandan00000000000000from collections import namedtuple from functools import update_wrapper from threading import RLock _CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"]) class _HashedSeq(list): __slots__ = 'hashvalue' def __init__(self, tup, hash=hash): self[:] = tup self.hashvalue = hash(tup) def __hash__(self): return self.hashvalue _fasttypes = set([int, str, frozenset, type(None)]) def _make_key(args, kwds, typed, kwd_mark = (object(),), fasttypes = _fasttypes, sorted=sorted, tuple=tuple, type=type, len=len): 'Make a cache key from optionally typed positional and keyword arguments' key = args if kwds: sorted_items = sorted(kwds.items()) key += kwd_mark for item in sorted_items: key += item if typed: key += tuple(type(v) for v in args) if kwds: key += tuple(type(v) for k, v in sorted_items) elif len(key) == 1 and type(key[0]) in fasttypes: return key[0] return _HashedSeq(key) def lru_cache(maxsize=100, typed=False): """Least-recently-used cache decorator. If *maxsize* is set to None, the LRU features are disabled and the cache can grow without bound. If *typed* is True, arguments of different types will be cached separately. For example, f(3.0) and f(3) will be treated as distinct calls with distinct results. Arguments to the cached function must be hashable. View the cache statistics named tuple (hits, misses, maxsize, currsize) with f.cache_info(). Clear the cache and statistics with f.cache_clear(). Access the underlying function with f.__wrapped__. See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used """ # Users should only access the lru_cache through its public API: # cache_info, cache_clear, and f.__wrapped__ # The internals of the lru_cache are encapsulated for thread safety and # to allow the implementation to change (including a possible C version). def decorating_function(user_function): cache = dict() stats = [0, 0] # make statistics updateable non-locally HITS, MISSES = 0, 1 # names for the stats fields make_key = _make_key cache_get = cache.get # bound method to lookup key or return None _len = len # localize the global len() function lock = RLock() # because linkedlist updates aren't threadsafe root = [] # root of the circular doubly linked list root[:] = [root, root, None, None] # initialize by pointing to self nonlocal_root = [root] # make updateable non-locally PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields if maxsize == 0: def wrapper(*args, **kwds): # no caching, just do a statistics update after a successful call result = user_function(*args, **kwds) stats[MISSES] += 1 return result elif maxsize is None: def wrapper(*args, **kwds): # simple caching without ordering or size limit key = make_key(args, kwds, typed) result = cache_get(key, root) # root used here as a unique not-found sentinel if result is not root: stats[HITS] += 1 return result result = user_function(*args, **kwds) cache[key] = result stats[MISSES] += 1 return result else: def wrapper(*args, **kwds): # size limited caching that tracks accesses by recency key = make_key(args, kwds, typed) if kwds or typed else args with lock: link = cache_get(key) if link is not None: # record recent use of the key by moving it to the front of the list root, = nonlocal_root link_prev, link_next, key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root stats[HITS] += 1 return result result = user_function(*args, **kwds) with lock: root, = nonlocal_root if key in cache: # getting here means that this same key was added to the # cache while the lock was released. since the link # update is already done, we need only return the # computed result and update the count of misses. pass elif _len(cache) >= maxsize: # use the old root to store the new key and result oldroot = root oldroot[KEY] = key oldroot[RESULT] = result # empty the oldest link and make it the new root root = nonlocal_root[0] = oldroot[NEXT] oldkey = root[KEY] oldvalue = root[RESULT] root[KEY] = root[RESULT] = None # now update the cache dictionary for the new links del cache[oldkey] cache[key] = oldroot else: # put result in a new link at the front of the list last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link stats[MISSES] += 1 return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache)) def cache_clear(): """Clear the cache and cache statistics""" with lock: cache.clear() root = nonlocal_root[0] root[:] = [root, root, None, None] stats[:] = [0, 0] wrapper.__wrapped__ = user_function wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return update_wrapper(wrapper, user_function) return decorating_function asdf-1.3.3/asdf/compat/numpycompat.py0000644000175000017500000000033313246003441017103 0ustar dandan00000000000000from __future__ import (absolute_import, division, print_function, unicode_literals) from ..util import minversion __all__ = ['NUMPY_LT_1_7'] NUMPY_LT_1_7 = not minversion('numpy', '1.7.0') asdf-1.3.3/asdf/compat/__init__.py0000644000175000017500000000125513246003441016272 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import six if six.PY2: from UserDict import UserDict from UserList import UserList from UserString import UserString elif six.PY3: from .user_collections_py3.UserDict import UserDict from .user_collections_py3.UserList import UserList from .user_collections_py3.UserString import UserString if six.PY2: from .functools_backport import lru_cache elif six.PY3: try: from functools import lru_cache except ImportError: from .functools_backport import lru_cache asdf-1.3.3/asdf/constants.py0000644000175000017500000000121113246003441015254 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np ASDF_MAGIC = b'#ASDF' BLOCK_MAGIC = b'\xd3BLK' BLOCK_HEADER_BOILERPLATE_SIZE = 6 ASDF_STANDARD_COMMENT = b'ASDF_STANDARD' INDEX_HEADER = b'#ASDF BLOCK INDEX' # The maximum number of blocks supported MAX_BLOCKS = 2 ** 16 MAX_BLOCKS_DIGITS = int(np.ceil(np.log10(MAX_BLOCKS) + 1)) YAML_TAG_PREFIX = 'tag:yaml.org,2002:' YAML_END_MARKER_REGEX = br'\r?\n\.\.\.((\r?\n)|$)' STSCI_SCHEMA_URI_BASE = 'http://stsci.edu/schemas/' BLOCK_FLAG_STREAMED = 0x1 asdf-1.3.3/asdf/block.py0000644000175000017500000012271613246003441014350 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function from collections import namedtuple import copy import hashlib import io import os import re import struct import weakref import numpy as np import six from six.moves.urllib import parse as urlparse import yaml from . import compression as mcompression from .compat.numpycompat import NUMPY_LT_1_7 from . import constants from . import generic_io from . import stream from . import treeutil from . import util from . import yamlutil class BlockManager(object): """ Manages the `Block`s associated with a ASDF file. """ def __init__(self, asdffile, copy_arrays=False): self._asdffile = weakref.ref(asdffile) self._internal_blocks = [] self._external_blocks = [] self._inline_blocks = [] self._streamed_blocks = [] self._block_type_mapping = { 'internal': self._internal_blocks, 'external': self._external_blocks, 'inline': self._inline_blocks, 'streamed': self._streamed_blocks } self._data_to_block_mapping = {} self._validate_checksums = False self._memmap = not copy_arrays def __len__(self): """ Return the total number of blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ return sum(len(x) for x in self._block_type_mapping.values()) def add(self, block): """ Add an internal block to the manager. """ block_set = self._block_type_mapping.get(block.array_storage, None) if block_set is not None: if block not in block_set: block_set.append(block) else: raise ValueError( "Unknown array storage type {0}".format(block.array_storage)) if block.array_storage == 'streamed' and len(self._streamed_blocks) > 1: raise ValueError("Can not add second streaming block") if block._data is not None: self._data_to_block_mapping[id(block._data)] = block def remove(self, block): """ Remove a block from the manager. """ block_set = self._block_type_mapping.get(block.array_storage, None) if block_set is not None: if block in block_set: block_set.remove(block) if block._data is not None: if id(block._data) in self._data_to_block_mapping: del self._data_to_block_mapping[id(block._data)] else: raise ValueError( "Unknown array storage type {0}".format(block.array_storage)) def set_array_storage(self, block, array_storage): """ Set the array storage type of the given block. Parameters ---------- block : Block instance array_storage : str Must be one of: - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. - ``streamed``: The special streamed inline block that appears at the end of the file. """ if array_storage not in ['internal', 'external', 'streamed', 'inline']: raise ValueError( "array_storage must be one of 'internal', 'external', " "'streamed' or 'inline'") if block.array_storage != array_storage: if block in self.blocks: self.remove(block) block._array_storage = array_storage self.add(block) if array_storage == 'streamed': block.output_compression = None @property def blocks(self): """ An iterator over all blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ for block_set in self._block_type_mapping.values(): for block in block_set: yield block @property def internal_blocks(self): """ An iterator over all internal blocks being managed. This may not include all of the blocks in an open file, since their reading may have been deferred. Call `finish_reading_internal_blocks` to find the positions and header information of all blocks in the file. """ for block_set in (self._internal_blocks, self._streamed_blocks): for block in block_set: yield block @property def streamed_block(self): """ The streamed block (always the last internal block in a file), or `None` if a streamed block is not present. """ self.finish_reading_internal_blocks() if len(self._streamed_blocks): return self._streamed_blocks[0] @property def external_blocks(self): """ An iterator over all external blocks being managed. """ for block in self._external_blocks: yield block @property def inline_blocks(self): """ An iterator over all inline blocks being managed. """ for block in self._inline_blocks: yield block def has_blocks_with_offset(self): """ Returns `True` if any of the internal blocks currently have an offset assigned. """ for block in self.internal_blocks: if block.offset is not None: return True return False def _sort_blocks_by_offset(self): def sorter(x): if x.offset is None: raise ValueError('Block is missing offset') else: return x.offset self._internal_blocks.sort(key=sorter) def _read_next_internal_block(self, fd, past_magic=False): # This assumes the file pointer is at the beginning of the # block, (or beginning + 4 if past_magic is True) block = Block(memmap=self._memmap).read( fd, past_magic=past_magic, validate_checksum=self._validate_checksums) if block is not None: self.add(block) return block def read_internal_blocks(self, fd, past_magic=False, validate_checksums=False): """ Read internal blocks present in the file. If the file is seekable, only the first block will be read, and the reading of all others will be lazily deferred until an the loading of an array requests it. Parameters ---------- fd : GenericFile The file to read from. past_magic : bool, optional If `True`, the file position is immediately after the block magic token. If `False` (default), the file position is exactly at the beginning of the block magic token. validate_checksums : bool, optional If `True`, validate the blocks against their checksums. """ self._validate_checksums = validate_checksums while True: block = self._read_next_internal_block(fd, past_magic=past_magic) if block is None: break past_magic = False # If the file handle is seekable, we only read the first # block and defer reading the rest until later. if fd.seekable(): break def finish_reading_internal_blocks(self): """ Read all remaining internal blocks present in the file, if any. This is called before updating a file, since updating requires knowledge of all internal blocks in the file. """ if len(self._internal_blocks): for i, block in enumerate(self._internal_blocks): if isinstance(block, UnloadedBlock): block.load() last_block = self._internal_blocks[-1] # Read all of the remaining blocks in the file, if any if (last_block._fd is not None and last_block._fd.seekable()): last_block._fd.seek(last_block.end_offset) while True: last_block = self._read_next_internal_block( last_block._fd, False) if last_block is None: break def write_internal_blocks_serial(self, fd, pad_blocks=False): """ Write all blocks to disk serially. Parameters ---------- fd : generic_io.GenericFile The file to write internal blocks to. The file position should be after the tree. """ for block in self.internal_blocks: if block.output_compression: block.offset = fd.tell() block.write(fd) else: if block.input_compression: block.update_size() padding = util.calculate_padding( block.size, pad_blocks, fd.block_size) block.allocated = block._size + padding block.offset = fd.tell() block.write(fd) fd.fast_forward(block.allocated - block._size) def write_internal_blocks_random_access(self, fd): """ Write all blocks to disk at their specified offsets. All internal blocks must have an offset assigned at this point. Parameters ---------- fd : generic_io.GenericFile The file to write internal blocks to. The file position should be after the tree. """ self._sort_blocks_by_offset() iter = self.internal_blocks last_block = next(iter) # We need to explicitly clear anything between the tree # and the first block, otherwise there may be other block # markers left over which will throw off block indexing. # We don't need to do this between each block. fd.clear(last_block.offset - fd.tell()) for block in iter: last_block.allocated = ((block.offset - last_block.offset) - last_block.header_size) fd.seek(last_block.offset) last_block.write(fd) last_block = block last_block.allocated = last_block.size fd.seek(last_block.offset) last_block.write(fd) fd.truncate(last_block.end_offset) def write_external_blocks(self, uri, pad_blocks=False): """ Write all blocks to disk serially. Parameters ---------- uri : str The base uri of the external blocks """ from . import asdf for i, block in enumerate(self.external_blocks): if uri is None: raise ValueError( "Can't write external blocks, since URI of main file is " "unknown.") subfd = self.get_external_uri(uri, i) asdffile = asdf.AsdfFile() block = copy.copy(block) block._array_storage = 'internal' asdffile.blocks.add(block) block._used = True asdffile.write_to(subfd, pad_blocks=pad_blocks) def write_block_index(self, fd, ctx): """ Write the block index. Parameters ---------- fd : GenericFile The file to write to. The file pointer should be at the end of the file. """ if len(self._internal_blocks) and not len(self._streamed_blocks): fd.write(constants.INDEX_HEADER) fd.write(b'\n') offsets = [x.offset for x in self.internal_blocks] yaml_version = tuple( int(x) for x in ctx.version_map['YAML_VERSION'].split('.')) yaml.dump( offsets, Dumper=yamlutil._yaml_base_dumper, stream=fd, explicit_start=True, explicit_end=True, version=yaml_version, allow_unicode=True, encoding='utf-8') _re_index_content = re.compile( br'^' + constants.INDEX_HEADER + br'\r?\n%YAML.*\.\.\.\r?\n?$') _re_index_misc = re.compile(br'^[\n\r\x20-\x7f]+$') def read_block_index(self, fd, ctx): """ Read the block index. Parameters ---------- fd : GenericFile The file to read from. It must be seekable. """ # This reads the block index by reading backward from the end # of the file. This tries to be as conservative as possible, # since not reading an index isn't a deal breaker -- # everything can still be read from the file, only slower. # Importantly, it must remain "transactionally clean", and not # create any blocks until we're sure the block index makes # sense. if not fd.seekable(): return if not len(self._internal_blocks): return first_block = self._internal_blocks[0] first_block_end = first_block.end_offset fd.seek(0, generic_io.SEEK_END) file_size = block_end = fd.tell() # We want to read on filesystem block boundaries. We use # "block_end - 5" here because we need to read at least 5 # bytes in the first block. block_start = ((block_end - 5) // fd.block_size) * fd.block_size buff_size = block_end - block_start content = b'' fd.seek(block_start, generic_io.SEEK_SET) buff = fd.read(buff_size) # Extra '\0' bytes are allowed after the ..., mainly to # workaround poor truncation support on Windows buff = buff.rstrip(b'\0') content = buff # We need an explicit YAML end marker, or there's no # block index for ending in (b'...', b'...\r\n', b'...\n'): if content.endswith(ending): break else: return # Read blocks in reverse order from the end of the file while True: # Look for the index header idx = content.find(constants.INDEX_HEADER) if idx != -1: content = content[idx:] index_start = block_start + idx break else: # If the rest of it starts to look like binary # values, bail... if not self._re_index_misc.match(buff): return if block_start <= first_block_end: return block_end = block_start block_start = max(block_end - fd.block_size, first_block_end) fd.seek(block_start, generic_io.SEEK_SET) buff_size = block_end - block_start buff = fd.read(buff_size) content = buff + content yaml_content = content[content.find(b'\n') + 1:] offsets = yaml.load(yaml_content, Loader=yamlutil._yaml_base_loader) # Make sure the indices look sane if not isinstance(offsets, list) or len(offsets) == 0: return last_offset = 0 for x in offsets: if (not isinstance(x, six.integer_types) or x > file_size or x < 0 or x <= last_offset + Block._header.size): return last_offset = x # We always read the first block, so we can confirm that the # first entry in the block index matches the first block if offsets[0] != first_block.offset: return if len(offsets) == 1: # If there's only one block in the index, we've already # loaded the first block, so just return: we have nothing # left to do return # One last sanity check: Read the last block in the index and # make sure it makes sense. fd.seek(offsets[-1], generic_io.SEEK_SET) try: block = Block(memmap=self._memmap).read(fd) except (ValueError, IOError): return # Now see if the end of the last block leads right into the index if (block.end_offset != index_start): return # It seems we're good to go, so instantiate the UnloadedBlock # objects for offset in offsets[1:-1]: self._internal_blocks.append( UnloadedBlock(fd, offset, memmap=self._memmap)) # We already read the last block in the file -- no need to read it again self._internal_blocks.append(block) def get_external_filename(self, filename, index): """ Given a main filename and an index number, return a new file name for referencing an external block. """ filename = os.path.splitext(filename)[0] return filename + '{0:04d}.asdf'.format(index) def get_external_uri(self, uri, index): """ Given a main URI and an index number, return a new URI for saving an external block. """ if uri is None: uri = '' parts = list(urlparse.urlparse(uri)) path = parts[2] dirname, filename = os.path.split(path) filename = self.get_external_filename(filename, index) path = os.path.join(dirname, filename) parts[2] = path return urlparse.urlunparse(parts) def _find_used_blocks(self, tree, ctx): reserved_blocks = set() for node in treeutil.iter_tree(tree): hook = ctx.type_index.get_hook_for_type( 'reserve_blocks', type(node), ctx.version_string) if hook is not None: for block in hook(node, ctx): reserved_blocks.add(block) for block in list(self.blocks): if (getattr(block, '_used', 0) == 0 and block not in reserved_blocks): self.remove(block) def _handle_global_block_settings(self, ctx, block): all_array_storage = getattr(ctx, '_all_array_storage', None) if all_array_storage: self.set_array_storage(block, all_array_storage) all_array_compression = getattr(ctx, '_all_array_compression', 'input') block.output_compression = all_array_compression auto_inline = getattr(ctx, '_auto_inline', None) if auto_inline: if np.product(block.data.shape) < auto_inline: self.set_array_storage(block, 'inline') def finalize(self, ctx): """ At this point, we have a complete set of blocks for the file, with no extras. Here, they are reindexed, and possibly reorganized. """ # TODO: Should this reset the state (what's external and what # isn't) afterword? self._find_used_blocks(ctx.tree, ctx) for block in list(self.blocks): self._handle_global_block_settings(ctx, block) def get_block(self, source): """ Given a "source identifier", return a block. Parameters ---------- source : any If an integer, refers to the index of an internal block. If a string, is a uri to an external block. Returns ------- buffer : buffer """ # If an "int", it is the index of an internal block if isinstance(source, six.integer_types): if source == -1: if len(self._streamed_blocks): return self._streamed_blocks[0] # If we don't have a streamed block, fall through so # we can read all of the blocks, ultimately arriving # at the last one, which, if all goes well is a # streamed block. # First, look in the blocks we've already read elif source >= 0: if source < len(self._internal_blocks): return self._internal_blocks[source] else: raise ValueError("Invalid source id {0}".format(source)) # If we have a streamed block or we already know we have # no blocks, reading any further isn't going to yield any # new blocks. if len(self._streamed_blocks) or len(self._internal_blocks) == 0: raise ValueError("Block '{0}' not found.".format(source)) # If the desired block hasn't already been read, and the # file is seekable, and we have at least one internal # block, then we can move the file pointer to the end of # the last known internal block, and start looking for # more internal blocks. This is "deferred block loading". last_block = self._internal_blocks[-1] if (last_block._fd is not None and last_block._fd.seekable()): last_block._fd.seek(last_block.end_offset) while True: next_block = self._read_next_internal_block( last_block._fd, False) if next_block is None: break if len(self._internal_blocks) - 1 == source: return next_block last_block = next_block if (source == -1 and last_block.array_storage == 'streamed'): return last_block raise ValueError("Block '{0}' not found.".format(source)) elif isinstance(source, six.string_types): asdffile = self._asdffile().open_external( source, do_not_fill_defaults=True) block = asdffile.blocks._internal_blocks[0] self.set_array_storage(block, 'external') # Handle the case of inline data elif isinstance(source, list): block = Block(data=np.array(source), array_storage='inline') else: raise TypeError("Unknown source '{0}'".format(source)) return block def get_source(self, block): """ Get a source identifier for a given block. Parameters ---------- block : Block Returns ------- source_id : str May be an integer for an internal block, or a URI for an external block. """ for i, internal_block in enumerate(self.internal_blocks): if block == internal_block: if internal_block.array_storage == 'streamed': return -1 return i for i, external_block in enumerate(self.external_blocks): if block == external_block: if self._asdffile().uri is None: raise ValueError( "Can't write external blocks, since URI of main file is " "unknown.") parts = list(urlparse.urlparse(self._asdffile().uri)) path = parts[2] filename = os.path.basename(path) return self.get_external_filename(filename, i) raise ValueError("block not found.") def find_or_create_block_for_array(self, arr, ctx): """ For a given array, looks for an existing block containing its underlying data. If not found, adds a new block to the block list. Returns the index in the block list to the array. Parameters ---------- arr : numpy.ndarray Returns ------- block : Block """ from .tags.core import ndarray if (isinstance(arr, ndarray.NDArrayType) and arr.block is not None): if arr.block in self.blocks: return arr.block else: arr._block = None base = util.get_array_base(arr) block = self._data_to_block_mapping.get(id(base)) if block is not None: return block block = Block(base) self.add(block) self._handle_global_block_settings(ctx, block) return block def get_streamed_block(self): """ Get the streamed block, which is always the last one. A streamed block, on writing, does not manage data of its own, but the user is expected to stream it to disk directly. """ block = self.streamed_block if block is None: block = Block(array_storage='streamed') self.add(block) return block def add_inline(self, array): """ Add an inline block for ``array`` to the block set. """ block = Block(array, array_storage='inline') self.add(block) return block def __getitem__(self, arr): return self.find_or_create_block_for_array(arr, object()) def close(self): for block in self.blocks: block.close() class Block(object): """ Represents a single block in a ASDF file. This is an implementation detail and should not be instantiated directly. Instead, should only be created through the `BlockManager`. """ _header = util.BinaryStruct([ ('flags', 'I'), ('compression', '4s'), ('allocated_size', 'Q'), ('used_size', 'Q'), ('data_size', 'Q'), ('checksum', '16s') ]) def __init__(self, data=None, uri=None, array_storage='internal', memmap=True): if isinstance(data, np.ndarray) and not data.flags.c_contiguous: self._data = np.ascontiguousarray(data) else: self._data = data self._uri = uri self._array_storage = array_storage self._fd = None self._offset = None self._input_compression = None self._output_compression = 'input' self._checksum = None self._should_memmap = memmap self._memmapped = False self.update_size() self._allocated = self._size def __repr__(self): return ''.format( self._array_storage[:3], self._offset, self._allocated, self._size) def __len__(self): return self._size @property def offset(self): return self._offset @offset.setter def offset(self, offset): self._offset = offset @property def allocated(self): return self._allocated @allocated.setter def allocated(self, allocated): self._allocated = allocated @property def header_size(self): return self._header.size + constants.BLOCK_HEADER_BOILERPLATE_SIZE @property def data_offset(self): return self._offset + self.header_size @property def size(self): return self._size + self.header_size @property def end_offset(self): """ The offset of the end of the allocated space for the block, and where the next block should begin. """ return self.offset + self.header_size + self.allocated def override_byteorder(self, byteorder): return byteorder @property def array_storage(self): return self._array_storage @property def input_compression(self): """ The compression codec used to read the block. """ return self._input_compression @input_compression.setter def input_compression(self, compression): self._input_compression = mcompression.validate(compression) @property def output_compression(self): """ The compression codec used to write the block. :return: """ if self._output_compression == 'input': return self._input_compression return self._output_compression @output_compression.setter def output_compression(self, compression): self._output_compression = mcompression.validate(compression) @property def checksum(self): return self._checksum def _set_checksum(self, checksum): if checksum == b'\0' * 16: self._checksum = None else: self._checksum = checksum def _calculate_checksum(self, data): m = hashlib.new('md5') m.update(self.data) return m.digest() def validate_checksum(self): """ Validate the content of the block against the current checksum. Returns ------- valid : bool `True` if the content is valid against the current checksum or there is no current checksum. Otherwise, `False`. """ if self._checksum: checksum = self._calculate_checksum(self.data) if checksum != self._checksum: return False return True def update_checksum(self): """ Update the checksum based on the current data contents. """ self._checksum = self._calculate_checksum(self.data) def update_size(self): """ Recalculate the on-disk size of the block. This causes any compression steps to run. It should only be called when updating the file in-place, otherwise the work is redundant. """ if self._data is not None: if six.PY2: # pragma: no cover self._data_size = len(self._data.data) else: self._data_size = self._data.data.nbytes if not self.output_compression: self._size = self._data_size else: self._size = mcompression.get_compressed_size( self._data, self.output_compression) else: self._data_size = self._size = 0 def read(self, fd, past_magic=False, validate_checksum=False): """ Read a Block from the given Python file-like object. If the file is seekable, the reading or memmapping of the actual data is postponed until an array requests it. If the file is a stream, the data will be read into memory immediately. Parameters ---------- fd : GenericFile past_magic : bool, optional If `True`, the file position is immediately after the block magic token. If `False` (default), the file position is exactly at the beginning of the block magic token. validate_checksum : bool, optional If `True`, validate the data against the checksum, and raise a `ValueError` if the data doesn't match. """ offset = None if fd.seekable(): offset = fd.tell() if not past_magic: buff = fd.read(len(constants.BLOCK_MAGIC)) if len(buff) < 4: return None if buff not in (constants.BLOCK_MAGIC, constants.INDEX_HEADER[:len(buff)]): raise ValueError( "Bad magic number in block. " "This may indicate an internal inconsistency about the " "sizes of the blocks in the file.") if buff == constants.INDEX_HEADER[:len(buff)]: return None elif offset is not None: offset -= 4 buff = fd.read(2) header_size, = struct.unpack(b'>H', buff) if header_size < self._header.size: raise ValueError( "Header size must be >= {0}".format(self._header.size)) buff = fd.read(header_size) header = self._header.unpack(buff) # This is used by the documentation system, but nowhere else. self._flags = header['flags'] self.input_compression = header['compression'] self._set_checksum(header['checksum']) if (self.input_compression is None and header['used_size'] != header['data_size']): raise ValueError( "used_size and data_size must be equal when no compression is used.") if (header['flags'] & constants.BLOCK_FLAG_STREAMED and self.input_compression is not None): raise ValueError( "Compression set on a streamed block.") if fd.seekable(): # If the file is seekable, we can delay reading the actual # data until later. self._fd = fd self._header_size = header_size self._offset = offset if header['flags'] & constants.BLOCK_FLAG_STREAMED: # Support streaming blocks fd.fast_forward(-1) self._array_storage = 'streamed' self._data_size = self._size = self._allocated = \ (fd.tell() - self.data_offset) + 1 else: fd.fast_forward(header['allocated_size']) self._allocated = header['allocated_size'] self._size = header['used_size'] self._data_size = header['data_size'] else: # If the file is a stream, we need to get the data now. if header['flags'] & constants.BLOCK_FLAG_STREAMED: # Support streaming blocks self._array_storage = 'streamed' self._data = fd.read_into_array(-1) self._data_size = self._size = self._allocated = len(self._data) else: self._data_size = header['data_size'] self._size = header['used_size'] self._allocated = header['allocated_size'] self._data = self._read_data(fd, self._size, self._data_size) fd.fast_forward(self._allocated - self._size) fd.close() if validate_checksum and not self.validate_checksum(): raise ValueError( "Block at {0} does not match given checksum".format( self._offset)) return self def _read_data(self, fd, used_size, data_size): if not self.input_compression: return fd.read_into_array(used_size) else: return mcompression.decompress( fd, used_size, data_size, self.input_compression) def write(self, fd): """ Write an internal block to the given Python file-like object. """ self._header_size = self._header.size flags = 0 data_size = used_size = allocated_size = 0 if self._array_storage == 'streamed': flags |= constants.BLOCK_FLAG_STREAMED elif self._data is not None: self.update_checksum() data_size = self._data.nbytes if not fd.seekable() and self.output_compression: buff = io.BytesIO() mcompression.compress(buff, self._data, self.output_compression) self.allocated = self._size = buff.tell() allocated_size = self.allocated used_size = self._size self.input_compression = self.output_compression assert allocated_size >= used_size if self.checksum is not None: checksum = self.checksum else: checksum = b'\0' * 16 fd.write(constants.BLOCK_MAGIC) fd.write(struct.pack(b'>H', self._header_size)) fd.write(self._header.pack( flags=flags, compression=mcompression.to_compression_header( self.output_compression), allocated_size=allocated_size, used_size=used_size, data_size=data_size, checksum=checksum)) if self._data is not None: if self.output_compression: if not fd.seekable(): fd.write(buff.getvalue()) else: # If the file is seekable, we write the # compressed data directly to it, then go back # and write the resulting size in the block # header. start = fd.tell() mcompression.compress( fd, self._data, self.output_compression) end = fd.tell() self.allocated = self._size = end - start fd.seek(self.offset + 6) self._header.update( fd, allocated_size=self.allocated, used_size=self._size) fd.seek(end) else: assert used_size == data_size fd.write_array(self._data) @property def data(self): """ Get the data for the block, as a numpy array. """ if self._data is None: if self._fd.is_closed(): raise IOError( "ASDF file has already been closed. " "Can not get the data.") # Be nice and reset the file position after we're done curpos = self._fd.tell() try: memmap = self._fd.can_memmap() and not self.input_compression if self._should_memmap and memmap: self._data = self._fd.memmap_array( self.data_offset, self._size) self._memmapped = True else: self._fd.seek(self.data_offset) self._data = self._read_data( self._fd, self._size, self._data_size) finally: self._fd.seek(curpos) return self._data def close(self): if self._memmapped and self._data is not None: if NUMPY_LT_1_7: # pragma: no cover try: self._data.flush() except ValueError: pass else: self._data.flush() if self._data._mmap is not None: self._data._mmap.close() self._data = None class UnloadedBlock(object): """ Represents an indexed, but not yet loaded, internal block. All that is known about it is its offset. It converts itself to a full-fledged block whenever the underlying data or more detail is requested. """ def __init__(self, fd, offset, memmap=True): self._fd = fd self._offset = offset self._data = None self._uri = None self._array_storage = 'internal' self._input_compression = None self._output_compression = 'input' self._checksum = None self._should_memmap = memmap self._memmapped = False def __len__(self): self.load() return len(self) def close(self): pass @property def array_storage(self): return 'internal' @property def offset(self): return self._offset def __getattr__(self, attr): self.load() return getattr(self, attr) def load(self): self._fd.seek(self._offset, generic_io.SEEK_SET) self.__class__ = Block self.read(self._fd) def calculate_updated_layout(blocks, tree_size, pad_blocks, block_size): """ Calculates a block layout that will try to use as many blocks as possible in their original locations, though at this point the algorithm is fairly naive. The result will be stored in the offsets of the blocks. Parameters ---------- blocks : Blocks instance tree_size : int The amount of space to reserve for the tree at the beginning. Returns ------- Returns `False` if no good layout can be found and one is best off rewriting the file serially, otherwise, returns `True`. """ def unfix_block(i): # If this algorithm gets more sophisticated we could carefully # move memmapped blocks around without clobbering other ones. # TODO: Copy to a tmpfile on disk and memmap it from there. entry = fixed[i] copy = entry.block.data.copy() entry.block.close() entry.block._data = copy del fixed[i] free.append(entry.block) def fix_block(block, offset): block.offset = offset fixed.append(Entry(block.offset, block.offset + block.size, block)) fixed.sort() Entry = namedtuple("Entry", ['start', 'end', 'block']) fixed = [] free = [] for block in blocks._internal_blocks: if block.offset is not None: block.update_size() fixed.append( Entry(block.offset, block.offset + block.size, block)) else: free.append(block) if not len(fixed): return False fixed.sort() # Make enough room at the beginning for the tree, by popping off # blocks at the beginning while len(fixed) and fixed[0].start < tree_size: unfix_block(0) if not len(fixed): return False # This algorithm is pretty basic at this point -- it just looks # for the first open spot big enough for the free block to fit. while len(free): block = free.pop() last_end = tree_size for entry in fixed: if entry.start - last_end >= block.size: fix_block(block, last_end) break last_end = entry.end else: padding = util.calculate_padding( entry.block.size, pad_blocks, block_size) fix_block(block, last_end + padding) if blocks.streamed_block is not None: padding = util.calculate_padding( fixed[-1].block.size, pad_blocks, block_size) blocks.streamed_block.offset = fixed[-1].end + padding blocks._sort_blocks_by_offset() return True asdf-1.3.3/asdf/util.py0000644000175000017500000002556213246003441014234 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import inspect import math import struct import types import six from six.moves.urllib.parse import urljoin from six.moves.urllib.request import pathname2url from six.moves.urllib import parse as urlparse from six.moves import zip as izip import numpy as np def human_list(l, separator="and"): """ Formats a list for human readability. Parameters ---------- l : sequence A sequence of strings separator : string, optional The word to use between the last two entries. Default: ``"and"``. Returns ------- formatted_list : string Examples -------- >>> human_list(["vanilla", "strawberry", "chocolate"], "or") 'vanilla, strawberry or chocolate' """ if len(l) == 1: return l[0] else: return ', '.join(l[:-1]) + ' ' + separator + ' ' + l[-1] def get_array_base(arr): """ For a given Numpy array, finds the base array that "owns" the actual data. """ base = arr while isinstance(base.base, np.ndarray): base = base.base return base def get_base_uri(uri): """ For a given URI, return the part without any fragment. """ parts = urlparse.urlparse(uri) return urlparse.urlunparse(list(parts[:5]) + ['']) def filepath_to_url(path): """ For a given local file path, return a file:// url. """ return urljoin('file:', pathname2url(path)) def iter_subclasses(cls): """ Returns all subclasses of a class. """ for x in cls.__subclasses__(): yield x for y in iter_subclasses(x): yield y def calculate_padding(content_size, pad_blocks, block_size): """ Calculates the amount of extra space to add to a block given the user's request for the amount of extra space. Care is given so that the total of size of the block with padding is evenly divisible by block size. Parameters ---------- content_size : int The size of the actual content pad_blocks : float or bool If `False`, add no padding (always return 0). If `True`, add a default amount of padding of 10% If a float, it is a factor to multiple content_size by to get the new total size. block_size : int The filesystem block size to use. Returns ------- nbytes : int The number of extra bytes to add for padding. """ if not pad_blocks: return 0 if pad_blocks is True: pad_blocks = 1.1 new_size = content_size * pad_blocks new_size = int((math.ceil( float(new_size) / block_size) + 1) * block_size) return max(new_size - content_size, 0) class BinaryStruct(object): """ A wrapper around the Python stdlib struct module to define a binary struct more like a dictionary than a tuple. """ def __init__(self, descr, endian='>'): """ Parameters ---------- descr : list of tuple Each entry is a pair ``(name, format)``, where ``format`` is one of the format types understood by `struct`. endian : str, optional The endianness of the struct. Must be ``>`` or ``<``. """ self._fmt = [endian] self._offsets = {} self._names = [] i = 0 for name, fmt in descr: self._fmt.append(fmt) self._offsets[name] = (i, (endian + fmt).encode('ascii')) self._names.append(name) i += struct.calcsize(fmt.encode('ascii')) self._fmt = ''.join(self._fmt).encode('ascii') self._size = struct.calcsize(self._fmt) @property def size(self): """ Return the size of the struct. """ return self._size def pack(self, **kwargs): """ Pack the given arguments, which are given as kwargs, and return the binary struct. """ fields = [0] * len(self._names) for key, val in six.iteritems(kwargs): if key not in self._offsets: raise KeyError("No header field '{0}'".format(key)) i = self._names.index(key) fields[i] = val return struct.pack(self._fmt, *fields) def unpack(self, buff): """ Unpack the given binary buffer into the fields. The result is a dictionary mapping field names to values. """ args = struct.unpack_from(self._fmt, buff[:self._size]) return dict(izip(self._names, args)) def update(self, fd, **kwargs): """ Update part of the struct in-place. Parameters ---------- fd : generic_io.GenericIO instance A writable, seekable file descriptor, currently seeked to the beginning of the struct. **kwargs : values The values to update on the struct. """ updates = [] for key, val in six.iteritems(kwargs): if key not in self._offsets: raise KeyError("No header field '{0}'".format(key)) updates.append((self._offsets[key], val)) updates.sort() start = fd.tell() for ((offset, datatype), val) in updates: fd.seek(start + offset) fd.write(struct.pack(datatype, val)) class HashableDict(dict): """ A simple wrapper around dict to make it hashable. This is sure to be slow, but for small dictionaries it shouldn't matter. """ def __hash__(self): return hash(frozenset(self.items())) def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. This ends up working like ``from module import object`` but is easier to deal with than the `__import__` builtin and supports digging into submodules. Parameters ---------- name : `str` A dotted path to a Python object--that is, the name of a function, class, or other object in a module with the full path to that module, including parent modules, separated by dots. Also known as the fully qualified name of the object. Examples -------- >>> resolve_name('asdf.util.resolve_name') Raises ------ `ImportError` If the module or named object is not found. """ # Note: On python 2 these must be str objects and not unicode parts = [str(part) for part in name.split('.')] if len(parts) == 1: # No dots in the name--just a straight up module import cursor = 1 attr_name = str('') # Must not be unicode on Python 2 else: cursor = len(parts) - 1 attr_name = parts[-1] module_name = parts[:cursor] while cursor > 0: try: ret = __import__(str('.'.join(module_name)), fromlist=[attr_name]) break except ImportError: if cursor == 0: raise cursor -= 1 module_name = parts[:cursor] attr_name = parts[cursor] ret = '' for part in parts[cursor:]: try: ret = getattr(ret, part) except AttributeError: raise ImportError(name) return ret def minversion(module, version, inclusive=True, version_path='__version__'): """ Returns `True` if the specified Python module satisfies a minimum version requirement, and `False` if not. By default this uses `pkg_resources.parse_version` to do the version comparison if available. Otherwise it falls back on `distutils.version.LooseVersion`. Parameters ---------- module : module or `str` An imported module of which to check the version, or the name of that module (in which case an import of that module is attempted-- if this fails `False` is returned). version : `str` The version as a string that this module must have at a minimum (e.g. ``'0.12'``). inclusive : `bool` The specified version meets the requirement inclusively (i.e. ``>=``) as opposed to strictly greater than (default: `True`). version_path : `str` A dotted attribute path to follow in the module for the version. Defaults to just ``'__version__'``, which should work for most Python modules. """ if isinstance(module, types.ModuleType): module_name = module.__name__ elif isinstance(module, six.string_types): module_name = module try: module = resolve_name(module_name) except ImportError: return False else: raise ValueError('module argument must be an actual imported ' 'module, or the import name of the module; ' 'got {0!r}'.format(module)) if '.' not in version_path: have_version = getattr(module, version_path) else: have_version = resolve_name('.'.join([module.__name__, version_path])) try: from pkg_resources import parse_version except ImportError: from distutils.version import LooseVersion as parse_version if inclusive: return parse_version(have_version) >= parse_version(version) else: return parse_version(have_version) > parse_version(version) class InheritDocstrings(type): """ This metaclass makes methods of a class automatically have their docstrings filled in from the methods they override in the base class. If the class uses multiple inheritance, the docstring will be chosen from the first class in the bases list, in the same way as methods are normally resolved in Python. If this results in selecting the wrong docstring, the docstring will need to be explicitly included on the method. For example:: >>> from asdf.util import InheritDocstrings >>> import six >>> @six.add_metaclass(InheritDocstrings) ... class A(object): ... def wiggle(self): ... "Wiggle the thingamajig" ... pass >>> class B(A): ... def wiggle(self): ... pass >>> B.wiggle.__doc__ u'Wiggle the thingamajig' """ def __init__(cls, name, bases, dct): def is_public_member(key): return ( (key.startswith('__') and key.endswith('__') and len(key) > 4) or not key.startswith('_')) for key, val in six.iteritems(dct): if (inspect.isfunction(val) and is_public_member(key) and val.__doc__ is None): for base in cls.__mro__[1:]: super_method = getattr(base, key, None) if super_method is not None: val.__doc__ = super_method.__doc__ break super(InheritDocstrings, cls).__init__(name, bases, dct) asdf-1.3.3/asdf/compression.py0000644000175000017500000001714613246003441015617 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import struct import numpy as np import six DEFAULT_BLOCK_SIZE = 1 << 22 #: Decompressed block size in bytes, 4MiB def validate(compression): """ Validate the compression string. Parameters ---------- compression : str, bytes or None Returns ------- compression : str or None In canonical form. Raises ------ ValueError """ if not compression or compression == b'\0\0\0\0': return None if isinstance(compression, bytes): compression = compression.decode('ascii') compression = compression.strip('\0') if compression not in ('zlib', 'bzp2', 'lz4', 'input'): raise ValueError( "Supported compression types are: 'zlib', 'bzp2', 'lz4', or 'input'") return compression class Lz4Compressor(object): def __init__(self, block_api): self._api = block_api def compress(self, data): output = self._api.compress(data, mode='high_compression') header = struct.pack('!I', len(output)) return header + output class Lz4Decompressor(object): def __init__(self, block_api): self._api = block_api self._size = 0 self._pos = 0 self._buffer = b'' def decompress(self, data): if not self._size: data = self._buffer + data if len(data) < 4: self._buffer += data return b'' self._size = struct.unpack('!I', data[:4])[0] data = data[4:] self._buffer = bytearray(self._size) if self._pos + len(data) < self._size: self._buffer[self._pos:self._pos + len(data)] = data self._pos += len(data) return b'' else: offset = self._size - self._pos self._buffer[self._pos:] = data[:offset] data = data[offset:] self._size = 0 self._pos = 0 output = self._api.decompress(self._buffer) self._buffer = b'' return output + self.decompress(data) def _get_decoder(compression): if compression == 'zlib': try: import zlib except ImportError: raise ImportError( "Your Python does not have the zlib library, " "therefore the compressed block in this ASDF file " "can not be decompressed.") return zlib.decompressobj() elif compression == 'bzp2': try: import bz2 except ImportError: raise ImportError( "Your Python does not have the bz2 library, " "therefore the compressed block in this ASDF file " "can not be decompressed.") return bz2.BZ2Decompressor() elif compression == 'lz4': try: import lz4.block except ImportError: raise ImportError( "lz4 library in not installed in your Python environment, " "therefore the compressed block in this ASDF file " "can not be decompressed.") return Lz4Decompressor(lz4.block) else: raise ValueError( "Unknown compression type: '{0}'".format(compression)) def _get_encoder(compression): if compression == 'zlib': try: import zlib except ImportError: raise ImportError( "Your Python does not have the zlib library, " "therefore the block in this ASDF file " "can not be compressed.") return zlib.compressobj() elif compression == 'bzp2': try: import bz2 except ImportError: raise ImportError( "Your Python does not have the bz2 library, " "therefore the block in this ASDF file " "can not be compressed.") return bz2.BZ2Compressor() elif compression == 'lz4': try: import lz4.block except ImportError: raise ImportError( "lz4 library in not installed in your Python environment, " "therefore the block in this ASDF file " "can not be compressed.") return Lz4Compressor(lz4.block) else: raise ValueError( "Unknown compression type: '{0}'".format(compression)) def to_compression_header(compression): """ Converts a compression string to the four byte field in a block header. """ if not compression: return b'' if isinstance(compression, six.text_type): return compression.encode('ascii') return compression def decompress(fd, used_size, data_size, compression): """ Decompress binary data in a file Parameters ---------- fd : generic_io.GenericIO object The file to read the compressed data from. used_size : int The size of the compressed data data_size : int The size of the uncompressed data compression : str The compression type used. Returns ------- array : numpy.array A flat uint8 containing the decompressed data. """ buffer = np.empty((data_size,), np.uint8) compression = validate(compression) decoder = _get_decoder(compression) i = 0 for block in fd.read_blocks(used_size): decoded = decoder.decompress(block) if i + len(decoded) > data_size: raise ValueError("Decompressed data too long") buffer.data[i:i+len(decoded)] = decoded i += len(decoded) if hasattr(decoder, 'flush'): decoded = decoder.flush() if i + len(decoded) > data_size: raise ValueError("Decompressed data too long") elif i + len(decoded) < data_size: raise ValueError("Decompressed data too short") buffer[i:i+len(decoded)] = decoded return buffer def compress(fd, data, compression, block_size=DEFAULT_BLOCK_SIZE): """ Compress array data and write to a file. Parameters ---------- fd : generic_io.GenericIO object The file to write to. data : buffer The buffer of uncompressed data. compression : str The type of compression to use. block_size : int, optional Input data will be split into blocks of this size (in bytes) before compression. """ compression = validate(compression) encoder = _get_encoder(compression) # We can have numpy arrays here. While compress() will work with them, # it is impossible to split them into fixed size blocks without converting # them to bytes. if isinstance(data, np.ndarray): data = data.tobytes() for i in range(0, len(data), block_size): fd.write(encoder.compress(data[i:i+block_size])) if hasattr(encoder, "flush"): fd.write(encoder.flush()) def get_compressed_size(data, compression, block_size=DEFAULT_BLOCK_SIZE): """ Returns the number of bytes required when the given data is compressed. Parameters ---------- data : buffer compression : str The type of compression to use. block_size : int, optional Input data will be split into blocks of this size (in bytes) before the compression. Returns ------- bytes : int """ compression = validate(compression) encoder = _get_encoder(compression) l = 0 for i in range(0, len(data), block_size): l += len(encoder.compress(data[i:i+block_size])) if hasattr(encoder, "flush"): l += len(encoder.flush()) return l asdf-1.3.3/asdf/conftest.py0000644000175000017500000000453513246003441015101 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os # this contains imports plugins that configure py.test for asdf tests. # by importing them here in conftest.py they are discoverable by py.test # no matter how it is invoked within the source tree. from astropy import __version__ as astropy_version from astropy.tests.pytest_plugins import * import pytest import six # This is to figure out the affiliated package version, rather than # using Astropy's from . import version from .tests.httpserver import HTTPServer, RangeHTTPServer packagename = os.path.basename(os.path.dirname(__file__)) TESTED_VERSIONS[packagename] = version.version # Uncomment the following line to treat all DeprecationWarnings as exceptions kwargs = {} if astropy_version >= '3.0': kwargs['modules_to_ignore_on_import'] = ['astropy.tests.disable_internet'] enable_deprecations_as_exceptions(**kwargs) try: PYTEST_HEADER_MODULES['Astropy'] = 'astropy' PYTEST_HEADER_MODULES['jsonschema'] = 'jsonschema' PYTEST_HEADER_MODULES['pyyaml'] = 'yaml' PYTEST_HEADER_MODULES['six'] = 'six' del PYTEST_HEADER_MODULES['h5py'] del PYTEST_HEADER_MODULES['Matplotlib'] del PYTEST_HEADER_MODULES['Scipy'] except (NameError, KeyError): pass @pytest.fixture() def httpserver(request): """ The returned ``httpserver`` provides a threaded HTTP server instance. It serves content from a temporary directory (available as the attribute tmpdir) at randomly assigned URL (available as the attribute url). * ``tmpdir`` - path to the tmpdir that it's serving from (str) * ``url`` - the base url for the server """ server = HTTPServer() request.addfinalizer(server.finalize) return server @pytest.fixture() def rhttpserver(request): """ The returned ``httpserver`` provides a threaded HTTP server instance. It serves content from a temporary directory (available as the attribute tmpdir) at randomly assigned URL (available as the attribute url). The server supports HTTP Range headers. * ``tmpdir`` - path to the tmpdir that it's serving from (str) * ``url`` - the base url for the server """ server = RangeHTTPServer() request.addfinalizer(server.finalize) return server asdf-1.3.3/asdf/tests/0000755000175000017500000000000013246031665014046 5ustar dandan00000000000000asdf-1.3.3/asdf/tests/coveragerc0000644000175000017500000000152513243547254016117 0ustar dandan00000000000000[run] source = {packagename} omit = {packagename}/_astropy_init* {packagename}/conftest* {packagename}/cython_version* {packagename}/setup_package* {packagename}/*/setup_package* {packagename}/*/*/setup_package* {packagename}/tests/* {packagename}/*/tests/* {packagename}/*/*/tests/* {packagename}/version.* {packagename}/compat* {packagename}/extern* [report] exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about packages we have installed except ImportError # Don't complain if tests don't hit assertions raise AssertionError raise NotImplementedError # Don't complain about script hooks def main\(.*\): # Ignore branches that don't pertain to this version of Python pragma: py{ignore_python_version} six.PY{ignore_python_version} asdf-1.3.3/asdf/tests/test_yaml.py0000644000175000017500000001035313246003441016412 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io from collections import OrderedDict import numpy as np import pytest import six import yaml from .. import asdf from .. import tagged from .. import treeutil from . import helpers def test_ordered_dict(tmpdir): # Test that we can write out and read in ordered dicts. tree = { "ordered_dict": OrderedDict( [('first', 'foo'), ('second', 'bar'), ('third', 'baz')]), "unordered_dict": { 'first': 'foo', 'second': 'bar', 'third': 'baz' } } def check_asdf(asdf): tree = asdf.tree assert isinstance(tree['ordered_dict'], OrderedDict) assert list(tree['ordered_dict'].keys()) == ['first', 'second', 'third'] assert not isinstance(tree['unordered_dict'], OrderedDict) assert isinstance(tree['unordered_dict'], dict) def check_raw_yaml(content): assert b'OrderedDict' not in content helpers.assert_roundtrip_tree(tree, tmpdir, check_asdf, check_raw_yaml) def test_unicode_write(tmpdir): # We want to write unicode out as regular utf-8-encoded # characters, not as escape sequences tree = { "ɐʇɐp‾ǝpoɔıun": 42, "ascii_only": "this is ascii" } def check_asdf(asdf): assert "ɐʇɐp‾ǝpoɔıun" in asdf.tree assert isinstance(asdf.tree['ascii_only'], six.text_type) def check_raw_yaml(content): # Ensure that unicode is written out as UTF-8 without escape # sequences assert "ɐʇɐp‾ǝpoɔıun".encode('utf-8') in content # Ensure that the unicode "tag" is not used assert b"unicode" not in content helpers.assert_roundtrip_tree(tree, tmpdir, check_asdf, check_raw_yaml) def test_arbitrary_python_object(): # Putting "just any old" Python object in the tree should raise an # exception. class Foo(object): pass tree = {'object': Foo()} buff = io.BytesIO() ff = asdf.AsdfFile(tree) with pytest.raises(yaml.YAMLError): ff.write_to(buff) def test_python_tuple(tmpdir): # We don't want to store tuples as tuples, because that's not a # built-in YAML data type. This test ensures that they are # converted to lists. tree = { "val": (1, 2, 3) } def check_asdf(asdf): assert isinstance(asdf.tree['val'], list) def check_raw_yaml(content): assert b'tuple' not in content helpers.assert_roundtrip_tree(tree, tmpdir, check_asdf, check_raw_yaml) def test_tags_removed_after_load(tmpdir): tree = { "foo": ["bar", (1, 2, None)] } def check_asdf(asdf): for node in treeutil.iter_tree(asdf.tree): if node != asdf.tree: assert not isinstance(node, tagged.Tagged) helpers.assert_roundtrip_tree(tree, tmpdir, check_asdf) def test_explicit_tags(): yaml = """#ASDF {} %YAML 1.1 --- ! foo: ! [1, 2, 3] ... """.format(asdf.versioning.default_version) # Check that fully qualified explicit tags work buff = helpers.yaml_to_asdf(yaml, yaml_headers=False) with asdf.AsdfFile.open(buff) as ff: assert all(ff.tree['foo'] == [1, 2, 3]) def test_yaml_internal_reference(tmpdir): # Test that YAML internal references (anchors and aliases) work, # as well as recursive data structures. d = { 'foo': '2', } d['bar'] = d l = [] l.append(l) tree = { 'first': d, 'second': d, 'list': l } def check_yaml(content): assert b'list:--&id002-*id002' in b''.join(content.split()) helpers.assert_roundtrip_tree(tree, tmpdir, raw_yaml_check_func=check_yaml) def test_yaml_nan_inf(): tree = { 'a': np.nan, 'b': np.inf, 'c': -np.inf } buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert np.isnan(ff.tree['a']) assert np.isinf(ff.tree['b']) assert np.isinf(ff.tree['c']) asdf-1.3.3/asdf/tests/setup_package.py0000644000175000017500000000053013246003441017220 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function def get_package_data(): # pragma: no cover return { str(_PACKAGE_NAME_ + '.tests'): ['coveragerc', 'data/*.yaml', 'data/*.json', 'data/*.fits']} asdf-1.3.3/asdf/tests/test_asdf_schemas.py0000644000175000017500000002026513246003441020073 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import io import os import re import yaml import pytest import numpy as np import astropy from astropy.tests.helper import catch_warnings import asdf from asdf.tests import helpers, CustomTestType from asdf import asdftypes from asdf import block from asdf import extension from asdf import schema from asdf import treeutil from asdf import util from asdf import versioning try: import gwcs HAS_GWCS = True except ImportError: HAS_GWCS = False class LabelMapperTestType(CustomTestType): version = '1.0.0' name = 'transform/label_mapper' class RegionsSelectorTestType(CustomTestType): version = '1.0.0' name = 'transform/regions_selector' class TestExtension(extension.BuiltinExtension): """This class defines an extension that represents tags whose implementations current reside in other repositories (such as GWCS) but whose schemas are defined in ASDF. This provides a workaround for schema validation testing since we want to pass without warnings, but the fact that these tag classes are not defined within ASDF means that warnings occur unless this extension is used. Eventually these schemas may be moved out of ASDF and into other repositories, or ASDF will potentially provide abstract base classes for the tag implementations. """ @property def types(self): return [LabelMapperTestType, RegionsSelectorTestType] @property def tag_mapping(self): return [('tag:stsci.edu:asdf', 'http://stsci.edu/schemas/asdf{tag_suffix}')] def generate_schema_list(): """Returns a generator for all schema files""" src = os.path.join(os.path.dirname(__file__), '../schemas') for root, dirs, files in os.walk(src): for fname in files: if not fname.endswith('.yaml'): continue if os.path.splitext(fname)[0] in ( 'draft-01', 'asdf-schema-1.0.0'): continue yield os.path.join(root, fname) def _assert_warnings(_warnings): if astropy.__version__ < '1.3.3': # Make sure at most only one warning occurred assert len(_warnings) <= 1, helpers.display_warnings(_warnings) # Make sure the warning was the one we expected if len(_warnings) == 1: message = str(_warnings[0].message) target_string = "gwcs and astropy-1.3.3 packages are required" assert message.startswith('Failed to convert'), \ helpers.display_warnings(_warnings) assert target_string in str(_warnings[0].message), \ helpers.display_warnings(_warnings) else: assert len(_warnings) == 0, helpers.display_warnings(_warnings) def _find_standard_version(filename): components = filename[filename.find('schemas') + 1:].split(os.path.sep) tag = 'tag:{}:{}'.format(components[1], '/'.join(components[2:])) name, version = asdftypes.split_tag_version(tag.replace('.yaml', '')) for sv in versioning.supported_versions: map_version = versioning.get_version_map(sv)['tags'].get(name) if map_version is not None and version == map_version: return sv return versioning.default_version def generate_example_schemas(): """Returns a generator for all examples in schema files""" def find_examples_in_schema(path): """Returns generator for all examples in schema at given path""" with open(path, 'rb') as fd: schema_tree = yaml.load(fd) for node in treeutil.iter_tree(schema_tree): if (isinstance(node, dict) and 'examples' in node and isinstance(node['examples'], list)): for desc, example in node['examples']: yield example for schema_path in generate_schema_list(): for example in find_examples_in_schema(schema_path): yield (schema_path, example) def pytest_generate_tests(metafunc): """This function is used by pytest to parametrize test function inputs Parameters: ----------- metafunc : object returned by pytest to enable test parametrization This function enables parametrization of the following tests: test_validate_schema test_schema_example The 'yield' functionality in pytest for parametrized tests has been deprecated. The @pytest.mark.parametrize decorator is not powerful enough for the kind of programmatic parametrization that we require here. """ def get_schema_name(schema_path): """Helper function to return the informative part of a schema path""" path = os.path.normpath(schema_path) return os.path.sep.join(path.split(os.path.sep)[-3:]) def create_schema_example_id(argval): """Helper function to create test ID for schema example validation""" if argval[0] == '/': # ID for the first argument is just the schema name return get_schema_name(argval) else: # This will cause pytest to create labels of the form: # SCHEMA_NAME-example # If there are multiple examples within a single schema, the # examples will be numbered automatically to distinguish them return "example" if metafunc.function is test_validate_schema: metafunc.parametrize( 'schema_path', generate_schema_list(), # just use the schema name as a test ID instead of full path ids=get_schema_name) elif metafunc.function is test_schema_example: metafunc.parametrize( 'filename,example', generate_example_schemas(), ids=create_schema_example_id) def test_validate_schema(schema_path): """Pytest to check validity of schema file at given path Parameters: ----------- schema_path : name of the schema file to be validated This function is called with a range of parameters by pytest's 'parametrize' utility in order to account for all schema files. """ # Make sure that each schema itself is valid. schema_tree = schema.load_schema(schema_path, resolve_references=True) schema.check_schema(schema_tree) def test_schema_example(filename, example): """Pytest to check validity of a specific example within schema file Parameters: ----------- filename : name of the schema file containing example to be tested example: string representing example This function is called with a range of parameters by pytest's 'parametrize' utility in order to account for all examples in all schema files. """ if not HAS_GWCS and re.search(r'frame-\d\.\d\.\d\.yaml', filename): return pytest.skip standard_version = _find_standard_version(filename) # Make sure that the examples in the schema files (and thus the # ASDF standard document) are valid. buff = helpers.yaml_to_asdf( 'example: ' + example.strip(), standard_version=standard_version) ff = asdf.AsdfFile( uri=util.filepath_to_url(os.path.abspath(filename)), extensions=TestExtension()) # Fake an external file ff2 = asdf.AsdfFile({'data': np.empty((1024*1024*8), dtype=np.uint8)}) ff._external_asdf_by_uri[ util.filepath_to_url( os.path.abspath( os.path.join( os.path.dirname(filename), 'external.asdf')))] = ff2 # Add some dummy blocks so that the ndarray examples work for i in range(3): b = block.Block(np.zeros((1024*1024*8), dtype=np.uint8)) b._used = True ff.blocks.add(b) b._array_storage = "streamed" try: with catch_warnings() as w: ff._open_impl(ff, buff) # Do not tolerate any warnings that occur during schema validation, # other than a few that we expect to occur under certain circumstances _assert_warnings(w) except: print("From file:", filename) raise # Just test we can write it out. A roundtrip test # wouldn't always yield the correct result, so those have # to be covered by "real" unit tests. if b'external.asdf' not in buff.getvalue(): buff = io.BytesIO() ff.write_to(buff) asdf-1.3.3/asdf/tests/test_generic_io.py0000644000175000017500000006046213246031624017565 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import os import sys import pytest import six import six.moves.urllib.request as urllib_request import numpy as np from .. import asdf from .. import generic_io from .. import util from . import helpers def _get_small_tree(): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } return tree def _get_large_tree(): # These are designed to be big enough so they don't fit in a # single block, but not so big that RAM/disk space for the tests # is enormous. x = np.random.rand(256, 256) y = np.random.rand(16, 16, 16) tree = { 'science_data': x, 'more': y } return tree @pytest.fixture(params=[_get_small_tree, _get_large_tree]) def tree(request): return request.param() def _roundtrip(tree, get_write_fd, get_read_fd, write_options={}, read_options={}): with get_write_fd() as fd: asdf.AsdfFile(tree).write_to(fd, **write_options) # Work around the fact that generic_io's get_file doesn't have a way of # determining whether or not the underlying file handle should be # closed as part of the exit handler if (six.PY3 and isinstance(fd._fd, io.FileIO)) or \ (six.PY2 and isinstance(fd._fd, file)): fd._fd.close() with get_read_fd() as fd: ff = asdf.AsdfFile.open(fd, **read_options) helpers.assert_tree_match(tree, ff.tree) return ff def test_mode_fail(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with pytest.raises(ValueError): generic_io.get_file(path, mode="r+") def test_open(tmpdir): from .. import open path = os.path.join(str(tmpdir), 'test.asdf') # Simply tests the high-level "open" function ff = asdf.AsdfFile(_get_small_tree()) ff.write_to(path) with open(path) as ff2: helpers.assert_tree_match(ff2.tree, ff.tree) def test_path(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(path, mode='w') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) return f def get_read_fd(): f = generic_io.get_file(path, mode='r') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) # This is to check for a "feature" in Python 3.x that reading zero # bytes from a socket causes it to stop. We have code in generic_io.py # to workaround it. f.read(0) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 next(ff.blocks.internal_blocks).data assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) def test_open2(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(open(path, 'wb'), mode='w') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) f._close = True return f def get_read_fd(): f = generic_io.get_file(open(path, 'rb'), mode='r') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) f._close = True return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) def test_open_fail(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='w') def test_open_fail2(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with io.open(path, 'w') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='w') if six.PY3: def test_open_fail3(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: fd.write("\n\n\n") with open(path, 'r') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='r') def test_open_fail4(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') with open(path, 'w') as fd: fd.write("\n\n\n") with io.open(path, 'r') as fd: with pytest.raises(ValueError): generic_io.get_file(fd, mode='r') def test_io_open(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): f = generic_io.get_file(io.open(path, 'wb'), mode='w') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) f._close = True return f def get_read_fd(): f = generic_io.get_file(io.open(path, 'r+b'), mode='rw') assert isinstance(f, generic_io.RealFile) assert f._uri == util.filepath_to_url(path) f._close = True return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) ff.tree['science_data'][0] = 42 def test_bytes_io(tree): buff = io.BytesIO() def get_write_fd(): f = generic_io.get_file(buff, mode='w') assert isinstance(f, generic_io.MemoryIO) return f def get_read_fd(): buff.seek(0) f = generic_io.get_file(buff, mode='rw') assert isinstance(f, generic_io.MemoryIO) return f with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] = 42 def test_streams(tree): buff = io.BytesIO() def get_write_fd(): return generic_io.OutputStream(buff) def get_read_fd(): buff.seek(0) return generic_io.InputStream(buff, 'rw') with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(ff.blocks) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] = 42 def test_streams2(): buff = io.BytesIO(b'\0' * 60) buff.seek(0) fd = generic_io.InputStream(buff, 'r') x = fd._peek(10) x = fd.read() assert len(x) == 60 @helpers.remote_data @pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows firewall prevents test") def test_urlopen(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(open(path, 'wb'), mode='w') def get_read_fd(): return generic_io.get_file( urllib_request.urlopen( httpserver.url + "test.asdf")) with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) @helpers.remote_data @pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows firewall prevents test") def test_http_connection(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(open(path, 'wb'), mode='w') def get_read_fd(): fd = generic_io.get_file(httpserver.url + "test.asdf") assert isinstance(fd, generic_io.InputStream) # This is to check for a "feature" in Python 3.x that reading zero # bytes from a socket causes it to stop. We have code in generic_io.py # to workaround it. fd.read(0) return fd with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert not isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] == 42 @helpers.remote_data @pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows firewall prevents test") def test_http_connection_range(tree, rhttpserver): path = os.path.join(rhttpserver.tmpdir, 'test.asdf') connection = [None] def get_write_fd(): return generic_io.get_file(open(path, 'wb'), mode='w') def get_read_fd(): fd = generic_io.get_file(rhttpserver.url + "test.asdf") assert isinstance(fd, generic_io.HTTPConnection) connection[0] = fd return fd with _roundtrip(tree, get_write_fd, get_read_fd) as ff: assert len(list(ff.blocks.internal_blocks)) == 2 assert isinstance(next(ff.blocks.internal_blocks)._data, np.core.memmap) assert isinstance(next(ff.blocks.internal_blocks)._data, np.ndarray) ff.tree['science_data'][0] == 42 def test_exploded_filesystem(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): return generic_io.get_file(path, mode='r') with _roundtrip(tree, get_write_fd, get_read_fd, write_options={'all_array_storage': 'external'}) as ff: assert len(list(ff.blocks.internal_blocks)) == 0 assert len(list(ff.blocks.external_blocks)) == 2 def test_exploded_filesystem_fail(tree, tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): fd = io.BytesIO() with open(path, mode='rb') as fd2: fd.write(fd2.read()) fd.seek(0) return fd with get_write_fd() as fd: asdf.AsdfFile(tree).write_to(fd, all_array_storage='external') with get_read_fd() as fd: with asdf.AsdfFile.open(fd) as ff: with pytest.raises(ValueError): helpers.assert_tree_match(tree, ff.tree) @helpers.remote_data @pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows firewall prevents test") def test_exploded_http(tree, httpserver): path = os.path.join(httpserver.tmpdir, 'test.asdf') def get_write_fd(): return generic_io.get_file(path, mode='w') def get_read_fd(): return generic_io.get_file(httpserver.url + "test.asdf") with _roundtrip(tree, get_write_fd, get_read_fd, write_options={'all_array_storage': 'external'}) as ff: assert len(list(ff.blocks.internal_blocks)) == 0 assert len(list(ff.blocks.external_blocks)) == 2 def test_exploded_stream_write(): # Writing an exploded file to an output stream should fail, since # we can't write "files" alongside it. tree = _get_small_tree() ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.write_to(io.BytesIO(), all_array_storage='external') def test_exploded_stream_read(tmpdir): # Reading from an exploded input file should fail, but only once # the data block is accessed. This behavior is important so that # the tree can still be accessed even if the data is missing. tree = _get_small_tree() path = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(tree) ff.write_to(path, all_array_storage='external') with open(path, 'rb') as fd: # This should work, so we can get the tree content x = generic_io.InputStream(fd, 'r') with asdf.AsdfFile.open(x) as ff: pass # It's only on trying to get at the block data that the error # occurs. with pytest.raises(ValueError): ff.tree['science_data'][:] def test_unicode_open(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') tree = _get_small_tree() ff = asdf.AsdfFile(tree) ff.write_to(path) with io.open(path, 'rt', encoding="utf-8") as fd: with pytest.raises(ValueError): with asdf.AsdfFile.open(fd): pass def test_invalid_obj(tmpdir): with pytest.raises(ValueError): generic_io.get_file(42) path = os.path.join(str(tmpdir), 'test.asdf') with generic_io.get_file(path, 'w') as fd: with pytest.raises(ValueError): fd2 = generic_io.get_file(fd, 'r') with pytest.raises(ValueError): fd2 = generic_io.get_file("http://www.google.com", "w") with pytest.raises(TypeError): fd2 = generic_io.get_file(io.StringIO()) with open(path, 'rb') as fd: with pytest.raises(ValueError): fd2 = generic_io.get_file(fd, 'w') with io.open(path, 'rb') as fd: with pytest.raises(ValueError): fd2 = generic_io.get_file(fd, 'w') with generic_io.get_file(sys.__stdout__, 'w'): pass def test_nonseekable_file(tmpdir): if six.PY2: base = file else: base = io.IOBase class FileWrapper(base): def tell(self): raise IOError() def seekable(self): return False def readable(self): return True def writable(self): return True with FileWrapper(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: assert isinstance(generic_io.get_file(fd, 'w'), generic_io.OutputStream) with pytest.raises(ValueError): generic_io.get_file(fd, 'rw') with FileWrapper(os.path.join(str(tmpdir), 'test.asdf'), 'rb') as fd: assert isinstance(generic_io.get_file(fd, 'r'), generic_io.InputStream) def test_relative_uri(): assert generic_io.relative_uri( 'http://www.google.com', 'file://local') == 'file://local' def test_arbitrary_file_object(): class Wrapper(object): def __init__(self, init): self._fd = init class Random(object): def seek(self, *args): return self._fd.seek(*args) def tell(self, *args): return self._fd.tell(*args) class Reader(Wrapper): def read(self, *args): return self._fd.read(*args) class RandomReader(Reader, Random): pass class Writer(Wrapper): def write(self, *args): return self._fd.write(*args) class RandomWriter(Writer, Random): pass class All(Reader, Writer, Random): pass buff = io.BytesIO() assert isinstance( generic_io.get_file(Reader(buff), 'r'), generic_io.InputStream) assert isinstance( generic_io.get_file(Writer(buff), 'w'), generic_io.OutputStream) assert isinstance( generic_io.get_file(RandomReader(buff), 'r'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(RandomWriter(buff), 'w'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'rw'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'r'), generic_io.MemoryIO) assert isinstance( generic_io.get_file(All(buff), 'w'), generic_io.MemoryIO) with pytest.raises(ValueError): generic_io.get_file(Reader(buff), 'w') with pytest.raises(ValueError): generic_io.get_file(Writer(buff), 'r') def test_check_bytes(tmpdir): with io.open(os.path.join(str(tmpdir), 'test.asdf'), 'w', encoding='utf-8') as fd: assert generic_io._check_bytes(fd, 'r') is False assert generic_io._check_bytes(fd, 'rw') is False assert generic_io._check_bytes(fd, 'w') is False with io.open(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: assert generic_io._check_bytes(fd, 'r') is True assert generic_io._check_bytes(fd, 'rw') is True assert generic_io._check_bytes(fd, 'w') is True def test_truncated_reader(): """ Tests several edge cases for _TruncatedReader.read() Includes regression test for https://github.com/spacetelescope/asdf/pull/181 """ # TODO: Should probably break this up into multiple test cases fd = generic_io.RandomAccessFile(io.BytesIO(), 'rw') content = b'a' * 100 + b'b' fd.write(content) fd.seek(0) # Simple cases where the delimiter is not found at all tr = generic_io._TruncatedReader(fd, b'x', 1) with pytest.raises(ValueError): tr.read() fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1) assert tr.read(100) == content[:100] assert tr.read(1) == content[100:] with pytest.raises(ValueError): tr.read() fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, exception=False) assert tr.read() == content # No delimiter but with 'initial_content' init = b'abcd' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read(100) == (init + content)[:100] assert tr.read() == (init + content)[100:] fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read() == init + content fd.seek(0) tr = generic_io._TruncatedReader(fd, b'x', 1, initial_content=init, exception=False) assert tr.read(2) == init[:2] assert tr.read() == init[2:] + content # Some tests of a single character delimiter # Add some trailing data after the delimiter fd.seek(0, 2) fd.write(b'ffff') # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1) assert tr.read(100) == content[:100] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1) assert tr.read() == content[:100] # Delimiter included fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read() == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(101) == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(102) == content[:101] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, b'b', 1, include=True) assert tr.read(100) == content[:100] assert tr.read(1) == content[100:101] assert tr.read() == b'' # Longer delimiter with variable length content = b'a' * 100 + b'\n...\n' + b'ffffff' delimiter = br'\r?\n\.\.\.((\r?\n)|$)' readahead = 7 fd = generic_io.RandomAccessFile(io.BytesIO(), 'rw') fd.write(content) # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read() == content[:100] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(100) == content[:100] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read() == content[99:100] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read(2) == content[99:100] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead) assert tr.read(99) == content[:99] assert tr.read(50) == content[99:100] assert tr.read() == b'' # Same as the previous set of tests, but including the delimiter fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read() == content[:105] assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(105) == content[:105] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read() == content[99:105] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read(2) == content[99:101] assert tr.read() == content[101:105] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True) assert tr.read(99) == content[:99] assert tr.read(50) == content[99:105] assert tr.read() == b'' # Same sequence of tests but with some 'initial_content' init = b'abcd' # Delimiter not included in read fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read() == (init + content[:100]) assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(100) == (init + content[:96]) assert tr.read() == content[96:100] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read() == content[95:100] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(6) == content[95:100] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(50) == content[95:100] assert tr.read() == b'' # Same as the previous set of tests, but including the delimiter fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read() == (init + content[:105]) assert tr.read() == b'' fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(105) == (init + content[:101]) assert tr.read() == content[101:105] assert tr.read() == b'' # (read just up to the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(103) == (init + content[:99]) assert tr.read() == content[99:105] assert tr.read() == b'' # (read partway into the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(6) == content[95:101] assert tr.read() == content[101:105] assert tr.read() == b'' # (read well past the delimiter) fd.seek(0) tr = generic_io._TruncatedReader(fd, delimiter, readahead, include=True, initial_content=init) assert tr.read(99) == (init + content[:95]) assert tr.read(50) == content[95:105] assert tr.read() == b'' def test_is_asdf(tmpdir): # test fits astropy = pytest.importorskip('astropy') from astropy.io import fits hdul = fits.HDUList() phdu= fits.PrimaryHDU() imhdu= fits.ImageHDU(data=np.arange(24).reshape((4,6))) hdul.append(phdu) hdul.append(imhdu) path = os.path.join(str(tmpdir), 'test.fits') hdul.writeto(path) assert not asdf.is_asdf_file(path) assert asdf.is_asdf_file(asdf.AsdfFile()) asdf-1.3.3/asdf/tests/test_fits_embed.py0000644000175000017500000002167213246003441017557 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import copy import os import sys import pytest import numpy as np from numpy.testing import assert_array_equal astropy = pytest.importorskip('astropy') from astropy.io import fits from astropy.tests.helper import catch_warnings from .. import asdf from .. import fits_embed from .. import open as asdf_open from .helpers import assert_tree_match, yaml_to_asdf, display_warnings TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data') def create_asdf_in_fits(): """Test fixture to create AsdfInFits object to use for testing""" hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float))) tree = { 'model': { 'sci': { 'data': hdulist[0].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist[1].data, 'wcs': 'WCS info' }, 'err': { 'data': hdulist[2].data, 'wcs': 'WCS info' } } } return fits_embed.AsdfInFits(hdulist, tree) def test_embed_asdf_in_fits_file(tmpdir): hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='SCI')) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='DQ')) tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, 'wcs': 'WCS info' }, 'dq': { 'data': hdulist['DQ'].data, 'wcs': 'WCS info' } } } ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to(os.path.join(str(tmpdir), 'test.fits')) ff2 = asdf.AsdfFile(tree) ff2.write_to(os.path.join(str(tmpdir), 'plain.asdf')) with fits.open(os.path.join(str(tmpdir), 'test.fits')) as hdulist2: assert len(hdulist2) == 3 assert [x.name for x in hdulist2] == ['SCI', 'DQ', 'ASDF'] assert_array_equal(hdulist2[0].data, np.arange(512, dtype=np.float)) assert hdulist2['ASDF'].data.tostring().strip().endswith(b"...") with fits_embed.AsdfInFits.open(hdulist2) as ff2: assert_tree_match(tree, ff2.tree) ff = asdf.AsdfFile(copy.deepcopy(ff2.tree)) ff.write_to('test.asdf') with asdf.AsdfFile.open('test.asdf') as ff: assert_tree_match(tree, ff.tree) def test_embed_asdf_in_fits_file_anonymous_extensions(tmpdir): # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(os.path.join(str(tmpdir), 'test.fits')) ff2 = asdf.AsdfFile(asdf_in_fits.tree) ff2.write_to(os.path.join(str(tmpdir), 'plain.asdf')) with fits.open(os.path.join(str(tmpdir), 'test.fits')) as hdulist: assert len(hdulist) == 4 assert [x.name for x in hdulist] == ['PRIMARY', '', '', 'ASDF'] assert hdulist['ASDF'].data.tostring().strip().endswith(b"...") with fits_embed.AsdfInFits.open(hdulist) as ff2: assert_tree_match(asdf_in_fits.tree, ff2.tree) ff = asdf.AsdfFile(copy.deepcopy(ff2.tree)) ff.write_to('test.asdf') with asdf.AsdfFile.open('test.asdf') as ff: assert_tree_match(asdf_in_fits.tree, ff.tree) def test_create_in_tree_first(tmpdir): tree = { 'model': { 'sci': { 'data': np.arange(512, dtype=np.float), 'wcs': 'WCS info' }, 'dq': { 'data': np.arange(512, dtype=np.float), 'wcs': 'WCS info' }, 'err': { 'data': np.arange(512, dtype=np.float), 'wcs': 'WCS info' } } } hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(tree['model']['sci']['data'])) hdulist.append(fits.ImageHDU(tree['model']['dq']['data'])) hdulist.append(fits.ImageHDU(tree['model']['err']['data'])) tmpfile = os.path.join(str(tmpdir), 'test.fits') with fits_embed.AsdfInFits(hdulist, tree) as ff: ff.write_to(tmpfile) with asdf.AsdfFile(tree) as ff: ff.write_to(os.path.join(str(tmpdir), 'plain.asdf')) with asdf.AsdfFile.open(os.path.join(str(tmpdir), 'plain.asdf')) as ff: assert_array_equal(ff.tree['model']['sci']['data'], np.arange(512, dtype=np.float)) # This tests the changes that allow FITS files with ASDF extensions to be # opened directly by the top-level AsdfFile.open API with asdf_open(tmpfile) as ff: assert_array_equal(ff.tree['model']['sci']['data'], np.arange(512, dtype=np.float)) def compare_asdfs(asdf0, asdf1): # Make sure the trees match assert_tree_match(asdf0.tree, asdf1.tree) # Compare the data blocks for key in asdf0.tree['model'].keys(): assert_array_equal( asdf0.tree['model'][key]['data'], asdf1.tree['model'][key]['data']) def test_asdf_in_fits_open(tmpdir): """Test the open method of AsdfInFits""" tmpfile = os.path.join(str(tmpdir), 'test.fits') # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(tmpfile) # Test opening the file directly from the URI with fits_embed.AsdfInFits.open(tmpfile) as ff: compare_asdfs(asdf_in_fits, ff) # Test open/close without context handler ff = fits_embed.AsdfInFits.open(tmpfile) compare_asdfs(asdf_in_fits, ff) ff.close() # Test reading in the file from an already-opened file handle with open(tmpfile, 'rb') as handle: with fits_embed.AsdfInFits.open(handle) as ff: compare_asdfs(asdf_in_fits, ff) # Test opening the file as a FITS file first and passing the HDUList with fits.open(tmpfile) as hdulist: with fits_embed.AsdfInFits.open(hdulist) as ff: compare_asdfs(asdf_in_fits, ff) def test_asdf_open(tmpdir): """Test the top-level open method of the asdf module""" tmpfile = os.path.join(str(tmpdir), 'test.fits') # Write the AsdfInFits object out as a FITS file with ASDF extension asdf_in_fits = create_asdf_in_fits() asdf_in_fits.write_to(tmpfile) # Test opening the file directly from the URI with asdf_open(tmpfile) as ff: compare_asdfs(asdf_in_fits, ff) # Test open/close without context handler ff = asdf_open(tmpfile) compare_asdfs(asdf_in_fits, ff) ff.close() # Test reading in the file from an already-opened file handle with open(tmpfile, 'rb') as handle: with asdf_open(handle) as ff: compare_asdfs(asdf_in_fits, ff) # Test opening the file as a FITS file first and passing the HDUList with fits.open(tmpfile) as hdulist: with asdf_open(hdulist) as ff: compare_asdfs(asdf_in_fits, ff) def test_bad_input(tmpdir): """Make sure these functions behave properly with bad input""" text_file = os.path.join(str(tmpdir), 'test.txt') with open(text_file, 'w') as fh: fh.write('I <3 ASDF!!!!!') with pytest.raises(ValueError): asdf_open(text_file) @pytest.mark.skipif(sys.platform.startswith('win'), reason='Avoid path manipulation on Windows') def test_version_mismatch_file(): testfile = os.path.join(TEST_DATA_PATH, 'version_mismatch.fits') with catch_warnings() as w: with asdf.AsdfFile.open(testfile, ignore_version_mismatch=False) as fits_handle: assert fits_handle.tree['a'] == complex(0j) # This is the warning that we expect from opening the FITS file assert len(w) == 1 assert str(w[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 7.0.0 found in file " "'file://{}', but latest supported version is 1.0.0".format(testfile)) # Make sure warning does not occur when warning is ignored (default) with catch_warnings() as w: with asdf.AsdfFile.open(testfile) as fits_handle: assert fits_handle.tree['a'] == complex(0j) assert len(w) == 0, display_warnings(w) with catch_warnings() as w: with fits_embed.AsdfInFits.open(testfile, ignore_version_mismatch=False) as fits_handle: assert fits_handle.tree['a'] == complex(0j) assert len(w) == 1 assert str(w[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 7.0.0 found in file " "'file://{}', but latest supported version is 1.0.0".format(testfile)) # Make sure warning does not occur when warning is ignored (default) with catch_warnings() as w: with fits_embed.AsdfInFits.open(testfile) as fits_handle: assert fits_handle.tree['a'] == complex(0j) assert len(w) == 0, display_warnings(w) asdf-1.3.3/asdf/tests/test_compression.py0000644000175000017500000001154413246003441020014 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import os import numpy as np import pytest from .. import asdf, compression from .. import generic_io from ..tests import helpers def _get_large_tree(): np.random.seed(0) x = np.random.rand(128, 128) tree = { 'science_data': x, } return tree def _get_sparse_tree(): np.random.seed(0) arr = np.zeros((128, 128)) for x, y, z in np.random.rand(64, 3): arr[int(x*127), int(y*127)] = z arr[0, 0] = 5.0 tree = {'science_data': arr} return tree def _roundtrip(tmpdir, tree, compression=None, write_options={}, read_options={}): tmpfile = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(tree) ff.set_array_compression(tree['science_data'], compression) ff.write_to(tmpfile, **write_options) with asdf.AsdfFile.open(tmpfile, mode="rw") as ff: ff.update(**write_options) with asdf.AsdfFile.open(tmpfile, **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) # Also test saving to a buffer buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.set_array_compression(tree['science_data'], compression) ff.write_to(buff, **write_options) buff.seek(0) with asdf.AsdfFile.open(buff, **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) # Test saving to a non-seekable buffer buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.set_array_compression(tree['science_data'], compression) ff.write_to(generic_io.OutputStream(buff), **write_options) buff.seek(0) with asdf.AsdfFile.open(generic_io.InputStream(buff), **read_options) as ff: helpers.assert_tree_match(tree, ff.tree) return ff def test_invalid_compression(): tree = _get_large_tree() ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.set_array_compression(tree['science_data'], 'foo') with pytest.raises(ValueError): compression._get_decoder('foo') with pytest.raises(ValueError): compression._get_encoder('foo') def test_get_compressed_size(): assert compression.get_compressed_size(b'0' * 1024, 'zlib') < 1024 def test_decompress_too_long_short(): fio = io.BytesIO() compression.compress(fio, b'0' * 1024, 'zlib') size = fio.tell() fio.seek(0) fio.read_blocks = lambda us: [fio.read(us)] compression.decompress(fio, size, 1024, 'zlib') fio.seek(0) with pytest.raises(ValueError): compression.decompress(fio, size, 1025, 'zlib') fio.seek(0) with pytest.raises(ValueError): compression.decompress(fio, size, 1023, 'zlib') def test_zlib(tmpdir): tree = _get_large_tree() _roundtrip(tmpdir, tree, 'zlib') def test_bzp2(tmpdir): tree = _get_large_tree() _roundtrip(tmpdir, tree, 'bzp2') def test_lz4(tmpdir): pytest.importorskip('lz4') tree = _get_large_tree() _roundtrip(tmpdir, tree, 'lz4') def test_recompression(tmpdir): tree = _get_large_tree() tmpfile = os.path.join(str(tmpdir), 'test1.asdf') afile = asdf.AsdfFile(tree) afile.write_to(tmpfile, all_array_compression='zlib') afile.close() afile = asdf.AsdfFile.open(tmpfile) tmpfile = os.path.join(str(tmpdir), 'test2.asdf') afile.write_to(tmpfile, all_array_compression='bzp2') afile.close() afile = asdf.AsdfFile.open(tmpfile) helpers.assert_tree_match(tree, afile.tree) afile.close() def test_input(tmpdir): tree = _get_large_tree() tmpfile = os.path.join(str(tmpdir), 'test1.asdf') afile = asdf.AsdfFile(tree) afile.write_to(tmpfile, all_array_compression='zlib') afile.close() afile = asdf.AsdfFile.open(tmpfile) tmpfile = os.path.join(str(tmpdir), 'test2.asdf') afile.write_to(tmpfile) afile.close() afile = asdf.AsdfFile.open(tmpfile) helpers.assert_tree_match(tree, afile.tree) assert afile.get_array_compression(afile.tree['science_data']) == 'zlib' afile.close() def test_none(tmpdir): tree = _get_large_tree() tmpfile1 = os.path.join(str(tmpdir), 'test1.asdf') afile = asdf.AsdfFile(tree) afile.write_to(tmpfile1) afile.close() afile = asdf.AsdfFile.open(tmpfile1) assert afile.get_array_compression(afile.tree['science_data']) is None tmpfile2 = os.path.join(str(tmpdir), 'test2.asdf') afile.write_to(tmpfile2, all_array_compression='zlib') assert afile.get_array_compression(afile.tree['science_data']) == 'zlib' afile.close() afile = asdf.AsdfFile.open(tmpfile2) afile.write_to(tmpfile1, all_array_compression=None) afile.close() afile = asdf.AsdfFile.open(tmpfile1) helpers.assert_tree_match(tree, afile.tree) assert afile.get_array_compression(afile.tree['science_data']) is None afile.close() asdf-1.3.3/asdf/tests/test_asdftypes.py0000644000175000017500000004550013246003441017454 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import os import sys import pytest from .. import asdf from .. import asdftypes from .. import extension from .. import util from .. import versioning from . import helpers, CustomTestType from astropy.tests.helper import catch_warnings TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data') def test_custom_tag(): import fractions class FractionType(asdftypes.AsdfType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [fractions.Fraction] @classmethod def to_tree(cls, node, ctx): return [node.numerator, node.denominator] @classmethod def from_tree(cls, tree, ctx): return fractions.Fraction(tree[0], tree[1]) class FractionExtension(object): @property def types(self): return [FractionType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] class FractionCallable(FractionExtension): @property def tag_mapping(self): def check(tag): prefix = 'tag:nowhere.org:custom' if tag.startswith(prefix): return 'http://nowhere.org/schemas/custom' + tag[len(prefix):] return [check] yaml = """ a: ! [2, 3] b: !core/complex-1.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with asdf.AsdfFile.open( buff, extensions=FractionExtension()) as ff: assert ff.tree['a'] == fractions.Fraction(2, 3) buff = io.BytesIO() ff.write_to(buff) buff = helpers.yaml_to_asdf(yaml) with asdf.AsdfFile.open( buff, extensions=FractionCallable()) as ff: assert ff.tree['a'] == fractions.Fraction(2, 3) buff = io.BytesIO() ff.write_to(buff) buff.close() def test_version_mismatch(): yaml = """ a: !core/complex-42.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with catch_warnings() as warning: with asdf.AsdfFile.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 1 assert str(warning[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 42.0.0 found in file, " "but latest supported version is 1.0.0") # Make sure warning is repeatable buff.seek(0) with catch_warnings() as warning: with asdf.AsdfFile.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 1 assert str(warning[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 42.0.0 found in file, " "but latest supported version is 1.0.0") # Make sure the warning does not occur if it is being ignored (default) buff.seek(0) with catch_warnings() as warning: with asdf.AsdfFile.open(buff) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 0, helpers.display_warnings(warning) # If the major and minor match, there should be no warning. yaml = """ a: !core/complex-1.0.1 0j """ buff = helpers.yaml_to_asdf(yaml) with catch_warnings() as warning: with asdf.AsdfFile.open(buff, ignore_version_mismatch=False) as ff: assert isinstance(ff.tree['a'], complex) assert len(warning) == 0 @pytest.mark.skipif(sys.platform.startswith('win'), reason='Avoid path manipulation on Windows') def test_version_mismatch_file(tmpdir): testfile = os.path.join(str(tmpdir), 'mismatch.asdf') yaml = """ a: !core/complex-42.0.0 0j """ buff = helpers.yaml_to_asdf(yaml) with open(testfile, 'wb') as handle: handle.write(buff.read()) with catch_warnings() as w: with asdf.AsdfFile.open(testfile, ignore_version_mismatch=False) as ff: assert ff._fname == "file://{}".format(testfile) assert isinstance(ff.tree['a'], complex) assert len(w) == 1 assert str(w[0].message) == ( "'tag:stsci.edu:asdf/core/complex' with version 42.0.0 found in file " "'file://{}', but latest supported version is 1.0.0".format(testfile)) def test_version_mismatch_with_supported_versions(): """Make sure that defining the supported_versions field does not affect whether or not schema mismatch warnings are triggered.""" class CustomFlow(object): pass class CustomFlowType(CustomTestType): version = '1.1.0' supported_versions = ['1.0.0', '1.1.0'] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] class CustomFlowExtension(object): @property def types(self): return [CustomFlowType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] yaml = """ flow_thing: ! c: 100 d: 3.14 """ buff = helpers.yaml_to_asdf(yaml) with catch_warnings() as w: data = asdf.AsdfFile.open( buff, ignore_version_mismatch=False, extensions=CustomFlowExtension()) assert len(w) == 1, helpers.display_warnings(w) assert str(w[0].message) == ( "'tag:nowhere.org:custom/custom_flow' with version 1.0.0 found in " "file, but latest supported version is 1.1.0") def test_versioned_writing(): from ..tags.core.complex import ComplexType # Create a bogus version map versioning._version_map['42.0.0'] = { 'FILE_FORMAT': '42.0.0', 'YAML_VERSION': '1.1', 'tags': { 'tag:stsci.edu:asdf/core/complex': '42.0.0', 'tag:stscu.edu:asdf/core/asdf': '1.0.0' } } versioning.supported_versions.append(versioning.AsdfVersion('42.0.0')) # Currently this class cannot inherit directly from ComplexType because if # it does it pollutes ASDF's built-in extension and causes later tests that # rely on ComplexType to fail. However, if CustomType is ever implemented # as an abstract base class, then it will be possible to use it as a mix-in # and also inherit from ComplexType. This means the only method/attribute # that will need to be explicitly defined will be 'version'. class FancyComplexType(asdftypes.CustomType): name = ComplexType.name version = (42, 0, 0) types = ComplexType.types @classmethod def to_tree(cls, node, ctx): return ComplexType.to_tree(node, ctx) @classmethod def from_tree(cls, tree, ctx): return ComplexType.from_tree(tree, ctx) class FancyComplexExtension(object): @property def types(self): return [FancyComplexType] @property def tag_mapping(self): return [] @property def url_mapping(self): return [('http://stsci.edu/schemas/asdf/core/complex-42.0.0', util.filepath_to_url(TEST_DATA_PATH) + '/complex-42.0.0.yaml')] tree = {'a': complex(0, -1)} buff = io.BytesIO() ff = asdf.AsdfFile(tree, version="42.0.0", extensions=[FancyComplexExtension()]) ff.write_to(buff) assert b'complex-42.0.0' in buff.getvalue() del versioning._version_map['42.0.0'] versioning.supported_versions.pop() def test_longest_match(): class FancyComplexExtension(object): @property def types(self): return [] @property def tag_mapping(self): return [] @property def url_mapping(self): return [('http://stsci.edu/schemas/asdf/core/', 'FOOBAR/{url_suffix}')] l = extension.AsdfExtensionList( [extension.BuiltinExtension(), FancyComplexExtension()]) assert l.url_mapping( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') == 'FOOBAR/asdf-1.0.0' assert l.url_mapping( 'http://stsci.edu/schemas/asdf/transform/transform-1.0.0') != 'FOOBAR/transform-1.0.0' def test_module_versioning(): class NoModuleType(asdftypes.AsdfType): # It seems highly unlikely that this would be a real module requires = ['qkjvqdja'] class HasCorrectPytest(asdftypes.AsdfType): # This means it requires 1.0.0 or greater, so it should succeed requires = ['pytest-1.0.0'] class DoesntHaveCorrectPytest(asdftypes.AsdfType): requires = ['pytest-91984.1.7'] nmt = NoModuleType() hcp = HasCorrectPytest() # perhaps an unfortunate acroynm dhcp = DoesntHaveCorrectPytest() assert nmt.has_required_modules == False assert hcp.has_required_modules == True assert dhcp.has_required_modules == False def test_undefined_tag(): # This tests makes sure that ASDF still returns meaningful structured data # even when it encounters a schema tag that it does not specifically # implement as an extension from numpy import array yaml = """ undefined_data: ! - 5 - {'message': 'there is no tag'} - !core/ndarray-1.0.0 [[1, 2, 3], [4, 5, 6]] - ! - !core/ndarray-1.0.0 [[7],[8],[9],[10]] - !core/complex-1.0.0 3.14j """ buff = helpers.yaml_to_asdf(yaml) with catch_warnings() as warning: afile = asdf.AsdfFile.open(buff) missing = afile.tree['undefined_data'] assert missing[0] == 5 assert missing[1] == {'message': 'there is no tag'} assert (missing[2] == array([[1, 2, 3], [4, 5, 6]])).all() assert (missing[3][0] == array([[7],[8],[9],[10]])).all() assert missing[3][1] == 3.14j # There are two undefined tags, so we expect two warnings assert len(warning) == 2 for i, tag in enumerate(["also_undefined-1.3.0", "undefined_tag-1.0.0"]): assert str(warning[i].message) == ( "tag:nowhere.org:custom/{} is not recognized, converting to raw " "Python data structure".format(tag)) # Make sure no warning occurs if explicitly ignored buff.seek(0) with catch_warnings() as warning: afile = asdf.AsdfFile.open(buff, ignore_unrecognized_tag=True) assert len(warning) == 0 def test_newer_tag(): # This test simulates a scenario where newer versions of CustomFlow # provides different keyword parameters that the older schema and tag class # do not account for. We want to test whether ASDF can handle this problem # gracefully and still provide meaningful data as output. The test case is # fairly contrived but we want to test whether ASDF can handle backwards # compatibility even when an explicit tag class for different versions of a # schema is not available. class CustomFlow(object): def __init__(self, c=None, d=None): self.c = c self.d = d class CustomFlowType(asdftypes.CustomType): version = '1.1.0' name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] @classmethod def from_tree(cls, tree, ctx): kwargs = {} for name in tree: kwargs[name] = tree[name] return CustomFlow(**kwargs) @classmethod def to_tree(cls, data, ctx): tree = dict(c=data.c, d=data.d) class CustomFlowExtension(object): @property def types(self): return [CustomFlowType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] new_yaml = """ flow_thing: ! c: 100 d: 3.14 """ new_buff = helpers.yaml_to_asdf(new_yaml) new_data = asdf.AsdfFile.open(new_buff, extensions=CustomFlowExtension()) assert type(new_data.tree['flow_thing']) == CustomFlow old_yaml = """ flow_thing: ! a: 100 b: 3.14 """ old_buff = helpers.yaml_to_asdf(old_yaml) with catch_warnings() as warning: asdf.AsdfFile.open(old_buff, extensions=CustomFlowExtension()) assert len(warning) == 1, helpers.display_warnings(warning) # We expect this warning since it will not be possible to convert version # 1.0.0 of CustomFlow to a CustomType (by design, for testing purposes). assert str(warning[0].message).startswith( "Failed to convert " "tag:nowhere.org:custom/custom_flow-1.0.0 to custom type") def test_incompatible_version_check(): class TestType0(asdftypes.CustomType): supported_versions = versioning.AsdfSpec('>=1.2.0') assert TestType0.incompatible_version('1.1.0') == True assert TestType0.incompatible_version('1.2.0') == False assert TestType0.incompatible_version('2.0.1') == False class TestType1(asdftypes.CustomType): supported_versions = versioning.AsdfVersion('1.0.0') assert TestType1.incompatible_version('1.0.0') == False assert TestType1.incompatible_version('1.1.0') == True class TestType2(asdftypes.CustomType): supported_versions = '1.0.0' assert TestType2.incompatible_version('1.0.0') == False assert TestType2.incompatible_version('1.1.0') == True class TestType3(asdftypes.CustomType): # This doesn't make much sense, but it's just for the sake of example supported_versions = ['1.0.0', versioning.AsdfSpec('>=2.0.0')] assert TestType3.incompatible_version('1.0.0') == False assert TestType3.incompatible_version('1.1.0') == True assert TestType3.incompatible_version('2.0.0') == False assert TestType3.incompatible_version('2.0.1') == False class TestType4(asdftypes.CustomType): supported_versions = ['1.0.0', versioning.AsdfVersion('1.1.0')] assert TestType4.incompatible_version('1.0.0') == False assert TestType4.incompatible_version('1.0.1') == True assert TestType4.incompatible_version('1.1.0') == False assert TestType4.incompatible_version('1.1.1') == True class TestType5(asdftypes.CustomType): supported_versions = \ [versioning.AsdfSpec('<1.0.0'), versioning.AsdfSpec('>=2.0.0')] assert TestType5.incompatible_version('0.9.9') == False assert TestType5.incompatible_version('2.0.0') == False assert TestType5.incompatible_version('2.0.1') == False assert TestType5.incompatible_version('1.0.0') == True assert TestType5.incompatible_version('1.1.0') == True with pytest.raises(ValueError): class TestType6(asdftypes.CustomType): supported_versions = 'blue' with pytest.raises(ValueError): class TestType6(asdftypes.CustomType): supported_versions = ['1.1.0', '2.2.0', 'blue'] def test_supported_versions(): class CustomFlow(object): def __init__(self, c=None, d=None): self.c = c self.d = d class CustomFlowType(asdftypes.CustomType): version = '1.1.0' supported_versions = [(1,0,0), versioning.AsdfSpec('>=1.1.0')] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] @classmethod def from_tree(cls, tree, ctx): # Convert old schema to new CustomFlow type if cls.version == '1.0.0': return CustomFlow(c=tree['a'], d=tree['b']) else: return CustomFlow(**tree) return CustomFlow(**kwargs) @classmethod def to_tree(cls, data, ctx): if cls.version == '1.0.0': tree = dict(a=data.c, b=data.d) else: tree = dict(c=data.c, d=data.d) class CustomFlowExtension(object): @property def types(self): return [CustomFlowType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] new_yaml = """ flow_thing: ! c: 100 d: 3.14 """ old_yaml = """ flow_thing: ! a: 100 b: 3.14 """ new_buff = helpers.yaml_to_asdf(new_yaml) new_data = asdf.AsdfFile.open(new_buff, extensions=CustomFlowExtension()) assert type(new_data.tree['flow_thing']) == CustomFlow old_buff = helpers.yaml_to_asdf(old_yaml) old_data = asdf.AsdfFile.open(old_buff, extensions=CustomFlowExtension()) assert type(old_data.tree['flow_thing']) == CustomFlow def test_unsupported_version_warning(): class CustomFlow(object): pass class CustomFlowType(asdftypes.CustomType): version = '1.0.0' supported_versions = [(1,0,0)] name = 'custom_flow' organization = 'nowhere.org' standard = 'custom' types = [CustomFlow] class CustomFlowExtension(object): @property def types(self): return [CustomFlowType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] yaml = """ flow_thing: ! c: 100 d: 3.14 """ buff = helpers.yaml_to_asdf(yaml) with catch_warnings() as _warnings: data = asdf.AsdfFile.open(buff, extensions=CustomFlowExtension()) assert len(_warnings) == 1 assert str(_warnings[0].message) == ( "Version 1.1.0 of tag:nowhere.org:custom/custom_flow is not compatible " "with any existing tag implementations") asdf-1.3.3/asdf/tests/test_reference.py0000644000175000017500000001645713246003441017421 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import os import numpy as np from numpy.testing import assert_array_equal import pytest from .. import asdf from .. import reference from .. import util from ..tags.core import ndarray from .helpers import assert_tree_match def test_external_reference(tmpdir): exttree = { 'cool_stuff': { 'a': np.array([0, 1, 2], np.float), 'b': np.array([3, 4, 5], np.float) }, 'list_of_stuff': [ 'foobar', 42, np.array([7, 8, 9], np.float) ] } external_path = os.path.join(str(tmpdir), 'external.asdf') ext = asdf.AsdfFile(exttree) ext.write_to(external_path) external_path = os.path.join(str(tmpdir), 'external2.asdf') ff = asdf.AsdfFile(exttree) ff.write_to(external_path) tree = { # The special name "data" here must be an array. This is # included so that such validation can be ignored when we just # have a "$ref". 'data': { '$ref': 'external.asdf#/cool_stuff/a' }, 'science_data': { '$ref': 'external.asdf#/cool_stuff/a' }, 'science_data2': { '$ref': 'external2.asdf#/cool_stuff/a' }, 'foobar': { '$ref': 'external.asdf#/list_of_stuff/0', }, 'answer': { '$ref': 'external.asdf#/list_of_stuff/1' }, 'array': { '$ref': 'external.asdf#/list_of_stuff/2', }, 'whole_thing': { '$ref': 'external.asdf#' }, 'myself': { '$ref': '#', }, 'internal': { '$ref': '#science_data' } } def do_asserts(ff): assert 'unloaded' in repr(ff.tree['science_data']) assert 'unloaded' in str(ff.tree['science_data']) assert len(ff._external_asdf_by_uri) == 0 assert_array_equal(ff.tree['science_data'], exttree['cool_stuff']['a']) assert len(ff._external_asdf_by_uri) == 1 with pytest.raises((ValueError, RuntimeError)): # Assignment destination is readonly ff.tree['science_data'][0] = 42 assert_array_equal(ff.tree['science_data2'], exttree['cool_stuff']['a']) assert len(ff._external_asdf_by_uri) == 2 assert ff.tree['foobar']() == 'foobar' assert ff.tree['answer']() == 42 assert_array_equal(ff.tree['array'], exttree['list_of_stuff'][2]) assert_tree_match(ff.tree['whole_thing'](), exttree) assert_array_equal( ff.tree['whole_thing']['cool_stuff']['a'], exttree['cool_stuff']['a']) assert_array_equal( ff.tree['myself']['science_data'], exttree['cool_stuff']['a']) # Make sure that referencing oneself doesn't make another call # to disk. assert len(ff._external_asdf_by_uri) == 2 assert_array_equal(ff.tree['internal'], exttree['cool_stuff']['a']) with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: do_asserts(ff) internal_path = os.path.join(str(tmpdir), 'main.asdf') ff.write_to(internal_path) with asdf.AsdfFile.open(internal_path) as ff: do_asserts(ff) with asdf.AsdfFile.open(internal_path) as ff: assert len(ff._external_asdf_by_uri) == 0 ff.resolve_references() assert len(ff._external_asdf_by_uri) == 2 assert isinstance(ff.tree['data'], ndarray.NDArrayType) assert isinstance(ff.tree['science_data'], ndarray.NDArrayType) assert_array_equal(ff.tree['science_data'], exttree['cool_stuff']['a']) assert_array_equal(ff.tree['science_data2'], exttree['cool_stuff']['a']) assert ff.tree['foobar'] == 'foobar' assert ff.tree['answer'] == 42 assert_array_equal(ff.tree['array'], exttree['list_of_stuff'][2]) assert_tree_match(ff.tree['whole_thing'], exttree) assert_array_equal( ff.tree['whole_thing']['cool_stuff']['a'], exttree['cool_stuff']['a']) assert_array_equal( ff.tree['myself']['science_data'], exttree['cool_stuff']['a']) assert_array_equal(ff.tree['internal'], exttree['cool_stuff']['a']) def test_external_reference_invalid(tmpdir): tree = { 'foo': { '$ref': 'fail.asdf' } } ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.resolve_references() ff = asdf.AsdfFile(tree, uri="http://nowhere.com/") with pytest.raises(IOError): ff.resolve_references() ff = asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) with pytest.raises(IOError): ff.resolve_references() def test_external_reference_invalid_fragment(tmpdir): exttree = { 'list_of_stuff': [ 'foobar', 42, np.array([7, 8, 9], np.float) ] } external_path = os.path.join(str(tmpdir), 'external.asdf') ff = asdf.AsdfFile(exttree) ff.write_to(external_path) tree = { 'foo': { '$ref': 'external.asdf#/list_of_stuff/a' } } with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: with pytest.raises(ValueError): ff.resolve_references() tree = { 'foo': { '$ref': 'external.asdf#/list_of_stuff/3' } } with asdf.AsdfFile(tree, uri=util.filepath_to_url( os.path.join(str(tmpdir), 'main.asdf'))) as ff: with pytest.raises(ValueError): ff.resolve_references() def test_make_reference(tmpdir): exttree = { # Include some ~ and / in the name to make sure that escaping # is working correctly 'f~o~o/': { 'a': np.array([0, 1, 2], np.float), 'b': np.array([3, 4, 5], np.float) } } external_path = os.path.join(str(tmpdir), 'external.asdf') ext = asdf.AsdfFile(exttree) ext.write_to(external_path) with asdf.AsdfFile.open(external_path) as ext: ff = asdf.AsdfFile() ff.tree['ref'] = ext.make_reference(['f~o~o/', 'a']) assert_array_equal(ff.tree['ref'], ext.tree['f~o~o/']['a']) ff.write_to(os.path.join(str(tmpdir), 'source.asdf')) with asdf.AsdfFile.open(os.path.join(str(tmpdir), 'source.asdf')) as ff: assert ff.tree['ref']._uri == 'external.asdf#f~0o~0o~1/a' def test_internal_reference(): tree = { 'foo': 2, 'bar': {'$ref': '#'} } ff = asdf.AsdfFile(tree) ff.find_references() assert isinstance(ff.tree['bar'], reference.Reference) ff.resolve_references() assert ff.tree['bar']['foo'] == 2 tree = { 'foo': 2 } ff = asdf.AsdfFile( tree, uri=util.filepath_to_url(os.path.abspath("test.asdf"))) ff.tree['bar'] = ff.make_reference([]) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) ff = asdf.AsdfFile() content = asdf.AsdfFile()._open_impl(ff, buff, _get_yaml_content=True) assert b"{$ref: ''}" in content asdf-1.3.3/asdf/tests/test_schema.py0000644000175000017500000002741113246003441016713 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import os import re import warnings from jsonschema import ValidationError import six import yaml import pytest import numpy as np import asdf from asdf import asdftypes from asdf import extension from asdf import resolver from asdf import schema from asdf import util from asdf.tests import helpers from astropy.tests.helper import catch_warnings TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'data') class CustomExtension: """This is the base class that is used for extensions for custom tag classes that exist only for the purposes of testing. """ @property def types(self): return [] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/', util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')] def test_violate_toplevel_schema(): tree = {'fits': 'This does not look like a FITS file'} with pytest.raises(ValidationError): asdf.AsdfFile(tree) ff = asdf.AsdfFile() ff.tree['fits'] = 'This does not look like a FITS file' with pytest.raises(ValidationError): buff = io.BytesIO() ff.write_to(buff) @pytest.mark.importorskip('astropy') def test_tagging_scalars(): yaml = """ unit: !unit/unit-1.0.0 m not_unit: m """ from astropy import units as u buff = helpers.yaml_to_asdf(yaml) with asdf.AsdfFile.open(buff) as ff: assert isinstance(ff.tree['unit'], u.UnitBase) assert not isinstance(ff.tree['not_unit'], u.UnitBase) assert isinstance(ff.tree['not_unit'], six.text_type) assert ff.tree == { 'unit': u.m, 'not_unit': 'm' } def test_read_json_schema(): """Pytest to make sure reading JSON schemas succeeds. This was known to fail on Python 3.5 See issue #314 at https://github.com/spacetelescope/asdf/issues/314 for more details. """ json_schema = os.path.join(TEST_DATA_PATH, 'example_schema.json') schema_tree = schema.load_schema(json_schema, resolve_references=True) schema.check_schema(schema_tree) def test_load_schema(tmpdir): schema_def = """ %YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "../core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_full_tag(tmpdir): schema_def = """ %YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "tag:stsci.edu:asdf/core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_load_schema_with_tag_address(tmpdir): schema_def = """ %YAML 1.1 %TAG !asdf! tag:stsci.edu:asdf/ --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" type: object properties: foobar: $ref: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0" required: [foobar] ... """ schema_path = tmpdir.join('nugatory.yaml') schema_path.write(schema_def.encode()) schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) def test_schema_caching(): # Make sure that if we request the same URL, we get the *exact # same* object, to ensure the cache is working. s1 = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') s2 = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/asdf-1.0.0') assert s1 is s2 def test_flow_style(): class CustomFlowStyleType(dict, asdftypes.AsdfType): name = 'custom_flow' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomFlowStyleExtension(CustomExtension): @property def types(self): return [CustomFlowStyleType] tree = { 'custom_flow': CustomFlowStyleType({'a': 42, 'b': 43}) } buff = io.BytesIO() ff = asdf.AsdfFile(tree, extensions=CustomFlowStyleExtension()) ff.write_to(buff) assert b' a: 42\n b: 43' in buff.getvalue() def test_style(): class CustomStyleType(str, asdftypes.AsdfType): name = 'custom_style' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomStyleExtension(CustomExtension): @property def types(self): return [CustomStyleType] tree = { 'custom_style': CustomStyleType("short") } buff = io.BytesIO() ff = asdf.AsdfFile(tree, extensions=CustomStyleExtension()) ff.write_to(buff) assert b'|-\n short\n' in buff.getvalue() def test_property_order(): tree = {'foo': np.ndarray([1, 2, 3])} buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.write_to(buff) ndarray_schema = schema.load_schema( 'http://stsci.edu/schemas/asdf/core/ndarray-1.0.0') property_order = ndarray_schema['anyOf'][1]['propertyOrder'] last_index = 0 for prop in property_order: index = buff.getvalue().find(prop.encode('utf-8') + b':') if index != -1: assert index > last_index last_index = index def test_invalid_nested(): class CustomType(str, asdftypes.AsdfType): name = 'custom' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class CustomTypeExtension(CustomExtension): @property def types(self): return [CustomType] yaml = """ custom: ! foo """ buff = helpers.yaml_to_asdf(yaml) # This should cause a warning but not an error because without explicitly # providing an extension, our custom type will not be recognized and will # simply be converted to a raw type. with catch_warnings() as warning: with asdf.AsdfFile.open(buff): pass assert len(warning) == 1 buff.seek(0) with pytest.raises(ValidationError): with asdf.AsdfFile.open(buff, extensions=[CustomTypeExtension()]): pass # Make sure tags get validated inside of other tags that know # nothing about them. yaml = """ array: !core/ndarray-1.0.0 data: [0, 1, 2] custom: ! foo """ buff = helpers.yaml_to_asdf(yaml) with pytest.raises(ValidationError): with asdf.AsdfFile.open(buff, extensions=[CustomTypeExtension()]): pass def test_invalid_schema(): s = {'type': 'integer'} schema.check_schema(s) s = {'type': 'foobar'} with pytest.raises(ValidationError): schema.check_schema(s) def test_defaults(): s = { 'type': 'object', 'properties': { 'a': { 'type': 'integer', 'default': 42 } } } t = {} cls = schema._create_validator(schema.FILL_DEFAULTS) validator = cls(s) validator.validate(t, _schema=s) assert t['a'] == 42 cls = schema._create_validator(schema.REMOVE_DEFAULTS) validator = cls(s) validator.validate(t, _schema=s) assert t == {} def test_default_check_in_schema(): s = { 'type': 'object', 'properties': { 'a': { 'type': 'integer', 'default': 'foo' } } } with pytest.raises(ValidationError): schema.check_schema(s) def test_fill_and_remove_defaults(): class DefaultType(dict, asdftypes.AsdfType): name = 'default' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' class DefaultTypeExtension(CustomExtension): @property def types(self): return [DefaultType] yaml = """ custom: ! b: {} """ buff = helpers.yaml_to_asdf(yaml) with asdf.AsdfFile.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert 'a' in ff.tree['custom'] assert ff.tree['custom']['a'] == 42 assert ff.tree['custom']['b']['c'] == 82 buff.seek(0) with asdf.AsdfFile.open(buff, extensions=[DefaultTypeExtension()], do_not_fill_defaults=True) as ff: assert 'a' not in ff.tree['custom'] assert 'c' not in ff.tree['custom']['b'] ff.fill_defaults() assert 'a' in ff.tree['custom'] assert ff.tree['custom']['a'] == 42 assert 'c' in ff.tree['custom']['b'] assert ff.tree['custom']['b']['c'] == 82 ff.remove_defaults() assert 'a' not in ff.tree['custom'] assert 'c' not in ff.tree['custom']['b'] def test_references_in_schema(): r = resolver.Resolver(CustomExtension().url_mapping, 'url') s = schema.load_schema( os.path.join(TEST_DATA_PATH, 'self_referencing-1.0.0.yaml'), resolver=r, resolve_references=True) assert '$ref' not in repr(s) assert s['anyOf'][1] == s['anyOf'][0] def test_large_literals(): tree = { 'large_int': (1 << 53), } with pytest.raises(ValidationError): asdf.AsdfFile(tree) tree = { 'large_array': np.array([(1 << 53)], np.uint64) } ff = asdf.AsdfFile(tree) buff = io.BytesIO() ff.write_to(buff) ff.set_array_storage(ff.tree['large_array'], 'inline') buff = io.BytesIO() with pytest.raises(ValidationError): ff.write_to(buff) print(buff.getvalue()) @pytest.mark.importorskip('astropy') def test_type_missing_dependencies(): class MissingType(asdftypes.AsdfType): name = 'missing' organization = 'nowhere.org' version = (1, 1, 0) standard = 'custom' types = ['asdfghjkl12345.foo'] requires = ["ASDFGHJKL12345"] class DefaultTypeExtension(CustomExtension): @property def types(self): return [MissingType] yaml = """ custom: ! b: {foo: 42} """ buff = helpers.yaml_to_asdf(yaml) with catch_warnings() as w: with asdf.AsdfFile.open(buff, extensions=[DefaultTypeExtension()]) as ff: assert ff.tree['custom']['b']['foo'] == 42 assert len(w) == 1 def test_assert_roundtrip_with_extension(tmpdir): called_custom_assert_equal = [False] class CustomType(dict, asdftypes.AsdfType): name = 'custom_flow' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' @classmethod def assert_equal(cls, old, new): called_custom_assert_equal[0] = True class CustomTypeExtension(CustomExtension): @property def types(self): return [CustomType] tree = { 'custom': CustomType({'a': 42, 'b': 43}) } def check(ff): assert isinstance(ff.tree['custom'], CustomType) helpers.assert_roundtrip_tree(tree, tmpdir, extensions=[CustomTypeExtension()]) assert called_custom_assert_equal[0] is True asdf-1.3.3/asdf/tests/helpers.py0000644000175000017500000002205213246031624016056 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import os import sys import six try: from astropy.coordinates import ICRS except ImportError: ICRS = None # Attempt to maintain backwards compatibility withearlier versions of astropy from astropy import __version__ as astropy_version if astropy_version < '3.0': import astropy from astropy.tests.disable_internet import INTERNET_OFF remote_data = astropy.tests.helper.remote_data else: import pytest from pytest_remotedata.disable_internet import INTERNET_OFF remote_data = pytest.mark.remote_data try: from astropy.coordinates.representation import CartesianRepresentation except ImportError: CartesianRepresentation = None try: from astropy.coordinates.representation import CartesianDifferential except ImportError: CartesianDifferential = None from ..asdf import AsdfFile, get_asdf_library_info from .httpserver import RangeHTTPServer from ..extension import _builtin_extension_list from .. import util from .. import versioning from ..tags.core import AsdfObject def assert_tree_match(old_tree, new_tree, ctx=None, funcname='assert_equal', ignore_keys=None): """ Assert that two ASDF trees match. Parameters ---------- old_tree : ASDF tree new_tree : ASDF tree ctx : ASDF file context Used to look up the set of types in effect. funcname : string The name of a method on members of old_tree and new_tree that will be used to compare custom objects. The default of `assert_equal` handles Numpy arrays. ignore_keys : list of str List of keys to ignore """ seen = set() if ignore_keys is None: ignore_keys = ['asdf_library', 'history'] ignore_keys = set(ignore_keys) if ctx is None: version_string = str(versioning.default_version) ctx = _builtin_extension_list else: version_string = ctx.version_string def recurse(old, new): if id(old) in seen or id(new) in seen: return seen.add(id(old)) seen.add(id(new)) old_type = ctx.type_index.from_custom_type(type(old), version_string) new_type = ctx.type_index.from_custom_type(type(new), version_string) if (old_type is not None and new_type is not None and old_type is new_type and hasattr(old_type, funcname)): getattr(old_type, funcname)(old, new) elif isinstance(old, dict) and isinstance(new, dict): assert (set(x for x in old.keys() if x not in ignore_keys) == set(x for x in new.keys() if x not in ignore_keys)) for key in old.keys(): if key not in ignore_keys: recurse(old[key], new[key]) elif isinstance(old, (list, tuple)) and isinstance(new, (list, tuple)): assert len(old) == len(new) for a, b in zip(old, new): recurse(a, b) # The astropy classes CartesianRepresentation, CartesianDifferential, # and ICRS do not define equality in a way that is meaningful for unit # tests. We explicitly compare the fields that we care about in order # to enable our unit testing. It is possible that in the future it will # be necessary or useful to account for fields that are not currently # compared. elif CartesianRepresentation is not None and \ isinstance(old, CartesianRepresentation): assert old.x == new.x and old.y == new.y and old.z == new.z elif CartesianDifferential is not None and \ isinstance(old, CartesianDifferential): assert old.d_x == new.d_x and old.d_y == new.d_y and \ old.d_z == new.d_z elif ICRS is not None and isinstance(old, ICRS): assert old.ra == new.ra and old.dec == new.dec else: assert old == new recurse(old_tree, new_tree) def assert_roundtrip_tree( tree, tmpdir, asdf_check_func=None, raw_yaml_check_func=None, write_options={}, extensions=None): """ Assert that a given tree saves to ASDF and, when loaded back, the tree matches the original tree. tree : ASDF tree tmpdir : str Path to temporary directory to save file asdf_check_func : callable, optional Will be called with the reloaded ASDF file to perform any additional checks. raw_yaml_check_func : callable, optional Will be called with the raw YAML content as a string to perform any additional checks. """ fname = str(tmpdir.join('test.asdf')) # First, test writing/reading a BytesIO buffer buff = io.BytesIO() AsdfFile(tree, extensions=extensions).write_to(buff, **write_options) assert not buff.closed buff.seek(0) with AsdfFile.open(buff, mode='rw', extensions=extensions) as ff: assert not buff.closed assert isinstance(ff.tree, AsdfObject) assert 'asdf_library' in ff.tree assert ff.tree['asdf_library'] == get_asdf_library_info() assert_tree_match(tree, ff.tree, ff) if asdf_check_func: asdf_check_func(ff) buff.seek(0) ff = AsdfFile(extensions=extensions) content = AsdfFile._open_impl(ff, buff, _get_yaml_content=True) buff.close() # We *never* want to get any raw python objects out assert b'!!python' not in content assert b'!core/asdf' in content assert content.startswith(b'%YAML 1.1') if raw_yaml_check_func: raw_yaml_check_func(content) # Then, test writing/reading to a real file ff = AsdfFile(tree, extensions=extensions) ff.write_to(fname, **write_options) with AsdfFile.open(fname, mode='rw', extensions=extensions) as ff: assert_tree_match(tree, ff.tree, ff) if asdf_check_func: asdf_check_func(ff) # Make sure everything works without a block index write_options['include_block_index'] = False buff = io.BytesIO() AsdfFile(tree, extensions=extensions).write_to(buff, **write_options) assert not buff.closed buff.seek(0) with AsdfFile.open(buff, mode='rw', extensions=extensions) as ff: assert not buff.closed assert isinstance(ff.tree, AsdfObject) assert_tree_match(tree, ff.tree, ff) if asdf_check_func: asdf_check_func(ff) # Now try everything on an HTTP range server if not INTERNET_OFF and not sys.platform.startswith('win'): server = RangeHTTPServer() try: ff = AsdfFile(tree, extensions=extensions) ff.write_to(os.path.join(server.tmpdir, 'test.asdf'), **write_options) with AsdfFile.open(server.url + 'test.asdf', mode='r', extensions=extensions) as ff: assert_tree_match(tree, ff.tree, ff) if asdf_check_func: asdf_check_func(ff) finally: server.finalize() def yaml_to_asdf(yaml_content, yaml_headers=True, standard_version=None): """ Given a string of YAML content, adds the extra pre- and post-amble to make it an ASDF file. Parameters ---------- yaml_content : string yaml_headers : bool, optional When True (default) add the standard ASDF YAML headers. Returns ------- buff : io.BytesIO() A file-like object containing the ASDF-like content. """ if isinstance(yaml_content, six.text_type): yaml_content = yaml_content.encode('utf-8') buff = io.BytesIO() if standard_version is None: standard_version = versioning.default_version if yaml_headers: buff.write("""#ASDF {0} #ASDF_STANDARD {1} %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-{0} """.format('1.0.0', standard_version).encode('ascii')) buff.write(yaml_content) if yaml_headers: buff.write(b"\n...\n") buff.seek(0) return buff def get_file_sizes(dirname): """ Get the file sizes in a directory. Parameters ---------- dirname : string Path to a directory Returns ------- sizes : dict Dictionary of (file, size) pairs. """ files = {} for filename in os.listdir(dirname): path = os.path.join(dirname, filename) if os.path.isfile(path): files[filename] = os.stat(path).st_size return files def display_warnings(_warnings): """ Return a string that displays a list of unexpected warnings Parameters ---------- _warnings : iterable List of warnings to be displayed Returns ------- msg : str String containing the warning messages to be displayed """ if len(_warnings) == 0: return "No warnings occurred (was one expected?)" msg = "Unexpected warning(s) occurred:\n" for warning in _warnings: msg += "{}:{}: {}: {}\n".format( warning.filename, warning.lineno, warning.category.__name__, warning.message) return msg asdf-1.3.3/asdf/tests/httpserver.py0000644000175000017500000000331313246003441016615 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os import shutil import multiprocessing import six import tempfile from ..extern.RangeHTTPServer import RangeHTTPRequestHandler __all__ = ['HTTPServer', 'RangeHTTPServer'] def run_server(queue, tmpdir, handler_class): # pragma: no cover """ Runs an HTTP server serving files from given tmpdir in a separate process. When it's ready, it sends a URL to the server over a queue so the main process (the HTTP client) can start making requests of it. """ class HTTPRequestHandler(handler_class): def translate_path(self, path): path = handler_class.translate_path(self, path) path = os.path.join( tmpdir, os.path.relpath(path, os.getcwd())) return path server = six.moves.socketserver.TCPServer( ("127.0.0.1", 0), HTTPRequestHandler) domain, port = server.server_address url = "http://{0}:{1}/".format(domain, port) queue.put(url) server.serve_forever() class HTTPServer(object): handler_class = six.moves.SimpleHTTPServer.SimpleHTTPRequestHandler def __init__(self): self.tmpdir = tempfile.mkdtemp() q = multiprocessing.Queue() self.process = multiprocessing.Process( target=run_server, args=(q, self.tmpdir, self.handler_class)) self.process.start() self.url = q.get() def finalize(self): self.process.terminate() shutil.rmtree(self.tmpdir) class RangeHTTPServer(HTTPServer): handler_class = RangeHTTPRequestHandler asdf-1.3.3/asdf/tests/test_suite.py0000644000175000017500000000553013246003441016602 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import os import sys import pytest from asdf import open as asdf_open from asdf import versioning from .helpers import assert_tree_match, display_warnings from astropy.tests.helper import catch_warnings def get_test_id(reference_file_path): """Helper function to return the informative part of a schema path""" path = os.path.normpath(reference_file_path) return os.path.sep.join(path.split(os.path.sep)[-3:]) def collect_reference_files(): """Function used by pytest to collect ASDF reference files for testing.""" root = os.path.join(os.path.dirname(__file__), '..', "reference_files") for version in versioning.supported_versions: version_dir = os.path.join(root, str(version)) if os.path.exists(version_dir): for filename in os.listdir(version_dir): if filename.endswith(".asdf"): filepath = os.path.join(version_dir, filename) basename, _ = os.path.splitext(filepath) if os.path.exists(basename + ".yaml"): yield filepath def _compare_trees(name_without_ext, expect_warnings=False): asdf_path = name_without_ext + ".asdf" yaml_path = name_without_ext + ".yaml" with asdf_open(asdf_path) as af_handle: af_handle.resolve_and_inline() with asdf_open(yaml_path) as ref: def _compare_func(): assert_tree_match(af_handle.tree, ref.tree, funcname='assert_allclose') if expect_warnings: # Make sure to only suppress warnings when they are expected. # However, there's still a chance of missing warnings that we # actually care about here. with catch_warnings(RuntimeWarning) as w: _compare_func() else: _compare_func() @pytest.mark.parametrize( 'reference_file', collect_reference_files(), ids=get_test_id) def test_reference_file(reference_file): basename = os.path.basename(reference_file) name_without_ext, _ = os.path.splitext(reference_file) known_fail = False # We expect warnings from numpy due to the way that complex.yaml is # constructed. We want to make sure we only suppress warnings when they are # expected. expect_warnings = basename == 'complex.asdf' if sys.version_info[:2] == (2, 7): known_fail = (basename in ('complex.asdf')) if sys.maxunicode <= 65535: known_fail = known_fail | (basename in ('unicode_spp.asdf')) try: _compare_trees(name_without_ext, expect_warnings=expect_warnings) except: if known_fail: pytest.xfail() else: raise asdf-1.3.3/asdf/tests/test_low_level.py0000644000175000017500000010006313246003441017436 0ustar dandan00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import os import numpy as np from numpy.testing import assert_array_equal from astropy.modeling import models import pytest import six from .. import asdf from .. import block from .. import constants from .. import generic_io from .. import treeutil from .. import versioning from ..tests.helpers import assert_tree_match def _get_small_tree(): x = np.arange(0, 10, dtype=np.float) tree = { 'science_data': x, 'subset': x[3:-3], 'skipping': x[::2], 'not_shared': np.arange(10, 0, -1, dtype=np.uint8) } return tree def test_no_yaml_end_marker(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: bar...baz baz: 42 """ path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with pytest.raises(ValueError): with asdf.AsdfFile.open(buff): pass buff.seek(0) fd = generic_io.InputStream(buff, 'r') with pytest.raises(ValueError): with asdf.AsdfFile.open(fd): pass with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with pytest.raises(ValueError): with asdf.AsdfFile.open(fd): pass def test_no_final_newline(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: ...bar... baz: 42 ...""" path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with asdf.AsdfFile.open(buff) as ff: assert len(ff.tree) == 2 buff.seek(0) fd = generic_io.InputStream(buff, 'r') with asdf.AsdfFile.open(fd) as ff: assert len(ff.tree) == 2 with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with asdf.AsdfFile.open(fd) as ff: assert len(ff.tree) == 2 def test_no_asdf_header(tmpdir): content = b"What? This ain't no ASDF file" path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with pytest.raises(ValueError): asdf.AsdfFile.open(buff) with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with pytest.raises(ValueError): asdf.AsdfFile.open(fd) def test_no_asdf_blocks(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo: bar ... XXXXXXXX """ path = os.path.join(str(tmpdir), 'test.asdf') buff = io.BytesIO(content) with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 0 buff.seek(0) fd = generic_io.InputStream(buff, 'r') with asdf.AsdfFile.open(fd) as ff: assert len(ff.blocks) == 0 with open(path, 'wb') as fd: fd.write(content) with open(path, 'rb') as fd: with asdf.AsdfFile.open(fd) as ff: assert len(ff.blocks) == 0 def test_invalid_source(): buff = io.BytesIO() ff = asdf.AsdfFile(_get_small_tree()) ff.write_to(buff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff2: ff2.blocks.get_block(0) with pytest.raises(ValueError): ff2.blocks.get_block(2) with pytest.raises(IOError): ff2.blocks.get_block("http://127.0.0.1/") with pytest.raises(TypeError): ff2.blocks.get_block(42.0) with pytest.raises(ValueError): ff2.blocks.get_source(42.0) block = ff2.blocks.get_block(0) assert ff2.blocks.get_source(block) == 0 def test_empty_file(): buff = io.BytesIO(b"#ASDF 1.0.0\n") buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert ff.tree == {} assert len(ff.blocks) == 0 buff = io.BytesIO(b"#ASDF 1.0.0\n#ASDF_STANDARD 1.0.0") buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert ff.tree == {} assert len(ff.blocks) == 0 def test_not_asdf_file(): buff = io.BytesIO(b"SIMPLE") buff.seek(0) with pytest.raises(ValueError): with asdf.AsdfFile.open(buff): pass buff = io.BytesIO(b"SIMPLE\n") buff.seek(0) with pytest.raises(ValueError): with asdf.AsdfFile.open(buff): pass def test_junk_file(): buff = io.BytesIO(b"#ASDF 1.0.0\nFOO") buff.seek(0) with pytest.raises(ValueError): with asdf.AsdfFile.open(buff): pass def test_block_mismatch(): # This is a file with a single small block, followed by something # that has an invalid block magic number. buff = io.BytesIO( b'#ASDF 1.0.0\n\xd3BLK\x00\x28\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0FOOBAR') buff.seek(0) with pytest.raises(ValueError): with asdf.AsdfFile.open(buff): pass def test_block_header_too_small(): # The block header size must be at least 40 buff = io.BytesIO( b'#ASDF 1.0.0\n\xd3BLK\0\0') buff.seek(0) with pytest.raises(ValueError): with asdf.AsdfFile.open(buff): pass if six.PY2: def test_file_already_closed(tmpdir): # Test that referencing specific blocks in another asdf file # works. tree = _get_small_tree() path = os.path.join(str(tmpdir), 'test.asdf') ff = asdf.AsdfFile(tree) ff.write_to(path) with open(path, 'rb') as fd: ff2 = asdf.AsdfFile.open(fd) with pytest.raises(IOError): str(ff2.tree['science_data'][:]) def test_external_block(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.set_array_storage(my_array, 'external') assert ff.get_array_storage(my_array) == 'external' ff.write_to(os.path.join(tmpdir, "test.asdf")) assert 'test0000.asdf' in os.listdir(tmpdir) def test_external_block_non_url(): my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.set_array_storage(my_array, 'external') assert ff.get_array_storage(my_array) == 'external' buff = io.BytesIO() with pytest.raises(ValueError): ff.write_to(buff) def test_invalid_array_storage(): my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) with pytest.raises(ValueError): ff.set_array_storage(my_array, 'foo') b = block.Block() b._array_storage = 'foo' with pytest.raises(ValueError): ff.blocks.add(b) with pytest.raises(ValueError): ff.blocks.remove(b) def test_transfer_array_sources(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf")) with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(my_array, ff.tree['my_array']) ff.write_to(os.path.join(tmpdir, "test2.asdf")) # write_to should have no effect on getting the original data assert_array_equal(my_array, ff.tree['my_array']) assert ff._fd is None def test_write_to_same(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf")) with asdf.AsdfFile.open( os.path.join(tmpdir, "test.asdf"), mode='rw') as ff: assert_array_equal(my_array, ff.tree['my_array']) ff.tree['extra'] = [0] * 1000 ff.write_to(os.path.join(tmpdir, "test2.asdf")) with asdf.AsdfFile.open( os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: assert_array_equal(my_array, ff.tree['my_array']) def test_pad_blocks(tmpdir): tmpdir = str(tmpdir) # This is the case where the new tree can't fit in the available space my_array = np.ones((8, 8)) * 1 my_array2 = np.ones((42, 5)) * 2 tree = { 'my_array': my_array, 'my_array2': my_array2 } ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True) with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['my_array'], my_array) assert_array_equal(ff.tree['my_array2'], my_array2) def test_update_expand_tree(tmpdir): tmpdir = str(tmpdir) testpath = os.path.join(tmpdir, "test.asdf") # This is the case where the new tree can't fit in the available space my_array = np.arange(64) * 1 my_array2 = np.arange(64) * 2 tree = { 'arrays': [ my_array, my_array2, np.arange(3) ] } ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['arrays'][2], 'inline') assert len(list(ff.blocks.inline_blocks)) == 1 ff.write_to(testpath, pad_blocks=True) with asdf.AsdfFile.open(testpath, mode='rw') as ff: assert_array_equal(ff.tree['arrays'][0], my_array) orig_offset = ff.blocks[ff.tree['arrays'][0]].offset ff.tree['extra'] = [0] * 6000 ff.update() with asdf.AsdfFile.open(testpath) as ff: assert orig_offset <= ff.blocks[ff.tree['arrays'][0]].offset assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline' assert_array_equal(ff.tree['arrays'][0], my_array) assert_array_equal(ff.tree['arrays'][1], my_array2) # Now, we expand the header only by a little bit ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['arrays'][2], 'inline') ff.write_to(os.path.join(tmpdir, "test2.asdf"), pad_blocks=True) with asdf.AsdfFile.open(os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff: orig_offset = ff.blocks[ff.tree['arrays'][0]].offset ff.tree['extra'] = [0] * 2 ff.update() with asdf.AsdfFile.open(os.path.join(tmpdir, "test2.asdf")) as ff: assert orig_offset == ff.blocks[ff.tree['arrays'][0]].offset assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline' assert_array_equal(ff.tree['arrays'][0], my_array) assert_array_equal(ff.tree['arrays'][1], my_array2) def _get_update_tree(): return { 'arrays': [ np.arange(64) * 1, np.arange(64) * 2, np.arange(64) * 3 ] } def test_update_delete_first_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][0] ff.update() assert os.stat(path).st_size <= original_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2]) def test_update_delete_last_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][-1] ff.update() assert os.stat(path).st_size <= original_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) def test_update_delete_middle_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: del ff.tree['arrays'][1] ff.update() assert len(ff.blocks._internal_blocks) == 2 assert os.stat(path).st_size <= original_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert len(ff.tree['arrays']) == 2 assert ff.tree['arrays'][0]._source == 0 assert ff.tree['arrays'][1]._source == 1 assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][2]) def test_update_replace_first_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][0] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], np.arange(32)) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) def test_update_replace_last_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][2] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], np.arange(32)) def test_update_replace_middle_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'][1] = np.arange(32) ff.update() assert os.stat(path).st_size <= original_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], np.arange(32)) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) def test_update_add_array(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'].append(np.arange(32)) ff.update() with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) assert_array_equal(ff.tree['arrays'][3], np.arange(32)) def test_update_add_array_at_end(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') # This is the case where the new tree can't fit in the available space tree = _get_update_tree() ff = asdf.AsdfFile(tree) ff.write_to(path, pad_blocks=True) original_size = os.stat(path).st_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf"), mode="rw") as ff: ff.tree['arrays'].append(np.arange(2048)) ff.update() assert len(ff.blocks) == 4 assert os.stat(path).st_size >= original_size with asdf.AsdfFile.open(os.path.join(tmpdir, "test.asdf")) as ff: assert_array_equal(ff.tree['arrays'][0], tree['arrays'][0]) assert_array_equal(ff.tree['arrays'][1], tree['arrays'][1]) assert_array_equal(ff.tree['arrays'][2], tree['arrays'][2]) assert_array_equal(ff.tree['arrays'][3], np.arange(2048)) def test_update_replace_all_arrays(tmpdir): tmpdir = str(tmpdir) testpath = os.path.join(tmpdir, "test.asdf") # This is the case where the new tree can't fit in the available space my_array = np.ones((64, 64)) * 1 tree = { 'my_array': my_array, } ff = asdf.AsdfFile(tree) ff.write_to(testpath, pad_blocks=True) with asdf.AsdfFile.open(testpath, mode='rw') as ff: ff.tree['my_array'] = np.ones((64, 64)) * 2 ff.update() with asdf.AsdfFile.open(testpath) as ff: assert_array_equal(ff.tree['my_array'], np.ones((64, 64)) * 2) def test_update_array_in_place(tmpdir): tmpdir = str(tmpdir) testpath = os.path.join(tmpdir, "test.asdf") # This is the case where the new tree can't fit in the available space my_array = np.ones((64, 64)) * 1 tree = { 'my_array': my_array, } ff = asdf.AsdfFile(tree) ff.write_to(testpath, pad_blocks=True) with asdf.AsdfFile.open(testpath, mode='rw') as ff: array = np.asarray(ff.tree['my_array']) array *= 2 ff.update() with asdf.AsdfFile.open(testpath) as ff: assert_array_equal(ff.tree['my_array'], np.ones((64, 64)) * 2) def test_init_from_asdffile(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff2 = asdf.AsdfFile(ff) assert ff.tree['my_array'] is ff2.tree['my_array'] assert_array_equal(ff.tree['my_array'], ff2.tree['my_array']) assert ff.blocks[my_array] != ff2.blocks[my_array] ff2.tree['my_array'] = None assert_array_equal(ff.tree['my_array'], my_array) ff.write_to(os.path.join(tmpdir, 'test.asdf')) with asdf.AsdfFile().open(os.path.join(tmpdir, 'test.asdf')) as ff: ff2 = asdf.AsdfFile(ff) assert not ff.tree['my_array'] is ff2.tree['my_array'] assert_array_equal(ff.tree['my_array'], ff2.tree['my_array']) assert ff.blocks[my_array] != ff2.blocks[my_array] ff2.tree['my_array'] = None assert_array_equal(ff.tree['my_array'], my_array) def test_update_exceptions(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.AsdfFile().open(path) as ff: with pytest.raises(IOError): ff.update() ff = asdf.AsdfFile(tree) buff = io.BytesIO() ff.write_to(buff) buff.seek(0) with asdf.AsdfFile.open(buff, mode='rw') as ff: ff.update() with pytest.raises(ValueError): asdf.AsdfFile().update() def test_get_data_from_closed_file(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.arange(0, 64).reshape((8, 8)) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.AsdfFile().open(path) as ff: pass with pytest.raises(IOError): assert_array_equal(my_array, ff.tree['my_array']) def test_seek_until_on_block_boundary(): # Create content where the first block begins on a # file-reading-block boundary. content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo : bar ... """ content += (b'\0' * (io.DEFAULT_BUFFER_SIZE - 2) + constants.BLOCK_MAGIC + b'\0\x30' + b'\0' * 50) buff = io.BytesIO(content) ff = asdf.AsdfFile.open(buff) assert len(ff.blocks) == 1 buff.seek(0) fd = generic_io.InputStream(buff, 'r') ff = asdf.AsdfFile.open(fd) assert len(ff.blocks) == 1 def test_checksum(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.arange(0, 64, dtype=np.int64).reshape((8, 8)) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.AsdfFile.open(path, validate_checksums=True) as ff: assert type(ff.blocks._internal_blocks[0].checksum) == bytes assert ff.blocks._internal_blocks[0].checksum == \ b'\xcaM\\\xb8t_L|\x00\n+\x01\xf1\xcfP1' def test_checksum_update(tmpdir): tmpdir = str(tmpdir) path = os.path.join(tmpdir, 'test.asdf') my_array = np.arange(0, 64, dtype=np.int64).reshape((8, 8)) tree = {'my_array': my_array} ff = asdf.AsdfFile(tree) ff.write_to(path) with asdf.AsdfFile.open(path, mode='rw') as ff: ff.tree['my_array'][7, 7] = 0.0 # update() should update the checksum, even if the data itself # is memmapped and isn't expressly re-written. ff.update() with asdf.AsdfFile.open(path, validate_checksums=True) as ff: assert ff.blocks._internal_blocks[0].checksum == \ b'T\xaf~[\x90\x8a\x88^\xc2B\x96D,N\xadL' def test_atomic_write(tmpdir): tmpfile = os.path.join(str(tmpdir), 'test.asdf') tree = _get_small_tree() ff = asdf.AsdfFile(tree) ff.write_to(tmpfile) with asdf.AsdfFile.open(tmpfile) as ff: ff.write_to(tmpfile) def test_overwrite(tmpdir): # This is intended to reproduce the following issue: # https://github.com/spacetelescope/asdf/issues/100 tmpfile = os.path.join(str(tmpdir), 'test.asdf') aff = models.AffineTransformation2D(matrix=[[1, 2], [3, 4]]) f = asdf.AsdfFile() f.tree['model'] = aff f.write_to(tmpfile) model = f.tree['model'] ff = asdf.AsdfFile() ff.tree['model'] = model ff.write_to(tmpfile) def test_walk_and_modify_remove_keys(): tree = { 'foo': 42, 'bar': 43 } def func(x): if x == 42: return None return x tree2 = treeutil.walk_and_modify(tree, func) assert 'foo' not in tree2 assert 'bar' in tree2 def test_copy(tmpdir): tmpdir = str(tmpdir) my_array = np.random.rand(8, 8) tree = {'my_array': my_array, 'foo': {'bar': 'baz'}} ff = asdf.AsdfFile(tree) ff.write_to(os.path.join(tmpdir, 'test.asdf')) with asdf.AsdfFile.open(os.path.join(tmpdir, 'test.asdf')) as ff: ff2 = ff.copy() ff2.tree['my_array'] *= 2 ff2.tree['foo']['bar'] = 'boo' assert np.all(ff2.tree['my_array'] == ff.tree['my_array'] * 2) assert ff.tree['foo']['bar'] == 'baz' assert_array_equal(ff2.tree['my_array'], ff2.tree['my_array']) def test_deferred_block_loading(): buff = io.BytesIO() ff = asdf.AsdfFile(_get_small_tree()) ff.write_to(buff, include_block_index=False) buff.seek(0) with asdf.AsdfFile.open(buff) as ff2: assert len([x for x in ff2.blocks.blocks if isinstance(x, block.Block)]) == 1 x = ff2.tree['science_data'] * 2 x = ff2.tree['not_shared'] * 2 assert len([x for x in ff2.blocks.blocks if isinstance(x, block.Block)]) == 2 with pytest.raises(ValueError): ff2.blocks.get_block(2) def test_block_index(): buff = io.BytesIO() arrays = [] for i in range(100): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff2: assert isinstance(ff2.blocks._internal_blocks[0], block.Block) assert len(ff2.blocks._internal_blocks) == 100 for i in range(2, 99): assert isinstance(ff2.blocks._internal_blocks[i], block.UnloadedBlock) assert isinstance(ff2.blocks._internal_blocks[99], block.Block) # Force the loading of one array x = ff2.tree['arrays'][50] * 2 for i in range(2, 99): if i == 50: assert isinstance(ff2.blocks._internal_blocks[i], block.Block) else: assert isinstance(ff2.blocks._internal_blocks[i], block.UnloadedBlock) def test_large_block_index(): # This test is designed to test reading of a block index that is # larger than a single file system block, which is why we create # io.DEFAULT_BUFFER_SIZE / 4 arrays, and assuming each entry has more # than one digit in its address, we're guaranteed to have an index # larger than a filesystem block. # TODO: It would be nice to find a way to make this test faster. The # real bottleneck here is the enormous YAML section. buff = io.BytesIO() narrays = int(io.DEFAULT_BUFFER_SIZE / 4) arrays = [] for i in range(narrays): arrays.append(np.array([i], np.uint16)) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff2: assert isinstance(ff2.blocks._internal_blocks[0], block.Block) assert len(ff2.blocks._internal_blocks) == narrays def test_no_block_index(): buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) assert constants.INDEX_HEADER not in buff.getvalue() def test_junk_after_index(): buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.write(b"JUNK") buff.seek(0) # This has junk after the block index, so it # should fall back to the skip method, which # only loads the first block. with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 1 def test_short_file_find_block_index(): # This tests searching for a block index in a file that looks like # it might have an index, in the last filesystem block or so, but # ultimately proves to not have an index. buff = io.BytesIO() ff = asdf.AsdfFile({'arr': np.ndarray([1]), 'arr2': np.ndarray([2])}) ff.write_to(buff, include_block_index=False) buff.write(b'#ASDF BLOCK INDEX\n') buff.write(b'0' * (io.DEFAULT_BUFFER_SIZE * 4)) buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 1 def test_invalid_block_index_values(): # This adds a value in the block index that points to something # past the end of the file. In that case, we should just reject # the index altogether. buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) ff.blocks._internal_blocks.append(block.UnloadedBlock(buff, 123456789)) ff.blocks.write_block_index(buff, ff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 1 def test_invalid_last_block_index(): # This adds a value in the block index that points to something # that isn't a block buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) ff.blocks._internal_blocks[-1]._offset -= 4 ff.blocks.write_block_index(buff, ff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 1 def test_unordered_block_index(): # This creates a block index that isn't in increasing order buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) ff.blocks._internal_blocks = ff.blocks._internal_blocks[::-1] ff.blocks.write_block_index(buff, ff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 1 def test_invalid_block_index_first_block_value(): # This creates a bogus block index where the offset of the first # block doesn't match what we already know it to be. In this # case, we should reject the whole block index. buff = io.BytesIO() arrays = [] for i in range(10): arrays.append(np.ones((8, 8)) * i) tree = { 'arrays': arrays } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) ff.blocks._internal_blocks[0]._offset -= 4 ff.blocks.write_block_index(buff, ff) buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 1 def test_invalid_block_id(): ff = asdf.AsdfFile() with pytest.raises(ValueError): ff.blocks.get_block(-2) def test_dots_but_no_block_index(): # This puts `...` at the end of the file, so we sort of think # we might have a block index, but as it turns out, we don't # after reading a few chunks from the end of the file. buff = io.BytesIO() tree = { 'array': np.ones((8, 8)) } ff = asdf.AsdfFile(tree) ff.write_to(buff, include_block_index=False) buff.write(b'A' * 64000) buff.write(b'...\n') buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 1 def test_open_no_memmap(tmpdir): tmpfile = os.path.join(str(tmpdir), 'random.asdf') tree = { 'array': np.random.random((20, 20)) } ff = asdf.AsdfFile(tree) ff.write_to(tmpfile) # Test that by default we use memmapped arrays when possible with asdf.AsdfFile.open(tmpfile) as af: array = af.tree['array'] # Make sure to access the block so that it gets loaded x = array[0] assert array.block._memmapped == True assert isinstance(array.block._data, np.memmap) # Test that if we ask for copy, we do not get memmapped arrays with asdf.AsdfFile.open(tmpfile, copy_arrays=True) as af: array = af.tree['array'] x = array[0] assert array.block._memmapped == False # We can't just check for isinstance(..., np.array) since this will # be true for np.memmap as well assert not isinstance(array.block._data, np.memmap) def test_invalid_version(tmpdir): content = b"""#ASDF 0.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-0.1.0 foo : bar ...""" buff = io.BytesIO(content) with pytest.raises(ValueError): with asdf.AsdfFile.open(buff) as ff: pass def test_valid_version(tmpdir): content = b"""#ASDF 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 foo : bar ...""" buff = io.BytesIO(content) with asdf.AsdfFile.open(buff) as ff: version = ff.file_format_version assert version.major == 1 assert version.minor == 0 assert version.patch == 0 def test_default_version(): # See https://github.com/spacetelescope/asdf/issues/364 version_map = versioning.get_version_map(versioning.default_version) ff = asdf.AsdfFile() assert ff.file_format_version == version_map['FILE_FORMAT'] def test_fd_not_seekable(): data = np.ones(1024) b = block.Block(data=data) fd = io.BytesIO() fd.seekable = lambda: False fd.write_array = lambda arr: fd.write(arr.tobytes()) fd.read_blocks = lambda us: [fd.read(us)] fd.fast_forward = lambda offset: fd.seek(offset, 1) b.output_compression = 'zlib' b.write(fd) fd.seek(0) b = block.Block() b.read(fd) # We lost the information about the underlying array type, # but still can compare the bytes. assert b.data.tobytes() == data.tobytes() def test_top_level_tree(): tree = {'tree': _get_small_tree()} ff = asdf.AsdfFile(tree) assert_tree_match(ff.tree['tree'], ff['tree']) ff2 = asdf.AsdfFile() ff2['tree'] = _get_small_tree() assert_tree_match(ff2.tree['tree'], ff2['tree']) asdf-1.3.3/asdf/tests/data/0000755000175000017500000000000013246031665014757 5ustar dandan00000000000000asdf-1.3.3/asdf/tests/data/custom_flow-1.0.0.yaml0000644000175000017500000000032613243547254020642 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_flow-1.0.0" type: object properties: a: type: number b: type: number flowStyle: blockasdf-1.3.3/asdf/tests/data/custom_style-1.0.0.yaml0000644000175000017500000000023613243547254021033 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_style-1.0.0" type: string style: literal asdf-1.3.3/asdf/tests/data/custom_flow-1.1.0.yaml0000644000175000017500000000032713243547254020644 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom_flow-1.1.0" type: object properties: c: type: number d: type: number flowStyle: block asdf-1.3.3/asdf/tests/data/complex-42.0.0.yaml0000644000175000017500000000021213243547254020027 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/custom-42.0.0" type: stringasdf-1.3.3/asdf/tests/data/version_mismatch.fits0000644000175000017500000001320013243547254021217 0ustar dandan00000000000000SIMPLE = T / conforms to FITS standard BITPIX = 8 / array data type NAXIS = 1 / number of array dimensions NAXIS1 = 288 EXTNAME = 'ASDF ' / extension name END #ASDF 1.0.0 #ASDF_STANDARD 1.1.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 a: !core/complex-7.0.0 0j asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.2.2.dev874} ... asdf-1.3.3/asdf/tests/data/missing-1.1.0.yaml0000644000175000017500000000021113243547254017744 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/missing-1.1.0" type: objectasdf-1.3.3/asdf/tests/data/custom-1.0.0.yaml0000644000175000017500000000022513243547254017611 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/custom-1.0.0" type: integer default: 42asdf-1.3.3/asdf/tests/data/self_referencing-1.0.0.yaml0000644000175000017500000000026713243547254021605 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/self_referencing-1.0.0" anyOf: - type: object - $ref: "#/anyOf/0" ...asdf-1.3.3/asdf/tests/data/fraction-1.0.0.yaml0000644000175000017500000000043713243547254020111 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/fraction-1.0.0" title: An example custom type for handling fractions tag: "tag:nowhere.org:custom/fraction-1.0.0" type: array items: type: integer minItems: 2 maxItems: 2 ...asdf-1.3.3/asdf/tests/data/default-1.0.0.yaml0000644000175000017500000000042513243547254017725 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/default-1.0.0" type: object properties: a: type: integer default: 42 b: type: object properties: c: type: integer default: 82asdf-1.3.3/asdf/tests/data/example_schema.json0000755000175000017500000000423313243547254020635 0ustar dandan00000000000000{ "date" : { "title" : "[yyyy-mm-ddThh:mm:ss.ssssss] UTC date file created", "type" : "string", "sql_dtype" : "datetime2", "fits_keyword" : "DATE", "description" : "The UTC date and time when the HDU was created, in the form YYYY-MM-DDThh:mm:ss.ssssss, where YYYY shall be the four-digit calendar year number, MM the two-digit month number with January given by 01 and December by 12, and DD the two-digit day of the month. The literal T shall separate the date and time, hh shall be the two-digit hour in the day, mm the two-digit number of minutes after the hour, and ss.ssssss the number of seconds (two digits followed by a fraction accurate to microseconds) after the minute. Default values must not be given to any portion of the date/time string, and leading zeros must not be omitted.", "calculation" : "Operating system time in the format of YYYY-MM-DDThh:mm:ss.ssssss", "default_value" : "", "example" : "2015-01-01T00:00:00.000001", "units" : "", "sw_source" : "calculation", "source" : "Science Data Processing (SDP)", "destination" : ["ScienceCommon.date","GuideStar.date"], "level" : "1a", "si" : "Multiple", "section" : "Basic", "mode" : "All", "fits_hdu" : "PRIMARY", "misc" : "" }, "origin" : { "title" : "institution responsible for creating FITS file", "type" : "string", "sql_dtype" : "nvarchar(20)", "fits_keyword" : "ORIGIN", "description" : "Identifies the organization or institution responsible for creating the FITS file.", "calculation" : "", "default_value" : "STSCI", "example" : "STSCI", "units" : "", "sw_source" : "", "source" : "Science Data Processing (SDP)", "destination" : ["ScienceCommon.origin","GuideStar.origin"], "level" : "1a", "si" : "Multiple", "section" : "Basic", "mode" : "All", "fits_hdu" : "PRIMARY", "misc" : "" } } asdf-1.3.3/asdf/tests/__init__.py0000644000175000017500000000115313243547254016162 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This packages contains affiliated package tests. """ from .. import CustomType class CustomTestType(CustomType): """This class is intended to be inherited by custom types that are used purely for the purposes of testing. The methods ``from_tree_tagged`` and ``from_tree`` are implemented solely in order to avoid custom type conversion warnings. """ @classmethod def from_tree_tagged(cls, tree, ctx): return cls.from_tree(tree.data, ctx) @classmethod def from_tree(cls, tree, ctx): return tree asdf-1.3.3/asdf/tests/test_versioning.py0000644000175000017500000001562013246003441017635 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import pytest from itertools import combinations from ..versioning import AsdfVersion, AsdfSpec def test_version_constructor(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion((1,0,0)) ver2 = AsdfVersion([1,0,0]) assert str(ver0) == '1.0.0' assert str(ver1) == '1.0.0' assert str(ver2) == '1.0.0' def test_version_and_version_equality(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion('1.0.0') assert ver0 is not ver1 assert ver0 == ver1 assert ver1 == ver0 assert not (ver0 != ver1) assert not (ver1 != ver0) def test_version_and_string_equality(): version = AsdfVersion('1.0.0') string_ver = '1.0.0' assert version == string_ver assert string_ver == version assert not (version != string_ver) assert not (string_ver != version) def test_version_and_tuple_equality(): version = AsdfVersion('1.0.0') tuple_ver = (1,0,0) assert version == tuple_ver assert tuple_ver == version assert not (version != tuple_ver) assert not (tuple_ver != version) def test_version_and_version_inequality(): ver0 = AsdfVersion('1.0.0') ver1 = AsdfVersion('1.0.1') ver2 = AsdfVersion('1.1.0') ver3 = AsdfVersion('1.1.1') ver4 = AsdfVersion('2.0.0') ver5 = AsdfVersion('2.0.1') ver6 = AsdfVersion('2.1.0') ver7 = AsdfVersion('2.1.1') versions = [ver0, ver1, ver2, ver3, ver4, ver5, ver6, ver7] for x,y in combinations(versions, 2): assert not (x == y) assert x != y assert ver0 < ver1 < ver2 < ver3 < ver4 < ver5 < ver6 < ver7 assert ver7 > ver6 > ver5 > ver4 > ver3 > ver2 > ver1 > ver0 assert (ver0 < ver1 < ver2 < ver4 < ver3 < ver5 < ver6 < ver7) == False assert (ver7 > ver6 > ver5 > ver3 > ver4 > ver2 > ver1 > ver0) == False assert ver0 <= ver1 <= ver2 <= ver3 <= ver4 <= ver5 <= ver6 <= ver7 assert ver7 >= ver6 >= ver5 >= ver4 >= ver3 >= ver2 >= ver1 >= ver0 def test_version_and_string_inequality(): version = AsdfVersion('2.0.0') assert version > '1.0.0' assert version > '1.0.1' assert version > '1.1.0' assert version > '1.1.1' assert (version > '2.0.0') == False assert (version < '2.0.0') == False assert version < '2.0.1' assert version < '2.1.0' assert version < '2.1.1' assert version >= '1.0.0' assert version >= '1.0.1' assert version >= '1.1.0' assert version >= '1.1.1' assert version >= '2.0.0' assert version <= '2.0.0' assert version <= '2.0.1' assert version <= '2.1.0' assert version <= '2.1.1' assert '1.0.0' < version assert '1.0.1' < version assert '1.1.0' < version assert '1.1.1' < version assert ('2.0.0' < version) == False assert ('2.0.0' > version) == False assert '2.0.1' > version assert '2.1.0' > version assert '2.1.1' > version assert '1.0.0' <= version assert '1.0.1' <= version assert '1.1.0' <= version assert '1.1.1' <= version assert '2.0.0' <= version assert '2.0.0' >= version assert '2.0.1' >= version assert '2.1.0' >= version assert '2.1.1' >= version def test_version_and_tuple_inequality(): version = AsdfVersion('2.0.0') assert version > (1,0,0) assert version > (1,0,1) assert version > (1,1,0) assert version > (1,1,1) assert (version > (2,0,0)) == False assert (version < (2,0,0)) == False assert version < (2,0,1) assert version < (2,1,0) assert version < (2,1,1) assert version >= (1,0,0) assert version >= (1,0,1) assert version >= (1,1,0) assert version >= (1,1,1) assert version >= (2,0,0) assert version <= (2,0,0) assert version <= (2,0,1) assert version <= (2,1,0) assert version <= (2,1,1) assert (1,0,0) < version assert (1,0,1) < version assert (1,1,0) < version assert (1,1,1) < version assert ((2,0,0) < version) == False assert ((2,0,0) > version) == False assert (2,0,1) > version assert (2,1,0) > version assert (2,1,1) > version assert (1,0,0) <= version assert (1,0,1) <= version assert (1,1,0) <= version assert (1,1,1) <= version assert (2,0,0) <= version assert (2,0,0) >= version assert (2,0,1) >= version assert (2,1,0) >= version assert (2,1,1) >= version def test_spec_version_match(): spec = AsdfSpec('>=1.1.0') assert spec.match(AsdfVersion('1.1.0')) assert spec.match(AsdfVersion('1.2.0')) assert not spec.match(AsdfVersion('1.0.0')) assert not spec.match(AsdfVersion('1.0.9')) def test_spec_version_select(): spec = AsdfSpec('>=1.1.0') versions = [AsdfVersion(x) for x in ['1.0.0', '1.0.9', '1.1.0', '1.2.0']] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_version_filter(): spec = AsdfSpec('>=1.1.0') versions = [AsdfVersion(x) for x in ['1.0.0', '1.0.9', '1.1.0', '1.2.0']] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_string_match(): spec = AsdfSpec('>=1.1.0') assert spec.match('1.1.0') assert spec.match('1.2.0') assert not spec.match('1.0.0') assert not spec.match('1.0.9') def test_spec_string_select(): spec = AsdfSpec('>=1.1.0') versions = ['1.0.0', '1.0.9', '1.1.0', '1.2.0'] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_string_filter(): spec = AsdfSpec('>=1.1.0') versions = ['1.0.0', '1.0.9', '1.1.0', '1.2.0'] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_tuple_match(): spec = AsdfSpec('>=1.1.0') assert spec.match((1,1,0)) assert spec.match((1,2,0)) assert not spec.match((1,0,0)) assert not spec.match((1,0,9)) def test_spec_tuple_select(): spec = AsdfSpec('>=1.1.0') versions = [(1,0,0), (1,0,9), (1,1,0), (1,2,0)] assert spec.select(versions) == '1.2.0' assert spec.select(versions[:-1]) == '1.1.0' assert spec.select(versions[:-2]) == None def test_spec_tuple_filter(): spec = AsdfSpec('>=1.1.0') versions = [(1,0,0), (1,0,9), (1,1,0), (1,2,0)] for x,y in zip(spec.filter(versions), ['1.1.0', '1.2.0']): assert x == y def test_spec_equal(): """Make sure that equality means match""" spec = AsdfSpec('>=1.2.0') version0 = AsdfVersion('1.1.0') version1 = AsdfVersion('1.3.0') assert spec != version0 assert version0 != spec assert spec == version1 assert version1 == spec assert spec != '1.1.0' assert '1.1.0' != spec assert spec == '1.3.0' assert '1.3.0' == spec assert spec != (1, 1, 0) assert (1, 1, 0) != spec assert spec == (1, 3, 0) assert (1, 3, 0) == spec asdf-1.3.3/asdf/tests/test_stream.py0000644000175000017500000001257713246003441016755 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import io import os import numpy as np from numpy.testing import assert_array_equal import pytest from .. import asdf from .. import generic_io from .. import stream def test_stream(): buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.float64).tostring()) buff.seek(0) with asdf.AsdfFile.open(buff) as ff: assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (100, 6, 2) for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_write_nothing(): # Test that if you write nothing, you get a zero-length array buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) ff.write_to(buff) buff.seek(0) with asdf.AsdfFile().open(buff) as ff: assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (0, 6, 2) def test_stream_twice(): # Test that if you write nothing, you get a zero-length array buff = io.BytesIO() tree = { 'stream': stream.Stream([6, 2], np.uint8), 'stream2': stream.Stream([12, 2], np.uint8) } ff = asdf.AsdfFile(tree) ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.uint8).tostring()) buff.seek(0) ff = asdf.AsdfFile().open(buff) assert len(ff.blocks) == 1 assert ff.tree['stream'].shape == (100, 6, 2) assert ff.tree['stream2'].shape == (50, 12, 2) def test_stream_with_nonstream(): buff = io.BytesIO() tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } ff = asdf.AsdfFile(tree) ff.write_to(buff) for i in range(100): buff.write(np.array([i] * 12, np.float64).tostring()) buff.seek(0) with asdf.AsdfFile().open(buff) as ff: assert len(ff.blocks) == 1 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) assert len(ff.blocks) == 2 for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_real_file(tmpdir): path = os.path.join(str(tmpdir), 'test.asdf') tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } with open(path, 'wb') as fd: ff = asdf.AsdfFile(tree) ff.write_to(fd) for i in range(100): fd.write(np.array([i] * 12, np.float64).tostring()) with asdf.AsdfFile().open(path) as ff: assert len(ff.blocks) == 1 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) assert len(ff.blocks) == 2 for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_stream_to_stream(): tree = { 'nonstream': np.array([1, 2, 3, 4], np.int64), 'stream': stream.Stream([6, 2], np.float64) } buff = io.BytesIO() fd = generic_io.OutputStream(buff) ff = asdf.AsdfFile(tree) ff.write_to(fd) for i in range(100): fd.write(np.array([i] * 12, np.float64).tostring()) buff.seek(0) with asdf.AsdfFile().open(generic_io.InputStream(buff, 'r')) as ff: assert len(ff.blocks) == 2 assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64)) assert ff.tree['stream'].shape == (100, 6, 2) for i, row in enumerate(ff.tree['stream']): assert np.all(row == i) def test_array_to_stream(tmpdir): tree = { 'stream': np.array([1, 2, 3, 4], np.int64), } buff = io.BytesIO() ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream'], 'streamed') ff.write_to(buff) buff.write(np.array([5, 6, 7, 8], np.int64).tostring()) buff.seek(0) ff = asdf.AsdfFile().open(generic_io.InputStream(buff)) assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8]) buff.seek(0) ff2 = asdf.AsdfFile(ff) ff2.write_to(buff) assert b"shape: ['*']" in buff.getvalue() with open(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd: ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream'], 'streamed') ff.write_to(fd) fd.write(np.array([5, 6, 7, 8], np.int64).tostring()) with asdf.AsdfFile().open(os.path.join(str(tmpdir), 'test.asdf')) as ff: assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8]) ff2 = asdf.AsdfFile(ff) ff2.write_to(buff) assert b"shape: ['*']" in buff.getvalue() def test_too_many_streams(): tree = { 'stream1': np.array([1, 2, 3, 4], np.int64), 'stream2': np.array([1, 2, 3, 4], np.int64) } ff = asdf.AsdfFile(tree) ff.set_array_storage(tree['stream1'], 'streamed') with pytest.raises(ValueError): ff.set_array_storage(tree['stream2'], 'streamed') def test_stream_repr_and_str(): tree = { 'stream': stream.Stream([16], np.int64) } ff = asdf.AsdfFile(tree) repr(ff.tree['stream']) str(ff.tree['stream']) asdf-1.3.3/asdf/__init__.py0000644000175000017500000000253413246003441015010 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function """ asdf: Python library for reading and writing Advanced Scientific Data Format (ASDF) files """ # Affiliated packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._internal_init import * # ---------------------------------------------------------------------------- if _ASDF_SETUP_ is False: __all__ = ['AsdfFile', 'CustomType', 'AsdfExtension', 'Stream', 'open', 'test', 'commands', 'ValidationError'] try: import yaml as _ except ImportError: raise ImportError("asdf requires pyyaml") try: import jsonschema as _ except ImportError: raise ImportError("asdf requires jsonschema") try: import numpy as _ except ImportError: raise ImportError("asdf requires numpy") from .asdf import AsdfFile from .asdftypes import CustomType from .extension import AsdfExtension from .stream import Stream from . import commands from jsonschema import ValidationError class ValidationError(ValidationError): pass open = AsdfFile.open asdf-1.3.3/asdf/yamlutil.py0000644000175000017500000002525713246003441015120 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import numpy as np import six import yaml import warnings from collections import OrderedDict from . constants import YAML_TAG_PREFIX from . import schema from . import tagged from . import treeutil from . import asdftypes from . import versioning from . import util if getattr(yaml, '__with_libyaml__', None): # pragma: no cover _yaml_base_dumper = yaml.CSafeDumper _yaml_base_loader = yaml.CSafeLoader else: # pragma: no cover _yaml_base_dumper = yaml.SafeDumper _yaml_base_loader = yaml.SafeLoader # ---------------------------------------------------------------------- # Custom loader/dumpers _yaml_base_type_map = { yaml.MappingNode: lambda node, loader: loader.construct_mapping(node, deep=True), yaml.SequenceNode: lambda node, loader: loader.construct_sequence(node, deep=True), yaml.ScalarNode: lambda node, loader: loader.construct_scalar(node) } def _yaml_to_base_type(node, loader): """ Converts a PyYAML node type to a basic Python data type. Parameters ---------- node : yaml.Node The node is converted to a basic Python type using the following: - MappingNode -> dict - SequenceNode -> list - ScalarNode -> str, int, float etc. loader : yaml.Loader Returns ------- basic : object Basic Python data type. """ def unknown_type_exception(node, loader): raise TypeError("Don't know how to implicitly construct '{0}'".format( type(node))) return _yaml_base_type_map.get( type(node), unknown_type_exception)(node, loader) class AsdfDumper(_yaml_base_dumper): """ A specialized YAML dumper that understands "tagged basic Python data types" as implemented in the `tagged` module. """ def represent_data(self, data): node = super(AsdfDumper, self).represent_data(data) tag_name = getattr(data, '_tag', None) if tag_name is not None: node.tag = tag_name return node _flow_style_map = { 'flow': True, 'block': False } def represent_sequence(dumper, sequence): flow_style = _flow_style_map.get(sequence.flow_style, None) sequence = sequence.data return super(AsdfDumper, dumper).represent_sequence( None, sequence, flow_style) def represent_mapping(dumper, mapping): flow_style = _flow_style_map.get(mapping.flow_style, None) node = super(AsdfDumper, dumper).represent_mapping( None, mapping.data, flow_style) if mapping.property_order: values = node.value new_mapping = {} for key, val in values: new_mapping[key.value] = (key, val) new_values = [] for key in mapping.property_order: if key in mapping: new_values.append(new_mapping[key]) property_order = set(mapping.property_order) for key, val in values: if key.value not in property_order: new_values.append((key, val)) node.value = new_values return node _style_map = { 'inline': '"', 'folded': '>', 'literal': '|' } def represent_scalar(dumper, value): style = _style_map.get(value.style, None) return super(AsdfDumper, dumper).represent_scalar( None, value.data, style) AsdfDumper.add_representer(tagged.TaggedList, represent_sequence) AsdfDumper.add_representer(tagged.TaggedDict, represent_mapping) AsdfDumper.add_representer(tagged.TaggedString, represent_scalar) class AsdfLoader(_yaml_base_loader): """ A specialized YAML loader that can construct "tagged basic Python data types" as implemented in the `tagged` module. """ ignore_version_mismatch = False def construct_object(self, node, deep=False): tag = node.tag if node.tag in self.yaml_constructors: return super(AsdfLoader, self).construct_object(node, deep=False) data = _yaml_to_base_type(node, self) tag = self.ctx.type_index.fix_yaml_tag( self.ctx, tag, self.ignore_version_mismatch) data = tagged.tag_object(tag, data) return data # ---------------------------------------------------------------------- # Handle omap (ordered mappings) YAML_OMAP_TAG = YAML_TAG_PREFIX + 'omap' # Add support for loading YAML !!omap objects as OrderedDicts and dumping # OrderedDict in the omap format as well. def ordereddict_constructor(loader, node): try: omap = loader.construct_yaml_omap(node) return OrderedDict(*omap) except yaml.constructor.ConstructorError: return list(*loader.construct_yaml_seq(node)) def represent_ordered_mapping(dumper, tag, data): # TODO: Again, adjust for preferred flow style, and other stylistic details # NOTE: For block style this uses the compact omap notation, but for flow style # it does not. # TODO: Need to see if I can figure out a mechanism so that classes that # use this representer can specify which values should use flow style values = [] node = yaml.SequenceNode(tag, values, flow_style=dumper.default_flow_style) if dumper.alias_key is not None: dumper.represented_objects[dumper.alias_key] = node for key, value in data.items(): key_item = dumper.represent_data(key) value_item = dumper.represent_data(value) node_item = yaml.MappingNode(YAML_OMAP_TAG, [(key_item, value_item)], flow_style=False) values.append(node_item) return node def represent_ordereddict(dumper, data): return represent_ordered_mapping(dumper, YAML_OMAP_TAG, data) AsdfLoader.add_constructor(YAML_OMAP_TAG, ordereddict_constructor) AsdfDumper.add_representer(OrderedDict, represent_ordereddict) # ---------------------------------------------------------------------- # Handle numpy scalars for scalar_type in util.iter_subclasses(np.floating): AsdfDumper.add_representer(scalar_type, AsdfDumper.represent_float) for scalar_type in util.iter_subclasses(np.integer): AsdfDumper.add_representer(scalar_type, AsdfDumper.represent_int) # ---------------------------------------------------------------------- # Unicode fix on Python 2 if six.PY2: # pragma: no cover # This dumps Python unicode strings as regular YAML strings rather # than !!python/unicode. See http://pyyaml.org/ticket/11 def _unicode_representer(dumper, value): return dumper.represent_scalar("tag:yaml.org,2002:str", value) AsdfDumper.add_representer(unicode, _unicode_representer) AsdfLoader.add_constructor('tag:yaml.org,2002:str', AsdfLoader.construct_scalar) def custom_tree_to_tagged_tree(tree, ctx): """ Convert a tree, possibly containing custom data types that aren't directly representable in YAML, to a tree of basic data types, annotated with tags. """ def walker(node): tag = ctx.type_index.from_custom_type(type(node), ctx.version_string) if tag is not None: return tag.to_tree_tagged(node, ctx) return node return treeutil.walk_and_modify(tree, walker) def tagged_tree_to_custom_tree(tree, ctx, force_raw_types=False): """ Convert a tree containing only basic data types, annotated with tags, to a tree containing custom data types. """ def walker(node): if force_raw_types: return node tag_name = getattr(node, '_tag', None) if tag_name is None: return node tag_type = ctx.type_index.from_yaml_tag(ctx, tag_name) # This means the tag did not correspond to any type in our type index. if tag_type is None: if not ctx._ignore_unrecognized_tag: warnings.warn("{} is not recognized, converting to raw Python " "data structure".format(tag_name)) return node real_tag = ctx.type_index.get_real_tag(tag_name) real_tag_name, real_tag_version = asdftypes.split_tag_version(real_tag) # This means that there is an explicit description of versions that are # compatible with the associated tag class implementation, but the # version we found does not fit that description. if tag_type.incompatible_version(real_tag_version): warnings.warn("Version {} of {} is not compatible with any " "existing tag implementations".format( real_tag_version, real_tag_name)) return node # If a tag class does not explicitly list compatible versions, then all # versions of the corresponding schema are assumed to be compatible. # Therefore we need to check to make sure whether the conversion is # actually successful, and just return a raw Python data type if it is # not. try: return tag_type.from_tree_tagged(node, ctx) except TypeError as err: warnings.warn("Failed to convert {} to custom type (detail: {}). " "Using raw Python data structure instead".format(real_tag, err)) return node return treeutil.walk_and_modify(tree, walker) def load_tree(stream, ctx, ignore_version_mismatch=False): """ Load YAML, returning a tree of objects. Parameters ---------- stream : readable file-like object Stream containing the raw YAML content. """ class AsdfLoaderTmp(AsdfLoader): pass AsdfLoaderTmp.ctx = ctx AsdfLoaderTmp.ignore_version_mismatch = ignore_version_mismatch return yaml.load(stream, Loader=AsdfLoaderTmp) def dump_tree(tree, fd, ctx): """ Dump a tree of objects, possibly containing custom types, to YAML. Parameters ---------- tree : object Tree of objects, possibly containing custom data types. fd : asdf.generic_io.GenericFile A file object to dump the serialized YAML to. ctx : Context The writing context. """ class AsdfDumperTmp(AsdfDumper): pass AsdfDumperTmp.ctx = ctx tags = None if hasattr(tree, 'yaml_tag'): tag = tree.yaml_tag tag = tag[:tag.index('/core/asdf') + 1] if tag.strip(): tags = {'!': tag} tree = custom_tree_to_tagged_tree(tree, ctx) schema.validate(tree, ctx) schema.remove_defaults(tree, ctx) yaml_version = tuple( int(x) for x in ctx.version_map['YAML_VERSION'].split('.')) yaml.dump_all( [tree], stream=fd, Dumper=AsdfDumperTmp, explicit_start=True, explicit_end=True, version=yaml_version, allow_unicode=True, encoding='utf-8', tags=tags) asdf-1.3.3/setup.py0000755000175000017500000001055013246031624013500 0ustar dandan00000000000000#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import glob import os import sys import ah_bootstrap from setuptools import setup #A dirty hack to get around some early import/configurations ambiguities if sys.version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASDF_SETUP_ = True from astropy_helpers.setup_helpers import ( register_commands, get_debug_option, get_package_info) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'packagename') DESCRIPTION = metadata.get('description', 'package description') AUTHOR = metadata.get('author', '') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', '') # Get the long description from the package's docstring __import__('asdf') package = sys.modules['asdf'] LONG_DESCRIPTION = package.__doc__ # Store the package name in a built-in variable so it's easy # to get from other parts of the setup infrastructure builtins._PACKAGE_NAME_ = 'asdf' # VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) VERSION = '1.3.3' # Indicates if this version is a release version RELEASE = 'dev' not in VERSION if not RELEASE: VERSION += get_git_devstr(False) # Get root of asdf-standard documents ASDF_STANDARD_ROOT = os.environ.get('ASDF_STANDARD_ROOT', 'asdf-standard') # Populate the dict of setup command overrides; this should be done before # invoking any other functionality from distutils since it can potentially # modify distutils' behavior. cmdclassd = register_commands('asdf', VERSION, RELEASE) # Freeze build information in version.py generate_version_py('asdf', VERSION, RELEASE, get_debug_option('asdf')) # Treat everything in scripts except README.rst as a script to be installed scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) if os.path.basename(fname) != 'README.rst'] # Get configuration information from all of the various subpackages. # See the docstring for setup_helpers.update_package_files for more # details. package_info = get_package_info() # Add the project-global data package_info['package_data'].setdefault('asdf', []).append('data/*') # The schemas come from a git submodule, so we deal with them here schema_root = os.path.join(ASDF_STANDARD_ROOT, "schemas") package_info['package_dir']['asdf.schemas'] = schema_root package_info['packages'].append('asdf.schemas') # The reference files come from a git submodule, so we deal with them here reference_file_root = os.path.join( ASDF_STANDARD_ROOT, "reference_files") package_info['package_dir']['asdf.reference_files'] = reference_file_root for dirname in os.listdir(reference_file_root): package_info['package_dir']['asdf.reference_files.' + dirname] = os.path.join( reference_file_root, dirname) package_info['packages'].append('asdf.reference_files') #Define entry points for command-line scripts entry_points = {} entry_points['console_scripts'] = [ 'asdftool = asdf.commands.main:main', ] # Add the dependencies which are not strictly needed but enable otherwise skipped tests extra_requires = [] if os.getenv('CI'): extra_requires.extend(['lz4>=0.10']) # Note that requires and provides should not be included in the call to # ``setup``, since these are now deprecated. See this link for more details: # https://groups.google.com/forum/#!topic/astropy-dev/urYO8ckB2uM setup(name=PACKAGENAME, version=VERSION, description=DESCRIPTION, scripts=scripts, python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*', install_requires=[ 'semantic_version>=2.3.1', 'pyyaml>=3.10', 'jsonschema>=2.3.0', 'six>=1.9.0', 'pytest>=2.7.2', 'numpy>=1.8', 'astropy>=1.3', ] + extra_requires, author=AUTHOR, author_email=AUTHOR_EMAIL, license=LICENSE, url=URL, long_description=LONG_DESCRIPTION, cmdclass=cmdclassd, zip_safe=False, entry_points=entry_points, **package_info ) asdf-1.3.3/licenses/0000755000175000017500000000000013246031665013574 5ustar dandan00000000000000asdf-1.3.3/licenses/LICENSE.rst0000644000175000017500000000274113243547254015417 0ustar dandan00000000000000Copyright (c) 2014, Space Telescope Science Institute All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Astropy Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. asdf-1.3.3/licenses/README.rst0000644000175000017500000000024213243547254015264 0ustar dandan00000000000000Licenses ======== This directory holds license and credit information for the affiliated package, works the affiliated package is derived from, and/or datasets. asdf-1.3.3/PKG-INFO0000644000175000017500000000066113246031665013067 0ustar dandan00000000000000Metadata-Version: 1.2 Name: asdf Version: 1.3.3 Summary: Python tools to handle ASDF files Home-page: http://github.com/spacetelescope/asdf Author: Erik Bray, Michael Droettboom Author-email: mdroe@stsci.edu License: BSD Description-Content-Type: UNKNOWN Description: Advanced Scienctific Data Format (ASDF) is a next generation interchange format for scientific data Platform: UNKNOWN Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.* asdf-1.3.3/asdf-standard/0000755000175000017500000000000013246031665014502 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/0000755000175000017500000000000013246031665016125 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/0000755000175000017500000000000013246031665020026 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/yaml-schema/0000755000175000017500000000000013246031665022226 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/yaml-schema/draft-01.yaml0000644000175000017500000000573113243564222024433 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://json-schema.org/draft-04/schema" id: "http://stsci.edu/schemas/yaml-schema/draft-01" title: YAML Schema description: | A metaschema extending JSON Schema's metaschema to add support for some YAML-specific constructions. allOf: - $ref: "http://json-schema.org/draft-04/schema" - type: object properties: tag: description: | A fully-qualified YAML tag name that should be associated with the object type returned by the YAML parser; for example, the object must be an instance of the class registered with the parser to create instances of objects with this tag. Implementation of this validator is optional and depends on details of the YAML parser. type: string minLength: 6 propertyOrder: description: | Specifies the default order of the properties when writing out. Any keys not listed in propertyOrder will be in arbitrary order at the end. type: array items: type: string flowStyle: description: | Specifies the default serialization style to use for an array or object. YAML supports multiple styles for arrays/sequences and objects/maps, called "block style" and "flow style". For example:: Block style: !!map Clark : Evans Ingy : döt Net Oren : Ben-Kiki Flow style: !!map { Clark: Evans, Ingy: döt Net, Oren: Ben-Kiki } This property gives a hint to the tool outputting the YAML which style to use. If not provided, the library is free to use whatever heuristics it wishes to determine the output style. This property does not enforce any particular style on YAML being parsed. type: string enum: [block, flow] style: description: | Specifies the default serialization style to use for a string. YAML supports multiple styles for strings:: Inline style: "First line\nSecond line" Literal style: | First line Second line Folded style: > First line Second line This property gives a hint to the tool outputting the YAML which style to use. If not provided, the library is free to use whatever heuristics it wishes to determine the output style. This property does not enforce any particular style on YAML being parsed. type: string enum: [inline, literal, folded] examples: description: | A list of examples to help document the schema. Each pair is a prose description followed by a string containing YAML content. type: array items: type: array items: - type: string - type: string ... asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/0000755000175000017500000000000013246031665020743 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/0000755000175000017500000000000013246031665021537 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.1.0.yaml0000644000175000017500000000117013243564222026041 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/spectral_frame-1.1.0" title: > Represents a spectral frame. allOf: - type: object properties: reference_position: description: | The position of the reference frame. enum: [geocenter, barycenter, heliocenter] default: geocenter axes_names: minItems: 1 maxItems: 1 axes_order: minItems: 1 maxItems: 1 unit: minItems: 1 maxItems: 1 - $ref: frame-1.1.0 asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.0.0.yaml0000644000175000017500000001316713243564222024154 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/frame-1.0.0" title: | The base class of all coordinate frames. description: | These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. Most of these coordinate frames are defined in [IERS conventions](http://www.iers.org/IERS/EN/Publications/TechnicalNotes/tn36.html). examples: - - | A celestial frame in the FK4 reference frame. - | !wcs/celestial_frame-1.0.0 axes_names: [ra, dec] name: CelestialFrame reference_frame: type: FK4 equinox: !time/time-1.0.0 '2010-01-01 00:00:00.000' obstime: !time/time-1.0.0 '2015-01-01 00:00:00.000' unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] type: object properties: name: description: | A user-friendly name for the frame. type: string axes_order: description: | The order of the axes. type: array items: type: integer axes_names: description: | The name of each axis in this frame. type: array items: anyOf: - type: string - type: 'null' reference_frame: description: | The reference frame. type: object properties: type: description: | The reference frame type. Some reference frame types require additional properties, listed next to each reference frame type below. The reference frames types are: - `ICRS` - `FK5`: `equinox`. - `FK4`: `equinox` and optionally `obstime`. - `FK4_noeterms`: `equinox` and optionally `obstime`. - `galactic` - `galactocentric`: `galcen_distance`, `galcen_ra`, `galcen_dec`, `z_sun` and `roll`. - `GCRS`: `obstime`, `obsgeoloc`, and `obsgeovel`. - `CIRS`: `obstime`. - `ITRS`: `obstime`. - `precessed_geocentric`: `obstime`, `obsgeoloc`, and `obsgeovel`. enum: [ICRS, FK5, FK4, FK4_noeterms, galactic, galactocentric, GCRS, CIRS, ITRS, precessed_geocentric] default: ICRS equinox: description: | The equinox of the reference frame. Required when `reference_frame` one of: `FK5`, `FK4`, `FK4_noeterms` $ref: ../time/time-1.0.0 obstime: description: | The observation time of the reference frame, used to determine the location of the Earth. Required when `reference_frame` is one of: `FK4`, `FK4_noeterms`, `GCRS`, `CIRS`, `ITRS` If not provided, it defaults to the same value as `equinox`. $ref: ../time/time-1.0.0 galcen_distance: description: | The distance from the Sun to the Galactic center. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: pc galcen_ra: description: | The Right Ascension (RA) of the Galactic center in the ICRS frame. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg galcen_dec: description: | The Declination (DEC) of the Galactic center in the ICRS frame. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg z_sun: description: | The distance from the sun to the galactic midplane. Required when `reference_frame` is `galactocentric`. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: pc roll: description: | The angle to rotate about the final x-axis, relative to the orientation for `galactic`. Required when `reference_frame` is `galactocentric`. type: array items: - type: number - $ref: ../unit/unit-1.0.0 default: deg obsgeoloc: description: | 3-vector giving the position of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: - type: array items: type: number minItems: 3 maxItems: 3 - $ref: ../unit/unit-1.0.0 default: m default: - [0, 0, 0] obsgeovel: description: | 3-vector giving the velocity of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: - type: array items: type: number minItems: 3 maxItems: 3 - $ref: ../unit/unit-1.0.0 default: m/s default: - [0, 0, 0] required: [type] unit: description: | Units for each axis. type: array items: $ref: ../unit/unit-1.0.0 required: [name] additionalProperties: trueasdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/frame-1.1.0.yaml0000644000175000017500000001250513243564222024150 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/frame-1.1.0" title: | The base class of all coordinate frames. description: | These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. Most of these coordinate frames are defined in [IERS conventions](http://www.iers.org/IERS/EN/Publications/TechnicalNotes/tn36.html). examples: - - | A celestial frame in the FK4 reference frame. - | !wcs/celestial_frame-1.1.0 axes_names: [ra, dec] name: CelestialFrame reference_frame: type: FK4 equinox: !time/time-1.1.0 '2010-01-01 00:00:00.000' obstime: !time/time-1.1.0 '2015-01-01 00:00:00.000' unit: [!unit/unit-1.0.0 deg, !unit/unit-1.0.0 deg] type: object properties: name: description: | A user-friendly name for the frame. type: string axes_order: description: | The order of the axes. type: array items: type: integer axes_names: description: | The name of each axis in this frame. type: array items: anyOf: - type: string - type: 'null' reference_frame: description: | The reference frame. type: object properties: type: description: | The reference frame type. Some reference frame types require additional properties, listed next to each reference frame type below. The reference frames types are: - `ICRS` - `FK5`: `equinox`. - `FK4`: `equinox` and optionally `obstime`. - `FK4_noeterms`: `equinox` and optionally `obstime`. - `galactic` - `galactocentric`: `galcen_distance`, `galcen_ra`, `galcen_dec`, `z_sun` and `roll`. - `GCRS`: `obstime`, `obsgeoloc`, and `obsgeovel`. - `CIRS`: `obstime`. - `ITRS`: `obstime`. - `precessed_geocentric`: `obstime`, `obsgeoloc`, and `obsgeovel`. enum: [ICRS, FK5, FK4, FK4_noeterms, galactic, galactocentric, GCRS, CIRS, ITRS, precessed_geocentric] default: ICRS equinox: description: | The equinox of the reference frame. Required when `reference_frame` one of: `FK5`, `FK4`, `FK4_noeterms` $ref: ../time/time-1.1.0 obstime: description: | The observation time of the reference frame, used to determine the location of the Earth. Required when `reference_frame` is one of: `FK4`, `FK4_noeterms`, `GCRS`, `CIRS`, `ITRS` If not provided, it defaults to the same value as `equinox`. $ref: ../time/time-1.1.0 galcen_distance: description: | The distance from the Sun to the Galactic center. Required when `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 galcen_coord: description: | The ICRS coordinates of the Galactic center. Required when `reference_frame` is `galactocentric`. $ref: icrs_coord-1.1.0 galcen_v_sun: description: | The velocity of the sun in the galactocentric frame as Cartesian velocity components. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: k/s } - { value: [0], unit: k/s } - { value: [0], unit: k/s } z_sun: description: | The distance from the sun to the galactic midplane. Required when `reference_frame` is `galactocentric`. `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 roll: description: | The angle to rotate about the final x-axis, relative to the orientation for `galactic`. Required when `reference_frame` is `galactocentric`. $ref: ../unit/quantity-1.1.0 obsgeoloc: description: | 3-vector giving the position of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: m } - { value: [0], unit: m } - { value: [0], unit: m } obsgeovel: description: | 3-vector giving the velocity of the observer relative to the center-of-mass of the Earth, oriented the same as BCRS/ICRS. Defaults to `[0, 0, 0]`, meaning "true" GCRS. Used when `reference_frame` is `GCRS` or `precessed_geocentric`. type: array items: $ref: ../unit/quantity-1.1.0 minItems: 3 maxItems: 3 default: - { value: [0], unit: m/s } - { value: [0], unit: m/s } - { value: [0], unit: m/s } required: [type] unit: description: | Units for each axis. type: array items: $ref: ../unit/unit-1.0.0 required: [name] additionalProperties: true asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.1.0.yaml0000644000175000017500000000071513243564222026175 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/celestial_frame-1.1.0" title: > Represents a celestial frame. allOf: - type: object properties: axes_names: minItems: 2 maxItems: 3 axes_order: minItems: 2 maxItems: 3 unit: minItems: 2 maxItems: 3 - $ref: frame-1.1.0 asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.0.0.yaml0000644000175000017500000000203113243564222023642 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/wcs-1.0.0" tag: "tag:stsci.edu:asdf/wcs/wcs-1.0.0" title: > A system for describing generalized world coordinate transformations. description: > ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. type: object properties: name: description: | A descriptive name for this WCS. type: string steps: description: | A list of steps in the forward transformation from detector to world coordinates. The inverse transformation is determined automatically by reversing this list, and inverting each of the individual transforms according to the rules described in [inverse](ref:http://stsci.edu/schemas/asdf/transform/transform-1.0.0/properties/inverse). type: array items: $ref: step-1.0.0 required: [name, steps] additionalProperties: trueasdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.0.0.yaml0000644000175000017500000000136513243564222024032 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/step-1.0.0" tag: "tag:stsci.edu:asdf/wcs/step-1.0.0" title: > Describes a single step of a WCS transform pipeline. description: > examples: [] type: object properties: frame: description: | The frame of the inputs to the transform. anyOf: - type: string - $ref: frame-1.0.0 transform: description: | The transform from this step to the next one. The last step in a WCS should not have a transform, but exists only to describe the frames and units of the final output axes. anyOf: - $ref: ../transform/transform-1.0.0 - type: 'null' default: null required: [frame]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/step-1.1.0.yaml0000644000175000017500000000136613243564222024034 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/step-1.1.0" tag: "tag:stsci.edu:asdf/wcs/step-1.1.0" title: > Describes a single step of a WCS transform pipeline. description: > examples: [] type: object properties: frame: description: | The frame of the inputs to the transform. anyOf: - type: string - $ref: frame-1.1.0 transform: description: | The transform from this step to the next one. The last step in a WCS should not have a transform, but exists only to describe the frames and units of the final output axes. anyOf: - $ref: ../transform/transform-1.1.0 - type: 'null' default: null required: [frame] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/wcs-1.1.0.yaml0000644000175000017500000000203113243564222023643 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/wcs-1.1.0" tag: "tag:stsci.edu:asdf/wcs/wcs-1.1.0" title: > A system for describing generalized world coordinate transformations. description: > ASDF WCS is a way of specifying transformations (usually from detector space to world coordinate space and back) by using the transformations in the `transform-schema` module. type: object properties: name: description: | A descriptive name for this WCS. type: string steps: description: | A list of steps in the forward transformation from detector to world coordinates. The inverse transformation is determined automatically by reversing this list, and inverting each of the individual transforms according to the rules described in [inverse](ref:http://stsci.edu/schemas/asdf/transform/transform-1.1.0/properties/inverse). type: array items: $ref: step-1.1.0 required: [name, steps] additionalProperties: trueasdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/icrs_coord-1.1.0.yaml0000644000175000017500000000202213243564222025175 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/icrs_coord-1.1.0" tag: "tag:stsci.edu:asdf/wcs/icrs_coord-1.1.0" title: | Represents an ICRS coordinate object from astropy description: This object represents the right ascension (RA) and declination of an ICRS coordinate or frame. The astropy ICRS class contains additional fields that may be useful to add here in the future. type: object properties: ra: type: object description: | A longitude representing the right ascension of the ICRS coordinate properties: value: type: number unit: $ref: ../unit/unit-1.0.0 default: deg wrap_angle: $ref: ../unit/quantity-1.1.0 default: "360 deg" dec: type: object description: | A latitude representing the declination of the ICRS coordinate properties: value: type: number unit: $ref: ../unit/unit-1.0.0 default: deg required: [ra, dec] ... asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/celestial_frame-1.0.0.yaml0000644000175000017500000000071413243564222026173 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/celestial_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/celestial_frame-1.0.0" title: > Represents a celestial frame. allOf: - type: object properties: axes_names: minItems: 2 maxItems: 3 axes_order: minItems: 2 maxItems: 3 unit: minItems: 2 maxItems: 3 - $ref: frame-1.0.0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.0.0.yaml0000644000175000017500000000074113243564222026230 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/composite_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/composite_frame-1.0.0" title: > Represents a set of frames. allOf: - type: object properties: name: description: Name of composite frame. type: string frames: description: List of frames in the composite frame. type: array - $ref: frame-1.0.0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/spectral_frame-1.0.0.yaml0000644000175000017500000000116713243564222026046 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/spectral_frame-1.0.0" tag: "tag:stsci.edu:asdf/wcs/spectral_frame-1.0.0" title: > Represents a spectral frame. allOf: - type: object properties: reference_position: description: | The position of the reference frame. enum: [geocenter, barycenter, heliocenter] default: geocenter axes_names: minItems: 1 maxItems: 1 axes_order: minItems: 1 maxItems: 1 unit: minItems: 1 maxItems: 1 - $ref: frame-1.0.0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/wcs/composite_frame-1.1.0.yaml0000644000175000017500000000071313243564222026230 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/wcs/composite_frame-1.1.0" tag: "tag:stsci.edu:asdf/wcs/composite_frame-1.1.0" title: > Represents a set of frames. allOf: - type: object properties: name: description: Name of composite frame. type: string frames: description: List of frames in the composite frame. type: array asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/0000755000175000017500000000000013246031665021673 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/asdf-1.0.0.yaml0000644000175000017500000000260613243564222024127 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/asdf-1.0.0" title: | Top-level schema for every ASDF file. description: | This schema contains the top-level attributes for every ASDF file. tag: "tag:stsci.edu:asdf/core/asdf-1.0.0" type: object properties: asdf_library: description: | Describes the ASDF library that produced the file. $ref: "software-1.0.0" history: description: | A log of transformations that have happened to the file. May include such things as data collection, data calibration pipelines, data analysis etc. type: array items: $ref: "history_entry-1.0.0" data: description: | The data array corresponds to the main science data array in the file. Oftentimes, the data model will be much more complex than a single array, but this array will be used by applications that just want to convert to a display an image or preview of the file. It is recommended, but not required, that it is a 2-dimensional image array. $ref: "ndarray-1.0.0" fits: description: | A way to specify exactly how this ASDF file should be converted to FITS. $ref: "../fits/fits-1.0.0" wcs: description: | The location of the main WCS for the main data. $ref: "../wcs/wcs-1.0.0" additionalProperties: trueasdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/software-1.0.0.yaml0000644000175000017500000000153513243564222025044 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/software-1.0.0" title: | Describes a software package. tag: "tag:stsci.edu:asdf/core/software-1.0.0" type: object properties: name: description: | The name of the application or library. type: string author: description: | The author (or institution) that produced the software package. type: string homepage: description: | A URI to the homepage of the software. type: string format: uri version: description: | The version of the software used. It is recommended, but not required, that this follows the (Semantic Versioning Specification)[http://semver.org/spec/v2.0.0.html]. type: string required: [name, author, homepage, version] additionalProperties: trueasdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/ndarray-1.0.0.yaml0000644000175000017500000002667113243564222024662 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0" tag: "tag:stsci.edu:asdf/core/ndarray-1.0.0" title: > An *n*-dimensional array. description: | There are two ways to store the data in an ndarray. - Inline in the tree: This is recommended only for small arrays. In this case, the entire ``ndarray`` tag may be a nested list, in which case the type of the array is inferred from the content. (See the rules for type inference in the ``inline-data`` definition below.) The inline data may also be given in the ``data`` property, in which case it is possible to explicitly specify the ``datatype`` and other properties. - External to the tree: The data comes from a [block](ref:block) within the same ASDF file or an external ASDF file referenced by a URI. examples: - - An inline array, with implicit data type - | !core/ndarray-1.0.0 [[1, 0, 0], [0, 1, 0], [0, 0, 1]] - - An inline array, with an explicit data type - | !core/ndarray-1.0.0 datatype: float64 data: [[1, 0, 0], [0, 1, 0], [0, 0, 1]] - - An inline structured array, where the types of each column are automatically detected - | !core/ndarray-1.0.0 [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]] - - An inline structured array, where the types of each column are explicitly specified - | !core/ndarray-1.0.0 datatype: [['ascii', 4], uint16, uint16, ['ascii', 4]] data: [[M110, 110, 205, And], [ M31, 31, 224, And], [ M32, 32, 221, And], [M103, 103, 581, Cas]] - - A double-precision array, in contiguous memory in a block within the same file - | !core/ndarray-1.0.0 source: 0 shape: [1024, 1024] datatype: float64 byteorder: little - - A view of a tile in that image - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little strides: [8192, 8] offset: 2099200 - - A structured datatype, with nested columns for a coordinate in (*ra*, *dec*), and a 3x3 convolution kernel - | !core/ndarray-1.0.0 source: 0 shape: [64] datatype: - name: coordinate datatype: - name: ra datatype: float64 - name: dec datatype: float64 - name: kernel datatype: float32 shape: [3, 3] byteorder: little - - An array in Fortran order - | !core/ndarray-1.0.0 source: 0 shape: [1024, 1024] datatype: float64 byteorder: little strides: [8192, 8] - - An array where values of -999 are treated as missing - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little mask: -999 - - An array where another array is used as a mask - | !core/ndarray-1.0.0 source: 0 shape: [256, 256] datatype: float64 byteorder: little mask: !core/ndarray-1.0.0 source: 1 shape: [256, 256] datatype: bool8 byteorder: little - - An array where the data is stored in the first block in another ASDF file. - | !core/ndarray-1.0.0 source: external.asdf shape: [256, 256] datatype: float64 byteorder: little definitions: scalar-datatype: description: | Describes the type of a single element. There is a set of numeric types, each with a single identifier: - `int8`, `int16`, `int32`, `int64`: Signed integer types, with the given bit size. - `uint8`, `uint16`, `uint32`, `uint64`: Unsigned integer types, with the given bit size. - `float32`: Single-precision floating-point type or "binary32", as defined in IEEE 754. - `float64`: Double-precision floating-point type or "binary64", as defined in IEEE 754. - `complex64`: Complex number where the real and imaginary parts are each single-precision floating-point ("binary32") numbers, as defined in IEEE 754. - `complex128`: Complex number where the real and imaginary parts are each double-precision floating-point ("binary64") numbers, as defined in IEEE 754. There are two distinct fixed-length string types, which must be indicated with a 2-element array where the first element is an identifier for the string type, and the second is a length: - `ascii`: A string containing ASCII text (all codepoints < 128), where each character is 1 byte. - `ucs4`: A string containing unicode text in the UCS-4 encoding, where each character is always 4 bytes long. Here the number of bytes used is 4 times the given length. anyOf: - type: string enum: [int8, uint8, int16, uint16, int32, uint32, int64, uint64, float32, float64, complex64, complex128, bool8] - type: array items: - type: string enum: [ascii, ucs4] - type: integer minimum: 0 minLength: 2 maxLength: 2 datatype: description: | The data format of the array elements. May be a single scalar datatype, or may be a nested list of datatypes. When a list, each field may have a name. anyOf: - $ref: "#/definitions/scalar-datatype" - type: array items: anyOf: - $ref: "#/definitions/scalar-datatype" - type: object properties: name: type: string pattern: "[A-Za-z_][A-Za-z0-9_]*" description: The name of the field datatype: $ref: "#/definitions/datatype" byteorder: type: string enum: [big, little] description: | The byteorder for the field. If not provided, the byteorder of the datatype as a whole will be used. shape: type: array items: type: integer minimum: 0 required: [datatype] inline-data: description: | Inline data is stored in YAML format directly in the tree, rather than referencing a binary block. It is made out of nested lists. If the datatype of the array is not specified, it is inferred from the array contents. Type inference is supported only for homogeneous arrays, not tables. - If any of the elements in the array are YAML strings, the `datatype` of the entire array is `ucs4`, with the width of the largest string in the column, otherwise... - If any of the elements in the array are complex numbers, the `datatype` of the entire column is `complex128`, otherwise... - If any of the types in the column are numbers with a decimal point, the `datatype` of the entire column is `float64`, otherwise.. - If any of the types in the column are integers, the `datatype` of the entire column is `int64`, otherwise... - The `datatype` of the entire column is `bool8`. Masked values may be included in the array using `null`. If an explicit mask array is also provided, it takes precedence. type: array items: anyOf: - type: number - type: string - type: "null" - $ref: "complex-1.0.0" - $ref: "#/definitions/inline-data" - type: boolean anyOf: - $ref: "#/definitions/inline-data" - type: object properties: source: description: | The source of the data. - If an integer: If positive, the zero-based index of the block within the same file. If negative, the index from the last block within the same file. For example, a source of `-1` corresponds to the last block in the same file. - If a string, a URI to an external ASDF file containing the block data. Relative URIs and ``file:`` and ``http:`` protocols must be supported. Other protocols may be supported by specific library implementations. The ability to reference block data in an external ASDF file is intentionally limited to the first block in the external ASDF file, and is intended only to support the needs of [exploded](ref:exploded). For the more general case of referencing data in an external ASDF file, use tree [references](ref:references). anyOf: - type: integer - type: string format: uri data: description: | The data for the array inline. If `datatype` and/or `shape` are also provided, they must match the data here and can be used as a consistency check. `strides`, `offset` and `byteorder` are meaningless when `data` is provided. $ref: "#/definitions/inline-data" shape: description: | The shape of the array. The first entry may be the string `*`, indicating that the length of the first index of the array will be automatically determined from the size of the block. This is used for streaming support. type: array items: anyOf: - type: integer minimum: 0 - enum: ['*'] datatype: description: | The data format of the array elements. $ref: "#/definitions/datatype" byteorder: description: > The byte order (big- or little-endian) of the array data. type: string enum: [big, little] offset: description: > The offset, in bytes, within the data for this start of this view. type: integer minimum: 0 default: 0 strides: description: > The number of bytes to skip in each dimension. If not provided, the array is assumed by be contiguous and in C order. If provided, must be the same length as the shape property. type: array items: anyOf: - type: integer minimum: 1 - type: integer maximum: -1 mask: description: > Describes how missing values in the array are stored. If a scalar number, that number is used to represent missing values. If an ndarray, the given array provides a mask, where non-zero values represent missing values in this array. The mask array must be broadcastable to the dimensions of this array. anyOf: - type: number - $ref: "complex-1.0.0" - allOf: - $ref: "ndarray-1.0.0" - datatype: bool8 dependencies: source: [shape, datatype, byteorder] propertyOrder: [source, data, mask, datatype, byteorder, shape, offset, strides]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/constant-1.0.0.yaml0000644000175000017500000000045113243564222025037 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/constant-1.0.0" tag: "tag:stsci.edu:asdf/core/constant-1.0.0" title: Specify that a value is a constant. description: | Used as a utility to indicate that value is a literal constant. asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/complex-1.0.0.yaml0000644000175000017500000000163113243564222024656 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/complex-1.0.0" title: Complex number value. description: | Represents a complex number matching the following EBNF grammar ``` plus-or-minus = "+" | "-" suffix = "J" | "j" | "I" | "i" complex = [ieee754] [plus-or-minus ieee754 suffix] ``` Where `ieee754` is a floating point number in IEEE 754 decimal format. Though `J`, `j`, `I` and `i` must be supported on reading, it is recommended to use `i` on writing. examples: - - 1 real, -1 imaginary - "!core/complex-1.0.0 1-1j" - - 0 real, 1 imaginary - "!core/complex-1.0.0 1J" - - -1 real, 0 imaginary - "!core/complex-1.0.0 -1" tag: "tag:stsci.edu:asdf/core/complex-1.0.0" type: string pattern: "([-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)?([-+][0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?[JjIi])?" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/history_entry-1.0.0.yaml0000644000175000017500000000136013246003557026132 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/history_entry-1.0.0" title: | An entry in the file history. tag: "tag:stsci.edu:asdf/core/history_entry-1.0.0" type: object properties: description: description: | A description of the transformation performed. type: string time: description: | A timestamp for the operation, in UTC. type: string format: date-time software: description: | One or more descriptions of the software that performed the operation. anyOf: - $ref: "software-1.0.0" - type: array items: $ref: "software-1.0.0" requiredProperties: [description] additionalProperties: trueasdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/column-1.0.0.yaml0000644000175000017500000000203013246003557024500 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/column-1.0.0" tag: "tag:stsci.edu:asdf/core/column-1.0.0" title: > A column in a table. description: | Each column contains a name and an array of data, and an optional description and unit. type: object properties: name: description: | The name of the column. Each name in a [table](http://stsci.edu/schemas/asdf/core/table-1.0.0) must be unique. type: string pattern: "[A-Za-z_][A-Za-z0-9_]*" data: description: | The array data for the column. allOf: - $ref: ndarray-1.0.0 description: description: | An optional description of the column. type: string default: '' unit: description: An optional unit for the column. allOf: - $ref: ../unit/unit-1.0.0 meta: description: Additional free-form metadata about the column. type: object default: {} requiredProperties: [name, data] additionalProperties: falseasdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/core/table-1.0.0.yaml0000644000175000017500000000555213246003557024306 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/core/table-1.0.0" tag: "tag:stsci.edu:asdf/core/table-1.0.0" title: > A table. description: | A table is represented as a list of columns, where each entry is a [column](ref:http://stsci.edu/schemas/asdf/core/column-1.0.0) object, containing the data and some additional information. The data itself may be stored inline as text, or in binary in either row- or column-major order by use of the `strides` property on the individual column arrays. Each column in the table must have the same first (slowest moving) dimension. examples: - - A table stored in column-major order, with each column in a separate block - | !core/table-1.0.0 columns: - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] description: RA meta: {foo: bar} name: a unit: !unit/unit-1.0.0 deg - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 1 datatype: float64 byteorder: little shape: [3] description: DEC name: b - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 2 datatype: [ascii, 1] byteorder: big shape: [3] description: The target name name: c - - A table stored in row-major order, all stored in the same block - | !core/table-1.0.0 columns: - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] strides: [13] description: RA meta: {foo: bar} name: a unit: !unit/unit-1.0.0 deg - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: float64 byteorder: little shape: [3] offset: 4 strides: [13] description: DEC name: b - !core/column-1.0.0 data: !core/ndarray-1.0.0 source: 0 datatype: [ascii, 1] byteorder: big shape: [3] offset: 12 strides: [13] description: The target name name: c type: object properties: columns: description: | A list of columns in the table. type: array items: $ref: column-1.0.0 meta: description: | Additional free-form metadata about the table. type: object default: {} additionalProperties: false requiredProperties: [data] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/0000755000175000017500000000000013246031665022756 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.0.0.yaml0000644000175000017500000000164713243564222026160 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiply-1.0.0" tag: "tag:stsci.edu:asdf/transform/multiply-1.0.0" title: > Perform a list of subtransforms in parallel and then multiply their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through multiplication. - | !transform/multiply-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/power-1.0.0.yaml0000644000175000017500000000113713243564222025427 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power-1.0.0" tag: "tag:stsci.edu:asdf/transform/power-1.0.0" title: > Perform a list of subtransforms in parallel and then raise each result to the power of the next. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.1.0.yaml0000644000175000017500000000136213243564222030524 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equidistant-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equidistant-1.1.0" title: | The zenithal equidistant projection. description: | Corresponds to the `ARC` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - R_\theta$$ And the sky-to-pixel transformation is defined as: $$R_\theta = 90^\circ - \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.0.0.yaml0000644000175000017500000000122613243564222026720 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/plate_carree-1.0.0" tag: "tag:stsci.edu:asdf/transform/plate_carree-1.0.0" title: | The plate carrée projection. description: | Corresponds to the `CAR` projection in the FITS WCS standard. The main virtue of this transformation is its simplicity. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/plate_carree-1.1.0.yaml0000644000175000017500000000122613243564222026721 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/plate_carree-1.1.0" tag: "tag:stsci.edu:asdf/transform/plate_carree-1.1.0" title: | The plate carrée projection. description: | Corresponds to the `CAR` projection in the FITS WCS standard. The main virtue of this transformation is its simplicity. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.1.0.yaml0000644000175000017500000000223213243564222025736 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/compose-1.1.0" tag: "tag:stsci.edu:asdf/transform/compose-1.1.0" title: > Perform a list of subtransforms in series. description: | The output of each subtransform is fed into the input of the next subtransform. The number of output dimensions of each subtransform must be equal to the number of input dimensions of the next subtransform in list. To reorder or add/drop axes, insert `remap_axes` transforms in the subtransform list. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, by reversing the list of transforms and applying the inverse of each. examples: - - A series of transforms - | !transform/compose-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.1.0.yaml0000644000175000017500000000114513243564222030010 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudocylindrical-1.1.0" title: | Base class of all pseudocylindrical projections. description: | Pseudocylindrical projections are like cylindrical projections except the parallels of latitude are projected at diminishing lengths toward the polar regions in order to reduce lateral distortion there. Consequently, the meridians are curved. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.0.0.yaml0000644000175000017500000000245213243564222025367 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.0.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/add-1.1.0.yaml0000644000175000017500000000147513243564222025031 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/add-1.1.0" tag: "tag:stsci.edu:asdf/transform/add-1.1.0" title: > Perform a list of subtransforms in parallel and then add their results together. description: | Each of the subtransforms must have the same number of inputs and outputs. examples: - - A list of transforms, performed in parallel and added together - | !transform/add-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.0.0.yaml0000644000175000017500000000224413243564222031201 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.0.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.0.0" - type: object properties: mu: type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/pseudocylindrical-1.0.0.yaml0000644000175000017500000000114513243564222030007 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudocylindrical-1.0.0" title: | Base class of all pseudocylindrical projections. description: | Pseudocylindrical projections are like cylindrical projections except the parallels of latitude are projected at diminishing lengths toward the polar regions in order to reduce lateral distortion there. Consequently, the meridians are curved. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.0.0.yaml0000644000175000017500000000217213243564222030163 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.0.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.0.0.yaml0000644000175000017500000000104013243564222026010 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.0.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.0.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: angle: type: number description: Angle, in degrees. required: [angle]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.0.0.yaml0000644000175000017500000000377013243564222026564 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/concatenate-1.0.0" tag: "tag:stsci.edu:asdf/transform/concatenate-1.0.0" title: > Send axes to different subtransforms. description: | Transforms a set of separable inputs by splitting the axes apart, sending them through the given subtransforms in parallel, and finally concatenating the subtransform output axes back together. The input axes are assigned to each subtransform in order. If the number of input axes is unequal to the sum of the number of input axes of all of the subtransforms, that is considered an error case. The output axes from each subtransform are appended together to make up the resulting output axes. For example, given 5 input axes, and 3 subtransforms with the following orders: 1. transform A: 2 in -> 2 out 1. transform B: 1 in -> 2 out 1. transform C: 2 in -> 1 out The transform is performed as follows: ``` : i0 i1 i2 i3 i4 : | | | | | : +---------+ +---------+ +----------+ : | A | | B | | C | : +---------+ +---------+ +----------+ : | | | | | : o0 o1 o2 o3 o4 ``` If reordering of the input or output axes is required, use in series with the `remap_axes` transform. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The example in the description - | !transform/concatenate-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 2 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.0.0.yaml0000644000175000017500000000153413243564222027075 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/hammer_aitoff-1.0.0" tag: "tag:stsci.edu:asdf/transform/hammer_aitoff-1.0.0" title: | Hammer-Aitoff projection. description: | Corresponds to the `AIT` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= 2 \arg \left(2Z^2 - 1, \frac{\pi}{180^\circ} \frac{Z}{2}x\right) \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^\circ}yZ\right)$$ And the sky-to-pixel transformation is defined as: $$x &= 2 \gamma \cos \theta \sin \frac{\phi}{2} \\ y &= \gamma \sin \theta$$ where: $$\gamma = \frac{180^\circ}{\pi} \sqrt{\frac{2}{1 + \cos \theta \cos(\phi / 2)}}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.1.0.yaml0000644000175000017500000000147213243564222027135 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/stereographic-1.1.0" tag: "tag:stsci.edu:asdf/transform/stereographic-1.1.0" title: | The stereographic projection. description: | Corresponds to the `STG` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^{\circ} - 2 \tan^{-1}\left(\frac{\pi R_\theta}{360^{\circ}}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\frac{2 \cos \theta}{1 + \sin \theta}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.0.0.yaml0000644000175000017500000000136513243564222026232 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/parabolic-1.0.0" tag: "tag:stsci.edu:asdf/transform/parabolic-1.0.0" title: | Parabolic projection. description: | Corresponds to the `PAR` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{180^\circ}{\pi} \frac{x}{1 - 4(y / 180^\circ)^2} \\ \theta &= 3 \sin^{-1}\left(\frac{y}{180^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \left(2\cos\frac{2\theta}{3} - 1\right) \\ y &= 180^\circ \sin \frac{\theta}{3}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.1.0.yaml0000644000175000017500000000261613243564222030526 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.1.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.1.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: type: number description: | Look angle, in degrees. default: 0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/compose-1.0.0.yaml0000644000175000017500000000223213243564222025735 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/compose-1.0.0" tag: "tag:stsci.edu:asdf/transform/compose-1.0.0" title: > Perform a list of subtransforms in series. description: | The output of each subtransform is fed into the input of the next subtransform. The number of output dimensions of each subtransform must be equal to the number of input dimensions of the next subtransform in list. To reorder or add/drop axes, insert `remap_axes` transforms in the subtransform list. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, by reversing the list of transforms and applying the inverse of each. examples: - - A series of transforms - | !transform/compose-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.1.0.yaml0000644000175000017500000000124313243564222026063 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quadcube-1.1.0" title: | Base class of all quadcube projections. description: | Quadrilateralized spherical cube (quad-cube) projections belong to the class of polyhedral projections in which the sphere is projected onto the surface of an enclosing polyhedron. The six faces of the quad-cube projections are numbered and laid out as: ``` 0 4 3 2 1 4 3 2 5 ``` allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.0.0.yaml0000644000175000017500000000323213243564222025723 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tabular-1.0.0" tag: "tag:stsci.edu:asdf/transform/tabular-1.0.0" title: > A Tabular model. description: | Tabular represents a lookup table with values corresponding to some grid points. It computes the interpolated values corresponding to the given inputs. Three methods of interpolation are supported - "linear", "nearest" and "splinef2d". It supports extrapolation. type: object properties: lookup_table: description: > Table values. anyOf: - $ref: ../core/ndarray-1.0.0 - type: array points: type: array items: anyOf: - type: array - $ref: ../core/ndarray-1.0.0 description: | Grid values - each row in the array corresponds to a dimension in the lookup table. The grid does not have to be regular. method: description: | Method of interpolation. Supported are "linear" and "nearest", and "splinef2d". "splinef2d" is only supported for 2-dimensional data. type: string enum: ["linear", "nearest", "splinef2d"] default: "linear" bounds_error: description: | If True, when interpolated values are requested outside of the domain of the input data, a ValueError is raised. If False, then "fill_value" is used. type: boolean default: true fill_value: description: | If provided, the value to use for points outside of the interpolation domain. If None, values outside the domain are extrapolated. Extrapolation is not supported by method "splinef2d". type: number required: [lookup_table]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.1.0.yaml0000644000175000017500000000162213243564222030270 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equal_area-1.1.0" title: | The zenithal equal area projection. description: | Corresponds to the `ZEA` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - 2 \sin^{-1} \left(\frac{\pi R_\theta}{360^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta &= \frac{180^\circ}{\pi} \sqrt{2(1 - \sin\theta)} \\ &= \frac{360^\circ}{\pi} \sin\left(\frac{90^\circ - \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.0.0.yaml0000644000175000017500000000076013243564222026125 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/identity-1.0.0" tag: "tag:stsci.edu:asdf/transform/identity-1.0.0" title: > The identity transform. description: > Invertibility: The inverse of this transform is also the identity transform. allOf: - $ref: "transform-1.0.0" - type: object properties: n_dims: type: integer default: 1 description: | The number of dimensions. asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.1.0.yaml0000644000175000017500000000137313243564222030003 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.1.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.1.0.yaml0000644000175000017500000000131113243564222026102 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/mercator-1.1.0" tag: "tag:stsci.edu:asdf/transform/mercator-1.1.0" title: | The Mercator projection. description: | Corresponds to the `MER` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= 2 \tan^{-1}\left(e^{y \pi / 180^{\circ}}\right)-90^{\circ}$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\ln \tan \left(\frac{90^{\circ} + \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.0.0.yaml0000644000175000017500000000127113243564222026110 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0" title: | Base class of all zenithal (or azimuthal) projections. description: | Zenithal projections are completely specified by defining the radius as a function of native latitude, $R_\theta$. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y, x) \\ R_\theta &= \sqrt{x^2 + y^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin \phi \\ y &= R_\theta \cos \phi$$ allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.0.0.yaml0000644000175000017500000000055313243564222025363 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/scale-1.0.0" tag: "tag:stsci.edu:asdf/transform/scale-1.0.0" title: > A Scale model. description: > Multiply the input by a factor. type: object properties: factor: type: number description: Multiplication factor. required: [factor]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.1.0.yaml0000644000175000017500000000141513243564222027777 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.1.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.1.0.yaml0000644000175000017500000000232613243564222026460 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polynomial-1.1.0" tag: "tag:stsci.edu:asdf/transform/polynomial-1.1.0" title: > A Polynomial model. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/polynomial-1.1.0 coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/polynomial-1.1.0 coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] type: object properties: coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array required: [coefficients] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/identity-1.1.0.yaml0000644000175000017500000000076013243564222026126 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/identity-1.1.0" tag: "tag:stsci.edu:asdf/transform/identity-1.1.0" title: > The identity transform. description: > Invertibility: The inverse of this transform is also the identity transform. allOf: - $ref: "transform-1.1.0" - type: object properties: n_dims: type: integer default: 1 description: | The number of dimensions. asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equidistant-1.0.0.yaml0000644000175000017500000000137313243564222030002 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equidistant-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_equidistant-1.0.0" title: | Conic equidistant projection. description: | Corresponds to the `COD` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{180^\circ}{\pi} \frac{\sin\theta_a\sin\eta}{\eta} \\ R_\theta &= \theta_a - \theta + \eta\cot\eta\cot\theta_a \\ Y_0 = \eta\cot\eta\cot\theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.1.0.yaml0000644000175000017500000000063713243564222026277 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polyconic-1.1.0" tag: "tag:stsci.edu:asdf/transform/polyconic-1.1.0" title: | Polyconic projection. description: | Corresponds to the `PCO` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudoconic-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.1.0.yaml0000644000175000017500000000143713243564222030171 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_orthographic-1.1.0" tag: "tag:stsci.edu:asdf/transform/slant_orthographic-1.1.0" title: | The slant orthographic projection. description: | Corresponds to the `SIN` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \cos^{-1}\left(\frac{\pi}{180^{\circ}}R_\theta\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cos \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.0.0.yaml0000644000175000017500000000064013243564222026604 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudoconic-1.0.0" title: | Base class of all pseudoconic projections. description: | Pseudoconics are a subclass of conics with concentric parallels. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/concatenate-1.1.0.yaml0000644000175000017500000000377013243564222026565 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/concatenate-1.1.0" tag: "tag:stsci.edu:asdf/transform/concatenate-1.1.0" title: > Send axes to different subtransforms. description: | Transforms a set of separable inputs by splitting the axes apart, sending them through the given subtransforms in parallel, and finally concatenating the subtransform output axes back together. The input axes are assigned to each subtransform in order. If the number of input axes is unequal to the sum of the number of input axes of all of the subtransforms, that is considered an error case. The output axes from each subtransform are appended together to make up the resulting output axes. For example, given 5 input axes, and 3 subtransforms with the following orders: 1. transform A: 2 in -> 2 out 1. transform B: 1 in -> 2 out 1. transform C: 2 in -> 1 out The transform is performed as follows: ``` : i0 i1 i2 i3 i4 : | | | | | : +---------+ +---------+ +----------+ : | A | | B | | C | : +---------+ +---------+ +----------+ : | | | | | : o0 o1 o2 o3 o4 ``` If reordering of the input or output axes is required, use in series with the `remap_axes` transform. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The example in the description - | !transform/concatenate-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 2 n_outputs: 1 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.0.0.yaml0000644000175000017500000000056313243564222025412 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/shift-1.0.0" tag: "tag:stsci.edu:asdf/transform/shift-1.0.0" title: > A Shift opeartion. description: > Apply an offset in one direction. type: object properties: offset: type: number description: Offset in one direction. required: [offset]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.1.0.yaml0000644000175000017500000000607313243564222027646 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/regions_selector-1.1.0" tag: "tag:stsci.edu:asdf/transform/regions_selector-1.1.0" title: > Represents a discontinuous transform. description: | Maps regions to transgorms and evaluates the transforms with the corresponding inputs. examples: - - Create a regions_selector schema for 2 regions, labeled "1" and "2". - | !transform/regions_selector-1.1.0 inputs: [x, y] label_mapper: !transform/label_mapper-1.1.0 mapper: !core/ndarray-1.0.0 datatype: int8 data: [[0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0]] outputs: [ra, dec, lam] selector: 1: !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0, 1, 1] - !transform/concatenate-1.1.0 forward: - !transform/concatenate-1.1.0 forward: - !transform/shift-1.1.0 {offset: 1.0} - !transform/shift-1.1.0 {offset: 2.0} - !transform/shift-1.1.0 {offset: 3.0} 2: !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0, 1, 1] - !transform/concatenate-1.1.0 forward: - !transform/concatenate-1.1.0 forward: - !transform/scale-1.1.0 {factor: 2.0} - !transform/scale-1.1.0 {factor: 3.0} - !transform/scale-1.1.0 {factor: 3.0} undefined_transform_value: .nan allOf: - $ref: "transform-1.1.0" - type: object properties: label_mapper: description: | An instance of [label_mapper-1.1.0](ref:http://stsci.edu/schemas/asdf/transform/label_mapper-1.1.0) $ref: "./label_mapper-1.1.0" inputs: description: | Names of inputs. type: array items: type: string outputs: description: | Names of outputs. type: array items: type: string selector: description: | A mapping of regions to trransforms. type: object properties: labels: description: | An array of unique region labels. type: array items: type: - integer - string transforms: description: | A transform for each region. The order should match the order of labels. type: array items: $ref: "transform-1.1.0" undefined_transform_value: description: | Value to be returned if there's no transform defined for the inputs. type: number required: [label_mapper, inputs, outputs, selector] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.0.0.yaml0000644000175000017500000000232413243564222025522 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.0.0" tag: "tag:stsci.edu:asdf/transform/affine-1.0.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/generic-1.1.0.yaml0000644000175000017500000000114213243564222025704 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/generic-1.1.0" tag: "tag:stsci.edu:asdf/transform/generic-1.1.0" title: > A generic transform. description: > This is used **entirely** for bootstrapping purposes so one can create composite models including transforms that haven't yet been written. **IT WILL NOT BE IN THE FINAL VERSION OF THE SPEC**. allOf: - $ref: "transform-1.1.0" - type: object properties: n_inputs: type: integer n_outputs: type: integer required: [n_inputs, n_outputs] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.0.0.yaml0000644000175000017500000000200413243564222026300 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/transform-1.0.0" title: > A generic type used to mark where other transforms are accepted. description: > These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. type: object properties: name: description: | A user-friendly name for the transform, to give it extra meaning. type: string domain: description: | The domain (range of valid inputs) to the transform. Each entry in the list corresponds to an input dimension. type: array items: $ref: "domain-1.0.0" inverse: description: | Explicitly sets the inverse transform of this transform. If the transform has a direct analytic inverse, this property is usually not necessary, as the ASDF-reading tool can provide it automatically. $ref: "transform-1.0.0" additionalProperties: true asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.1.0.yaml0000644000175000017500000000070713243564222030260 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quad_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/quad_spherical_cube-1.1.0" title: | Quadrilateralized spherical cube projection. description: | Corresponds to the `QSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/parabolic-1.1.0.yaml0000644000175000017500000000136513243564222026233 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/parabolic-1.1.0" tag: "tag:stsci.edu:asdf/transform/parabolic-1.1.0" title: | Parabolic projection. description: | Corresponds to the `PAR` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{180^\circ}{\pi} \frac{x}{1 - 4(y / 180^\circ)^2} \\ \theta &= 3 \sin^{-1}\left(\frac{y}{180^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \left(2\cos\frac{2\theta}{3} - 1\right) \\ y &= 180^\circ \sin \frac{\theta}{3}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.1.0.yaml0000644000175000017500000000246013243564222031724 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_zenithal_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/slant_zenithal_perspective-1.1.0" title: | The slant zenithal perspective projection. description: | Corresponds to the `SZP` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.1.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 phi0: type: number description: | The longitude $\phi_0$ of the reference point, in degrees. default: 0 theta0: type: number description: | The latitude $\theta_0$ of the reference point, in degrees. default: 90asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/mercator-1.0.0.yaml0000644000175000017500000000131113243564222026101 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/mercator-1.0.0" tag: "tag:stsci.edu:asdf/transform/mercator-1.0.0" title: | The Mercator projection. description: | Corresponds to the `MER` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= 2 \tan^{-1}\left(e^{y \pi / 180^{\circ}}\right)-90^{\circ}$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\ln \tan \left(\frac{90^{\circ} + \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "cylindrical-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.0.0.yaml0000644000175000017500000001011213243564222026707 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0" tag: "tag:stsci.edu:asdf/transform/label_mapper-1.0.0" title: > Represents a mapping from a coordinate value to a label. description: | A label mapper instance maps inputs to a label. It is used together with [regions_selector](ref:http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0). The [label_mapper](ref:http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0) returns the label corresponding to given inputs. The [regions_selector](ref:http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0) returns the transform corresponding to this label. This maps inputs (e.g. pixels on a detector) to transforms uniquely. examples: - - Map array indices are to labels. - | !transform/label_mapper-1.0.0 mapper: !core/ndarray-1.0.0 [[1, 0, 2], [1, 0, 2], [1, 0, 2]] - - Map numbers dictionary to transforms which return labels. - | !transform/label_mapper-1.0.0 mapper: !!omap - !!omap labels: [-1.67833272, -1.9580548, -1.118888] - !!omap models: - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 6.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 2.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.0.0 mapping: [0] n_inputs: 2 - - Map a number wihtin a range of numbers to transforms which return labels. - | !transform/label_mapper-1.0.0 mapper: !!omap - !!omap labels: - [3.2, 4.1] - [2.67, 2.98] - [1.95, 2.3] - !!omap models: - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 6.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 2.0} - !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [1] - !transform/shift-1.0.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.0.0 mapping: [0] n_inputs: 2 allOf: - $ref: "transform-1.0.0" - type: object properties: mapper: description: | An array with the shape of the detector/observation. Pixel values are of type integer or string and represent region labels. Pixels which are not within any region have value 0 or " ". anyOf: - $ref: "../core/ndarray-1.0.0" - type: object properties: labels: type: array items: anyOf: - type: number - type: array items: type: number minLength: 2 maxLength: 2 models: type: array items: $ref: "transform-1.0.0" inputs: type: array items: type: string description: | Names of inputs. inputs_mapping: $ref: "transform-1.0.0" description: | [mapping](ref:http://stsci.edu/schemas/asdf/transform/remap-axes-1.0.0) atol: type: number description: | absolute tolerance to compare keys in mapper. required: [mapper] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.1.0.yaml0000644000175000017500000000164413243564222026126 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/subtract-1.1.0" tag: "tag:stsci.edu:asdf/transform/subtract-1.1.0" title: > Perform a list of subtransforms in parallel and then subtract their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through subtraction. - | !transform/subtract-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.0.0.yaml0000644000175000017500000000121113243564222027611 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sanson_flamsteed-1.0.0" tag: "tag:stsci.edu:asdf/transform/sanson_flamsteed-1.0.0" title: | The Sanson-Flamsteed projection. description: | Corresponds to the `SFL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\cos y} \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \cos \theta \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/quadcube-1.0.0.yaml0000644000175000017500000000124313243564222026062 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quadcube-1.0.0" title: | Base class of all quadcube projections. description: | Quadrilateralized spherical cube (quad-cube) projections belong to the class of polyhedral projections in which the sphere is projected onto the surface of an enclosing polyhedron. The six faces of the quad-cube projections are numbered and laid out as: ``` 0 4 3 2 1 4 3 2 5 ``` allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equal_area-1.0.0.yaml0000644000175000017500000000162213243564222030267 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equal_area-1.0.0" title: | The zenithal equal area projection. description: | Corresponds to the `ZEA` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - 2 \sin^{-1} \left(\frac{\pi R_\theta}{360^\circ}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta &= \frac{180^\circ}{\pi} \sqrt{2(1 - \sin\theta)} \\ &= \frac{360^\circ}{\pi} \sin\left(\frac{90^\circ - \theta}{2}\right)$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.0.0.yaml0000644000175000017500000000071413243564222031451 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tangential_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/tangential_spherical_cube-1.0.0" title: | Tangential spherical cube projection. description: | Corresponds to the `TSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/regions_selector-1.0.0.yaml0000644000175000017500000000607313243564222027645 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/regions_selector-1.0.0" tag: "tag:stsci.edu:asdf/transform/regions_selector-1.0.0" title: > Represents a discontinuous transform. description: | Maps regions to transgorms and evaluates the transforms with the corresponding inputs. examples: - - Create a regions_selector schema for 2 regions, labeled "1" and "2". - | !transform/regions_selector-1.0.0 inputs: [x, y] label_mapper: !transform/label_mapper-1.0.0 mapper: !core/ndarray-1.0.0 datatype: int8 data: [[0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0], [0, 1, 1, 0, 2, 0]] outputs: [ra, dec, lam] selector: 1: !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0, 1, 1] - !transform/concatenate-1.0.0 forward: - !transform/concatenate-1.0.0 forward: - !transform/shift-1.0.0 {offset: 1.0} - !transform/shift-1.0.0 {offset: 2.0} - !transform/shift-1.0.0 {offset: 3.0} 2: !transform/compose-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0, 1, 1] - !transform/concatenate-1.0.0 forward: - !transform/concatenate-1.0.0 forward: - !transform/scale-1.0.0 {factor: 2.0} - !transform/scale-1.0.0 {factor: 3.0} - !transform/scale-1.0.0 {factor: 3.0} undefined_transform_value: .nan allOf: - $ref: "transform-1.0.0" - type: object properties: label_mapper: description: | An instance of [label_mapper-1.0.0](ref:http://stsci.edu/schemas/asdf/transform/label_mapper-1.0.0) $ref: "./label_mapper-1.0.0" inputs: description: | Names of inputs. type: array items: type: string outputs: description: | Names of outputs. type: array items: type: string selector: description: | A mapping of regions to trransforms. type: object properties: labels: description: | An array of unique region labels. type: array items: type: - integer - string transforms: description: | A transform for each region. The order should match the order of labels. type: array items: $ref: "transform-1.0.0" undefined_transform_value: description: | Value to be returned if there's no transform defined for the inputs. type: number required: [label_mapper, inputs, outputs, selector] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.1.0.yaml0000644000175000017500000000162213243564222030747 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.1.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.0.0.yaml0000644000175000017500000000104313243564222026120 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.0.0" tag: "tag:stsci.edu:asdf/transform/constant-1.0.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.0.0" - type: object properties: value: type: number required: [value]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.0.0.yaml0000644000175000017500000000121313243564222025232 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy-1.0.0" tag: "tag:stsci.edu:asdf/transform/airy-1.0.0" title: | The Airy projection. description: | Corresponds to the `AIR` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. allOf: - $ref: "zenithal-1.0.0" - type: object properties: theta_b: type: number description: | The latitude $\theta_b$ at which to minimize the error, in degrees. default: 90 asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/pseudoconic-1.1.0.yaml0000644000175000017500000000064013243564222026605 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/pseudoconic-1.1.0" title: | Base class of all pseudoconic projections. description: | Pseudoconics are a subclass of conics with concentric parallels. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/label_mapper-1.1.0.yaml0000644000175000017500000001074413243564222026723 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/label_mapper-1.1.0" tag: "tag:stsci.edu:asdf/transform/label_mapper-1.1.0" title: > Represents a mapping from a coordinate value to a label. description: | A label mapper instance maps inputs to a label. It is used together with [regions_selector](ref:http://stsci.edu/schemas/asdf/transform/regions_selector-1.1.0). The [label_mapper](ref:http://stsci.edu/schemas/asdf/transform/label_mapper-1.1.0) returns the label corresponding to given inputs. The [regions_selector](ref:http://stsci.edu/schemas/asdf/transform/regions_selector-1.1.0) returns the transform corresponding to this label. This maps inputs (e.g. pixels on a detector) to transforms uniquely. examples: - - Map array indices are to labels. - | !transform/label_mapper-1.1.0 mapper: !core/ndarray-1.0.0 [[1, 0, 2], [1, 0, 2], [1, 0, 2]] - - Map numbers dictionary to transforms which return labels. - | !transform/label_mapper-1.1.0 mapper: !!omap - !!omap labels: [-1.67833272, -1.9580548, -1.118888] - !!omap models: - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 6.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 2.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.1.0 mapping: [0] n_inputs: 2 - - Map a number wihtin a range of numbers to transforms which return labels. - | !transform/label_mapper-1.1.0 mapper: !!omap - !!omap labels: - [3.2, 4.1] - [2.67, 2.98] - [1.95, 2.3] - !!omap models: - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 6.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 2.0} - !transform/compose-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [1] - !transform/shift-1.1.0 {offset: 4.0} inputs: [x, y] inputs_mapping: !transform/remap_axes-1.1.0 mapping: [0] n_inputs: 2 allOf: - $ref: "transform-1.1.0" - type: object properties: mapper: description: | A mapping of inputs to labels. In the general case this is a `astropy.modeling.core.Model`. It could be a numpy array with the shape of the detector/observation. Pixel values are of type integer or string and represent region labels. Pixels which are not within any region have value ``no_label``. It could be a dictionary which maps tuples to labels or floating point numbers to labels. anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "transform-1.1.0" - type: object properties: labels: type: array items: anyOf: - type: number - type: array items: type: number minLength: 2 maxLength: 2 models: type: array items: $ref: "transform-1.1.0" inputs: type: array items: type: string description: | Names of inputs. inputs_mapping: $ref: "transform-1.1.0" description: | [mapping](ref:http://stsci.edu/schemas/asdf/transform/remap-axes-1.1.0) atol: type: number description: | absolute tolerance to compare keys in mapper. no_label: description: | Fill in value for missing output. anyOf: - type: number - type: string required: [mapper] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_equidistant-1.0.0.yaml0000644000175000017500000000136213243564222030523 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_equidistant-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_equidistant-1.0.0" title: | The zenithal equidistant projection. description: | Corresponds to the `ARC` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^\circ - R_\theta$$ And the sky-to-pixel transformation is defined as: $$R_\theta = 90^\circ - \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/transform-1.1.0.yaml0000644000175000017500000000200413243564222026301 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/transform-1.1.0" title: > A generic type used to mark where other transforms are accepted. description: > These objects are designed to be nested in arbitrary ways to build up transformation pipelines out of a number of low-level pieces. type: object properties: name: description: | A user-friendly name for the transform, to give it extra meaning. type: string domain: description: | The domain (range of valid inputs) to the transform. Each entry in the list corresponds to an input dimension. type: array items: $ref: "domain-1.0.0" inverse: description: | Explicitly sets the inverse transform of this transform. If the transform has a direct analytic inverse, this property is usually not necessary, as the ASDF-reading tool can provide it automatically. $ref: "transform-1.1.0" additionalProperties: true asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/polynomial-1.0.0.yaml0000644000175000017500000000232613243564222026457 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polynomial-1.0.0" tag: "tag:stsci.edu:asdf/transform/polynomial-1.0.0" title: > A Polynomial model. description: | A polynomial model represented by its coefficients stored in an ndarray of shape $(n+1)$ for univariate polynomials or $(n+1, n+1)$ for polynomials with 2 variables, where $n$ is the highest total degree of the polynomial. $$P = \sum_{i, j=0}^{i+j=n}c_{ij} * x^{i} * y^{j}$$ Invertibility: This transform is not automatically invertible. examples: - - $P = 1.2 + 0.3 * x + 56.1 * x^{2}$ - | !transform/polynomial-1.0.0 coefficients: !core/ndarray-1.0.0 [1.2, 0.3, 56.1] - - $P = 1.2 + 0.3 * x + 3 * x * y + 2.1 * y^{2}$ - | !transform/polynomial-1.0.0 coefficients: !core/ndarray-1.0.0 [[1.2, 0.0, 2.1], [0.3, 3.0, 0.0], [0.0, 0.0, 0.0]] type: object properties: coefficients: description: | An array with coefficients. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array required: [coefficients] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/tabular-1.1.0.yaml0000644000175000017500000000323213243564222025724 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tabular-1.1.0" tag: "tag:stsci.edu:asdf/transform/tabular-1.1.0" title: > A Tabular model. description: | Tabular represents a lookup table with values corresponding to some grid points. It computes the interpolated values corresponding to the given inputs. Three methods of interpolation are supported - "linear", "nearest" and "splinef2d". It supports extrapolation. type: object properties: lookup_table: description: > Table values. anyOf: - $ref: ../core/ndarray-1.0.0 - type: array points: type: array items: anyOf: - type: array - $ref: ../core/ndarray-1.0.0 description: | Grid values - each row in the array corresponds to a dimension in the lookup table. The grid does not have to be regular. method: description: | Method of interpolation. Supported are "linear" and "nearest", and "splinef2d". "splinef2d" is only supported for 2-dimensional data. type: string enum: ["linear", "nearest", "splinef2d"] default: "linear" bounds_error: description: | If True, when interpolated values are requested outside of the domain of the input data, a ValueError is raised. If False, then "fill_value" is used. type: boolean default: true fill_value: description: | If provided, the value to use for points outside of the interpolation domain. If None, values outside the domain are extrapolated. Extrapolation is not supported by method "splinef2d". type: number required: [lookup_table]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.1.0.yaml0000644000175000017500000000163613243564222027552 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.1.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/scale-1.1.0.yaml0000644000175000017500000000055313243564222025364 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/scale-1.1.0" tag: "tag:stsci.edu:asdf/transform/scale-1.1.0" title: > A Scale model. description: > Multiply the input by a factor. type: object properties: factor: type: number description: Multiplication factor. required: [factor]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/subtract-1.0.0.yaml0000644000175000017500000000164413243564222026125 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/subtract-1.0.0" tag: "tag:stsci.edu:asdf/transform/subtract-1.0.0" title: > Perform a list of subtransforms in parallel and then subtract their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through subtraction. - | !transform/subtract-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.0.0.yaml0000644000175000017500000000140213243564222026077 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gnomonic-1.0.0" tag: "tag:stsci.edu:asdf/transform/gnomonic-1.0.0" title: | The gnomonic projection. description: | Corresponds to the `TAN` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.0.0.yaml0000644000175000017500000000072613243564222031250 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cobe_quad_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube-1.0.0" title: | COBE quadrilateralized spherical cube projection. description: | Corresponds to the `CSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/hammer_aitoff-1.1.0.yaml0000644000175000017500000000153413243564222027076 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/hammer_aitoff-1.1.0" tag: "tag:stsci.edu:asdf/transform/hammer_aitoff-1.1.0" title: | Hammer-Aitoff projection. description: | Corresponds to the `AIT` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= 2 \arg \left(2Z^2 - 1, \frac{\pi}{180^\circ} \frac{Z}{2}x\right) \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^\circ}yZ\right)$$ And the sky-to-pixel transformation is defined as: $$x &= 2 \gamma \cos \theta \sin \frac{\phi}{2} \\ y &= \gamma \sin \theta$$ where: $$\gamma = \frac{180^\circ}{\pi} \sqrt{\frac{2}{1 + \cos \theta \cos(\phi / 2)}}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.1.0.yaml0000644000175000017500000000106213243564222027120 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix_polar-1.1.0" tag: "tag:stsci.edu:asdf/transform/healpix_polar-1.1.0" title: | HEALPix polar, aka "butterfly", projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.1.0.yaml0000644000175000017500000000143113243564222025723 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix-1.1.0" tag: "tag:stsci.edu:asdf/transform/healpix-1.1.0" title: | HEALPix projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky H: type: number description: | The number of facets in the longitude direction. default: 4.0 X: type: number description: | The number of facets in the latitude direction. default: 3.0 asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/gnomonic-1.1.0.yaml0000644000175000017500000000140213243564222026100 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/gnomonic-1.1.0" tag: "tag:stsci.edu:asdf/transform/gnomonic-1.1.0" title: | The gnomonic projection. description: | Corresponds to the `TAN` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/airy-1.1.0.yaml0000644000175000017500000000121313243564222025233 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/airy-1.1.0" tag: "tag:stsci.edu:asdf/transform/airy-1.1.0" title: | The Airy projection. description: | Corresponds to the `AIR` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0) for the definition of the full transformation. allOf: - $ref: "zenithal-1.1.0" - type: object properties: theta_b: type: number description: | The latitude $\theta_b$ at which to minimize the error, in degrees. default: 90 asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/generic-1.0.0.yaml0000644000175000017500000000114213243564222025703 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/generic-1.0.0" tag: "tag:stsci.edu:asdf/transform/generic-1.0.0" title: > A generic transform. description: > This is used **entirely** for bootstrapping purposes so one can create composite models including transforms that haven't yet been written. **IT WILL NOT BE IN THE FINAL VERSION OF THE SPEC**. allOf: - $ref: "transform-1.0.0" - type: object properties: n_inputs: type: integer n_outputs: type: integer required: [n_inputs, n_outputs] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.0.0.yaml0000644000175000017500000000062513243564222026571 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical-1.0.0" title: | Base class of all cylindrical projections. description: | The surface of cylindrical projections is a cylinder. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic-1.1.0.yaml0000644000175000017500000000245213243564222025370 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic-1.1.0" title: | Base class of all conic projections. description: | In conic projections, the sphere is thought to be projected onto the surface of a cone which is then opened out. In a general sense, the pixel-to-sky transformation is defined as: $$\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right) / C \\ R_\theta &= \mathrm{sign} \theta_a \sqrt{x^2 + (Y_0 - y)^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin (C \phi) \\ y &= R_\theta \cos (C \phi) + Y_0$$ where $C$ is the "constant of the cone": $$C = \frac{180^\circ \cos \theta}{\pi R_\theta}$$ allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky sigma: type: number description: | $(\theta_1 + \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0 delta: type: number description: | $(\theta_1 - \theta_2) / 2$ where $\theta_1$ and $\theta_2$ are the latitudes of the standard parallels, in degrees. default: 0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/polyconic-1.0.0.yaml0000644000175000017500000000063713243564222026276 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/polyconic-1.0.0" tag: "tag:stsci.edu:asdf/transform/polyconic-1.0.0" title: | Polyconic projection. description: | Corresponds to the `PCO` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudoconic-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/cobe_quad_spherical_cube-1.1.0.yaml0000644000175000017500000000072613243564222031251 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cobe_quad_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube-1.1.0" title: | COBE quadrilateralized spherical cube projection. description: | Corresponds to the `CSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/healpix_polar-1.0.0.yaml0000644000175000017500000000106213243564222027117 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix_polar-1.0.0" tag: "tag:stsci.edu:asdf/transform/healpix_polar-1.0.0" title: | HEALPix polar, aka "butterfly", projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/tangential_spherical_cube-1.1.0.yaml0000644000175000017500000000071413243564222031452 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/tangential_spherical_cube-1.1.0" tag: "tag:stsci.edu:asdf/transform/tangential_spherical_cube-1.1.0" title: | Tangential spherical cube projection. description: | Corresponds to the `TSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.0.0.yaml0000644000175000017500000000161213243564222026417 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/molleweide-1.0.0" tag: "tag:stsci.edu:asdf/transform/molleweide-1.0.0" title: | Molleweide's projection. description: | Corresponds to the `MOL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi x}{2 \sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}} \\ \theta &= \sin^{-1}\left(\frac{1}{90^\circ}\sin^{-1}\left(\frac{\pi}{180^\circ}\frac{y}{\sqrt{2}}\right) + \frac{y}{180^\circ}\sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \frac{2 \sqrt{2}}{\pi} \phi \cos \gamma \\ y &= \sqrt{2} \frac{180^\circ}{\pi} \sin \gamma$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/slant_zenithal_perspective-1.0.0.yaml0000644000175000017500000000246013243564222031723 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_zenithal_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/slant_zenithal_perspective-1.0.0" title: | The slant zenithal perspective projection. description: | Corresponds to the `SZP` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \tan^{-1}\left(\frac{180^{\circ}}{\pi R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cot \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.0.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 phi0: type: number description: | The longitude $\phi_0$ of the reference point, in degrees. default: 0 theta0: type: number description: | The latitude $\theta_0$ of the reference point, in degrees. default: 90asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic_perspective-1.0.0.yaml0000644000175000017500000000141513243564222027776 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_perspective-1.0.0" title: | Colles' conic perspecitve projection. description: | Corresponds to the `COP` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \sin \theta_a \\ R_\theta &= \frac{180^\circ}{\pi} \cos \eta [ \cot \theta_a - \tan(\theta - \theta_a)] \\ Y_0 &= \frac{180^\circ}{\pi} \cos \eta \cot \theta_a$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic_equal_area-1.0.0.yaml0000644000175000017500000000163613243564222027551 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/conic_equal_area-1.0.0" title: | Alber's conic equal area projection. description: | Corresponds to the `COE` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.0.0) for the definition of the full transformation. The transformation is defined as: $$C &= \gamma / 2 \\ R_\theta &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin \theta} \\ Y_0 &= \frac{180^\circ}{\pi} \frac{2}{\gamma} \sqrt{1 + \sin \theta_1 \sin \theta_2 - \gamma \sin((\theta_1 + \theta_2)/2)}$$ where: $$\gamma = \sin \theta_1 + \sin \theta_2$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/slant_orthographic-1.0.0.yaml0000644000175000017500000000143713243564222030170 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/slant_orthographic-1.0.0" tag: "tag:stsci.edu:asdf/transform/slant_orthographic-1.0.0" title: | The slant orthographic projection. description: | Corresponds to the `SIN` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = \cos^{-1}\left(\frac{\pi}{180^{\circ}}R_\theta\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\cos \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/affine-1.1.0.yaml0000644000175000017500000000232413243564222025523 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/affine-1.1.0" tag: "tag:stsci.edu:asdf/transform/affine-1.1.0" title: > An affine transform. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: matrix: description: | An array of size (*n* x *n*), where *n* is the number of axes, representing the linear transformation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: array items: type: number minItems: 2 maxItems: 2 minItems: 2 maxItems: 2 translation: description: | An array of size (*n*,), where *n* is the number of axes, representing the translation in an affine transform. anyOf: - $ref: "../core/ndarray-1.0.0" - type: array items: type: number minItems: 2 maxItems: 2 required: [matrix] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical-1.1.0.yaml0000644000175000017500000000062513243564222026572 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical-1.1.0" title: | Base class of all cylindrical projections. description: | The surface of cylindrical projections is a cylinder. allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/rotate2d-1.1.0.yaml0000644000175000017500000000104013243564222026011 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate2d-1.1.0" tag: "tag:stsci.edu:asdf/transform/rotate2d-1.1.0" title: > A 2D rotation. description: > A 2D rotation around the origin, in degrees. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. allOf: - $ref: "transform-1.1.0" - type: object properties: angle: type: number description: Angle, in degrees. required: [angle]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/stereographic-1.0.0.yaml0000644000175000017500000000147213243564222027134 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/stereographic-1.0.0" tag: "tag:stsci.edu:asdf/transform/stereographic-1.0.0" title: | The stereographic projection. description: | Corresponds to the `STG` projection in the FITS WCS standard. See [zenithal](ref:http://stsci.edu/schemas/asdf/transform/zenithal-1.0.0) for the definition of the full transformation. The pixel-to-sky transformation is defined as: $$\theta = 90^{\circ} - 2 \tan^{-1}\left(\frac{\pi R_\theta}{360^{\circ}}\right)$$ And the sky-to-pixel transformation is defined as: $$R_\theta = \frac{180^{\circ}}{\pi}\frac{2 \cos \theta}{1 + \sin \theta}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "zenithal-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/quad_spherical_cube-1.0.0.yaml0000644000175000017500000000070713243564222030257 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/quad_spherical_cube-1.0.0" tag: "tag:stsci.edu:asdf/transform/quad_spherical_cube-1.0.0" title: | Quadrilateralized spherical cube projection. description: | Corresponds to the `QSC` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "quadcube-1.0.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.0.0.yaml0000644000175000017500000000230313243564222027547 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.0.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.0.0" - type: object properties: theta1: type: number description: | Bonne conformal latitude, in degrees. default: 0 asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/healpix-1.0.0.yaml0000644000175000017500000000143113243564222025722 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/healpix-1.0.0" tag: "tag:stsci.edu:asdf/transform/healpix-1.0.0" title: | HEALPix projection. description: | Corresponds to the `XPH` projection in the FITS WCS standard. Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "transform-1.0.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky H: type: number description: | The number of facets in the longitude direction. default: 4.0 X: type: number description: | The number of facets in the latitude direction. default: 3.0 asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal_perspective-1.0.0.yaml0000644000175000017500000000261613243564222030525 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal_perspective-1.0.0" tag: "tag:stsci.edu:asdf/transform/zenithal_perspective-1.0.0" title: | The zenithal perspective projection. description: | Corresponds to the `AZP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y \cos \gamma, x) \\ \theta &= \left\{\genfrac{}{}{0pt}{}{\psi - \omega}{\psi + \omega + 180^{\circ}}\right.$$ where: $$\psi &= \arg(\rho, 1) \\ \omega &= \sin^{-1}\left(\frac{\rho \mu}{\sqrt{\rho^2 + 1}}\right) \\ \rho &= \frac{R}{\frac{180^{\circ}}{\pi}(\mu + 1) + y \sin \gamma} \\ R &= \sqrt{x^2 + y^2 \cos^2 \gamma}$$ And the sky-to-pixel transformation is defined as: $$x &= R \sin \phi \\ y &= -R \sec \gamma \cos \theta$$ where: $$R = \frac{180^{\circ}}{\pi} \frac{(\mu + 1) \cos \theta}{(\mu + \sin \theta) + \cos \theta \cos \phi \tan \gamma}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "zenithal-1.0.0" - type: object properties: mu: type: number description: | Distance from point of projection to center of sphere in spherical radii. default: 0 gamma: type: number description: | Look angle, in degrees. default: 0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/constant-1.1.0.yaml0000644000175000017500000000104313243564222026121 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/constant-1.1.0" tag: "tag:stsci.edu:asdf/transform/constant-1.1.0" title: > A transform that takes no inputs and always outputs a constant value. description: | Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform, which always outputs zero values. allOf: - $ref: "transform-1.1.0" - type: object properties: value: type: number required: [value]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.0.0.yaml0000644000175000017500000000266313243564222026025 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.0.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.0.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.0.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.0.0" - type: object properties: phi: type: number description: Angle, in degrees. theta: type: number description: Angle, in degrees. psi: type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/shift-1.1.0.yaml0000644000175000017500000000056313243564222025413 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/shift-1.1.0" tag: "tag:stsci.edu:asdf/transform/shift-1.1.0" title: > A Shift opeartion. description: > Apply an offset in one direction. type: object properties: offset: type: number description: Offset in one direction. required: [offset]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.1.0.yaml0000644000175000017500000000163113243564222025537 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/divide-1.1.0" tag: "tag:stsci.edu:asdf/transform/divide-1.1.0" title: > Perform a list of subtransforms in parallel and then divide their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through division. - | !transform/divide-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/conic_orthomorphic-1.1.0.yaml0000644000175000017500000000217213243564222030164 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/conic_orthomorphic-1.1.0" tag: "tag:stsci.edu:asdf/transform/conic_orthomorphic-1.1.0" title: | Conic orthomorphic projection. description: | Corresponds to the `COO` projection in the FITS WCS standard. See [conic](ref:http://stsci.edu/schemas/asdf/transform/conic-1.1.0) for the definition of the full transformation. The transformation is defined as: $$C &= \frac{\ln \left( \frac{\cos\theta_2}{\cos\theta_1} \right)} {\ln \left[ \frac{\tan\left(\frac{90^\circ-\theta_2}{2}\right)} {\tan\left(\frac{90^\circ-\theta_1}{2}\right)} \right] } \\ R_\theta &= \psi \left[ \tan \left( \frac{90^\circ - \theta}{2} \right) \right]^C \\ Y_0 &= \psi \left[ \tan \left( \frac{90^\circ - \theta_a}{2} \right) \right]^C$$ where: $$\psi = \frac{180^\circ}{\pi} \frac{\cos \theta} {C\left[\tan\left(\frac{90^\circ-\theta}{2}\right)\right]^C}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "conic-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/zenithal-1.1.0.yaml0000644000175000017500000000127113243564222026111 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/zenithal-1.1.0" title: | Base class of all zenithal (or azimuthal) projections. description: | Zenithal projections are completely specified by defining the radius as a function of native latitude, $R_\theta$. The pixel-to-sky transformation is defined as: $$\phi &= \arg(-y, x) \\ R_\theta &= \sqrt{x^2 + y^2}$$ and the inverse (sky-to-pixel) is defined as: $$x &= R_\theta \sin \phi \\ y &= R_\theta \cos \phi$$ allOf: - $ref: "transform-1.1.0" - type: object properties: direction: enum: [pix2sky, sky2pix] default: pix2sky asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/multiply-1.1.0.yaml0000644000175000017500000000164713243564222026161 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/multiply-1.1.0" tag: "tag:stsci.edu:asdf/transform/multiply-1.1.0" title: > Perform a list of subtransforms in parallel and then multiply their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through multiplication. - | !transform/multiply-1.1.0 forward: - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.1.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/power-1.1.0.yaml0000644000175000017500000000113713243564222025430 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/power-1.1.0" tag: "tag:stsci.edu:asdf/transform/power-1.1.0" title: > Perform a list of subtransforms in parallel and then raise each result to the power of the next. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. allOf: - $ref: "transform-1.1.0" - properties: forward: type: array items: $ref: "transform-1.1.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/add-1.0.0.yaml0000644000175000017500000000147513243564222025030 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/add-1.0.0" tag: "tag:stsci.edu:asdf/transform/add-1.0.0" title: > Perform a list of subtransforms in parallel and then add their results together. description: | Each of the subtransforms must have the same number of inputs and outputs. examples: - - A list of transforms, performed in parallel and added together - | !transform/add-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/sanson_flamsteed-1.1.0.yaml0000644000175000017500000000121113243564222027612 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/sanson_flamsteed-1.1.0" tag: "tag:stsci.edu:asdf/transform/sanson_flamsteed-1.1.0" title: | The Sanson-Flamsteed projection. description: | Corresponds to the `SFL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\cos y} \\ \theta &= y$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \cos \theta \\ y &= \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.1.0.yaml0000644000175000017500000000501713243564222026421 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.1.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.1.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers or "constant markers". Each item in the list corresponds to an output axis. For each item: - If an integer, it is the index of the input axis to send to the output axis. - If a constant, it must be a single item which is a constant value to send to the output axis. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. Invertibility: TBD examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.1.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.1.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.1.0 mapping: [0, 0, 1] - - For 2 input axes, add a third axis which is a constant - | !transform/remap_axes-1.1.0 mapping: [0, 1, !core/constant-1.0.0 42] - - | The above example is equivalent to the following, and ASDF implementations are free to normalize it thusly: - | !transform/concatenate-1.1.0 forward: - !transform/remap_axes-1.1.0 mapping: [0] - !transform/remap_axes-1.1.0 mapping: [1] - !transform/constant-1.0.0 value: 42 - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.1.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: anyOf: - type: integer - $ref: "../core/constant-1.0.0" allOf: - $ref: "transform-1.1.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/domain-1.0.0.yaml0000644000175000017500000000203113243564222025534 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/domain-1.0.0" tag: "tag:stsci.edu:asdf/transform/domain-1.0.0" title: > Defines the domain of an input axis. (deprecated since 1.1.0) description: > Describes the range of acceptable input values to a particular axis of a transform. examples: - - The domain `[0, 1)`. - | !transform/domain-1.0.0 lower: 0 upper: 1 includes_lower: true properties: lower: description: > The lower value of the domain. If not provided, the domain has no lower limit. type: number default: -.inf upper: description: > The upper value of the domain. If not provided, the domain has no upper limit. type: number default: .inf includes_lower: description: If `true`, the domain includes `lower`. type: boolean default: false includes_upper: description: If `true`, the domain includes `upper`. type: boolean default: falseasdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/remap_axes-1.0.0.yaml0000644000175000017500000000501713243564222026420 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/remap_axes-1.0.0" tag: "tag:stsci.edu:asdf/transform/remap_axes-1.0.0" title: > Reorder, add and drop axes. description: | This transform allows the order of the input axes to be shuffled and returned as the output axes. It is a list made up of integers or "constant markers". Each item in the list corresponds to an output axis. For each item: - If an integer, it is the index of the input axis to send to the output axis. - If a constant, it must be a single item which is a constant value to send to the output axis. If only a list is provided, the number of input axes is automatically determined from the maximum index in the list. If an object with `mapping` and `n_inputs` properties is provided, the number of input axes is explicitly set by the `n_inputs` value. Invertibility: TBD examples: - - For 2 input axes, swap the axes - | !transform/remap_axes-1.0.0 mapping: [1, 0] - - For 2 input axes, return the second axis and drop the first - | !transform/remap_axes-1.0.0 mapping: [1] - - For 2 input axes, return the first axis twice, followed by the second - | !transform/remap_axes-1.0.0 mapping: [0, 0, 1] - - For 2 input axes, add a third axis which is a constant - | !transform/remap_axes-1.0.0 mapping: [0, 1, !core/constant-1.0.0 42] - - | The above example is equivalent to the following, and ASDF implementations are free to normalize it thusly: - | !transform/concatenate-1.0.0 forward: - !transform/remap_axes-1.0.0 mapping: [0] - !transform/remap_axes-1.0.0 mapping: [1] - !transform/constant-1.0.0 value: 42 - - Here we have 3 input axes, but we are explicitly dropping the last one - | !transform/remap_axes-1.0.0 mapping: [0, 1] n_inputs: 3 definitions: mapping: type: array items: anyOf: - type: integer - $ref: "../core/constant-1.0.0" allOf: - $ref: "transform-1.0.0" - properties: n_inputs: description: | Explicitly set the number of input axes. If not provided, it is determined from the maximum index value in the mapping list. type: integer mapping: $ref: "#/definitions/mapping" required: [mapping] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/divide-1.0.0.yaml0000644000175000017500000000163113243564222025536 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/divide-1.0.0" tag: "tag:stsci.edu:asdf/transform/divide-1.0.0" title: > Perform a list of subtransforms in parallel and then divide their results. description: | Each of the subtransforms must have the same number of inputs and outputs. Invertibility: This transform is not automatically invertible. examples: - - A list of transforms, performed in parallel, and then combined through division. - | !transform/divide-1.0.0 forward: - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 - !transform/generic-1.0.0 n_inputs: 1 n_outputs: 2 allOf: - $ref: "transform-1.0.0" - properties: forward: type: array items: $ref: "transform-1.0.0" required: [forward]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/bonne_equal_area-1.1.0.yaml0000644000175000017500000000230313243564222027550 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/bonne_equal_area-1.1.0" tag: "tag:stsci.edu:asdf/transform/bonne_equal_area-1.1.0" title: | Bonne's equal area pseudoconic projection. description: | Corresponds to the `BON` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi}{180^\circ} A_\phi R_\theta / \cos \theta \\ \theta &= Y_0 - R_\theta$$ where: $$R_\theta &= \mathrm{sign} \theta_1 \sqrt{x^2 + (Y_0 - y)^2} \\ A_\phi &= \arg\left(\frac{Y_0 - y}{R_\theta}, \frac{x}{R_\theta}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= R_\theta \sin A_\phi \\ y &= -R_\theta \cos A_\phi + Y_0$$ where: $$A_\phi &= \frac{180^\circ}{\pi R_\theta} \phi \cos \theta \\ R_\theta &= Y_0 - \theta \\ Y_0 &= \frac{180^\circ}{\pi} \cot \theta_1 + \theta_1$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "pseudoconic-1.1.0" - type: object properties: theta1: type: number description: | Bonne conformal latitude, in degrees. default: 0 asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/rotate3d-1.1.0.yaml0000644000175000017500000000266313243564222026026 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/rotate3d-1.1.0" tag: "tag:stsci.edu:asdf/transform/rotate3d-1.1.0" title: > Rotation in 3D space. description: | Euler angle rotation around 3 axes. Invertibility: All ASDF tools are required to be able to compute the analytic inverse of this transform. examples: - - The three Euler angles are 12.3, 34 and -1.2 in degrees. - | !transform/rotate3d-1.1.0 phi: 12.3 theta: 34 psi: -1.2 direction: zxz allOf: - $ref: "transform-1.1.0" - type: object properties: phi: type: number description: Angle, in degrees. theta: type: number description: Angle, in degrees. psi: type: number description: Angle, in degrees. direction: description: | Sequence of rotation axes: one of `zxz`, `zyz`, `yzy`, `yxy`, `xyx`, `xzx` or `native2celestial`, `celestial2native`. If `direction` is `native2celestial` or `celestial2native`, `phi`, `theta` are the longitude and latitude of the native pole in the celestial system and `psi` is the longitude of the celestial pole in the native system. enum: [zxz, zyz, yzy, yxy, xyx, xzx, native2celestial, celestial2native] default: native2celestial required: [phi, theta, psi, direction]asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_perspective-1.1.0.yaml0000644000175000017500000000224413243564222031202 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_perspective-1.1.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_perspective-1.1.0" title: | The cylindrical perspective projection. description: | Corresponds to the `CYP` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{x}{\lambda} \\ \theta &= \arg(1, \eta) + \sin{-1}\left(\frac{\eta \mu}{\sqrt{\eta^2 + 1}}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \lambda \phi \\ y &= \frac{180^{\circ}}{\pi}\left(\frac{\mu + \lambda}{\mu + \cos \theta}\right)\sin \theta$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.1.0" - type: object properties: mu: type: number description: | Distance from center of sphere in the direction opposite the projected surface, in spherical radii. default: 0 lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/cylindrical_equal_area-1.0.0.yaml0000644000175000017500000000162213243564222030746 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/cylindrical_equal_area-1.0.0" tag: "tag:stsci.edu:asdf/transform/cylindrical_equal_area-1.0.0" title: | The cylindrical equal area projection. description: | Corresponds to the `CEA` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= x \\ \theta &= \sin^{-1}\left(\frac{\pi}{180^{\circ}}\lambda y\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \phi \\ y &= \frac{180^{\circ}}{\pi}\frac{\sin \theta}{\lambda}$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. allOf: - $ref: "cylindrical-1.0.0" - type: object properties: lambda: type: number description: | Radius of the cylinder in spherical radii, default is 0. default: 0asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/transform/molleweide-1.1.0.yaml0000644000175000017500000000161213243564222026420 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/transform/molleweide-1.1.0" tag: "tag:stsci.edu:asdf/transform/molleweide-1.1.0" title: | Molleweide's projection. description: | Corresponds to the `MOL` projection in the FITS WCS standard. The pixel-to-sky transformation is defined as: $$\phi &= \frac{\pi x}{2 \sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}} \\ \theta &= \sin^{-1}\left(\frac{1}{90^\circ}\sin^{-1}\left(\frac{\pi}{180^\circ}\frac{y}{\sqrt{2}}\right) + \frac{y}{180^\circ}\sqrt{2 - \left(\frac{\pi}{180^\circ}y\right)^2}\right)$$ And the sky-to-pixel transformation is defined as: $$x &= \frac{2 \sqrt{2}}{\pi} \phi \cos \gamma \\ y &= \sqrt{2} \frac{180^\circ}{\pi} \sin \gamma$$ Invertibility: All ASDF tools are required to provide the inverse of this transform. $ref: "pseudocylindrical-1.1.0" asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/version_map-1.1.0.yaml0000644000175000017500000000714213243564222024605 0ustar dandan00000000000000%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/asdf-schema: 1.0.0 tag:stsci.edu:asdf/core/asdf: 1.0.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.1.0 tag:stsci.edu:asdf/transform/add: 1.1.0 tag:stsci.edu:asdf/transform/affine: 1.1.0 tag:stsci.edu:asdf/transform/airy: 1.1.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/compose: 1.1.0 tag:stsci.edu:asdf/transform/concatenate: 1.1.0 tag:stsci.edu:asdf/transform/conic: 1.1.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.1.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.1.0 tag:stsci.edu:asdf/transform/constant: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.1.0 tag:stsci.edu:asdf/transform/divide: 1.1.0 tag:stsci.edu:asdf/transform/generic: 1.1.0 tag:stsci.edu:asdf/transform/gnomonic: 1.1.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.1.0 tag:stsci.edu:asdf/transform/healpix: 1.1.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.1.0 tag:stsci.edu:asdf/transform/identity: 1.1.0 tag:stsci.edu:asdf/transform/label_mapper: 1.1.0 tag:stsci.edu:asdf/transform/mercator: 1.1.0 tag:stsci.edu:asdf/transform/molleweide: 1.1.0 tag:stsci.edu:asdf/transform/multiply: 1.1.0 tag:stsci.edu:asdf/transform/parabolic: 1.1.0 tag:stsci.edu:asdf/transform/plate_carree: 1.1.0 tag:stsci.edu:asdf/transform/polyconic: 1.1.0 tag:stsci.edu:asdf/transform/polynomial: 1.1.0 tag:stsci.edu:asdf/transform/power: 1.1.0 tag:stsci.edu:asdf/transform/pseudoconic: 1.1.0 tag:stsci.edu:asdf/transform/pseudocylindrical: 1.1.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/quadcube: 1.1.0 tag:stsci.edu:asdf/transform/regions_selector: 1.1.0 tag:stsci.edu:asdf/transform/remap_axes: 1.1.0 tag:stsci.edu:asdf/transform/rotate2d: 1.1.0 tag:stsci.edu:asdf/transform/rotate3d: 1.1.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.1.0 tag:stsci.edu:asdf/transform/scale: 1.1.0 tag:stsci.edu:asdf/transform/shift: 1.1.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.1.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.1.0 tag:stsci.edu:asdf/transform/stereographic: 1.1.0 tag:stsci.edu:asdf/transform/subtract: 1.1.0 tag:stsci.edu:asdf/transform/tabular: 1.1.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.1.0 tag:stsci.edu:asdf/transform/transform: 1.1.0 tag:stsci.edu:asdf/transform/zenithal: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.1.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.1.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/unit/quantity: 1.1.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.1.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.1.0 tag:stsci.edu:asdf/wcs/frame: 1.1.0 tag:stsci.edu:asdf/wcs/icrs_coord: 1.1.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.1.0 tag:stsci.edu:asdf/wcs/step: 1.1.0 tag:stsci.edu:asdf/wcs/wcs: 1.0.0 ... asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/fits/0000755000175000017500000000000013246031665021710 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/fits/fits-1.0.0.yaml0000644000175000017500000000753713243564222024204 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/fits/fits-1.0.0" title: > A FITS file inside of an ASDF file. description: | This schema is useful for distributing ASDF files that can automatically be converted to FITS files by specifying the exact content of the resulting FITS file. Not all kinds of data in FITS are directly representable in ASDF. For example, applying an offset and scale to the data using the `BZERO` and `BSCALE` keywords. In these cases, it will not be possible to store the data in the native format from FITS and also be accessible in its proper form in the ASDF file. Only image and binary table extensions are supported. examples: - - A simple FITS file with a primary header and two extensions - | !fits/fits-1.0.0 - header: - [SIMPLE, true, conforms to FITS standard] - [BITPIX, 8, array data type] - [NAXIS, 0, number of array dimensions] - [EXTEND, true] - [] - ['', Top Level MIRI Metadata] - [] - [DATE, '2013-08-30T10:49:55.070373', The date this file was created (UTC)] - [FILENAME, MiriDarkReferenceModel_test.fits, The name of the file] - [TELESCOP, JWST, The telescope used to acquire the data] - [] - ['', Information about the observation] - [] - [DATE-OBS, '2013-08-30T10:49:55.000000', The date the observation was made (UTC)] - data: !core/ndarray-1.0.0 datatype: float32 shape: [2, 3, 3, 4] source: 0 byteorder: big header: - [XTENSION, IMAGE, Image extension] - [BITPIX, -32, array data type] - [NAXIS, 4, number of array dimensions] - [NAXIS1, 4] - [NAXIS2, 3] - [NAXIS3, 3] - [NAXIS4, 2] - [PCOUNT, 0, number of parameters] - [GCOUNT, 1, number of groups] - [EXTNAME, SCI, extension name] - [BUNIT, DN, Units of the data array] - data: !core/ndarray-1.0.0 datatype: float32 shape: [2, 3, 3, 4] source: 1 byteorder: big header: - [XTENSION, IMAGE, Image extension] - [BITPIX, -32, array data type] - [NAXIS, 4, number of array dimensions] - [NAXIS1, 4] - [NAXIS2, 3] - [NAXIS3, 3] - [NAXIS4, 2] - [PCOUNT, 0, number of parameters] - [GCOUNT, 1, number of groups] - [EXTNAME, ERR, extension name] - [BUNIT, DN, Units of the error array] tag: "tag:stsci.edu:asdf/fits/fits-1.0.0" type: array items: description: > Each item represents a single header/data unit (HDU). type: object properties: header: description: > A list of the keyword/value/comment triples from the header, in the order they appear in the FITS file. type: array items: type: array minItems: 0 maxItems: 3 items: - description: "The keyword." type: string maxLength: 8 pattern: "[A-Z0-9]*" - description: "The value." anyOf: - type: string maxLength: 60 - type: number - type: boolean - description: "The comment." type: string maxLength: 60 data: description: "The data part of the HDU." anyOf: - $ref: "../core/ndarray-1.0.0" - $ref: "../core/table-1.0.0" - type: "null" default: null required: [header] additionalProperties: false asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/time/0000755000175000017500000000000013246031665021701 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/time/time-1.0.0.yaml0000644000175000017500000002066713243564222024165 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/time/time-1.0.0" title: Represents an instance in time. description: | A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. examples: - - Example ISO time - | !time/time-1.0.0 "2000-12-31T13:05:27.737" - - Example year, day-of-year and time format time - | !time/time-1.0.0 "2001:003:04:05:06.789" - - Example Besselian Epoch time - | !time/time-1.0.0 B2000.0 - - Example Besselian Epoch time, equivalent to above - | !time/time-1.0.0 value: 2000.0 format: byear - - Example list of times - | !time/time-1.0.0 ["2000-12-31T13:05:27.737", "2000-12-31T13:06:38.444"] - - Example of an array of times - | !time/time-1.0.0 value: !core/ndarray-1.0.0 data: [2000, 2001] datatype: float64 format: jyear - - Example with a location - | !time/time-1.0.0 value: 2000.0 format: jyear scale: tdb location: x: 6378100 y: 0 z: 0 definitions: iso_time: type: string pattern: "[0-9]{4}-(0[1-9])|(1[0-2])-(0[1-9])|([1-2][0-9])|(3[0-1])[T ]([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" byear: type: string pattern: "B[0-9]+(.[0-9]+)?" jyear: type: string pattern: "J[0-9]+(.[0-9]+)?" yday: type: string pattern: "[0-9]{4}:(00[1-9])|(0[1-9][0-9])|([1-2][0-9][0-9])|(3[0-5][0-9])|(36[0-5]):([0-1][0-9])|([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" string_formats: anyOf: - $ref: "#/definitions/iso_time" - $ref: "#/definitions/byear" - $ref: "#/definitions/jyear" - $ref: "#/definitions/yday" array_of_strings: type: array items: anyOf: - $ref: "#/definitions/array_of_strings" - $ref: "#/definitions/string_formats" allOf: - tag: "tag:stsci.edu:asdf/time/time-1.0.0" - anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - type: object properties: $ref: "../core/ndarray-1.0.0#anyOf/1/properties" - type: object properties: value: description: | The value(s) of the time. anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0" - type: number format: description: | The format of the time. If not provided, the the format should be guessed from the string from among the following unambiguous options: `iso`, `byear`, `jyear` and `yday`. The supported formats are: - `iso`: ISO 8601 compliant date-time format `YYYY-MM-DDTHH:MM:SS.sss...`. For example, `2000-01-01 00:00:00.000` is midnight on January 1, 2000. The `T` separating the date from the time section is optional. - `yday`: Year, day-of-year and time as `YYYY:DOY:HH:MM:SS.sss...`. The day-of-year (DOY) goes from 001 to 365 (366 in leap years). For example, `2000:001:00:00:00.000` is midnight on January 1, 2000. - `byear`: Besselian Epoch year, eg. `B1950.0`. The `B` is optional if the `byear` format is explicitly specified. - `jyear`: Julian Epoch year, eg. `J2000.0`. The `J` is optional if the `jyear` format is explicitly specified. - `decimalyear`: Time as a decimal year, with integer values corresponding to midnight of the first day of each year. For example 2000.5 corresponds to the ISO time `2000-07-02 00:00:00`. - `jd`: Julian Date time format. This represents the number of days since the beginning of the Julian Period. For example, 2451544.5 in `jd` is midnight on January 1, 2000. - `mjd`: Modified Julian Date time format. This represents the number of days since midnight on November 17, 1858. For example, 51544.0 in MJD is midnight on January 1, 2000. - `gps`: GPS time: seconds from 1980-01-06 00:00:00 UTC For example, 630720013.0 is midnight on January 1, 2000. - `unix`: Unix time: seconds from 1970-01-01 00:00:00 UTC. For example, 946684800.0 in Unix time is midnight on January 1, 2000. [TODO: Astropy's definition of UNIX time doesn't match POSIX's here. What should we do for the purposes of ASDF?] enum: - iso - yday - byear - jyear - decimalyear - jd - mjd - gps - unix - cxcsec scale: description: | The time scale (or time standard) is a specification for measuring time: either the rate at which time passes; or points in time; or both. See also [3] and [4]. These scales are defined in detail in [SOFA Time Scale and Calendar Tools](http://www.iausofa.org/sofa_ts_c.pdf). The supported time scales are: - `utc`: Coordinated Universal Time (UTC). This is the default time scale, except for `gps`, `unix`. - `tai`: International Atomic Time (TAI). - `tcb`: Barycentric Coordinate Time (TCB). - `tcg`: Geocentric Coordinate Time (TCG). - `tdb`: Barycentric Dynamical Time (TDB). - `tt`: Terrestrial Time (TT). - `ut1`: Universal Time (UT1). enum: - utc - tai - tcb - tcg - tdb - tt - ut1 location: description: | Specifies the observer location for scales that are sensitive to observer location, currently only `tdb`. May be specified either with geocentric coordinates (X, Y, Z) with an optional unit or geodetic coordinates: - `long`: longitude in degrees - `lat`: in degrees - `h`: optional height anyOf: - type: object properties: x: type: number y: type: number z: type: number unit: allOf: - $ref: "../unit/unit-1.0.0" - default: m required: [x, y, z] - type: object properties: long: type: number minimum: -180 maximum: 180 lat: type: number minimum: -90 maximum: 90 h: type: number default: 0 unit: allOf: - $ref: "../unit/unit-1.0.0" - default: m required: [long, lat] required: [value] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/time/time-1.1.0.yaml0000644000175000017500000001745113243564222024163 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" id: "http://stsci.edu/schemas/asdf/time/time-1.1.0" title: Represents an instance in time. description: | A "time" is a single instant in time. It may explicitly specify the way time is represented (the "format") and the "scale" which specifies the offset and scaling relation of the unit of time. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in astronomy and required to calculate, e.g., sidereal times and barycentric corrections. Times may be represented as one of the following: - an object, with explicit `value`, and optional `format`, `scale` and `location`. - a string, in which case the format is guessed from across the unambiguous options (`iso`, `byear`, `jyear`, `yday`), and the scale is hardcoded to `UTC`. In either case, a single time tag may be used to represent an n-dimensional array of times, using either an `ndarray` tag or inline as (possibly nested) YAML lists. If YAML lists, the same format must be used for all time values. The precision of the numeric formats should only be assumed to be as good as an IEEE-754 double precision (float64) value. If higher-precision is required, the `iso` or `yday` format should be used. examples: - - Example ISO time - | !time/time-1.1.0 "2000-12-31T13:05:27.737" - - Example year, day-of-year and time format time - | !time/time-1.1.0 "2001:003:04:05:06.789" - - Example Besselian Epoch time - | !time/time-1.1.0 B2000.0 - - Example Besselian Epoch time, equivalent to above - | !time/time-1.1.0 value: 2000.0 format: byear - - Example list of times - | !time/time-1.1.0 ["2000-12-31T13:05:27.737", "2000-12-31T13:06:38.444"] - - Example of an array of times - | !time/time-1.1.0 value: !core/ndarray-1.0.0 data: [2000, 2001] datatype: float64 format: jyear - - Example with a location - | !time/time-1.1.0 value: 2000.0 format: jyear scale: tdb location: x: 6378100 y: 0 z: 0 definitions: iso_time: type: string pattern: "[0-9]{4}-(0[1-9])|(1[0-2])-(0[1-9])|([1-2][0-9])|(3[0-1])[T ]([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" byear: type: string pattern: "B[0-9]+(.[0-9]+)?" jyear: type: string pattern: "J[0-9]+(.[0-9]+)?" yday: type: string pattern: "[0-9]{4}:(00[1-9])|(0[1-9][0-9])|([1-2][0-9][0-9])|(3[0-5][0-9])|(36[0-5]):([0-1][0-9])|([0-1][0-9])|(2[0-4]):[0-5][0-9]:[0-5][0-9](.[0-9]+)?" string_formats: anyOf: - $ref: "#/definitions/iso_time" - $ref: "#/definitions/byear" - $ref: "#/definitions/jyear" - $ref: "#/definitions/yday" array_of_strings: type: array items: anyOf: - $ref: "#/definitions/array_of_strings" - $ref: "#/definitions/string_formats" allOf: - tag: "tag:stsci.edu:asdf/time/time-1.1.0" - anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - type: object properties: $ref: "../core/ndarray-1.0.0#anyOf/1/properties" - type: object properties: value: description: | The value(s) of the time. anyOf: - $ref: "#/definitions/string_formats" - $ref: "#/definitions/array_of_strings" - $ref: "../core/ndarray-1.0.0" - type: number format: description: | The format of the time. If not provided, the the format should be guessed from the string from among the following unambiguous options: `iso`, `byear`, `jyear` and `yday`. The supported formats are: - `iso`: ISO 8601 compliant date-time format `YYYY-MM-DDTHH:MM:SS.sss...`. For example, `2000-01-01 00:00:00.000` is midnight on January 1, 2000. The `T` separating the date from the time section is optional. - `yday`: Year, day-of-year and time as `YYYY:DOY:HH:MM:SS.sss...`. The day-of-year (DOY) goes from 001 to 365 (366 in leap years). For example, `2000:001:00:00:00.000` is midnight on January 1, 2000. - `byear`: Besselian Epoch year, eg. `B1950.0`. The `B` is optional if the `byear` format is explicitly specified. - `jyear`: Julian Epoch year, eg. `J2000.0`. The `J` is optional if the `jyear` format is explicitly specified. - `decimalyear`: Time as a decimal year, with integer values corresponding to midnight of the first day of each year. For example 2000.5 corresponds to the ISO time `2000-07-02 00:00:00`. - `jd`: Julian Date time format. This represents the number of days since the beginning of the Julian Period. For example, 2451544.5 in `jd` is midnight on January 1, 2000. - `mjd`: Modified Julian Date time format. This represents the number of days since midnight on November 17, 1858. For example, 51544.0 in MJD is midnight on January 1, 2000. - `gps`: GPS time: seconds from 1980-01-06 00:00:00 UTC For example, 630720013.0 is midnight on January 1, 2000. - `unix`: Unix time: seconds from 1970-01-01 00:00:00 UTC. For example, 946684800.0 in Unix time is midnight on January 1, 2000. [TODO: Astropy's definition of UNIX time doesn't match POSIX's here. What should we do for the purposes of ASDF?] enum: - iso - yday - byear - jyear - decimalyear - jd - mjd - gps - unix - cxcsec scale: description: | The time scale (or time standard) is a specification for measuring time: either the rate at which time passes; or points in time; or both. See also [3] and [4]. These scales are defined in detail in [SOFA Time Scale and Calendar Tools](http://www.iausofa.org/sofa_ts_c.pdf). The supported time scales are: - `utc`: Coordinated Universal Time (UTC). This is the default time scale, except for `gps`, `unix`. - `tai`: International Atomic Time (TAI). - `tcb`: Barycentric Coordinate Time (TCB). - `tcg`: Geocentric Coordinate Time (TCG). - `tdb`: Barycentric Dynamical Time (TDB). - `tt`: Terrestrial Time (TT). - `ut1`: Universal Time (UT1). enum: - utc - tai - tcb - tcg - tdb - tt - ut1 location: description: | Specifies the observer location for scales that are sensitive to observer location, currently only `tdb`. May be specified either with geocentric coordinates (X, Y, Z) with an optional unit or geodetic coordinates: - `long`: longitude in degrees - `lat`: in degrees - `h`: optional height type: object properties: x: $ref: "../unit/quantity-1.1.0" y: $ref: "../unit/quantity-1.1.0" z: $ref: "../unit/quantity-1.1.0" required: [x, y, z] required: [value] asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/asdf-schema-1.0.0.yaml0000644000175000017500000000271513243564222024436 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://json-schema.org/draft-04/schema" id: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" title: ASDF schema description: | A metaschema extending YAML Schema and JSON Schema to add support for some ASDF-specific checks, related to nd-arrays. allOf: - $ref: "http://stsci.edu/schemas/yaml-schema/draft-01" - type: object properties: max_ndim: description: | Specifies that the corresponding ndarray is at most the given number of dimensions. If the array has fewer dimensions, it should be logically treated as if it were "broadcast" to the expected dimensions by adding 1's to the front of the shape list. type: integer minimum: 0 ndim: description: | Specifies that the matching ndarray is exactly the given number of dimensions. type: integer minimum: 0 datatype: description: | Specifies the datatype of the ndarray. By default, an array is considered "matching" if the array can be cast to the given datatype without data loss. For exact datatype matching, set `exact_datatype` to `true`. allOf: - $ref: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0#definitions/datatype-1.0.0" exact_datatype: description: | If `true`, the datatype must match exactly. type: boolean default: false ... asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/version_map-1.0.0.yaml0000644000175000017500000000707113243564222024605 0ustar dandan00000000000000%YAML 1.1 --- FILE_FORMAT: 1.0.0 YAML_VERSION: "1.1" tags: tag:stsci.edu:asdf/asdf-schema: 1.0.0 tag:stsci.edu:asdf/core/asdf: 1.0.0 tag:stsci.edu:asdf/core/column: 1.0.0 tag:stsci.edu:asdf/core/complex: 1.0.0 tag:stsci.edu:asdf/core/constant: 1.0.0 tag:stsci.edu:asdf/core/history_entry: 1.0.0 tag:stsci.edu:asdf/core/ndarray: 1.0.0 tag:stsci.edu:asdf/core/software: 1.0.0 tag:stsci.edu:asdf/core/table: 1.0.0 tag:stsci.edu:asdf/fits/fits: 1.0.0 tag:stsci.edu:asdf/time/time: 1.0.0 tag:stsci.edu:asdf/transform/add: 1.0.0 tag:stsci.edu:asdf/transform/affine: 1.0.0 tag:stsci.edu:asdf/transform/airy: 1.0.0 tag:stsci.edu:asdf/transform/bonne_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/cobe_quad_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/compose: 1.0.0 tag:stsci.edu:asdf/transform/concatenate: 1.0.0 tag:stsci.edu:asdf/transform/conic: 1.0.0 tag:stsci.edu:asdf/transform/conic_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/conic_equidistant: 1.0.0 tag:stsci.edu:asdf/transform/conic_orthomorphic: 1.0.0 tag:stsci.edu:asdf/transform/conic_perspective: 1.0.0 tag:stsci.edu:asdf/transform/constant: 1.0.0 tag:stsci.edu:asdf/transform/cylindrical: 1.0.0 tag:stsci.edu:asdf/transform/cylindrical_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/cylindrical_perspective: 1.0.0 tag:stsci.edu:asdf/transform/divide: 1.0.0 tag:stsci.edu:asdf/transform/domain: 1.0.0 tag:stsci.edu:asdf/transform/generic: 1.0.0 tag:stsci.edu:asdf/transform/gnomonic: 1.0.0 tag:stsci.edu:asdf/transform/hammer_aitoff: 1.0.0 tag:stsci.edu:asdf/transform/healpix: 1.0.0 tag:stsci.edu:asdf/transform/healpix_polar: 1.0.0 tag:stsci.edu:asdf/transform/identity: 1.0.0 tag:stsci.edu:asdf/transform/label_mapper: 1.0.0 tag:stsci.edu:asdf/transform/mercator: 1.0.0 tag:stsci.edu:asdf/transform/molleweide: 1.0.0 tag:stsci.edu:asdf/transform/multiply: 1.0.0 tag:stsci.edu:asdf/transform/parabolic: 1.0.0 tag:stsci.edu:asdf/transform/plate_carree: 1.0.0 tag:stsci.edu:asdf/transform/polyconic: 1.0.0 tag:stsci.edu:asdf/transform/polynomial: 1.0.0 tag:stsci.edu:asdf/transform/power: 1.0.0 tag:stsci.edu:asdf/transform/pseudoconic: 1.0.0 tag:stsci.edu:asdf/transform/pseudocylindrical: 1.0.0 tag:stsci.edu:asdf/transform/quad_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/quadcube: 1.0.0 tag:stsci.edu:asdf/transform/regions_selector: 1.0.0 tag:stsci.edu:asdf/transform/remap_axes: 1.0.0 tag:stsci.edu:asdf/transform/rotate2d: 1.0.0 tag:stsci.edu:asdf/transform/rotate3d: 1.0.0 tag:stsci.edu:asdf/transform/sanson_flamsteed: 1.0.0 tag:stsci.edu:asdf/transform/scale: 1.0.0 tag:stsci.edu:asdf/transform/shift: 1.0.0 tag:stsci.edu:asdf/transform/slant_orthographic: 1.0.0 tag:stsci.edu:asdf/transform/slant_zenithal_perspective: 1.0.0 tag:stsci.edu:asdf/transform/stereographic: 1.0.0 tag:stsci.edu:asdf/transform/subtract: 1.0.0 tag:stsci.edu:asdf/transform/tabular: 1.0.0 tag:stsci.edu:asdf/transform/tangential_spherical_cube: 1.0.0 tag:stsci.edu:asdf/transform/transform: 1.0.0 tag:stsci.edu:asdf/transform/zenithal: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_equal_area: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_equidistant: 1.0.0 tag:stsci.edu:asdf/transform/zenithal_perspective: 1.0.0 tag:stsci.edu:asdf/unit/defunit: 1.0.0 tag:stsci.edu:asdf/unit/unit: 1.0.0 tag:stsci.edu:asdf/wcs/celestial_frame: 1.0.0 tag:stsci.edu:asdf/wcs/composite_frame: 1.0.0 tag:stsci.edu:asdf/wcs/frame: 1.0.0 tag:stsci.edu:asdf/wcs/spectral_frame: 1.0.0 tag:stsci.edu:asdf/wcs/step: 1.0.0 tag:stsci.edu:asdf/wcs/wcs: 1.0.0 ...asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/unit/0000755000175000017500000000000013246031665021722 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/unit/unit-1.0.0.yaml0000644000175000017500000000105213243564222024212 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/unit-1.0.0" title: Physical unit. description: > This represents a physical unit, in [VOUnit syntax, Version 1.0](http://www.ivoa.net/documents/VOUnits/index.html). Where units are not explicitly tagged, they are assumed to be in VOUnit syntax. examples: - - Example unit - | !unit/unit-1.0.0 "2.1798721 10-18kg m2 s-2" anyOf: - tag: "tag:stsci.edu:asdf/unit/unit-1.0.0" - {} type: string pattern: "[\x00-\x7f]*"asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/unit/quantity-1.1.0.yaml0000644000175000017500000000230213243564222025111 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/quantity-1.1.0" tag: "tag:stsci.edu:asdf/unit/quantity-1.1.0" title: > Represents a Quantity object from astropy examples: - - A quantity consisting of a scalar value and unit - | !unit/quantity-1.1.0 value: 3.14159 unit: km - - A quantity consisting of a single value in an array - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 [2.71828] unit: A - - A quantity with an array of values - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 [1, 2, 3, 4] unit: s - - A quantity with an n-dimensional array of values - | !unit/quantity-1.1.0 value: !core/ndarray-1.0.0 datatype: float64 data: [[1, 2, 3], [4, 5, 6]] unit: pc type: object properties: value: description: | A vector of one or more values anyOf: - type: number - $ref: "../core/ndarray-1.0.0" unit: description: | The unit corresponding to the values $ref: unit-1.0.0 required: [value, unit] ... asdf-1.3.3/asdf-standard/schemas/stsci.edu/asdf/unit/defunit-1.0.0.yaml0000644000175000017500000000147013243564222024675 0ustar dandan00000000000000%YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://stsci.edu/schemas/asdf/unit/defunit-1.0.0" title: Define a new physical unit. description: | Defines a new unit. It can be used to either: - Define a new base unit. - Create a new unit name that is a equivalent to a given unit. The new unit must be defined before any unit tags that use it. tag: "tag:stsci.edu:asdf/unit/defunit-1.0.0" type: object properties: name: description: The name of the new unit. type: string pattern: "[A-Za-z_][A-Za-z0-9_]+" unit: description: | The unit that the new name is equivalent to. It is optional, and if not provided, or ``null``, this ``defunit`` defines a new base unit. anyOf: - $ref: "unit-1.0.0" - type: "null" required: [name]asdf-1.3.3/asdf-standard/reference_files/0000755000175000017500000000000013246031665017622 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/reference_files/1.0.0/0000755000175000017500000000000013246031665020256 5ustar dandan00000000000000asdf-1.3.3/asdf-standard/reference_files/1.0.0/ascii.asdf0000644000175000017500000000070413243564222022203 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: [ascii, 5] byteorder: big shape: [2] ... BLK0 ` o본Rڥ+*ascii#ASDF BLOCK INDEX %YAML 1.1 --- [350] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/shared.yaml0000644000175000017500000000065113243564222022407 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] subset: !core/ndarray-1.0.0 data: [1, 3, 5, 7] datatype: int64 shape: [4] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/unicode_bmp.asdf0000644000175000017500000000117513243564222023402 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 source: 1 datatype: [ucs4, 2] byteorder: big shape: [2] ... BLK0IS6.cܪMKqBLK0Mt_ZQ#ASDF BLOCK INDEX %YAML 1.1 --- [454, 524] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/stream.asdf0000644000175000017500000000163313243564222022410 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} my_stream: !core/ndarray-1.0.0 source: -1 datatype: float64 byteorder: little shape: ['*', 8] ... BLK0????????@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@asdf-1.3.3/asdf-standard/reference_files/1.0.0/complex.asdf0000644000175000017500000001315113243564222022562 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypec16: !core/ndarray-1.0.0 source: 1 datatype: complex128 byteorder: big shape: [100] datatype>c8: !core/ndarray-1.0.0 source: 0 datatype: complex64 byteorder: big shape: [100] ... BLK0   &^len冄4343434343434344444443433333433343BLK0@@@nA[eWv<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<BLK0   Mzj4ߝoA{`4343434343434344444443433333433343BLK0@@@e1$ǜ90m3<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<#ASDF BLOCK INDEX %YAML 1.1 --- [665, 1519, 3173, 4027] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/unicode_bmp.yaml0000644000175000017500000000065313243564222023427 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 data: ['', Æʩ] datatype: [ucs4, 2] shape: [2] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/ascii.yaml0000644000175000017500000000052113243564222022225 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: ['', ascii] datatype: [ascii, 5] shape: [2] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/exploded.asdf0000644000175000017500000000055013243564222022716 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: exploded0000.asdf datatype: int64 byteorder: little shape: [8] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/compressed.asdf0000644000175000017500000000200513243564222023253 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} bzp2: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [128] zlib: !core/ndarray-1.0.0 source: 1 datatype: int64 byteorder: little shape: [128] ... BLK0bzp2m@]ZBZh91AY&SY\(W?&b4Ʉ0 C4`4` <  a Hc?@"e .`ɣ> $iNJk^&mn˧o (q"Ə"Ls&ΟB*u*֯bͫw.޿Ջ6ܻz8?˛?EӨ@%A ":ܑN$8BLK0zlibm@]Zx-Blܲms-۶{_rC0;‘rc8;NrS4;Ùrs<.rK\2•rk\:׻nr[6Ýr{>{G<1'ߓg<9{^W5{Ûw=>O|3—o|;~_7ßv#ASDF BLOCK INDEX %YAML 1.1 --- [441, 721] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/stream.yaml0000644000175000017500000000126213243564222022433 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} my_stream: !core/ndarray-1.0.0 data: - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] - [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0] - [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0] - [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0] - [4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0] - [5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0] - [6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0] - [7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0] datatype: float64 shape: [8, 8] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/shared.asdf0000644000175000017500000000115713243564222022364 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [8] subset: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [4] offset: 8 strides: [16] ... BLK0@@@5YL_A&L#ASDF BLOCK INDEX %YAML 1.1 --- [467] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/exploded0000.asdf0000644000175000017500000000063713243564222023224 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} ... BLK0@@@5YL_A&L#ASDF BLOCK INDEX %YAML 1.1 --- [259] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/unicode_spp.yaml0000644000175000017500000000067313243564222023455 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 data: ['', "\U00010020"] datatype: [ucs4, 2] shape: [2] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/exploded.yaml0000644000175000017500000000053113243564222022742 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/int.yaml0000644000175000017500000000244713243564222021740 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypei1: !core/ndarray-1.0.0 data: [127, -128, 0] datatype: int8 shape: [3] datatype>i2: !core/ndarray-1.0.0 data: [32767, -32768, 0] datatype: int16 shape: [3] datatype>i4: !core/ndarray-1.0.0 data: [2147483647, -2147483648, 0] datatype: int32 shape: [3] datatype>u1: !core/ndarray-1.0.0 data: [255, 0] datatype: uint8 shape: [2] datatype>u2: !core/ndarray-1.0.0 data: [65535, 0] datatype: uint16 shape: [2] datatype>u4: !core/ndarray-1.0.0 data: [4294967295, 0] datatype: uint32 shape: [2] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/float.yaml0000644000175000017500000000223513243564222022246 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypef4: !core/ndarray-1.0.0 data: [0.0, -0.0, .nan, .inf, -.inf, -3.4028234663852886e+38, 3.4028234663852886e+38, 1.1920928955078125e-07, 5.960464477539063e-08, 1.1754943508222875e-38] datatype: float32 shape: [10] datatype>f8: !core/ndarray-1.0.0 data: [0.0, -0.0, .nan, .inf, -.inf, -1.7976931348623157e+308, 1.7976931348623157e+308, 2.220446049250313e-16, 1.1102230246251565e-16, 2.2250738585072014e-308] datatype: float64 shape: [10] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/unicode_spp.asdf0000644000175000017500000000117513243564222023426 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypeU: !core/ndarray-1.0.0 source: 1 datatype: [ucs4, 2] byteorder: big shape: [2] ... BLK0'0B9e< BLK0dluэ#` #ASDF BLOCK INDEX %YAML 1.1 --- [454, 524] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/float.asdf0000644000175000017500000000220613243564222022217 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypef4: !core/ndarray-1.0.0 source: 1 datatype: float32 byteorder: big shape: [10] datatype>f8: !core/ndarray-1.0.0 source: 3 datatype: float64 byteorder: big shape: [10] ... BLK0(((1[\Z143BLK0(((*!SeGľ{$nT43BLK0PPPeջ };b<<BLK0PPPl~ArQq<<#ASDF BLOCK INDEX %YAML 1.1 --- [649, 743, 837, 971] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/int.asdf0000644000175000017500000000424613243564222021712 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypei1: !core/ndarray-1.0.0 source: 1 datatype: int8 byteorder: big shape: [3] datatype>i2: !core/ndarray-1.0.0 source: 0 datatype: int16 byteorder: big shape: [3] datatype>i4: !core/ndarray-1.0.0 source: 2 datatype: int32 byteorder: big shape: [3] datatype>u1: !core/ndarray-1.0.0 source: 8 datatype: uint8 byteorder: big shape: [2] datatype>u2: !core/ndarray-1.0.0 source: 7 datatype: uint16 byteorder: big shape: [2] datatype>u4: !core/ndarray-1.0.0 source: 6 datatype: uint32 byteorder: big shape: [2] ... BLK0ebb-R_fBLK0ztuOI+NUBLK0 I R4jBLK0 ҒkS(iZJ<BLK0ztuOI+NUBLK0qɭӜr][s2BLK0ĭ+돩:vBLK0]^w-t<BLK0迯V;/xBLK0迯V;/xBLK0]^w-t<BLK0ĭ+돩:v#ASDF BLOCK INDEX %YAML 1.1 --- [1391, 1451, 1508, 1574, 1640, 1697, 1757, 1819, 1877, 1933, 1989, 2047] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/basic.asdf0000644000175000017500000000077013243564222022177 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 source: 0 datatype: int64 byteorder: little shape: [8] ... BLK0@@@5YL_A&L#ASDF BLOCK INDEX %YAML 1.1 --- [348] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/compressed.yaml0000644000175000017500000000273313243564222023310 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} bzp2: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127] datatype: int64 shape: [128] zlib: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127] datatype: int64 shape: [128] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/basic.yaml0000644000175000017500000000053113243564222022217 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} data: !core/ndarray-1.0.0 data: [0, 1, 2, 3, 4, 5, 6, 7] datatype: int64 shape: [8] ... asdf-1.3.3/asdf-standard/reference_files/1.0.0/complex.yaml0000644000175000017500000004412113243564222022610 0ustar dandan00000000000000#ASDF 1.0.0 #ASDF_STANDARD 1.0.0 %YAML 1.1 %TAG ! tag:stsci.edu:asdf/ --- !core/asdf-1.0.0 asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', name: asdf, version: 1.0.0} datatypec16: !core/ndarray-1.0.0 data: [!core/complex-1.0.0 0j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 -1.79769313486e+308j, !core/complex-1.0.0 1.79769313486e+308j, !core/complex-1.0.0 2.22044604925e-16j, !core/complex-1.0.0 1.11022302463e-16j, !core/complex-1.0.0 2.22507385851e-308j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (-0+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-0-1.79769313486e+308j), !core/complex-1.0.0 1.79769313486e+308j, !core/complex-1.0.0 2.22044604925e-16j, !core/complex-1.0.0 1.11022302463e-16j, !core/complex-1.0.0 2.22507385851e-308j, !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (nan-1.79769313486e+308j), !core/complex-1.0.0 (nan+1.79769313486e+308j), !core/complex-1.0.0 (nan+2.22044604925e-16j), !core/complex-1.0.0 (nan+1.11022302463e-16j), !core/complex-1.0.0 (nan+2.22507385851e-308j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (inf-1.79769313486e+308j), !core/complex-1.0.0 (inf+1.79769313486e+308j), !core/complex-1.0.0 (inf+2.22044604925e-16j), !core/complex-1.0.0 (inf+1.11022302463e-16j), !core/complex-1.0.0 (inf+2.22507385851e-308j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-inf-1.79769313486e+308j), !core/complex-1.0.0 (-inf+1.79769313486e+308j), !core/complex-1.0.0 (-inf+2.22044604925e-16j), !core/complex-1.0.0 (-inf+1.11022302463e-16j), !core/complex-1.0.0 (-inf+2.22507385851e-308j), !core/complex-1.0.0 (-1.79769313486e+308+0j), !core/complex-1.0.0 (-1.79769313486e+308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-1.79769313486e+308-1.79769313486e+308j), !core/complex-1.0.0 (-1.79769313486e+308+1.79769313486e+308j), !core/complex-1.0.0 (-1.79769313486e+308+2.22044604925e-16j), !core/complex-1.0.0 (-1.79769313486e+308+1.11022302463e-16j), !core/complex-1.0.0 (-1.79769313486e+308+2.22507385851e-308j), !core/complex-1.0.0 (1.79769313486e+308+0j), !core/complex-1.0.0 (1.79769313486e+308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.79769313486e+308-1.79769313486e+308j), !core/complex-1.0.0 (1.79769313486e+308+1.79769313486e+308j), !core/complex-1.0.0 (1.79769313486e+308+2.22044604925e-16j), !core/complex-1.0.0 (1.79769313486e+308+1.11022302463e-16j), !core/complex-1.0.0 (1.79769313486e+308+2.22507385851e-308j), !core/complex-1.0.0 (2.22044604925e-16+0j), !core/complex-1.0.0 (2.22044604925e-16+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (2.22044604925e-16-1.79769313486e+308j), !core/complex-1.0.0 (2.22044604925e-16+1.79769313486e+308j), !core/complex-1.0.0 (2.22044604925e-16+2.22044604925e-16j), !core/complex-1.0.0 (2.22044604925e-16+1.11022302463e-16j), !core/complex-1.0.0 (2.22044604925e-16+2.22507385851e-308j), !core/complex-1.0.0 (1.11022302463e-16+0j), !core/complex-1.0.0 (1.11022302463e-16+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.11022302463e-16-1.79769313486e+308j), !core/complex-1.0.0 (1.11022302463e-16+1.79769313486e+308j), !core/complex-1.0.0 (1.11022302463e-16+2.22044604925e-16j), !core/complex-1.0.0 (1.11022302463e-16+1.11022302463e-16j), !core/complex-1.0.0 (1.11022302463e-16+2.22507385851e-308j), !core/complex-1.0.0 (2.22507385851e-308+0j), !core/complex-1.0.0 (2.22507385851e-308+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (2.22507385851e-308-1.79769313486e+308j), !core/complex-1.0.0 (2.22507385851e-308+1.79769313486e+308j), !core/complex-1.0.0 (2.22507385851e-308+2.22044604925e-16j), !core/complex-1.0.0 (2.22507385851e-308+1.11022302463e-16j), !core/complex-1.0.0 (2.22507385851e-308+2.22507385851e-308j)] datatype: complex128 shape: [100] datatype>c8: !core/ndarray-1.0.0 data: [!core/complex-1.0.0 0j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 -3.40282346639e+38j, !core/complex-1.0.0 3.40282346639e+38j, !core/complex-1.0.0 1.19209289551e-07j, !core/complex-1.0.0 5.96046447754e-08j, !core/complex-1.0.0 1.17549435082e-38j, !core/complex-1.0.0 0j, !core/complex-1.0.0 (-0+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-0-3.40282346639e+38j), !core/complex-1.0.0 3.40282346639e+38j, !core/complex-1.0.0 1.19209289551e-07j, !core/complex-1.0.0 5.96046447754e-08j, !core/complex-1.0.0 1.17549435082e-38j, !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (nan-3.40282346639e+38j), !core/complex-1.0.0 (nan+3.40282346639e+38j), !core/complex-1.0.0 (nan+1.19209289551e-07j), !core/complex-1.0.0 (nan+5.96046447754e-08j), !core/complex-1.0.0 (nan+1.17549435082e-38j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (inf-3.40282346639e+38j), !core/complex-1.0.0 (inf+3.40282346639e+38j), !core/complex-1.0.0 (inf+1.19209289551e-07j), !core/complex-1.0.0 (inf+5.96046447754e-08j), !core/complex-1.0.0 (inf+1.17549435082e-38j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (-inf+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-inf-3.40282346639e+38j), !core/complex-1.0.0 (-inf+3.40282346639e+38j), !core/complex-1.0.0 (-inf+1.19209289551e-07j), !core/complex-1.0.0 (-inf+5.96046447754e-08j), !core/complex-1.0.0 (-inf+1.17549435082e-38j), !core/complex-1.0.0 (-3.40282346639e+38+0j), !core/complex-1.0.0 (-3.40282346639e+38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (-3.40282346639e+38-3.40282346639e+38j), !core/complex-1.0.0 (-3.40282346639e+38+3.40282346639e+38j), !core/complex-1.0.0 (-3.40282346639e+38+1.19209289551e-07j), !core/complex-1.0.0 (-3.40282346639e+38+5.96046447754e-08j), !core/complex-1.0.0 (-3.40282346639e+38+1.17549435082e-38j), !core/complex-1.0.0 (3.40282346639e+38+0j), !core/complex-1.0.0 (3.40282346639e+38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (3.40282346639e+38-3.40282346639e+38j), !core/complex-1.0.0 (3.40282346639e+38+3.40282346639e+38j), !core/complex-1.0.0 (3.40282346639e+38+1.19209289551e-07j), !core/complex-1.0.0 (3.40282346639e+38+5.96046447754e-08j), !core/complex-1.0.0 (3.40282346639e+38+1.17549435082e-38j), !core/complex-1.0.0 (1.19209289551e-07+0j), !core/complex-1.0.0 (1.19209289551e-07+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.19209289551e-07-3.40282346639e+38j), !core/complex-1.0.0 (1.19209289551e-07+3.40282346639e+38j), !core/complex-1.0.0 (1.19209289551e-07+1.19209289551e-07j), !core/complex-1.0.0 (1.19209289551e-07+5.96046447754e-08j), !core/complex-1.0.0 (1.19209289551e-07+1.17549435082e-38j), !core/complex-1.0.0 (5.96046447754e-08+0j), !core/complex-1.0.0 (5.96046447754e-08+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (5.96046447754e-08-3.40282346639e+38j), !core/complex-1.0.0 (5.96046447754e-08+3.40282346639e+38j), !core/complex-1.0.0 (5.96046447754e-08+1.19209289551e-07j), !core/complex-1.0.0 (5.96046447754e-08+5.96046447754e-08j), !core/complex-1.0.0 (5.96046447754e-08+1.17549435082e-38j), !core/complex-1.0.0 (1.17549435082e-38+0j), !core/complex-1.0.0 (1.17549435082e-38+0j), !core/complex-1.0.0 (nan+nanj), !core/complex-1.0.0 (nan+infj), !core/complex-1.0.0 (nan-infj), !core/complex-1.0.0 (1.17549435082e-38-3.40282346639e+38j), !core/complex-1.0.0 (1.17549435082e-38+3.40282346639e+38j), !core/complex-1.0.0 (1.17549435082e-38+1.19209289551e-07j), !core/complex-1.0.0 (1.17549435082e-38+5.96046447754e-08j), !core/complex-1.0.0 (1.17549435082e-38+1.17549435082e-38j)] datatype: complex64 shape: [100] ... asdf-1.3.3/setup.cfg0000644000175000017500000000120213246003441013572 0ustar dandan00000000000000[build_sphinx] source-dir = docs build-dir = docs/_build all_files = 1 [upload_docs] upload-dir = docs/_build/html show-response = 1 [tool:pytest] minversion = 2.2 norecursedirs = build docs/_build doctest_plus = enabled [ah_bootstrap] auto_use = True [metadata] package_name = asdf description = Python tools to handle ASDF files long_description = Advanced Scienctific Data Format (ASDF) is a next generation interchange format for scientific data author = Erik Bray, Michael Droettboom author_email = mdroe@stsci.edu license = BSD url = http://github.com/spacetelescope/asdf edit_on_github = False github_project = spacetelescope/asdf asdf-1.3.3/ah_bootstrap.py0000644000175000017500000010434413243547254015037 0ustar dandan00000000000000""" This bootstrap module contains code for ensuring that the astropy_helpers package will be importable by the time the setup.py script runs. It also includes some workarounds to ensure that a recent-enough version of setuptools is being used for the installation. This module should be the first thing imported in the setup.py of distributions that make use of the utilities in astropy_helpers. If the distribution ships with its own copy of astropy_helpers, this module will first attempt to import from the shipped copy. However, it will also check PyPI to see if there are any bug-fix releases on top of the current version that may be useful to get past platform-specific bugs that have been fixed. When running setup.py, use the ``--offline`` command-line option to disable the auto-upgrade checks. When this module is imported or otherwise executed it automatically calls a main function that attempts to read the project's setup.cfg file, which it checks for a configuration section called ``[ah_bootstrap]`` the presences of that section, and options therein, determine the next step taken: If it contains an option called ``auto_use`` with a value of ``True``, it will automatically call the main function of this module called `use_astropy_helpers` (see that function's docstring for full details). Otherwise no further action is taken (however, ``ah_bootstrap.use_astropy_helpers`` may be called manually from within the setup.py script). Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same names as the arguments to `use_astropy_helpers`, and can be used to configure the bootstrap script when ``auto_use = True``. See https://github.com/astropy/astropy-helpers for more details, and for the latest version of this module. """ import contextlib import errno import imp import io import locale import os import re import subprocess as sp import sys try: from ConfigParser import ConfigParser, RawConfigParser except ImportError: from configparser import ConfigParser, RawConfigParser if sys.version_info[0] < 3: _str_types = (str, unicode) _text_type = unicode PY3 = False else: _str_types = (str, bytes) _text_type = str PY3 = True # What follows are several import statements meant to deal with install-time # issues with either missing or misbehaving pacakges (including making sure # setuptools itself is installed): # Some pre-setuptools checks to ensure that either distribute or setuptools >= # 0.7 is used (over pre-distribute setuptools) if it is available on the path; # otherwise the latest setuptools will be downloaded and bootstrapped with # ``ez_setup.py``. This used to be included in a separate file called # setuptools_bootstrap.py; but it was combined into ah_bootstrap.py try: import pkg_resources _setuptools_req = pkg_resources.Requirement.parse('setuptools>=0.7') # This may raise a DistributionNotFound in which case no version of # setuptools or distribute is properly installed _setuptools = pkg_resources.get_distribution('setuptools') if _setuptools not in _setuptools_req: # Older version of setuptools; check if we have distribute; again if # this results in DistributionNotFound we want to give up _distribute = pkg_resources.get_distribution('distribute') if _setuptools != _distribute: # It's possible on some pathological systems to have an old version # of setuptools and distribute on sys.path simultaneously; make # sure distribute is the one that's used sys.path.insert(1, _distribute.location) _distribute.activate() imp.reload(pkg_resources) except: # There are several types of exceptions that can occur here; if all else # fails bootstrap and use the bootstrapped version from ez_setup import use_setuptools use_setuptools() # typing as a dependency for 1.6.1+ Sphinx causes issues when imported after # initializing submodule with ah_boostrap.py # See discussion and references in # https://github.com/astropy/astropy-helpers/issues/302 try: import typing # noqa except ImportError: pass # Note: The following import is required as a workaround to # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this # module now, it will get cleaned up after `run_setup` is called, but that will # later cause the TemporaryDirectory class defined in it to stop working when # used later on by setuptools try: import setuptools.py31compat # noqa except ImportError: pass # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass # End compatibility imports... # In case it didn't successfully import before the ez_setup checks import pkg_resources from setuptools import Distribution from setuptools.package_index import PackageIndex from setuptools.sandbox import run_setup from distutils import log from distutils.debug import DEBUG # TODO: Maybe enable checking for a specific version of astropy_helpers? DIST_NAME = 'astropy-helpers' PACKAGE_NAME = 'astropy_helpers' # Defaults for other options DOWNLOAD_IF_NEEDED = True INDEX_URL = 'https://pypi.python.org/simple' USE_GIT = True OFFLINE = False AUTO_UPGRADE = True # A list of all the configuration options and their required types CFG_OPTIONS = [ ('auto_use', bool), ('path', str), ('download_if_needed', bool), ('index_url', str), ('use_git', bool), ('offline', bool), ('auto_upgrade', bool) ] class _Bootstrapper(object): """ Bootstrapper implementation. See ``use_astropy_helpers`` for parameter documentation. """ def __init__(self, path=None, index_url=None, use_git=None, offline=None, download_if_needed=None, auto_upgrade=None): if path is None: path = PACKAGE_NAME if not (isinstance(path, _str_types) or path is False): raise TypeError('path must be a string or False') if PY3 and not isinstance(path, _text_type): fs_encoding = sys.getfilesystemencoding() path = path.decode(fs_encoding) # path to unicode self.path = path # Set other option attributes, using defaults where necessary self.index_url = index_url if index_url is not None else INDEX_URL self.offline = offline if offline is not None else OFFLINE # If offline=True, override download and auto-upgrade if self.offline: download_if_needed = False auto_upgrade = False self.download = (download_if_needed if download_if_needed is not None else DOWNLOAD_IF_NEEDED) self.auto_upgrade = (auto_upgrade if auto_upgrade is not None else AUTO_UPGRADE) # If this is a release then the .git directory will not exist so we # should not use git. git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) if use_git is None and not git_dir_exists: use_git = False self.use_git = use_git if use_git is not None else USE_GIT # Declared as False by default--later we check if astropy-helpers can be # upgraded from PyPI, but only if not using a source distribution (as in # the case of import from a git submodule) self.is_submodule = False @classmethod def main(cls, argv=None): if argv is None: argv = sys.argv config = cls.parse_config() config.update(cls.parse_command_line(argv)) auto_use = config.pop('auto_use', False) bootstrapper = cls(**config) if auto_use: # Run the bootstrapper, otherwise the setup.py is using the old # use_astropy_helpers() interface, in which case it will run the # bootstrapper manually after reconfiguring it. bootstrapper.run() return bootstrapper @classmethod def parse_config(cls): if not os.path.exists('setup.cfg'): return {} cfg = ConfigParser() try: cfg.read('setup.cfg') except Exception as e: if DEBUG: raise log.error( "Error reading setup.cfg: {0!r}\n{1} will not be " "automatically bootstrapped and package installation may fail." "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) return {} if not cfg.has_section('ah_bootstrap'): return {} config = {} for option, type_ in CFG_OPTIONS: if not cfg.has_option('ah_bootstrap', option): continue if type_ is bool: value = cfg.getboolean('ah_bootstrap', option) else: value = cfg.get('ah_bootstrap', option) config[option] = value return config @classmethod def parse_command_line(cls, argv=None): if argv is None: argv = sys.argv config = {} # For now we just pop recognized ah_bootstrap options out of the # arg list. This is imperfect; in the unlikely case that a setup.py # custom command or even custom Distribution class defines an argument # of the same name then we will break that. However there's a catch22 # here that we can't just do full argument parsing right here, because # we don't yet know *how* to parse all possible command-line arguments. if '--no-git' in argv: config['use_git'] = False argv.remove('--no-git') if '--offline' in argv: config['offline'] = True argv.remove('--offline') return config def run(self): strategies = ['local_directory', 'local_file', 'index'] dist = None # First, remove any previously imported versions of astropy_helpers; # this is necessary for nested installs where one package's installer # is installing another package via setuptools.sandbox.run_setup, as in # the case of setup_requires for key in list(sys.modules): try: if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): del sys.modules[key] except AttributeError: # Sometimes mysterious non-string things can turn up in # sys.modules continue # Check to see if the path is a submodule self.is_submodule = self._check_submodule() for strategy in strategies: method = getattr(self, 'get_{0}_dist'.format(strategy)) dist = method() if dist is not None: break else: raise _AHBootstrapSystemExit( "No source found for the {0!r} package; {0} must be " "available and importable as a prerequisite to building " "or installing this package.".format(PACKAGE_NAME)) # This is a bit hacky, but if astropy_helpers was loaded from a # directory/submodule its Distribution object gets a "precedence" of # "DEVELOP_DIST". However, in other cases it gets a precedence of # "EGG_DIST". However, when activing the distribution it will only be # placed early on sys.path if it is treated as an EGG_DIST, so always # do that dist = dist.clone(precedence=pkg_resources.EGG_DIST) # Otherwise we found a version of astropy-helpers, so we're done # Just active the found distribution on sys.path--if we did a # download this usually happens automatically but it doesn't hurt to # do it again # Note: Adding the dist to the global working set also activates it # (makes it importable on sys.path) by default. try: pkg_resources.working_set.add(dist, replace=True) except TypeError: # Some (much) older versions of setuptools do not have the # replace=True option here. These versions are old enough that all # bets may be off anyways, but it's easy enough to work around just # in case... if dist.key in pkg_resources.working_set.by_key: del pkg_resources.working_set.by_key[dist.key] pkg_resources.working_set.add(dist) @property def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname)) def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_index_dist(self): if not self.download: log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) return None log.warn( "Downloading {0!r}; run setup.py with the --offline option to " "force offline installation.".format(DIST_NAME)) try: dist = self._do_download() except Exception as e: if DEBUG: raise log.warn( 'Failed to download and/or install {0!r} from {1!r}:\n' '{2}'.format(DIST_NAME, self.index_url, str(e))) dist = None # No need to run auto-upgrade here since we've already presumably # gotten the most up-to-date version from the package index return dist def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): with _silence(): run_setup(os.path.join(path, 'setup.py'), ['egg_info']) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist def _do_download(self, version='', find_links=None): if find_links: allow_hosts = '' index_url = None else: allow_hosts = None index_url = self.index_url # Annoyingly, setuptools will not handle other arguments to # Distribution (such as options) before handling setup_requires, so it # is not straightforward to programmatically augment the arguments which # are passed to easy_install class _Distribution(Distribution): def get_option_dict(self, command_name): opts = Distribution.get_option_dict(self, command_name) if command_name == 'easy_install': if find_links is not None: opts['find_links'] = ('setup script', find_links) if index_url is not None: opts['index_url'] = ('setup script', index_url) if allow_hosts is not None: opts['allow_hosts'] = ('setup script', allow_hosts) return opts if version: req = '{0}=={1}'.format(DIST_NAME, version) else: req = DIST_NAME attrs = {'setup_requires': [req]} try: if DEBUG: _Distribution(attrs=attrs) else: with _silence(): _Distribution(attrs=attrs) # If the setup_requires succeeded it will have added the new dist to # the main working_set return pkg_resources.working_set.by_key.get(DIST_NAME) except Exception as e: if DEBUG: raise msg = 'Error retrieving {0} from {1}:\n{2}' if find_links: source = find_links[0] elif index_url != INDEX_URL: source = index_url else: source = 'PyPI' raise Exception(msg.format(DIST_NAME, source, repr(e))) def _do_upgrade(self, dist): # Build up a requirement for a higher bugfix release but a lower minor # release (so API compatibility is guaranteed) next_version = _next_version(dist.parsed_version) req = pkg_resources.Requirement.parse( '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) package_index = PackageIndex(index_url=self.index_url) upgrade = package_index.obtain(req) if upgrade is not None: return self._do_download(version=upgrade.version) def _check_submodule(self): """ Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details. """ if (self.path is None or (os.path.exists(self.path) and not os.path.isdir(self.path))): return False if self.use_git: return self._check_submodule_using_git() else: return self._check_submodule_no_git() def _check_submodule_using_git(self): """ Check if the given path is a git submodule. If so, attempt to initialize and/or update the submodule if needed. This function makes calls to the ``git`` command in subprocesses. The ``_check_submodule_no_git`` option uses pure Python to check if the given path looks like a git submodule, but it cannot perform updates. """ cmd = ['git', 'submodule', 'status', '--', self.path] try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except _CommandNotFound: # The git command simply wasn't found; this is most likely the # case on user systems that don't have git and are simply # trying to install the package from PyPI or a source # distribution. Silently ignore this case and simply don't try # to use submodules return False stderr = stderr.strip() if returncode != 0 and stderr: # Unfortunately the return code alone cannot be relied on, as # earlier versions of git returned 0 even if the requested submodule # does not exist # This is a warning that occurs in perl (from running git submodule) # which only occurs with a malformatted locale setting which can # happen sometimes on OSX. See again # https://github.com/astropy/astropy/issues/2749 perl_warning = ('perl: warning: Falling back to the standard locale ' '("C").') if not stderr.strip().endswith(perl_warning): # Some other unknown error condition occurred log.warn('git submodule command failed ' 'unexpectedly:\n{0}'.format(stderr)) return False # Output of `git submodule status` is as follows: # # 1: Status indicator: '-' for submodule is uninitialized, '+' if # submodule is initialized but is not at the commit currently indicated # in .gitmodules (and thus needs to be updated), or 'U' if the # submodule is in an unstable state (i.e. has merge conflicts) # # 2. SHA-1 hash of the current commit of the submodule (we don't really # need this information but it's useful for checking that the output is # correct) # # 3. The output of `git describe` for the submodule's current commit # hash (this includes for example what branches the commit is on) but # only if the submodule is initialized. We ignore this information for # now _git_submodule_status_re = re.compile( '^(?P[+-U ])(?P[0-9a-f]{40}) ' '(?P\S+)( .*)?$') # The stdout should only contain one line--the status of the # requested submodule m = _git_submodule_status_re.match(stdout) if m: # Yes, the path *is* a git submodule self._update_submodule(m.group('submodule'), m.group('status')) return True else: log.warn( 'Unexpected output from `git submodule status`:\n{0}\n' 'Will attempt import from {1!r} regardless.'.format( stdout, self.path)) return False def _check_submodule_no_git(self): """ Like ``_check_submodule_using_git``, but simply parses the .gitmodules file to determine if the supplied path is a git submodule, and does not exec any subprocesses. This can only determine if a path is a submodule--it does not perform updates, etc. This function may need to be updated if the format of the .gitmodules file is changed between git versions. """ gitmodules_path = os.path.abspath('.gitmodules') if not os.path.isfile(gitmodules_path): return False # This is a minimal reader for gitconfig-style files. It handles a few of # the quirks that make gitconfig files incompatible with ConfigParser-style # files, but does not support the full gitconfig syntax (just enough # needed to read a .gitmodules file). gitmodules_fileobj = io.StringIO() # Must use io.open for cross-Python-compatible behavior wrt unicode with io.open(gitmodules_path) as f: for line in f: # gitconfig files are more flexible with leading whitespace; just # go ahead and remove it line = line.lstrip() # comments can start with either # or ; if line and line[0] in (':', ';'): continue gitmodules_fileobj.write(line) gitmodules_fileobj.seek(0) cfg = RawConfigParser() try: cfg.readfp(gitmodules_fileobj) except Exception as exc: log.warn('Malformatted .gitmodules file: {0}\n' '{1} cannot be assumed to be a git submodule.'.format( exc, self.path)) return False for section in cfg.sections(): if not cfg.has_option(section, 'path'): continue submodule_path = cfg.get(section, 'path').rstrip(os.sep) if submodule_path == self.path.rstrip(os.sep): return True return False def _update_submodule(self, submodule, status): if status == ' ': # The submodule is up to date; no action necessary return elif status == '-': if self.offline: raise _AHBootstrapSystemExit( "Cannot initialize the {0} submodule in --offline mode; " "this requires being able to clone the submodule from an " "online repository.".format(submodule)) cmd = ['update', '--init'] action = 'Initializing' elif status == '+': cmd = ['update'] action = 'Updating' if self.offline: cmd.append('--no-fetch') elif status == 'U': raise _AHBootstrapSystemExit( 'Error: Submodule {0} contains unresolved merge conflicts. ' 'Please complete or abandon any changes in the submodule so that ' 'it is in a usable state, then try again.'.format(submodule)) else: log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 'attempt to use the submodule as-is, but try to ensure ' 'that the submodule is in a clean state and contains no ' 'conflicts or errors.\n{2}'.format(status, submodule, _err_help_msg)) return err_msg = None cmd = ['git', 'submodule'] + cmd + ['--', submodule] log.warn('{0} {1} submodule with: `{2}`'.format( action, submodule, ' '.join(cmd))) try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except OSError as e: err_msg = str(e) else: if returncode != 0: err_msg = stderr if err_msg is not None: log.warn('An unexpected error occurred updating the git submodule ' '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, _err_help_msg)) class _CommandNotFound(OSError): """ An exception raised when a command run with run_cmd is not found on the system. """ def run_cmd(cmd): """ Run a command in a subprocess, given as a list of command-line arguments. Returns a ``(returncode, stdout, stderr)`` tuple. """ try: p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) # XXX: May block if either stdout or stderr fill their buffers; # however for the commands this is currently used for that is # unlikely (they should have very brief output) stdout, stderr = p.communicate() except OSError as e: if DEBUG: raise if e.errno == errno.ENOENT: msg = 'Command not found: `{0}`'.format(' '.join(cmd)) raise _CommandNotFound(msg, cmd) else: raise _AHBootstrapSystemExit( 'An unexpected error occurred when running the ' '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) # Can fail of the default locale is not configured properly. See # https://github.com/astropy/astropy/issues/2749. For the purposes under # consideration 'latin1' is an acceptable fallback. try: stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' except ValueError: # Due to an OSX oddity locale.getdefaultlocale() can also crash # depending on the user's locale/language settings. See: # http://bugs.python.org/issue18378 stdio_encoding = 'latin1' # Unlikely to fail at this point but even then let's be flexible if not isinstance(stdout, _text_type): stdout = stdout.decode(stdio_encoding, 'replace') if not isinstance(stderr, _text_type): stderr = stderr.decode(stdio_encoding, 'replace') return (p.returncode, stdout, stderr) def _next_version(version): """ Given a parsed version from pkg_resources.parse_version, returns a new version string with the next minor version. Examples ======== >>> _next_version(pkg_resources.parse_version('1.2.3')) '1.3.0' """ if hasattr(version, 'base_version'): # New version parsing from setuptools >= 8.0 if version.base_version: parts = version.base_version.split('.') else: parts = [] else: parts = [] for part in version: if part.startswith('*'): break parts.append(part) parts = [int(p) for p in parts] if len(parts) < 3: parts += [0] * (3 - len(parts)) major, minor, micro = parts[:3] return '{0}.{1}.{2}'.format(major, minor + 1, 0) class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x encoding = 'utf-8' def write(self, s): pass def flush(self): pass @contextlib.contextmanager def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr _err_help_msg = """ If the problem persists consider installing astropy_helpers manually using pip (`pip install astropy_helpers`) or by manually downloading the source archive, extracting it, and installing by running `python setup.py install` from the root of the extracted source code. """ class _AHBootstrapSystemExit(SystemExit): def __init__(self, *args): if not args: msg = 'An unknown problem occurred bootstrapping astropy_helpers.' else: msg = args[0] msg += '\n' + _err_help_msg super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) BOOTSTRAPPER = _Bootstrapper.main() def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run() asdf-1.3.3/CHANGES.rst0000644000175000017500000001167413246031624013575 0ustar dandan000000000000001.3.3 (2018-03-01) ------------------ - Update test infrastructure to rely on new Astropy v3.0 plugins. [#461] - Disable use of 2to3. This was causing test failures on Debian builds. [#463] 1.3.2 (2018-02-22) ------------------ - Updates to allow this version of ASDF to be compatible with Astropy v3.0. [#450] - Remove tests that are no longer relevant due to latest updates to Astropy's testing infrastructure. [#458] 1.3.1 (2017-11-02) ------------------ - Relax requirement on ``semantic_version`` version to 2.3.1. [#361] - Fix bug when retrieving file format version from new ASDF file. [#365] - Fix bug when duplicating inline arrays. [#370] - Allow tag references using the tag URI scheme to be resolved in schema files. [#371] 1.3.0 (2017-10-24) ------------------ - Fixed a bug in reading data from an "http:" url. [#231] - Implements v 1.1.0 of the asdf schemas. [#233] - Added a function ``is_asdf_file`` which inspects the input and returns ``True`` or ``False``. [#239] - The ``open`` method of ``AsdfInFits`` now accepts URIs and open file handles in addition to HDULists. The ``open`` method of ``AsdfFile`` will now try to parse the given URI or file handle as ``AsdfInFits`` if it is not obviously a regular ASDF file. [#241] - Updated WCS frame fields ``obsgeoloc`` and ``obsgeovel`` to reflect recent updates in ``astropy`` that changed representation from ``Quantity`` to ``CartesianRepresentation``. Updated to reflect ``astropy`` change that combines ``galcen_ra`` and ``galcen_dec`` into ``galcen_coord``. Added support for new field ``galcen_v_sun``. Added support for required module versions for tag classes. [#244] - Added support for ``lz4`` compression algorithm [#258]. Also added support for using a different compression algorithm for writing out a file than the one that was used for reading the file (e.g. to convert blocks to use a different compression algorithm) [#257] - Tag classes may now use an optional ``supported_versions`` attribute to declare exclusive support for particular versions of the corresponding schema. If this attribute is omitted (as it is for most existing tag classes), the tag is assumed to be compatible with all versions of the corresponding schema. If ``supported_versions`` is provided, the tag class implementation can include code that is conditioned on the schema version. If an incompatible schema is encountered, or if deserialization of the tagged object fails with an exception, a raw Python data structure will be returned. [#272] - Added option to ``AsdfFile.open`` to allow suppression of warning messages when mismatched schema versions are encountered. [#294] - Added a diff tool to ``asdftool`` to allow for visual comparison of pairs of ASDF files. [#286] - Added command to ``asdftool`` to display available tags. [#303] - When possible, display name of ASDF file that caused version mismatch warning. [#306] - Issue a warning when an unrecognized tag is encountered. [#295] This warning is silenced by default, but can be enabled with a parameter to the ``AsdfFile`` constructor, or to ``AsdfFile.open``. Also added an option for ignoring warnings from unrecognized schema tags. [#319] - Fix bug with loading JSON schemas in Python 3.5. [#317] - Remove all remnants of support for Python 2.6. [#333] - Fix issues with the type index used for writing out ASDF files. This ensures that items in the type index are not inadvertently overwritten by later versions of the same type. It also makes sure that schema example tests run against the correct version of the ASDF standard. [#350] - Update time schema to reflect changes in astropy. This fixes an outstanding bug. [#343] - Add ``copy_arrays`` option to ``asdf.open`` to control whether or not underlying array data should be memory mapped, if possible. [#355] - Allow the tree to be accessed using top-level ``__getitem__`` and ``__setitem__``. [#352] 1.2.1(2016-11-07) ----------------- - Make asdf conditionally dependent on the version of astropy to allow running it with older versions of astropy. [#228] 1.2.0(2016-10-04) ----------------- - Added Tabular model. [#214] - Forced new blocks to be contiguous [#221] - Rewrote code which tags complex objects [#223] - Fixed version error message [#224] 1.0.5 (2016-06-28) ------------------ - Fixed a memory leak when reading wcs that grew memory to over 10 Gb. [#200] 1.0.4 (2016-05-25) ------------------ - Added wrapper class for astropy.core.Time, TaggedTime. [#198] 1.0.2 (2016-02-29) ------------------ - Renamed package to ASDF. [#190] - Stopped support for Python 2.6 [#191] 1.0.1 (2016-01-08) ------------------ - Fixed installation from the source tarball on Python 3. [#187] - Fixed error handling when opening ASDF files not supported by the current version of asdf. [#178] - Fixed parse error that could occur sometimes when YAML data was read from a stream. [#183] 1.0.0 (2015-09-18) ------------------ - Initial release. asdf-1.3.3/astropy_helpers/0000755000175000017500000000000013246031665015212 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/LICENSE.rst0000644000175000017500000000272313243564211017025 0ustar dandan00000000000000Copyright (c) 2014, Astropy Developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Astropy Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. asdf-1.3.3/astropy_helpers/README.rst0000644000175000017500000000503313246003560016673 0ustar dandan00000000000000astropy-helpers =============== * Stable versions: https://pypi.org/project/astropy-helpers/ * Development version, issue tracker: https://github.com/astropy/astropy-helpers This project provides a Python package, ``astropy_helpers``, which includes many build, installation, and documentation-related tools used by the Astropy project, but packaged separately for use by other projects that wish to leverage this work. The motivation behind this package and details of its implementation are in the accepted `Astropy Proposal for Enhancement (APE) 4 `_. The ``astropy_helpers.extern`` sub-module includes modules developed elsewhere that are bundled here for convenience. At the moment, this consists of the following two sphinx extensions: * `numpydoc `_, a Sphinx extension developed as part of the Numpy project. This is used to parse docstrings in Numpy format * `sphinx-automodapi `_, a Sphinx extension developed as part of the Astropy project. This used to be developed directly in ``astropy-helpers`` but is now a standalone package. Issues with these sub-modules should be reported in their respective repositories, and we will regularly update the bundled versions to reflect the latest released versions. ``astropy_helpers`` includes a special "bootstrap" module called ``ah_bootstrap.py`` which is intended to be used by a project's setup.py in order to ensure that the ``astropy_helpers`` package is available for build/installation. This is similar to the ``ez_setup.py`` module that is shipped with some projects to bootstrap `setuptools `_. As described in APE4, the version numbers for ``astropy_helpers`` follow the corresponding major/minor version of the `astropy core package `_, but with an independent sequence of micro (bugfix) version numbers. Hence, the initial release is 0.4, in parallel with Astropy v0.4, which will be the first version of Astropy to use ``astropy-helpers``. For examples of how to implement ``astropy-helpers`` in a project, see the ``setup.py`` and ``setup.cfg`` files of the `Affiliated package template `_. .. image:: https://travis-ci.org/astropy/astropy-helpers.svg :target: https://travis-ci.org/astropy/astropy-helpers .. image:: https://coveralls.io/repos/astropy/astropy-helpers/badge.svg :target: https://coveralls.io/r/astropy/astropy-helpers asdf-1.3.3/astropy_helpers/setup.py0000755000175000017500000000364013246003560016723 0ustar dandan00000000000000#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import ah_bootstrap import pkg_resources from setuptools import setup from astropy_helpers.setup_helpers import (register_commands, get_package_info, add_exclude_packages) from astropy_helpers.version_helpers import generate_version_py NAME = 'astropy_helpers' VERSION = '2.0.6' RELEASE = 'dev' not in VERSION generate_version_py(NAME, VERSION, RELEASE, False, uses_git=not RELEASE) # Use the updated version including the git rev count from astropy_helpers.version import version as VERSION add_exclude_packages(['astropy_helpers.tests']) cmdclass = register_commands(NAME, VERSION, RELEASE) # This package actually doesn't use the Astropy test command del cmdclass['test'] setup( name=pkg_resources.safe_name(NAME), # astropy_helpers -> astropy-helpers version=VERSION, description='Utilities for building and installing Astropy, Astropy ' 'affiliated packages, and their respective documentation.', author='The Astropy Developers', author_email='astropy.team@gmail.com', license='BSD', url=' https://github.com/astropy/astropy-helpers', long_description=open('README.rst').read(), classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Framework :: Setuptools Plugin', 'Framework :: Sphinx :: Extension', 'Framework :: Sphinx :: Theme', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Archiving :: Packaging' ], cmdclass=cmdclass, zip_safe=False, **get_package_info() ) asdf-1.3.3/astropy_helpers/.travis.yml0000644000175000017500000000556213246003560017324 0ustar dandan00000000000000# We set the language to c because python isn't supported on the MacOS X nodes # on Travis. However, the language ends up being irrelevant anyway, since we # install Python ourselves using conda. language: c os: - osx - linux # Setting sudo to false opts in to Travis-CI container-based builds. sudo: false env: matrix: - PYTHON_VERSION=2.7 EVENT_TYPE='push pull_request cron' - PYTHON_VERSION=3.4 SETUPTOOLS_VERSION=20 - PYTHON_VERSION=3.5 - PYTHON_VERSION=3.6 SETUPTOOLS_VERSION=dev DEBUG=True CONDA_DEPENDENCIES='sphinx cython numpy six pytest-cov' EVENT_TYPE='push pull_request cron' global: - CONDA_DEPENDENCIES="setuptools sphinx cython numpy" - PIP_DEPENDENCIES="coveralls pytest-cov" - EVENT_TYPE='push pull_request' matrix: include: - os: linux env: PYTHON_VERSION=3.6 SPHINX_VERSION='>1.6' - os: linux env: PYTHON_VERSION=3.6 PIP_DEPENDENCIES='git+https://github.com/sphinx-doc/sphinx.git#egg=sphinx coveralls pytest-cov' CONDA_DEPENDENCIES="setuptools cython numpy" - os: linux env: PYTHON_VERSION=3.5 SPHINX_VERSION='<1.4' - os: linux env: PYTHON_VERSION=3.5 SPHINX_VERSION='<1.5' SETUPTOOLS_VERSION=27 - os: linux env: PYTHON_VERSION=3.6 SPHINX_VERSION='<1.6' SETUPTOOLS_VERSION=27 # Uncomment the following if there are issues in setuptools that we # can't work around quickly - otherwise leave uncommented so that # we notice when things go wrong. # # allow_failures: # - env: PYTHON_VERSION=3.6 SETUPTOOLS_VERSION=dev DEBUG=True # CONDA_DEPENDENCIES='sphinx cython numpy six pytest-cov' # EVENT_TYPE='push pull_request cron' install: - git clone git://github.com/astropy/ci-helpers.git - source ci-helpers/travis/setup_conda.sh # We cannot install the developer version of setuptools using pip because # pip tries to remove the previous version of setuptools before the # installation is complete, which causes issues. Instead, we just install # setuptools manually. - if [[ $SETUPTOOLS_VERSION == dev ]]; then git clone http://github.com/pypa/setuptools.git; cd setuptools; python bootstrap.py; python setup.py install; cd ..; fi before_script: # Some of the tests use git commands that require a user to be configured - git config --global user.name "A U Thor" - git config --global user.email "author@example.com" script: # Use full path for coveragerc; see issue #193 - py.test --cov astropy_helpers --cov-config $(pwd)/astropy_helpers/tests/coveragerc astropy_helpers # In conftest.py we produce a .coverage.subprocess that contains coverage # statistics for sub-processes, so we combine it with the main one here. - mv .coverage .coverage.main - coverage combine .coverage.main .coverage.subprocess - coverage report after_success: - coveralls --rcfile=astropy_helpers/tests/coveragerc asdf-1.3.3/astropy_helpers/licenses/0000755000175000017500000000000013246031665017017 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/licenses/LICENSE_ASTROSCRAPPY.rst0000644000175000017500000000315413243564211022643 0ustar dandan00000000000000# The OpenMP helpers include code heavily adapted from astroscrappy, released # under the following license: # # Copyright (c) 2015, Curtis McCully # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, this # list of conditions and the following disclaimer in the documentation and/or # other materials provided with the distribution. # * Neither the name of the Astropy Team nor the names of its contributors may be # used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. asdf-1.3.3/astropy_helpers/licenses/LICENSE_NUMPYDOC.rst0000644000175000017500000001350713243564211022152 0ustar dandan00000000000000------------------------------------------------------------------------------- The files - numpydoc.py - docscrape.py - docscrape_sphinx.py - phantom_import.py have the following license: Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ------------------------------------------------------------------------------- The files - compiler_unparse.py - comment_eater.py - traitsdoc.py have the following license: This software is OSI Certified Open Source Software. OSI Certified is a certification mark of the Open Source Initiative. Copyright (c) 2006, Enthought, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Enthought, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ------------------------------------------------------------------------------- The file - plot_directive.py originates from Matplotlib (http://matplotlib.sf.net/) which has the following license: Copyright (c) 2002-2008 John D. Hunter; All Rights Reserved. 1. This LICENSE AGREEMENT is between John D. Hunter (“JDH”), and the Individual or Organization (“Licensee”) accessing and otherwise using matplotlib software in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, JDH hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use matplotlib 0.98.3 alone or in any derivative version, provided, however, that JDH’s License Agreement and JDH’s notice of copyright, i.e., “Copyright (c) 2002-2008 John D. Hunter; All Rights Reserved” are retained in matplotlib 0.98.3 alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates matplotlib 0.98.3 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to matplotlib 0.98.3. 4. JDH is making matplotlib 0.98.3 available to Licensee on an “AS IS” basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB 0.98.3 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB 0.98.3 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING MATPLOTLIB 0.98.3, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between JDH and Licensee. This License Agreement does not grant permission to use JDH trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using matplotlib 0.98.3, Licensee agrees to be bound by the terms and conditions of this License Agreement. asdf-1.3.3/astropy_helpers/licenses/LICENSE_COPYBUTTON.rst0000644000175000017500000000471113243564211022417 0ustar dandan00000000000000Copyright 2014 Python Software Foundation License: PSF PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- . 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. . 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. . 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. . 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. . 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. . 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. . 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. . 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. asdf-1.3.3/astropy_helpers/MANIFEST.in0000644000175000017500000000030013243564211016734 0ustar dandan00000000000000include README.rst include CHANGES.rst include LICENSE.rst recursive-include licenses * include ez_setup.py include ah_bootstrap.py exclude *.pyc *.o prune build prune astropy_helpers/tests asdf-1.3.3/astropy_helpers/astropy_helpers.egg-info/0000755000175000017500000000000013246031665022127 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers.egg-info/dependency_links.txt0000644000175000017500000000000113243564211026170 0ustar dandan00000000000000 asdf-1.3.3/astropy_helpers/astropy_helpers.egg-info/PKG-INFO0000644000175000017500000000761313243564211023226 0ustar dandan00000000000000Metadata-Version: 1.1 Name: astropy-helpers Version: 2.0.4 Summary: Utilities for building and installing Astropy, Astropy affiliated packages, and their respective documentation. Home-page: https://github.com/astropy/astropy-helpers Author: The Astropy Developers Author-email: astropy.team@gmail.com License: BSD Description-Content-Type: UNKNOWN Description: astropy-helpers =============== * Stable versions: https://pypi.org/project/astropy-helpers/ * Development version, issue tracker: https://github.com/astropy/astropy-helpers This project provides a Python package, ``astropy_helpers``, which includes many build, installation, and documentation-related tools used by the Astropy project, but packaged separately for use by other projects that wish to leverage this work. The motivation behind this package and details of its implementation are in the accepted `Astropy Proposal for Enhancement (APE) 4 `_. The ``astropy_helpers.extern`` sub-module includes modules developed elsewhere that are bundled here for convenience. At the moment, this consists of the following two sphinx extensions: * `numpydoc `_, a Sphinx extension developed as part of the Numpy project. This is used to parse docstrings in Numpy format * `sphinx-automodapi `_, a Sphinx extension developed as part of the Astropy project. This used to be developed directly in ``astropy-helpers`` but is now a standalone package. Issues with these sub-modules should be reported in their respective repositories, and we will regularly update the bundled versions to reflect the latest released versions. ``astropy_helpers`` includes a special "bootstrap" module called ``ah_bootstrap.py`` which is intended to be used by a project's setup.py in order to ensure that the ``astropy_helpers`` package is available for build/installation. This is similar to the ``ez_setup.py`` module that is shipped with some projects to bootstrap `setuptools `_. As described in APE4, the version numbers for ``astropy_helpers`` follow the corresponding major/minor version of the `astropy core package `_, but with an independent sequence of micro (bugfix) version numbers. Hence, the initial release is 0.4, in parallel with Astropy v0.4, which will be the first version of Astropy to use ``astropy-helpers``. For examples of how to implement ``astropy-helpers`` in a project, see the ``setup.py`` and ``setup.cfg`` files of the `Affiliated package template `_. .. image:: https://travis-ci.org/astropy/astropy-helpers.svg :target: https://travis-ci.org/astropy/astropy-helpers .. image:: https://coveralls.io/repos/astropy/astropy-helpers/badge.svg :target: https://coveralls.io/r/astropy/astropy-helpers Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: Framework :: Setuptools Plugin Classifier: Framework :: Sphinx :: Extension Classifier: Framework :: Sphinx :: Theme Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Software Development :: Build Tools Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: System :: Archiving :: Packaging asdf-1.3.3/astropy_helpers/astropy_helpers.egg-info/top_level.txt0000644000175000017500000000002013243564211024644 0ustar dandan00000000000000astropy_helpers asdf-1.3.3/astropy_helpers/astropy_helpers.egg-info/not-zip-safe0000644000175000017500000000000113243564211024350 0ustar dandan00000000000000 asdf-1.3.3/astropy_helpers/astropy_helpers.egg-info/SOURCES.txt0000644000175000017500000000642513243564211024015 0ustar dandan00000000000000CHANGES.rst LICENSE.rst MANIFEST.in README.rst ah_bootstrap.py ez_setup.py setup.cfg setup.py astropy_helpers/__init__.py astropy_helpers/distutils_helpers.py astropy_helpers/git_helpers.py astropy_helpers/openmp_helpers.py astropy_helpers/setup_helpers.py astropy_helpers/test_helpers.py astropy_helpers/utils.py astropy_helpers/version.py astropy_helpers/version_helpers.py astropy_helpers.egg-info/PKG-INFO astropy_helpers.egg-info/SOURCES.txt astropy_helpers.egg-info/dependency_links.txt astropy_helpers.egg-info/not-zip-safe astropy_helpers.egg-info/top_level.txt astropy_helpers/commands/__init__.py astropy_helpers/commands/_dummy.py astropy_helpers/commands/_test_compat.py astropy_helpers/commands/build_ext.py astropy_helpers/commands/build_py.py astropy_helpers/commands/build_sphinx.py astropy_helpers/commands/install.py astropy_helpers/commands/install_lib.py astropy_helpers/commands/register.py astropy_helpers/commands/setup_package.py astropy_helpers/commands/test.py astropy_helpers/commands/src/compiler.c astropy_helpers/compat/__init__.py astropy_helpers/extern/__init__.py astropy_helpers/extern/setup_package.py astropy_helpers/extern/automodapi/__init__.py astropy_helpers/extern/automodapi/autodoc_enhancements.py astropy_helpers/extern/automodapi/automodapi.py astropy_helpers/extern/automodapi/automodsumm.py astropy_helpers/extern/automodapi/smart_resolver.py astropy_helpers/extern/automodapi/utils.py astropy_helpers/extern/automodapi/templates/autosummary_core/base.rst astropy_helpers/extern/automodapi/templates/autosummary_core/class.rst astropy_helpers/extern/automodapi/templates/autosummary_core/module.rst astropy_helpers/extern/numpydoc/__init__.py astropy_helpers/extern/numpydoc/docscrape.py astropy_helpers/extern/numpydoc/docscrape_sphinx.py astropy_helpers/extern/numpydoc/numpydoc.py astropy_helpers/extern/numpydoc/templates/numpydoc_docstring.rst astropy_helpers/sphinx/__init__.py astropy_helpers/sphinx/conf.py astropy_helpers/sphinx/setup_package.py astropy_helpers/sphinx/ext/__init__.py astropy_helpers/sphinx/ext/changelog_links.py astropy_helpers/sphinx/ext/doctest.py astropy_helpers/sphinx/ext/edit_on_github.py astropy_helpers/sphinx/ext/tocdepthfix.py astropy_helpers/sphinx/ext/tests/__init__.py astropy_helpers/sphinx/local/python2_local_links.inv astropy_helpers/sphinx/local/python3_local_links.inv astropy_helpers/sphinx/themes/bootstrap-astropy/globaltoc.html astropy_helpers/sphinx/themes/bootstrap-astropy/layout.html astropy_helpers/sphinx/themes/bootstrap-astropy/localtoc.html astropy_helpers/sphinx/themes/bootstrap-astropy/searchbox.html astropy_helpers/sphinx/themes/bootstrap-astropy/theme.conf astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout.svg astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout_20.png astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.ico astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.svg astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.png astropy_helpers/sphinx/themes/bootstrap-astropy/static/bootstrap-astropy.css astropy_helpers/sphinx/themes/bootstrap-astropy/static/copybutton.js astropy_helpers/sphinx/themes/bootstrap-astropy/static/sidebar.js licenses/LICENSE_ASTROSCRAPPY.rst licenses/LICENSE_COPYBUTTON.rst licenses/LICENSE_NUMPYDOC.rstasdf-1.3.3/astropy_helpers/appveyor.yml0000644000175000017500000000250213246003560017572 0ustar dandan00000000000000# AppVeyor.com is a Continuous Integration service to build and run tests under # Windows environment: global: PYTHON: "C:\\conda" MINICONDA_VERSION: "latest" CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\ci-helpers\\appveyor\\windows_sdk.cmd" PYTHON_ARCH: "64" # needs to be set for CMD_IN_ENV to succeed. If a mix # of 32 bit and 64 bit builds are needed, move this # to the matrix section. # babel 2.0 is known to break on Windows: # https://github.com/python-babel/babel/issues/174 CONDA_DEPENDENCIES: "numpy Cython sphinx pytest babel!=2.0 setuptools" matrix: - PYTHON_VERSION: "2.7" - PYTHON_VERSION: "3.4" - PYTHON_VERSION: "3.5" - PYTHON_VERSION: "3.6" platform: -x64 install: # Set up ci-helpers - "git clone git://github.com/astropy/ci-helpers.git" - "powershell ci-helpers/appveyor/install-miniconda.ps1" - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" - "activate test" # Some of the tests use git commands that require a user to be configured - git config --global user.name "A U Thor" - git config --global user.email "author@example.com" # Not a .NET project, we build the package in the install step instead build: false test_script: - "%CMD_IN_ENV% py.test astropy_helpers" asdf-1.3.3/astropy_helpers/setup.cfg0000644000175000017500000000015313243564211017025 0ustar dandan00000000000000[tool:pytest] norecursedirs = .tox astropy_helpers/tests/package_template python_functions = test_ asdf-1.3.3/astropy_helpers/ah_bootstrap.py0000644000175000017500000010776213246003560020257 0ustar dandan00000000000000""" This bootstrap module contains code for ensuring that the astropy_helpers package will be importable by the time the setup.py script runs. It also includes some workarounds to ensure that a recent-enough version of setuptools is being used for the installation. This module should be the first thing imported in the setup.py of distributions that make use of the utilities in astropy_helpers. If the distribution ships with its own copy of astropy_helpers, this module will first attempt to import from the shipped copy. However, it will also check PyPI to see if there are any bug-fix releases on top of the current version that may be useful to get past platform-specific bugs that have been fixed. When running setup.py, use the ``--offline`` command-line option to disable the auto-upgrade checks. When this module is imported or otherwise executed it automatically calls a main function that attempts to read the project's setup.cfg file, which it checks for a configuration section called ``[ah_bootstrap]`` the presences of that section, and options therein, determine the next step taken: If it contains an option called ``auto_use`` with a value of ``True``, it will automatically call the main function of this module called `use_astropy_helpers` (see that function's docstring for full details). Otherwise no further action is taken and by default the system-installed version of astropy-helpers will be used (however, ``ah_bootstrap.use_astropy_helpers`` may be called manually from within the setup.py script). This behavior can also be controlled using the ``--auto-use`` and ``--no-auto-use`` command-line flags. For clarity, an alias for ``--no-auto-use`` is ``--use-system-astropy-helpers``, and we recommend using the latter if needed. Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same names as the arguments to `use_astropy_helpers`, and can be used to configure the bootstrap script when ``auto_use = True``. See https://github.com/astropy/astropy-helpers for more details, and for the latest version of this module. """ import contextlib import errno import imp import io import locale import os import re import subprocess as sp import sys try: from ConfigParser import ConfigParser, RawConfigParser except ImportError: from configparser import ConfigParser, RawConfigParser if sys.version_info[0] < 3: _str_types = (str, unicode) _text_type = unicode PY3 = False else: _str_types = (str, bytes) _text_type = str PY3 = True # What follows are several import statements meant to deal with install-time # issues with either missing or misbehaving pacakges (including making sure # setuptools itself is installed): # Some pre-setuptools checks to ensure that either distribute or setuptools >= # 0.7 is used (over pre-distribute setuptools) if it is available on the path; # otherwise the latest setuptools will be downloaded and bootstrapped with # ``ez_setup.py``. This used to be included in a separate file called # setuptools_bootstrap.py; but it was combined into ah_bootstrap.py try: import pkg_resources _setuptools_req = pkg_resources.Requirement.parse('setuptools>=0.7') # This may raise a DistributionNotFound in which case no version of # setuptools or distribute is properly installed _setuptools = pkg_resources.get_distribution('setuptools') if _setuptools not in _setuptools_req: # Older version of setuptools; check if we have distribute; again if # this results in DistributionNotFound we want to give up _distribute = pkg_resources.get_distribution('distribute') if _setuptools != _distribute: # It's possible on some pathological systems to have an old version # of setuptools and distribute on sys.path simultaneously; make # sure distribute is the one that's used sys.path.insert(1, _distribute.location) _distribute.activate() imp.reload(pkg_resources) except: # There are several types of exceptions that can occur here; if all else # fails bootstrap and use the bootstrapped version from ez_setup import use_setuptools use_setuptools() # typing as a dependency for 1.6.1+ Sphinx causes issues when imported after # initializing submodule with ah_boostrap.py # See discussion and references in # https://github.com/astropy/astropy-helpers/issues/302 try: import typing # noqa except ImportError: pass # Note: The following import is required as a workaround to # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this # module now, it will get cleaned up after `run_setup` is called, but that will # later cause the TemporaryDirectory class defined in it to stop working when # used later on by setuptools try: import setuptools.py31compat # noqa except ImportError: pass # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass # End compatibility imports... # In case it didn't successfully import before the ez_setup checks import pkg_resources from setuptools import Distribution from setuptools.package_index import PackageIndex from distutils import log from distutils.debug import DEBUG # TODO: Maybe enable checking for a specific version of astropy_helpers? DIST_NAME = 'astropy-helpers' PACKAGE_NAME = 'astropy_helpers' if PY3: UPPER_VERSION_EXCLUSIVE = None else: UPPER_VERSION_EXCLUSIVE = '3' # Defaults for other options DOWNLOAD_IF_NEEDED = True INDEX_URL = 'https://pypi.python.org/simple' USE_GIT = True OFFLINE = False AUTO_UPGRADE = True # A list of all the configuration options and their required types CFG_OPTIONS = [ ('auto_use', bool), ('path', str), ('download_if_needed', bool), ('index_url', str), ('use_git', bool), ('offline', bool), ('auto_upgrade', bool) ] class _Bootstrapper(object): """ Bootstrapper implementation. See ``use_astropy_helpers`` for parameter documentation. """ def __init__(self, path=None, index_url=None, use_git=None, offline=None, download_if_needed=None, auto_upgrade=None): if path is None: path = PACKAGE_NAME if not (isinstance(path, _str_types) or path is False): raise TypeError('path must be a string or False') if PY3 and not isinstance(path, _text_type): fs_encoding = sys.getfilesystemencoding() path = path.decode(fs_encoding) # path to unicode self.path = path # Set other option attributes, using defaults where necessary self.index_url = index_url if index_url is not None else INDEX_URL self.offline = offline if offline is not None else OFFLINE # If offline=True, override download and auto-upgrade if self.offline: download_if_needed = False auto_upgrade = False self.download = (download_if_needed if download_if_needed is not None else DOWNLOAD_IF_NEEDED) self.auto_upgrade = (auto_upgrade if auto_upgrade is not None else AUTO_UPGRADE) # If this is a release then the .git directory will not exist so we # should not use git. git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) if use_git is None and not git_dir_exists: use_git = False self.use_git = use_git if use_git is not None else USE_GIT # Declared as False by default--later we check if astropy-helpers can be # upgraded from PyPI, but only if not using a source distribution (as in # the case of import from a git submodule) self.is_submodule = False @classmethod def main(cls, argv=None): if argv is None: argv = sys.argv config = cls.parse_config() config.update(cls.parse_command_line(argv)) auto_use = config.pop('auto_use', False) bootstrapper = cls(**config) if auto_use: # Run the bootstrapper, otherwise the setup.py is using the old # use_astropy_helpers() interface, in which case it will run the # bootstrapper manually after reconfiguring it. bootstrapper.run() return bootstrapper @classmethod def parse_config(cls): if not os.path.exists('setup.cfg'): return {} cfg = ConfigParser() try: cfg.read('setup.cfg') except Exception as e: if DEBUG: raise log.error( "Error reading setup.cfg: {0!r}\n{1} will not be " "automatically bootstrapped and package installation may fail." "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) return {} if not cfg.has_section('ah_bootstrap'): return {} config = {} for option, type_ in CFG_OPTIONS: if not cfg.has_option('ah_bootstrap', option): continue if type_ is bool: value = cfg.getboolean('ah_bootstrap', option) else: value = cfg.get('ah_bootstrap', option) config[option] = value return config @classmethod def parse_command_line(cls, argv=None): if argv is None: argv = sys.argv config = {} # For now we just pop recognized ah_bootstrap options out of the # arg list. This is imperfect; in the unlikely case that a setup.py # custom command or even custom Distribution class defines an argument # of the same name then we will break that. However there's a catch22 # here that we can't just do full argument parsing right here, because # we don't yet know *how* to parse all possible command-line arguments. if '--no-git' in argv: config['use_git'] = False argv.remove('--no-git') if '--offline' in argv: config['offline'] = True argv.remove('--offline') if '--auto-use' in argv: config['auto_use'] = True argv.remove('--auto-use') if '--no-auto-use' in argv: config['auto_use'] = False argv.remove('--no-auto-use') if '--use-system-astropy-helpers' in argv: config['auto_use'] = False argv.remove('--use-system-astropy-helpers') return config def run(self): strategies = ['local_directory', 'local_file', 'index'] dist = None # First, remove any previously imported versions of astropy_helpers; # this is necessary for nested installs where one package's installer # is installing another package via setuptools.sandbox.run_setup, as in # the case of setup_requires for key in list(sys.modules): try: if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): del sys.modules[key] except AttributeError: # Sometimes mysterious non-string things can turn up in # sys.modules continue # Check to see if the path is a submodule self.is_submodule = self._check_submodule() for strategy in strategies: method = getattr(self, 'get_{0}_dist'.format(strategy)) dist = method() if dist is not None: break else: raise _AHBootstrapSystemExit( "No source found for the {0!r} package; {0} must be " "available and importable as a prerequisite to building " "or installing this package.".format(PACKAGE_NAME)) # This is a bit hacky, but if astropy_helpers was loaded from a # directory/submodule its Distribution object gets a "precedence" of # "DEVELOP_DIST". However, in other cases it gets a precedence of # "EGG_DIST". However, when activing the distribution it will only be # placed early on sys.path if it is treated as an EGG_DIST, so always # do that dist = dist.clone(precedence=pkg_resources.EGG_DIST) # Otherwise we found a version of astropy-helpers, so we're done # Just active the found distribution on sys.path--if we did a # download this usually happens automatically but it doesn't hurt to # do it again # Note: Adding the dist to the global working set also activates it # (makes it importable on sys.path) by default. try: pkg_resources.working_set.add(dist, replace=True) except TypeError: # Some (much) older versions of setuptools do not have the # replace=True option here. These versions are old enough that all # bets may be off anyways, but it's easy enough to work around just # in case... if dist.key in pkg_resources.working_set.by_key: del pkg_resources.working_set.by_key[dist.key] pkg_resources.working_set.add(dist) @property def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname)) def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_index_dist(self): if not self.download: log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) return None log.warn( "Downloading {0!r}; run setup.py with the --offline option to " "force offline installation.".format(DIST_NAME)) try: dist = self._do_download() except Exception as e: if DEBUG: raise log.warn( 'Failed to download and/or install {0!r} from {1!r}:\n' '{2}'.format(DIST_NAME, self.index_url, str(e))) dist = None # No need to run auto-upgrade here since we've already presumably # gotten the most up-to-date version from the package index return dist def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): # We use subprocess instead of run_setup from setuptools to # avoid segmentation faults - see the following for more details: # https://github.com/cython/cython/issues/2104 sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist def _do_download(self, version='', find_links=None): if find_links: allow_hosts = '' index_url = None else: allow_hosts = None index_url = self.index_url # Annoyingly, setuptools will not handle other arguments to # Distribution (such as options) before handling setup_requires, so it # is not straightforward to programmatically augment the arguments which # are passed to easy_install class _Distribution(Distribution): def get_option_dict(self, command_name): opts = Distribution.get_option_dict(self, command_name) if command_name == 'easy_install': if find_links is not None: opts['find_links'] = ('setup script', find_links) if index_url is not None: opts['index_url'] = ('setup script', index_url) if allow_hosts is not None: opts['allow_hosts'] = ('setup script', allow_hosts) return opts if version: req = '{0}=={1}'.format(DIST_NAME, version) else: if UPPER_VERSION_EXCLUSIVE is None: req = DIST_NAME else: req = '{0}<{1}'.format(DIST_NAME, UPPER_VERSION_EXCLUSIVE) attrs = {'setup_requires': [req]} # NOTE: we need to parse the config file (e.g. setup.cfg) to make sure # it honours the options set in the [easy_install] section, and we need # to explicitly fetch the requirement eggs as setup_requires does not # get honored in recent versions of setuptools: # https://github.com/pypa/setuptools/issues/1273 try: context = _verbose if DEBUG else _silence with context(): dist = _Distribution(attrs=attrs) try: dist.parse_config_files(ignore_option_errors=True) dist.fetch_build_eggs(req) except TypeError: # On older versions of setuptools, ignore_option_errors # doesn't exist, and the above two lines are not needed # so we can just continue pass # If the setup_requires succeeded it will have added the new dist to # the main working_set return pkg_resources.working_set.by_key.get(DIST_NAME) except Exception as e: if DEBUG: raise msg = 'Error retrieving {0} from {1}:\n{2}' if find_links: source = find_links[0] elif index_url != INDEX_URL: source = index_url else: source = 'PyPI' raise Exception(msg.format(DIST_NAME, source, repr(e))) def _do_upgrade(self, dist): # Build up a requirement for a higher bugfix release but a lower minor # release (so API compatibility is guaranteed) next_version = _next_version(dist.parsed_version) req = pkg_resources.Requirement.parse( '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) package_index = PackageIndex(index_url=self.index_url) upgrade = package_index.obtain(req) if upgrade is not None: return self._do_download(version=upgrade.version) def _check_submodule(self): """ Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details. """ if (self.path is None or (os.path.exists(self.path) and not os.path.isdir(self.path))): return False if self.use_git: return self._check_submodule_using_git() else: return self._check_submodule_no_git() def _check_submodule_using_git(self): """ Check if the given path is a git submodule. If so, attempt to initialize and/or update the submodule if needed. This function makes calls to the ``git`` command in subprocesses. The ``_check_submodule_no_git`` option uses pure Python to check if the given path looks like a git submodule, but it cannot perform updates. """ cmd = ['git', 'submodule', 'status', '--', self.path] try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except _CommandNotFound: # The git command simply wasn't found; this is most likely the # case on user systems that don't have git and are simply # trying to install the package from PyPI or a source # distribution. Silently ignore this case and simply don't try # to use submodules return False stderr = stderr.strip() if returncode != 0 and stderr: # Unfortunately the return code alone cannot be relied on, as # earlier versions of git returned 0 even if the requested submodule # does not exist # This is a warning that occurs in perl (from running git submodule) # which only occurs with a malformatted locale setting which can # happen sometimes on OSX. See again # https://github.com/astropy/astropy/issues/2749 perl_warning = ('perl: warning: Falling back to the standard locale ' '("C").') if not stderr.strip().endswith(perl_warning): # Some other unknown error condition occurred log.warn('git submodule command failed ' 'unexpectedly:\n{0}'.format(stderr)) return False # Output of `git submodule status` is as follows: # # 1: Status indicator: '-' for submodule is uninitialized, '+' if # submodule is initialized but is not at the commit currently indicated # in .gitmodules (and thus needs to be updated), or 'U' if the # submodule is in an unstable state (i.e. has merge conflicts) # # 2. SHA-1 hash of the current commit of the submodule (we don't really # need this information but it's useful for checking that the output is # correct) # # 3. The output of `git describe` for the submodule's current commit # hash (this includes for example what branches the commit is on) but # only if the submodule is initialized. We ignore this information for # now _git_submodule_status_re = re.compile( '^(?P[+-U ])(?P[0-9a-f]{40}) ' '(?P\S+)( .*)?$') # The stdout should only contain one line--the status of the # requested submodule m = _git_submodule_status_re.match(stdout) if m: # Yes, the path *is* a git submodule self._update_submodule(m.group('submodule'), m.group('status')) return True else: log.warn( 'Unexpected output from `git submodule status`:\n{0}\n' 'Will attempt import from {1!r} regardless.'.format( stdout, self.path)) return False def _check_submodule_no_git(self): """ Like ``_check_submodule_using_git``, but simply parses the .gitmodules file to determine if the supplied path is a git submodule, and does not exec any subprocesses. This can only determine if a path is a submodule--it does not perform updates, etc. This function may need to be updated if the format of the .gitmodules file is changed between git versions. """ gitmodules_path = os.path.abspath('.gitmodules') if not os.path.isfile(gitmodules_path): return False # This is a minimal reader for gitconfig-style files. It handles a few of # the quirks that make gitconfig files incompatible with ConfigParser-style # files, but does not support the full gitconfig syntax (just enough # needed to read a .gitmodules file). gitmodules_fileobj = io.StringIO() # Must use io.open for cross-Python-compatible behavior wrt unicode with io.open(gitmodules_path) as f: for line in f: # gitconfig files are more flexible with leading whitespace; just # go ahead and remove it line = line.lstrip() # comments can start with either # or ; if line and line[0] in (':', ';'): continue gitmodules_fileobj.write(line) gitmodules_fileobj.seek(0) cfg = RawConfigParser() try: cfg.readfp(gitmodules_fileobj) except Exception as exc: log.warn('Malformatted .gitmodules file: {0}\n' '{1} cannot be assumed to be a git submodule.'.format( exc, self.path)) return False for section in cfg.sections(): if not cfg.has_option(section, 'path'): continue submodule_path = cfg.get(section, 'path').rstrip(os.sep) if submodule_path == self.path.rstrip(os.sep): return True return False def _update_submodule(self, submodule, status): if status == ' ': # The submodule is up to date; no action necessary return elif status == '-': if self.offline: raise _AHBootstrapSystemExit( "Cannot initialize the {0} submodule in --offline mode; " "this requires being able to clone the submodule from an " "online repository.".format(submodule)) cmd = ['update', '--init'] action = 'Initializing' elif status == '+': cmd = ['update'] action = 'Updating' if self.offline: cmd.append('--no-fetch') elif status == 'U': raise _AHBootstrapSystemExit( 'Error: Submodule {0} contains unresolved merge conflicts. ' 'Please complete or abandon any changes in the submodule so that ' 'it is in a usable state, then try again.'.format(submodule)) else: log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 'attempt to use the submodule as-is, but try to ensure ' 'that the submodule is in a clean state and contains no ' 'conflicts or errors.\n{2}'.format(status, submodule, _err_help_msg)) return err_msg = None cmd = ['git', 'submodule'] + cmd + ['--', submodule] log.warn('{0} {1} submodule with: `{2}`'.format( action, submodule, ' '.join(cmd))) try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except OSError as e: err_msg = str(e) else: if returncode != 0: err_msg = stderr if err_msg is not None: log.warn('An unexpected error occurred updating the git submodule ' '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, _err_help_msg)) class _CommandNotFound(OSError): """ An exception raised when a command run with run_cmd is not found on the system. """ def run_cmd(cmd): """ Run a command in a subprocess, given as a list of command-line arguments. Returns a ``(returncode, stdout, stderr)`` tuple. """ try: p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) # XXX: May block if either stdout or stderr fill their buffers; # however for the commands this is currently used for that is # unlikely (they should have very brief output) stdout, stderr = p.communicate() except OSError as e: if DEBUG: raise if e.errno == errno.ENOENT: msg = 'Command not found: `{0}`'.format(' '.join(cmd)) raise _CommandNotFound(msg, cmd) else: raise _AHBootstrapSystemExit( 'An unexpected error occurred when running the ' '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) # Can fail of the default locale is not configured properly. See # https://github.com/astropy/astropy/issues/2749. For the purposes under # consideration 'latin1' is an acceptable fallback. try: stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' except ValueError: # Due to an OSX oddity locale.getdefaultlocale() can also crash # depending on the user's locale/language settings. See: # http://bugs.python.org/issue18378 stdio_encoding = 'latin1' # Unlikely to fail at this point but even then let's be flexible if not isinstance(stdout, _text_type): stdout = stdout.decode(stdio_encoding, 'replace') if not isinstance(stderr, _text_type): stderr = stderr.decode(stdio_encoding, 'replace') return (p.returncode, stdout, stderr) def _next_version(version): """ Given a parsed version from pkg_resources.parse_version, returns a new version string with the next minor version. Examples ======== >>> _next_version(pkg_resources.parse_version('1.2.3')) '1.3.0' """ if hasattr(version, 'base_version'): # New version parsing from setuptools >= 8.0 if version.base_version: parts = version.base_version.split('.') else: parts = [] else: parts = [] for part in version: if part.startswith('*'): break parts.append(part) parts = [int(p) for p in parts] if len(parts) < 3: parts += [0] * (3 - len(parts)) major, minor, micro = parts[:3] return '{0}.{1}.{2}'.format(major, minor + 1, 0) class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x encoding = 'utf-8' def write(self, s): pass def flush(self): pass @contextlib.contextmanager def _verbose(): yield @contextlib.contextmanager def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr _err_help_msg = """ If the problem persists consider installing astropy_helpers manually using pip (`pip install astropy_helpers`) or by manually downloading the source archive, extracting it, and installing by running `python setup.py install` from the root of the extracted source code. """ class _AHBootstrapSystemExit(SystemExit): def __init__(self, *args): if not args: msg = 'An unknown problem occurred bootstrapping astropy_helpers.' else: msg = args[0] msg += '\n' + _err_help_msg super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) BOOTSTRAPPER = _Bootstrapper.main() def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run() asdf-1.3.3/astropy_helpers/CHANGES.rst0000644000175000017500000004140013246003560017004 0ustar dandan00000000000000astropy-helpers Changelog ************************* 2.0.6 (2018-02-24) ------------------ - Avoid deprecation warning due to ``exclude=`` keyword in ``setup.py``. [#379] 2.0.5 (2018-02-22) ------------------ - Fix segmentation faults that occurred when the astropy-helpers submodule was first initialized in packages that also contained Cython code. [#375] 2.0.4 (2018-02-09) ------------------ - Support dotted package names as namespace packages in generate_version_py. [#370] - Fix compatibility with setuptools 36.x and above. [#372] - Fix false negative in add_openmp_flags_if_available when measuring code coverage with gcc. [#374] 2.0.3 (2018-01-20) ------------------ - Make sure that astropy-helpers 3.x.x is not downloaded on Python 2. [#363] - The bundled version of sphinx-automodapi has been updated to v0.7. [#365] - Add --auto-use and --no-auto-use command-line flags to match the ``auto_use`` configuration option, and add an alias ``--use-system-astropy-helpers`` for ``--no-auto-use``. [#366] 2.0.2 (2017-10-13) ------------------ - Added new helper function add_openmp_flags_if_available that can add OpenMP compilation flags to a C/Cython extension if needed. [#346] - Update numpydoc to v0.7. [#343] - The function ``get_git_devstr`` now returns ``'0'`` instead of ``None`` when no git repository is present. This allows generation of development version strings that are in a format that ``setuptools`` expects (e.g. "1.1.3.dev0" instead of "1.1.3.dev"). [#330] - It is now possible to override generated timestamps to make builds reproducible by setting the ``SOURCE_DATE_EPOCH`` environment variable [#341] - Mark Sphinx extensions as parallel-safe. [#344] - Switch to using mathjax instead of imgmath for local builds. [#342] - Deprecate ``exclude`` parameter of various functions in setup_helpers since it could not work as intended. Add new function ``add_exclude_packages`` to provide intended behavior. [#331] - Allow custom Sphinx doctest extension to recognize and process standard doctest directives ``testsetup`` and ``doctest``. [#335] 2.0.1 (2017-07-28) ------------------ - Fix compatibility with Sphinx <1.5. [#326] 2.0 (2017-07-06) ---------------- - Add support for package that lies in a subdirectory. [#249] - Removing ``compat.subprocess``. [#298] - Python 3.3 is no longer supported. [#300] - The 'automodapi' Sphinx extension (and associated dependencies) has now been moved to a standalone package which can be found at https://github.com/astropy/sphinx-automodapi - this is now bundled in astropy-helpers under astropy_helpers.extern.automodapi for convenience. Version shipped with astropy-helpers is v0.6. [#278, #303, #309, #323] - The ``numpydoc`` Sphinx extension has now been moved to ``astropy_helpers.extern``. [#278] - Fix ``build_docs`` error catching, so it doesn't hide Sphinx errors. [#292] - Fix compatibility with Sphinx 1.6. [#318] - Updating ez_setup.py to the last version before it's removal. [#321] 1.3.1 (2017-03-18) ------------------ - Fixed the missing button to hide output in documentation code blocks. [#287] - Fixed bug when ``build_docs`` when running with the clean (-l) option. [#289] - Add alternative location for various intersphinx inventories to fall back to. [#293] 1.3 (2016-12-16) ---------------- - ``build_sphinx`` has been deprecated in favor of the ``build_docs`` command. [#246] - Force the use of Cython's old ``build_ext`` command. A new ``build_ext`` command was added in Cython 0.25, but it does not work with astropy-helpers currently. [#261] 1.2 (2016-06-18) ---------------- - Added sphinx configuration value ``automodsumm_inherited_members``. If ``True`` this will include members that are inherited from a base class in the generated API docs. Defaults to ``False`` which matches the previous behavior. [#215] - Fixed ``build_sphinx`` to recognize builds that succeeded but have output *after* the "build succeeded." statement. This only applies when ``--warnings-returncode`` is given (which is primarily relevant for Travis documentation builds). [#223] - Fixed ``build_sphinx`` the sphinx extensions to not output a spurious warning for sphinx versions > 1.4. [#229] - Add Python version dependent local sphinx inventories that contain otherwise missing references. [#216] - ``astropy_helpers`` now require Sphinx 1.3 or later. [#226] 1.1.2 (2016-03-9) ----------------- - The CSS for the sphinx documentation was altered to prevent some text overflow problems. [#217] 1.1.1 (2015-12-23) ------------------ - Fixed crash in build with ``AttributeError: cython_create_listing`` with older versions of setuptools. [#209, #210] 1.1 (2015-12-10) ---------------- - The original ``AstropyTest`` class in ``astropy_helpers``, which implements the ``setup.py test`` command, is deprecated in favor of moving the implementation of that command closer to the actual Astropy test runner in ``astropy.tests``. Now a dummy ``test`` command is provided solely for informing users that they need ``astropy`` installed to run the tests (however, the previous, now deprecated implementation is still provided and continues to work with older versions of Astropy). See the related issue for more details. [#184] - Added a useful new utility function to ``astropy_helpers.utils`` called ``find_data_files``. This is similar to the ``find_packages`` function in setuptools in that it can be used to search a package for data files (matching a pattern) that can be passed to the ``package_data`` argument for ``setup()``. See the docstring to ``astropy_helpers.utils.find_data_files`` for more details. [#42] - The ``astropy_helpers`` module now sets the global ``_ASTROPY_SETUP_`` flag upon import (from within a ``setup.py``) script, so it's not necessary to have this in the ``setup.py`` script explicitly. If in doubt though, there's no harm in setting it twice. Putting it in ``astropy_helpers`` just ensures that any other imports that occur during build will have this flag set. [#191] - It is now possible to use Cython as a ``setup_requires`` build requirement, and still build Cython extensions even if Cython wasn't available at the beginning of the build processes (that is, is automatically downloaded via setuptools' processing of ``setup_requires``). [#185] - Moves the ``adjust_compiler`` check into the ``build_ext`` command itself, so it's only used when actually building extension modules. This also deprecates the stand-alone ``adjust_compiler`` function. [#76] - When running the ``build_sphinx`` / ``build_docs`` command with the ``-w`` option, the output from Sphinx is streamed as it runs instead of silently buffering until the doc build is complete. [#197] 1.0.7 (unreleased) ------------------ - Fix missing import in ``astropy_helpers/utils.py``. [#196] 1.0.6 (2015-12-04) ------------------ - Fixed bug where running ``./setup.py build_sphinx`` could return successfully even when the build was not successful (and should have returned a non-zero error code). [#199] 1.0.5 (2015-10-02) ------------------ - Fixed a regression in the ``./setup.py test`` command that was introduced in v1.0.4. 1.0.4 (2015-10-02) ------------------ - Fixed issue with the sphinx documentation css where the line numbers for code blocks were not aligned with the code. [#179, #180] - Fixed crash that could occur when trying to build Cython extension modules when Cython isn't installed. Normally this still results in a failed build, but was supposed to provide a useful error message rather than crash outright (this was a regression introduced in v1.0.3). [#181] - Fixed a crash that could occur on Python 3 when a working C compiler isn't found. [#182] - Quieted warnings about deprecated Numpy API in Cython extensions, when building Cython extensions against Numpy >= 1.7. [#183, #186] - Improved support for py.test >= 2.7--running the ``./setup.py test`` command now copies all doc pages into the temporary test directory as well, so that all test files have a "common root directory". [#189, #190] 1.0.3 (2015-07-22) ------------------ - Added workaround for sphinx-doc/sphinx#1843, a but in Sphinx which prevented descriptor classes with a custom metaclass from being documented correctly. [#158] - Added an alias for the ``./setup.py build_sphinx`` command as ``./setup.py build_docs`` which, to a new contributor, should hopefully be less cryptic. [#161] - The fonts in graphviz diagrams now match the font of the HTML content. [#169] - When the documentation is built on readthedocs.org, MathJax will be used for math rendering. When built elsewhere, the "pngmath" extension is still used for math rendering. [#170] - Fix crash when importing astropy_helpers when running with ``python -OO`` [#171] - The ``build`` and ``build_ext`` stages now correctly recognize the presence of C++ files in Cython extensions (previously only vanilla C worked). [#173] 1.0.2 (2015-04-02) ------------------ - Various fixes enabling the astropy-helpers Sphinx build command and Sphinx extensions to work with Sphinx 1.3. [#148] - More improvement to the ability to handle multiple versions of astropy-helpers being imported in the same Python interpreter session in the (somewhat rare) case of nested installs. [#147] - To better support high resolution displays, use SVG for the astropy logo and linkout image, falling back to PNGs for browsers that support it. [#150, #151] - Improve ``setup_helpers.get_compiler_version`` to work with more compilers, and to return more info. This will help fix builds of Astropy on less common compilers, like Sun C. [#153] 1.0.1 (2015-03-04) ------------------ - Released in concert with v0.4.8 to address the same issues. 0.4.8 (2015-03-04) ------------------ - Improved the ``ah_bootstrap`` script's ability to override existing installations of astropy-helpers with new versions in the context of installing multiple packages simultaneously within the same Python interpreter (e.g. when one package has in its ``setup_requires`` another package that uses a different version of astropy-helpers. [#144] - Added a workaround to an issue in matplotlib that can, in rare cases, lead to a crash when installing packages that import matplotlib at build time. [#144] 1.0 (2015-02-17) ---------------- - Added new pre-/post-command hook points for ``setup.py`` commands. Now any package can define code to run before and/or after any ``setup.py`` command without having to manually subclass that command by adding ``pre__hook`` and ``post__hook`` callables to the package's ``setup_package.py`` module. See the PR for more details. [#112] - The following objects in the ``astropy_helpers.setup_helpers`` module have been relocated: - ``get_dummy_distribution``, ``get_distutils_*``, ``get_compiler_option``, ``add_command_option``, ``is_distutils_display_option`` -> ``astropy_helpers.distutils_helpers`` - ``should_build_with_cython``, ``generate_build_ext_command`` -> ``astropy_helpers.commands.build_ext`` - ``AstropyBuildPy`` -> ``astropy_helpers.commands.build_py`` - ``AstropyBuildSphinx`` -> ``astropy_helpers.commands.build_sphinx`` - ``AstropyInstall`` -> ``astropy_helpers.commands.install`` - ``AstropyInstallLib`` -> ``astropy_helpers.commands.install_lib`` - ``AstropyRegister`` -> ``astropy_helpers.commands.register`` - ``get_pkg_version_module`` -> ``astropy_helpers.version_helpers`` - ``write_if_different``, ``import_file``, ``get_numpy_include_path`` -> ``astropy_helpers.utils`` All of these are "soft" deprecations in the sense that they are still importable from ``astropy_helpers.setup_helpers`` for now, and there is no (easy) way to produce deprecation warnings when importing these objects from ``setup_helpers`` rather than directly from the modules they are defined in. But please consider updating any imports to these objects. [#110] - Use of the ``astropy.sphinx.ext.astropyautosummary`` extension is deprecated for use with Sphinx < 1.2. Instead it should suffice to remove this extension for the ``extensions`` list in your ``conf.py`` and add the stock ``sphinx.ext.autosummary`` instead. [#131] 0.4.7 (2015-02-17) ------------------ - Fixed incorrect/missing git hash being added to the generated ``version.py`` when creating a release. [#141] 0.4.6 (2015-02-16) ------------------ - Fixed problems related to the automatically generated _compiler module not being created properly. [#139] 0.4.5 (2015-02-11) ------------------ - Fixed an issue where ah_bootstrap.py could blow up when astropy_helper's version number is 1.0. - Added a workaround for documentation of properties in the rare case where the class's metaclass has a property of the same name. [#130] - Fixed an issue on Python 3 where importing a package using astropy-helper's generated version.py module would crash when the current working directory is an empty git repository. [#114, #137] - Fixed an issue where the "revision count" appended to .dev versions by the generated version.py did not accurately reflect the revision count for the package it belongs to, and could be invalid if the current working directory is an unrelated git repository. [#107, #137] - Likewise, fixed a confusing warning message that could occur in the same circumstances as the above issue. [#121, #137] 0.4.4 (2014-12-31) ------------------ - More improvements for building the documentation using Python 3.x. [#100] - Additional minor fixes to Python 3 support. [#115] - Updates to support new test features in Astropy [#92, #106] 0.4.3 (2014-10-22) ------------------ - The generated ``version.py`` file now preserves the git hash of installed copies of the package as well as when building a source distribution. That is, the git hash of the changeset that was installed/released is preserved. [#87] - In smart resolver add resolution for class links when they exist in the intersphinx inventory, but not the mapping of the current package (e.g. when an affiliated package uses an astropy core class of which "actual" and "documented" location differs) [#88] - Fixed a bug that could occur when running ``setup.py`` for the first time in a repository that uses astropy-helpers as a submodule: ``AttributeError: 'NoneType' object has no attribute 'mkdtemp'`` [#89] - Fixed a bug where optional arguments to the ``doctest-skip`` Sphinx directive were sometimes being left in the generated documentation output. [#90] - Improved support for building the documentation using Python 3.x. [#96] - Avoid error message if .git directory is not present. [#91] 0.4.2 (2014-08-09) ------------------ - Fixed some CSS issues in generated API docs. [#69] - Fixed the warning message that could be displayed when generating a version number with some older versions of git. [#77] - Fixed automodsumm to work with new versions of Sphinx (>= 1.2.2). [#80] 0.4.1 (2014-08-08) ------------------ - Fixed git revision count on systems with git versions older than v1.7.2. [#70] - Fixed display of warning text when running a git command fails (previously the output of stderr was not being decoded properly). [#70] - The ``--offline`` flag to ``setup.py`` understood by ``ah_bootstrap.py`` now also prevents git from going online to fetch submodule updates. [#67] - The Sphinx extension for converting issue numbers to links in the changelog now supports working on arbitrary pages via a new ``conf.py`` setting: ``changelog_links_docpattern``. By default it affects the ``changelog`` and ``whatsnew`` pages in one's Sphinx docs. [#61] - Fixed crash that could result from users with missing/misconfigured locale settings. [#58] - The font used for code examples in the docs is now the system-defined ``monospace`` font, rather than ``Minaco``, which is not available on all platforms. [#50] 0.4 (2014-07-15) ---------------- - Initial release of astropy-helpers. See `APE4 `_ for details of the motivation and design of this package. - The ``astropy_helpers`` package replaces the following modules in the ``astropy`` package: - ``astropy.setup_helpers`` -> ``astropy_helpers.setup_helpers`` - ``astropy.version_helpers`` -> ``astropy_helpers.version_helpers`` - ``astropy.sphinx`` - > ``astropy_helpers.sphinx`` These modules should be considered deprecated in ``astropy``, and any new, non-critical changes to those modules will be made in ``astropy_helpers`` instead. Affiliated packages wishing to make use those modules (as in the Astropy package-template) should use the versions from ``astropy_helpers`` instead, and include the ``ah_bootstrap.py`` script in their project, for bootstrapping the ``astropy_helpers`` package in their setup.py script. asdf-1.3.3/astropy_helpers/astropy_helpers/0000755000175000017500000000000013246031665020435 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/version.py0000644000175000017500000000102413243564211022464 0ustar dandan00000000000000# Autogenerated by Astropy-affiliated package astropy_helpers's setup.py on 2018-02-22 15:58:01 from __future__ import unicode_literals import datetime version = "2.0.4" githash = "41a607235bdc335c9c125f828bdd35502a09aff9" major = 2 minor = 0 bugfix = 4 release = True timestamp = datetime.datetime(2018, 2, 22, 15, 58, 1) debug = False try: from ._compiler import compiler except ImportError: compiler = "unknown" try: from .cython_version import cython_version except ImportError: cython_version = "unknown" asdf-1.3.3/astropy_helpers/astropy_helpers/distutils_helpers.py0000644000175000017500000001736213243564211024561 0ustar dandan00000000000000""" This module contains various utilities for introspecting the distutils module and the setup process. Some of these utilities require the `astropy_helpers.setup_helpers.register_commands` function to be called first, as it will affect introspection of setuptools command-line arguments. Other utilities in this module do not have that restriction. """ import os import sys from distutils import ccompiler, log from distutils.dist import Distribution from distutils.errors import DistutilsError from .utils import silence # This function, and any functions that call it, require the setup in # `astropy_helpers.setup_helpers.register_commands` to be run first. def get_dummy_distribution(): """ Returns a distutils Distribution object used to instrument the setup environment before calling the actual setup() function. """ from .setup_helpers import _module_state if _module_state['registered_commands'] is None: raise RuntimeError( 'astropy_helpers.setup_helpers.register_commands() must be ' 'called before using ' 'astropy_helpers.setup_helpers.get_dummy_distribution()') # Pre-parse the Distutils command-line options and config files to if # the option is set. dist = Distribution({'script_name': os.path.basename(sys.argv[0]), 'script_args': sys.argv[1:]}) dist.cmdclass.update(_module_state['registered_commands']) with silence(): try: dist.parse_config_files() dist.parse_command_line() except (DistutilsError, AttributeError, SystemExit): # Let distutils handle DistutilsErrors itself AttributeErrors can # get raise for ./setup.py --help SystemExit can be raised if a # display option was used, for example pass return dist def get_distutils_option(option, commands): """ Returns the value of the given distutils option. Parameters ---------- option : str The name of the option commands : list of str The list of commands on which this option is available Returns ------- val : str or None the value of the given distutils option. If the option is not set, returns None. """ dist = get_dummy_distribution() for cmd in commands: cmd_opts = dist.command_options.get(cmd) if cmd_opts is not None and option in cmd_opts: return cmd_opts[option][1] else: return None def get_distutils_build_option(option): """ Returns the value of the given distutils build option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build option. If the option is not set, returns None. """ return get_distutils_option(option, ['build', 'build_ext', 'build_clib']) def get_distutils_install_option(option): """ Returns the value of the given distutils install option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build option. If the option is not set, returns None. """ return get_distutils_option(option, ['install']) def get_distutils_build_or_install_option(option): """ Returns the value of the given distutils build or install option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build or install option. If the option is not set, returns None. """ return get_distutils_option(option, ['build', 'build_ext', 'build_clib', 'install']) def get_compiler_option(): """ Determines the compiler that will be used to build extension modules. Returns ------- compiler : str The compiler option specified for the build, build_ext, or build_clib command; or the default compiler for the platform if none was specified. """ compiler = get_distutils_build_option('compiler') if compiler is None: return ccompiler.get_default_compiler() return compiler def add_command_option(command, name, doc, is_bool=False): """ Add a custom option to a setup command. Issues a warning if the option already exists on that command. Parameters ---------- command : str The name of the command as given on the command line name : str The name of the build option doc : str A short description of the option, for the `--help` message is_bool : bool, optional When `True`, the option is a boolean option and doesn't require an associated value. """ dist = get_dummy_distribution() cmdcls = dist.get_command_class(command) if (hasattr(cmdcls, '_astropy_helpers_options') and name in cmdcls._astropy_helpers_options): return attr = name.replace('-', '_') if hasattr(cmdcls, attr): raise RuntimeError( '{0!r} already has a {1!r} class attribute, barring {2!r} from ' 'being usable as a custom option name.'.format(cmdcls, attr, name)) for idx, cmd in enumerate(cmdcls.user_options): if cmd[0] == name: log.warn('Overriding existing {0!r} option ' '{1!r}'.format(command, name)) del cmdcls.user_options[idx] if name in cmdcls.boolean_options: cmdcls.boolean_options.remove(name) break cmdcls.user_options.append((name, None, doc)) if is_bool: cmdcls.boolean_options.append(name) # Distutils' command parsing requires that a command object have an # attribute with the same name as the option (with '-' replaced with '_') # in order for that option to be recognized as valid setattr(cmdcls, attr, None) # This caches the options added through add_command_option so that if it is # run multiple times in the same interpreter repeated adds are ignored # (this way we can still raise a RuntimeError if a custom option overrides # a built-in option) if not hasattr(cmdcls, '_astropy_helpers_options'): cmdcls._astropy_helpers_options = set([name]) else: cmdcls._astropy_helpers_options.add(name) def get_distutils_display_options(): """ Returns a set of all the distutils display options in their long and short forms. These are the setup.py arguments such as --name or --version which print the project's metadata and then exit. Returns ------- opts : set The long and short form display option arguments, including the - or -- """ short_display_opts = set('-' + o[1] for o in Distribution.display_options if o[1]) long_display_opts = set('--' + o[0] for o in Distribution.display_options) # Include -h and --help which are not explicitly listed in # Distribution.display_options (as they are handled by optparse) short_display_opts.add('-h') long_display_opts.add('--help') # This isn't the greatest approach to hardcode these commands. # However, there doesn't seem to be a good way to determine # whether build *will be* run as part of the command at this # phase. display_commands = set([ 'clean', 'register', 'setopt', 'saveopts', 'egg_info', 'alias']) return short_display_opts.union(long_display_opts.union(display_commands)) def is_distutils_display_option(): """ Returns True if sys.argv contains any of the distutils display options such as --version or --name. """ display_options = get_distutils_display_options() return bool(set(sys.argv[1:]).intersection(display_options)) asdf-1.3.3/astropy_helpers/astropy_helpers/utils.py0000644000175000017500000006476513246003560022162 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import absolute_import, unicode_literals import contextlib import functools import imp import inspect import os import sys import glob import textwrap import types import warnings try: from importlib import machinery as import_machinery # Python 3.2 does not have SourceLoader if not hasattr(import_machinery, 'SourceLoader'): import_machinery = None except ImportError: import_machinery = None # Python 3.3's importlib caches filesystem reads for faster imports in the # general case. But sometimes it's necessary to manually invalidate those # caches so that the import system can pick up new generated files. See # https://github.com/astropy/astropy/issues/820 if sys.version_info[:2] >= (3, 3): from importlib import invalidate_caches else: def invalidate_caches(): return None # Python 2/3 compatibility if sys.version_info[0] < 3: string_types = (str, unicode) # noqa else: string_types = (str,) # Note: The following Warning subclasses are simply copies of the Warnings in # Astropy of the same names. class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyDeprecationWarning(AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ def _get_platlib_dir(cmd): """ Given a build command, return the name of the appropriate platform-specific build subdirectory directory (e.g. build/lib.linux-x86_64-2.7) """ plat_specifier = '.{0}-{1}'.format(cmd.plat_name, sys.version[0:3]) return os.path.join(cmd.build_base, 'lib' + plat_specifier) def get_numpy_include_path(): """ Gets the path to the numpy headers. """ # We need to go through this nonsense in case setuptools # downloaded and installed Numpy for us as part of the build or # install, since Numpy may still think it's in "setup mode", when # in fact we're ready to use it to build astropy now. if sys.version_info[0] >= 3: import builtins if hasattr(builtins, '__NUMPY_SETUP__'): del builtins.__NUMPY_SETUP__ import imp import numpy imp.reload(numpy) else: import __builtin__ if hasattr(__builtin__, '__NUMPY_SETUP__'): del __builtin__.__NUMPY_SETUP__ import numpy reload(numpy) try: numpy_include = numpy.get_include() except AttributeError: numpy_include = numpy.get_numpy_include() return numpy_include class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x def write(self, s): pass def flush(self): pass @contextlib.contextmanager def silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr if sys.platform == 'win32': import ctypes def _has_hidden_attribute(filepath): """ Returns True if the given filepath has the hidden attribute on MS-Windows. Based on a post here: http://stackoverflow.com/questions/284115/cross-platform-hidden-file-detection """ if isinstance(filepath, bytes): filepath = filepath.decode(sys.getfilesystemencoding()) try: attrs = ctypes.windll.kernel32.GetFileAttributesW(filepath) assert attrs != -1 result = bool(attrs & 2) except (AttributeError, AssertionError): result = False return result else: def _has_hidden_attribute(filepath): return False def is_path_hidden(filepath): """ Determines if a given file or directory is hidden. Parameters ---------- filepath : str The path to a file or directory Returns ------- hidden : bool Returns `True` if the file is hidden """ name = os.path.basename(os.path.abspath(filepath)) if isinstance(name, bytes): is_dotted = name.startswith(b'.') else: is_dotted = name.startswith('.') return is_dotted or _has_hidden_attribute(filepath) def walk_skip_hidden(top, onerror=None, followlinks=False): """ A wrapper for `os.walk` that skips hidden files and directories. This function does not have the parameter `topdown` from `os.walk`: the directories must always be recursed top-down when using this function. See also -------- os.walk : For a description of the parameters """ for root, dirs, files in os.walk( top, topdown=True, onerror=onerror, followlinks=followlinks): # These lists must be updated in-place so os.walk will skip # hidden directories dirs[:] = [d for d in dirs if not is_path_hidden(d)] files[:] = [f for f in files if not is_path_hidden(f)] yield root, dirs, files def write_if_different(filename, data): """Write `data` to `filename`, if the content of the file is different. Parameters ---------- filename : str The file name to be written to. data : bytes The data to be written to `filename`. """ assert isinstance(data, bytes) if os.path.exists(filename): with open(filename, 'rb') as fd: original_data = fd.read() else: original_data = None if original_data != data: with open(filename, 'wb') as fd: fd.write(data) def import_file(filename, name=None): """ Imports a module from a single file as if it doesn't belong to a particular package. The returned module will have the optional ``name`` if given, or else a name generated from the filename. """ # Specifying a traditional dot-separated fully qualified name here # results in a number of "Parent module 'astropy' not found while # handling absolute import" warnings. Using the same name, the # namespaces of the modules get merged together. So, this # generates an underscore-separated name which is more likely to # be unique, and it doesn't really matter because the name isn't # used directly here anyway. mode = 'U' if sys.version_info[0] < 3 else 'r' if name is None: basename = os.path.splitext(filename)[0] name = '_'.join(os.path.relpath(basename).split(os.sep)[1:]) if import_machinery: loader = import_machinery.SourceFileLoader(name, filename) mod = loader.load_module() else: with open(filename, mode) as fd: mod = imp.load_module(name, fd, filename, ('.py', mode, 1)) return mod def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. Raise `ImportError` if the module or name is not found. """ parts = name.split('.') cursor = len(parts) - 1 module_name = parts[:cursor] attr_name = parts[-1] while cursor > 0: try: ret = __import__('.'.join(module_name), fromlist=[attr_name]) break except ImportError: if cursor == 0: raise cursor -= 1 module_name = parts[:cursor] attr_name = parts[cursor] ret = '' for part in parts[cursor:]: try: ret = getattr(ret, part) except AttributeError: raise ImportError(name) return ret if sys.version_info[0] >= 3: def iteritems(dictionary): return dictionary.items() else: def iteritems(dictionary): return dictionary.iteritems() def extends_doc(extended_func): """ A function decorator for use when wrapping an existing function but adding additional functionality. This copies the docstring from the original function, and appends to it (along with a newline) the docstring of the wrapper function. Examples -------- >>> def foo(): ... '''Hello.''' ... >>> @extends_doc(foo) ... def bar(): ... '''Goodbye.''' ... >>> print(bar.__doc__) Hello. Goodbye. """ def decorator(func): if not (extended_func.__doc__ is None or func.__doc__ is None): func.__doc__ = '\n\n'.join([extended_func.__doc__.rstrip('\n'), func.__doc__.lstrip('\n')]) return func return decorator # Duplicated from astropy.utils.decorators.deprecated # When fixing issues in this function fix them in astropy first, then # port the fixes over to astropy-helpers def deprecated(since, message='', name='', alternative='', pending=False, obj_type=None): """ Used to mark a function or class as deprecated. To mark an attribute as deprecated, use `deprecated_attribute`. Parameters ---------- since : str The release at which this API became deprecated. This is required. message : str, optional Override the default deprecation message. The format specifier ``func`` may be used for the name of the function, and ``alternative`` may be used in the deprecation message to insert the name of an alternative to the deprecated function. ``obj_type`` may be used to insert a friendly name for the type of object being deprecated. name : str, optional The name of the deprecated function or class; if not provided the name is automatically determined from the passed in function or class, though this is useful in the case of renamed functions, where the new function is just assigned to the name of the deprecated function. For example:: def new_function(): ... oldFunction = new_function alternative : str, optional An alternative function or class name that the user may use in place of the deprecated object. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a AstropyPendingDeprecationWarning instead of a AstropyDeprecationWarning. obj_type : str, optional The type of this object, if the automatically determined one needs to be overridden. """ method_types = (classmethod, staticmethod, types.MethodType) def deprecate_doc(old_doc, message): """ Returns a given docstring with a deprecation message prepended to it. """ if not old_doc: old_doc = '' old_doc = textwrap.dedent(old_doc).strip('\n') new_doc = (('\n.. deprecated:: %(since)s' '\n %(message)s\n\n' % {'since': since, 'message': message.strip()}) + old_doc) if not old_doc: # This is to prevent a spurious 'unexpected unindent' warning from # docutils when the original docstring was blank. new_doc += r'\ ' return new_doc def get_function(func): """ Given a function or classmethod (or other function wrapper type), get the function object. """ if isinstance(func, method_types): func = func.__func__ return func def deprecate_function(func, message): """ Returns a wrapped function that displays an ``AstropyDeprecationWarning`` when it is called. """ if isinstance(func, method_types): func_wrapper = type(func) else: func_wrapper = lambda f: f func = get_function(func) def deprecated_func(*args, **kwargs): if pending: category = AstropyPendingDeprecationWarning else: category = AstropyDeprecationWarning warnings.warn(message, category, stacklevel=2) return func(*args, **kwargs) # If this is an extension function, we can't call # functools.wraps on it, but we normally don't care. # This crazy way to get the type of a wrapper descriptor is # straight out of the Python 3.3 inspect module docs. if type(func) != type(str.__dict__['__add__']): deprecated_func = functools.wraps(func)(deprecated_func) deprecated_func.__doc__ = deprecate_doc( deprecated_func.__doc__, message) return func_wrapper(deprecated_func) def deprecate_class(cls, message): """ Returns a wrapper class with the docstrings updated and an __init__ function that will raise an ``AstropyDeprectationWarning`` warning when called. """ # Creates a new class with the same name and bases as the # original class, but updates the dictionary with a new # docstring and a wrapped __init__ method. __module__ needs # to be manually copied over, since otherwise it will be set # to *this* module (astropy.utils.misc). # This approach seems to make Sphinx happy (the new class # looks enough like the original class), and works with # extension classes (which functools.wraps does not, since # it tries to modify the original class). # We need to add a custom pickler or you'll get # Can't pickle : it's not found as ... # errors. Picklability is required for any class that is # documented by Sphinx. members = cls.__dict__.copy() members.update({ '__doc__': deprecate_doc(cls.__doc__, message), '__init__': deprecate_function(get_function(cls.__init__), message), }) return type(cls.__name__, cls.__bases__, members) def deprecate(obj, message=message, name=name, alternative=alternative, pending=pending): if obj_type is None: if isinstance(obj, type): obj_type_name = 'class' elif inspect.isfunction(obj): obj_type_name = 'function' elif inspect.ismethod(obj) or isinstance(obj, method_types): obj_type_name = 'method' else: obj_type_name = 'object' else: obj_type_name = obj_type if not name: name = get_function(obj).__name__ altmessage = '' if not message or type(message) == type(deprecate): if pending: message = ('The %(func)s %(obj_type)s will be deprecated in a ' 'future version.') else: message = ('The %(func)s %(obj_type)s is deprecated and may ' 'be removed in a future version.') if alternative: altmessage = '\n Use %s instead.' % alternative message = ((message % { 'func': name, 'name': name, 'alternative': alternative, 'obj_type': obj_type_name}) + altmessage) if isinstance(obj, type): return deprecate_class(obj, message) else: return deprecate_function(obj, message) if type(message) == type(deprecate): return deprecate(message) return deprecate def deprecated_attribute(name, since, message=None, alternative=None, pending=False): """ Used to mark a public attribute as deprecated. This creates a property that will warn when the given attribute name is accessed. To prevent the warning (i.e. for internal code), use the private name for the attribute by prepending an underscore (i.e. ``self._name``). Parameters ---------- name : str The name of the deprecated attribute. since : str The release at which this API became deprecated. This is required. message : str, optional Override the default deprecation message. The format specifier ``name`` may be used for the name of the attribute, and ``alternative`` may be used in the deprecation message to insert the name of an alternative to the deprecated function. alternative : str, optional An alternative attribute that the user may use in place of the deprecated attribute. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a AstropyPendingDeprecationWarning instead of a AstropyDeprecationWarning. Examples -------- :: class MyClass: # Mark the old_name as deprecated old_name = misc.deprecated_attribute('old_name', '0.1') def method(self): self._old_name = 42 """ private_name = '_' + name @deprecated(since, name=name, obj_type='attribute') def get(self): return getattr(self, private_name) @deprecated(since, name=name, obj_type='attribute') def set(self, val): setattr(self, private_name, val) @deprecated(since, name=name, obj_type='attribute') def delete(self): delattr(self, private_name) return property(get, set, delete) def minversion(module, version, inclusive=True, version_path='__version__'): """ Returns `True` if the specified Python module satisfies a minimum version requirement, and `False` if not. By default this uses `pkg_resources.parse_version` to do the version comparison if available. Otherwise it falls back on `distutils.version.LooseVersion`. Parameters ---------- module : module or `str` An imported module of which to check the version, or the name of that module (in which case an import of that module is attempted-- if this fails `False` is returned). version : `str` The version as a string that this module must have at a minimum (e.g. ``'0.12'``). inclusive : `bool` The specified version meets the requirement inclusively (i.e. ``>=``) as opposed to strictly greater than (default: `True`). version_path : `str` A dotted attribute path to follow in the module for the version. Defaults to just ``'__version__'``, which should work for most Python modules. Examples -------- >>> import astropy >>> minversion(astropy, '0.4.4') True """ if isinstance(module, types.ModuleType): module_name = module.__name__ elif isinstance(module, string_types): module_name = module try: module = resolve_name(module_name) except ImportError: return False else: raise ValueError('module argument must be an actual imported ' 'module, or the import name of the module; ' 'got {0!r}'.format(module)) if '.' not in version_path: have_version = getattr(module, version_path) else: have_version = resolve_name('.'.join([module.__name__, version_path])) try: from pkg_resources import parse_version except ImportError: from distutils.version import LooseVersion as parse_version if inclusive: return parse_version(have_version) >= parse_version(version) else: return parse_version(have_version) > parse_version(version) # Copy of the classproperty decorator from astropy.utils.decorators class classproperty(property): """ Similar to `property`, but allows class-level properties. That is, a property whose getter is like a `classmethod`. The wrapped method may explicitly use the `classmethod` decorator (which must become before this decorator), or the `classmethod` may be omitted (it is implicit through use of this decorator). .. note:: classproperty only works for *read-only* properties. It does not currently allow writeable/deleteable properties, due to subtleties of how Python descriptors work. In order to implement such properties on a class a metaclass for that class must be implemented. Parameters ---------- fget : callable The function that computes the value of this property (in particular, the function when this is used as a decorator) a la `property`. doc : str, optional The docstring for the property--by default inherited from the getter function. lazy : bool, optional If True, caches the value returned by the first call to the getter function, so that it is only called once (used for lazy evaluation of an attribute). This is analogous to `lazyproperty`. The ``lazy`` argument can also be used when `classproperty` is used as a decorator (see the third example below). When used in the decorator syntax this *must* be passed in as a keyword argument. Examples -------- :: >>> class Foo(object): ... _bar_internal = 1 ... @classproperty ... def bar(cls): ... return cls._bar_internal + 1 ... >>> Foo.bar 2 >>> foo_instance = Foo() >>> foo_instance.bar 2 >>> foo_instance._bar_internal = 2 >>> foo_instance.bar # Ignores instance attributes 2 As previously noted, a `classproperty` is limited to implementing read-only attributes:: >>> class Foo(object): ... _bar_internal = 1 ... @classproperty ... def bar(cls): ... return cls._bar_internal ... @bar.setter ... def bar(cls, value): ... cls._bar_internal = value ... Traceback (most recent call last): ... NotImplementedError: classproperty can only be read-only; use a metaclass to implement modifiable class-level properties When the ``lazy`` option is used, the getter is only called once:: >>> class Foo(object): ... @classproperty(lazy=True) ... def bar(cls): ... print("Performing complicated calculation") ... return 1 ... >>> Foo.bar Performing complicated calculation 1 >>> Foo.bar 1 If a subclass inherits a lazy `classproperty` the property is still re-evaluated for the subclass:: >>> class FooSub(Foo): ... pass ... >>> FooSub.bar Performing complicated calculation 1 >>> FooSub.bar 1 """ def __new__(cls, fget=None, doc=None, lazy=False): if fget is None: # Being used as a decorator--return a wrapper that implements # decorator syntax def wrapper(func): return cls(func, lazy=lazy) return wrapper return super(classproperty, cls).__new__(cls) def __init__(self, fget, doc=None, lazy=False): self._lazy = lazy if lazy: self._cache = {} fget = self._wrap_fget(fget) super(classproperty, self).__init__(fget=fget, doc=doc) # There is a buglet in Python where self.__doc__ doesn't # get set properly on instances of property subclasses if # the doc argument was used rather than taking the docstring # from fget if doc is not None: self.__doc__ = doc def __get__(self, obj, objtype=None): if self._lazy and objtype in self._cache: return self._cache[objtype] if objtype is not None: # The base property.__get__ will just return self here; # instead we pass objtype through to the original wrapped # function (which takes the class as its sole argument) val = self.fget.__wrapped__(objtype) else: val = super(classproperty, self).__get__(obj, objtype=objtype) if self._lazy: if objtype is None: objtype = obj.__class__ self._cache[objtype] = val return val def getter(self, fget): return super(classproperty, self).getter(self._wrap_fget(fget)) def setter(self, fset): raise NotImplementedError( "classproperty can only be read-only; use a metaclass to " "implement modifiable class-level properties") def deleter(self, fdel): raise NotImplementedError( "classproperty can only be read-only; use a metaclass to " "implement modifiable class-level properties") @staticmethod def _wrap_fget(orig_fget): if isinstance(orig_fget, classmethod): orig_fget = orig_fget.__func__ # Using stock functools.wraps instead of the fancier version # found later in this module, which is overkill for this purpose @functools.wraps(orig_fget) def fget(obj): return orig_fget(obj.__class__) # Set the __wrapped__ attribute manually for support on Python 2 fget.__wrapped__ = orig_fget return fget def find_data_files(package, pattern): """ Include files matching ``pattern`` inside ``package``. Parameters ---------- package : str The package inside which to look for data files pattern : str Pattern (glob-style) to match for the data files (e.g. ``*.dat``). This supports the Python 3.5 ``**``recursive syntax. For example, ``**/*.fits`` matches all files ending with ``.fits`` recursively. Only one instance of ``**`` can be included in the pattern. """ if sys.version_info[:2] >= (3, 5): return glob.glob(os.path.join(package, pattern), recursive=True) else: if '**' in pattern: start, end = pattern.split('**') if end.startswith(('/', os.sep)): end = end[1:] matches = glob.glob(os.path.join(package, start, end)) for root, dirs, files in os.walk(os.path.join(package, start)): for dirname in dirs: matches += glob.glob(os.path.join(root, dirname, end)) return matches else: return glob.glob(os.path.join(package, pattern)) asdf-1.3.3/astropy_helpers/astropy_helpers/openmp_helpers.py0000644000175000017500000000610513243564211024024 0ustar dandan00000000000000# This module defines functions that can be used to check whether OpenMP is # available and if so what flags to use. To use this, import the # add_openmp_flags_if_available function in a setup_package.py file where you # are defining your extensions: # # from astropy_helpers.openmp_helpers import add_openmp_flags_if_available # # then call it with a single extension as the only argument: # # add_openmp_flags_if_available(extension) # # this will add the OpenMP flags if available. from __future__ import absolute_import, print_function import os import sys import glob import tempfile import subprocess from distutils import log from distutils.ccompiler import new_compiler from distutils.sysconfig import customize_compiler from distutils.errors import CompileError, LinkError from .setup_helpers import get_compiler_option __all__ = ['add_openmp_flags_if_available'] CCODE = """ #include #include int main(void) { #pragma omp parallel printf("nthreads=%d\\n", omp_get_num_threads()); return 0; } """ def add_openmp_flags_if_available(extension): """ Add OpenMP compilation flags, if available (if not a warning will be printed to the console and no flags will be added) Returns `True` if the flags were added, `False` otherwise. """ ccompiler = new_compiler() customize_compiler(ccompiler) tmp_dir = tempfile.mkdtemp() start_dir = os.path.abspath('.') if get_compiler_option() == 'msvc': compile_flag = '-openmp' link_flag = '' else: compile_flag = '-fopenmp' link_flag = '-fopenmp' try: os.chdir(tmp_dir) with open('test_openmp.c', 'w') as f: f.write(CCODE) os.mkdir('objects') # Compile, link, and run test program ccompiler.compile(['test_openmp.c'], output_dir='objects', extra_postargs=[compile_flag]) ccompiler.link_executable(glob.glob(os.path.join('objects', '*' + ccompiler.obj_extension)), 'test_openmp', extra_postargs=[link_flag]) output = subprocess.check_output('./test_openmp').decode(sys.stdout.encoding or 'utf-8').splitlines() if 'nthreads=' in output[0]: nthreads = int(output[0].strip().split('=')[1]) if len(output) == nthreads: using_openmp = True else: log.warn("Unexpected number of lines from output of test OpenMP " "program (output was {0})".format(output)) using_openmp = False else: log.warn("Unexpected output from test OpenMP " "program (output was {0})".format(output)) using_openmp = False except (CompileError, LinkError): using_openmp = False finally: os.chdir(start_dir) if using_openmp: log.info("Compiling Cython extension with OpenMP support") extension.extra_compile_args.append(compile_flag) extension.extra_link_args.append(link_flag) else: log.warn("Cannot compile Cython extension with OpenMP, reverting to non-parallel code") return using_openmp asdf-1.3.3/astropy_helpers/astropy_helpers/extern/0000755000175000017500000000000013246031665021742 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/extern/setup_package.py0000644000175000017500000000027513243564211025126 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst def get_package_data(): return {'astropy_helpers.extern': ['automodapi/templates/*/*.rst', 'numpydoc/templates/*.rst']} asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/0000755000175000017500000000000013246031665024104 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/utils.py0000644000175000017500000001574413243564211025624 0ustar dandan00000000000000import inspect import sys import re import os from warnings import warn from sphinx.ext.autosummary.generate import find_autosummary_in_docstring if sys.version_info[0] >= 3: def iteritems(dictionary): return dictionary.items() else: def iteritems(dictionary): return dictionary.iteritems() # We use \n instead of os.linesep because even on Windows, the generated files # use \n as the newline character. SPACE_NEWLINE = ' \n' SINGLE_NEWLINE = '\n' DOUBLE_NEWLINE = '\n\n' TRIPLE_NEWLINE = '\n\n\n' def cleanup_whitespace(text): """ Make sure there are never more than two consecutive newlines, and that there are no trailing whitespaces. """ # Get rid of overall leading/trailing whitespace text = text.strip() + '\n' # Get rid of trailing whitespace on each line while SPACE_NEWLINE in text: text = text.replace(SPACE_NEWLINE, SINGLE_NEWLINE) # Avoid too many consecutive newlines while TRIPLE_NEWLINE in text: text = text.replace(TRIPLE_NEWLINE, DOUBLE_NEWLINE) return text def find_mod_objs(modname, onlylocals=False): """ Returns all the public attributes of a module referenced by name. .. note:: The returned list *not* include subpackages or modules of `modname`,nor does it include private attributes (those that beginwith '_' or are not in `__all__`). Parameters ---------- modname : str The name of the module to search. onlylocals : bool If True, only attributes that are either members of `modname` OR one of its modules or subpackages will be included. Returns ------- localnames : list of str A list of the names of the attributes as they are named in the module `modname` . fqnames : list of str A list of the full qualified names of the attributes (e.g., ``astropy.utils.misc.find_mod_objs``). For attributes that are simple variables, this is based on the local name, but for functions or classes it can be different if they are actually defined elsewhere and just referenced in `modname`. objs : list of objects A list of the actual attributes themselves (in the same order as the other arguments) """ __import__(modname) mod = sys.modules[modname] if hasattr(mod, '__all__'): pkgitems = [(k, mod.__dict__[k]) for k in mod.__all__] else: pkgitems = [(k, mod.__dict__[k]) for k in dir(mod) if k[0] != '_'] # filter out modules and pull the names and objs out ismodule = inspect.ismodule localnames = [k for k, v in pkgitems if not ismodule(v)] objs = [v for k, v in pkgitems if not ismodule(v)] # fully qualified names can be determined from the object's module fqnames = [] for obj, lnm in zip(objs, localnames): if hasattr(obj, '__module__') and hasattr(obj, '__name__'): fqnames.append(obj.__module__ + '.' + obj.__name__) else: fqnames.append(modname + '.' + lnm) if onlylocals: valids = [fqn.startswith(modname) for fqn in fqnames] localnames = [e for i, e in enumerate(localnames) if valids[i]] fqnames = [e for i, e in enumerate(fqnames) if valids[i]] objs = [e for i, e in enumerate(objs) if valids[i]] return localnames, fqnames, objs def find_autosummary_in_lines_for_automodsumm(lines, module=None, filename=None): """Find out what items appear in autosummary:: directives in the given lines. Returns a list of (name, toctree, template, inherited_members) where *name* is a name of an object and *toctree* the :toctree: path of the corresponding autosummary directive (relative to the root of the file name), *template* the value of the :template: option, and *inherited_members* is the value of the :inherited-members: option. *toctree*, *template*, and *inherited_members* are ``None`` if the directive does not have the corresponding options set. .. note:: This is a slightly modified version of ``sphinx.ext.autosummary.generate.find_autosummary_in_lines`` which recognizes the ``inherited-members`` option. """ autosummary_re = re.compile(r'^(\s*)\.\.\s+autosummary::\s*') automodule_re = re.compile( r'^\s*\.\.\s+automodule::\s*([A-Za-z0-9_.]+)\s*$') module_re = re.compile( r'^\s*\.\.\s+(current)?module::\s*([a-zA-Z0-9_.]+)\s*$') autosummary_item_re = re.compile(r'^\s+(~?[_a-zA-Z][a-zA-Z0-9_.]*)\s*.*?') toctree_arg_re = re.compile(r'^\s+:toctree:\s*(.*?)\s*$') template_arg_re = re.compile(r'^\s+:template:\s*(.*?)\s*$') inherited_members_arg_re = re.compile(r'^\s+:inherited-members:\s*$') no_inherited_members_arg_re = re.compile(r'^\s+:no-inherited-members:\s*$') documented = [] toctree = None template = None inherited_members = None current_module = module in_autosummary = False base_indent = "" for line in lines: if in_autosummary: m = toctree_arg_re.match(line) if m: toctree = m.group(1) if filename: toctree = os.path.join(os.path.dirname(filename), toctree) continue m = template_arg_re.match(line) if m: template = m.group(1).strip() continue m = inherited_members_arg_re.match(line) if m: inherited_members = True continue m = no_inherited_members_arg_re.match(line) if m: inherited_members = False continue if line.strip().startswith(':'): warn(line) continue # skip options m = autosummary_item_re.match(line) if m: name = m.group(1).strip() if name.startswith('~'): name = name[1:] if current_module and \ not name.startswith(current_module + '.'): name = "%s.%s" % (current_module, name) documented.append((name, toctree, template, inherited_members)) continue if not line.strip() or line.startswith(base_indent + " "): continue in_autosummary = False m = autosummary_re.match(line) if m: in_autosummary = True base_indent = m.group(1) toctree = None template = None inherited_members = None continue m = automodule_re.search(line) if m: current_module = m.group(1).strip() # recurse into the automodule docstring documented.extend(find_autosummary_in_docstring( current_module, filename=filename)) continue m = module_re.match(line) if m: current_module = m.group(2) continue return documented asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/autodoc_enhancements.py0000644000175000017500000001230413243564211030637 0ustar dandan00000000000000""" Miscellaneous enhancements to help autodoc along. """ import inspect import sys import types import sphinx from distutils.version import LooseVersion from sphinx.ext.autodoc import AttributeDocumenter, ModuleDocumenter from sphinx.util.inspect import isdescriptor if sys.version_info[0] == 3: class_types = (type,) else: class_types = (type, types.ClassType) SPHINX_LT_15 = (LooseVersion(sphinx.__version__) < LooseVersion('1.5')) MethodDescriptorType = type(type.__subclasses__) # See # https://github.com/astropy/astropy-helpers/issues/116#issuecomment-71254836 # for further background on this. def type_object_attrgetter(obj, attr, *defargs): """ This implements an improved attrgetter for type objects (i.e. classes) that can handle class attributes that are implemented as properties on a metaclass. Normally `getattr` on a class with a `property` (say, "foo"), would return the `property` object itself. However, if the class has a metaclass which *also* defines a `property` named "foo", ``getattr(cls, 'foo')`` will find the "foo" property on the metaclass and resolve it. For the purposes of autodoc we just want to document the "foo" property defined on the class, not on the metaclass. For example:: >>> class Meta(type): ... @property ... def foo(cls): ... return 'foo' ... >>> class MyClass(metaclass=Meta): ... @property ... def foo(self): ... \"\"\"Docstring for MyClass.foo property.\"\"\" ... return 'myfoo' ... >>> getattr(MyClass, 'foo') 'foo' >>> type_object_attrgetter(MyClass, 'foo') >>> type_object_attrgetter(MyClass, 'foo').__doc__ 'Docstring for MyClass.foo property.' The last line of the example shows the desired behavior for the purposes of autodoc. """ for base in obj.__mro__: if attr in base.__dict__: if isinstance(base.__dict__[attr], property): # Note, this should only be used for properties--for any other # type of descriptor (classmethod, for example) this can mess # up existing expectations of what getattr(cls, ...) returns return base.__dict__[attr] break return getattr(obj, attr, *defargs) if SPHINX_LT_15: # Provided to work around a bug in Sphinx # See https://github.com/sphinx-doc/sphinx/pull/1843 class AttributeDocumenter(AttributeDocumenter): @classmethod def can_document_member(cls, member, membername, isattr, parent): non_attr_types = cls.method_types + class_types + \ (MethodDescriptorType,) isdatadesc = isdescriptor(member) and not \ isinstance(member, non_attr_types) and not \ type(member).__name__ == "instancemethod" # That last condition addresses an obscure case of C-defined # methods using a deprecated type in Python 3, that is not # otherwise exported anywhere by Python return isdatadesc or (not isinstance(parent, ModuleDocumenter) and not inspect.isroutine(member) and not isinstance(member, class_types)) def setup(app): # Must have the autodoc extension set up first so we can override it app.setup_extension('sphinx.ext.autodoc') # Need to import this too since it re-registers all the documenter types # =_= import sphinx.ext.autosummary.generate app.add_autodoc_attrgetter(type, type_object_attrgetter) if sphinx.version_info < (1, 4, 2): # this is a really ugly hack to supress a warning that sphinx 1.4 # generates when overriding an existing directive (which is *desired* # behavior here). As of sphinx v1.4.2, this has been fixed: # https://github.com/sphinx-doc/sphinx/issues/2451 # But we leave it in for 1.4.0/1.4.1 . But if the "needs_sphinx" is # eventually updated to >= 1.4.2, this should be removed entirely (in # favor of the line in the "else" clause) _oldwarn = app._warning _oldwarncount = app._warncount try: try: # *this* is in a try/finally because we don't want to force six as # a real dependency. In sphinx 1.4, six is a prerequisite, so # there's no issue. But in older sphinxes this may not be true... # but the inderlying warning is absent anyway so we let it slide. from six import StringIO app._warning = StringIO() except ImportError: pass app.add_autodocumenter(AttributeDocumenter) finally: app._warning = _oldwarn app._warncount = _oldwarncount else: suppress_warnigns_orig = app.config.suppress_warnings[:] if 'app.add_directive' not in app.config.suppress_warnings: app.config.suppress_warnings.append('app.add_directive') try: app.add_autodocumenter(AttributeDocumenter) finally: app.config.suppress_warnings = suppress_warnigns_orig asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/templates/0000755000175000017500000000000013246031665026102 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/templates/autosummary_core/0000755000175000017500000000000013246031665031500 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/templates/autosummary_core/module.rst0000644000175000017500000000127713243564211033521 0ustar dandan00000000000000{% if referencefile %} .. include:: {{ referencefile }} {% endif %} {{ objname }} {{ underline }} .. automodule:: {{ fullname }} {% block functions %} {% if functions %} .. rubric:: Functions .. autosummary:: {% for item in functions %} {{ item }} {%- endfor %} {% endif %} {% endblock %} {% block classes %} {% if classes %} .. rubric:: Classes .. autosummary:: {% for item in classes %} {{ item }} {%- endfor %} {% endif %} {% endblock %} {% block exceptions %} {% if exceptions %} .. rubric:: Exceptions .. autosummary:: {% for item in exceptions %} {{ item }} {%- endfor %} {% endif %} {% endblock %} asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/templates/autosummary_core/class.rst0000644000175000017500000000221113243564211033326 0ustar dandan00000000000000{% if referencefile %} .. include:: {{ referencefile }} {% endif %} {{ objname }} {{ underline }} .. currentmodule:: {{ module }} .. autoclass:: {{ objname }} :show-inheritance: {% if '__init__' in methods %} {% set caught_result = methods.remove('__init__') %} {% endif %} {% block attributes_summary %} {% if attributes %} .. rubric:: Attributes Summary .. autosummary:: {% for item in attributes %} ~{{ name }}.{{ item }} {%- endfor %} {% endif %} {% endblock %} {% block methods_summary %} {% if methods %} .. rubric:: Methods Summary .. autosummary:: {% for item in methods %} ~{{ name }}.{{ item }} {%- endfor %} {% endif %} {% endblock %} {% block attributes_documentation %} {% if attributes %} .. rubric:: Attributes Documentation {% for item in attributes %} .. autoattribute:: {{ item }} {%- endfor %} {% endif %} {% endblock %} {% block methods_documentation %} {% if methods %} .. rubric:: Methods Documentation {% for item in methods %} .. automethod:: {{ item }} {%- endfor %} {% endif %} {% endblock %} asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/templates/autosummary_core/base.rst0000644000175000017500000000025213243564211033136 0ustar dandan00000000000000{% if referencefile %} .. include:: {{ referencefile }} {% endif %} {{ objname }} {{ underline }} .. currentmodule:: {{ module }} .. auto{{ objtype }}:: {{ objname }} asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/__init__.py0000644000175000017500000000002413243564211026204 0ustar dandan00000000000000__version__ = '0.7' asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/smart_resolver.py0000644000175000017500000000717713243564211027534 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The classes in the astropy docs are documented by their API location, which is not necessarily where they are defined in the source. This causes a problem when certain automated features of the doc build, such as the inheritance diagrams or the `Bases` list of a class reference a class by its canonical location rather than its "user" location. In the `autodoc-process-docstring` event, a mapping from the actual name to the API name is maintained. Later, in the `missing-reference` event, unresolved references are looked up in this dictionary and corrected if possible. """ from docutils.nodes import literal, reference def process_docstring(app, what, name, obj, options, lines): if isinstance(obj, type): env = app.env if not hasattr(env, 'class_name_mapping'): env.class_name_mapping = {} mapping = env.class_name_mapping mapping[obj.__module__ + '.' + obj.__name__] = name def missing_reference_handler(app, env, node, contnode): if not hasattr(env, 'class_name_mapping'): env.class_name_mapping = {} mapping = env.class_name_mapping reftype = node['reftype'] reftarget = node['reftarget'] if reftype in ('obj', 'class', 'exc', 'meth'): reftarget = node['reftarget'] suffix = '' if reftarget not in mapping: if '.' in reftarget: front, suffix = reftarget.rsplit('.', 1) else: suffix = reftarget if suffix.startswith('_') and not suffix.startswith('__'): # If this is a reference to a hidden class or method, # we can't link to it, but we don't want to have a # nitpick warning. return node[0].deepcopy() if reftype in ('obj', 'meth') and '.' in reftarget: if front in mapping: reftarget = front suffix = '.' + suffix if (reftype in ('class', ) and '.' in reftarget and reftarget not in mapping): if '.' in front: reftarget, _ = front.rsplit('.', 1) suffix = '.' + suffix reftarget = reftarget + suffix prefix = reftarget.rsplit('.')[0] inventory = env.intersphinx_named_inventory if (reftarget not in mapping and prefix in inventory): if reftarget in inventory[prefix]['py:class']: newtarget = inventory[prefix]['py:class'][reftarget][2] if not node['refexplicit'] and \ '~' not in node.rawsource: contnode = literal(text=reftarget) newnode = reference('', '', internal=True) newnode['reftitle'] = reftarget newnode['refuri'] = newtarget newnode.append(contnode) return newnode if reftarget in mapping: newtarget = mapping[reftarget] + suffix if not node['refexplicit'] and '~' not in node.rawsource: contnode = literal(text=newtarget) newnode = env.domains['py'].resolve_xref( env, node['refdoc'], app.builder, 'class', newtarget, node, contnode) if newnode is not None: newnode['reftitle'] = reftarget return newnode def setup(app): app.connect('autodoc-process-docstring', process_docstring) app.connect('missing-reference', missing_reference_handler) asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/automodsumm.py0000644000175000017500000006363013243564211027033 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This directive will produce an "autosummary"-style table for public attributes of a specified module. See the `sphinx.ext.autosummary`_ extension for details on this process. The main difference from the `autosummary`_ directive is that `autosummary`_ requires manually inputting all attributes that appear in the table, while this captures the entries automatically. This directive requires a single argument that must be a module or package. It also accepts any options supported by the `autosummary`_ directive- see `sphinx.ext.autosummary`_ for details. It also accepts some additional options: * ``:classes-only:`` If present, the autosummary table will only contain entries for classes. This cannot be used at the same time with ``:functions-only:`` or ``:variables-only:``. * ``:functions-only:`` If present, the autosummary table will only contain entries for functions. This cannot be used at the same time with ``:classes-only:`` or ``:variables-only:``. * ``:variables-only:`` If present, the autosummary table will only contain entries for variables (everything except functions and classes). This cannot be used at the same time with ``:classes-only:`` or ``:functions-only:``. * ``:skip: obj1, [obj2, obj3, ...]`` If present, specifies that the listed objects should be skipped and not have their documentation generated, nor be included in the summary table. * ``:allowed-package-names: pkgormod1, [pkgormod2, pkgormod3, ...]`` Specifies the packages that functions/classes documented here are allowed to be from, as comma-separated list of package names. If not given, only objects that are actually in a subpackage of the package currently being documented are included. * ``:inherited-members:`` or ``:no-inherited-members:`` The global sphinx configuration option ``automodsumm_inherited_members`` decides if members that a class inherits from a base class are included in the generated documentation. The flags ``:inherited-members:`` or ``:no-inherited-members:`` allows overrriding this global setting. This extension also adds two sphinx configuration options: * ``automodsumm_writereprocessed`` Should be a bool, and if ``True``, will cause `automodsumm`_ to write files with any ``automodsumm`` sections replaced with the content Sphinx processes after ``automodsumm`` has run. The output files are not actually used by sphinx, so this option is only for figuring out the cause of sphinx warnings or other debugging. Defaults to ``False``. * ``automodsumm_inherited_members`` Should be a bool and if ``True``, will cause `automodsumm`_ to document class members that are inherited from a base class. This value can be overriden for any particular automodsumm directive by including the ``:inherited-members:`` or ``:no-inherited-members:`` options. Defaults to ``False``. .. _sphinx.ext.autosummary: http://sphinx-doc.org/latest/ext/autosummary.html .. _autosummary: http://sphinx-doc.org/latest/ext/autosummary.html#directive-autosummary .. _automod-diagram: automod-diagram directive ========================= This directive will produce an inheritance diagram like that of the `sphinx.ext.inheritance_diagram`_ extension. This directive requires a single argument that must be a module or package. It accepts no options. .. note:: Like 'inheritance-diagram', 'automod-diagram' requires `graphviz `_ to generate the inheritance diagram. .. _sphinx.ext.inheritance_diagram: http://sphinx-doc.org/latest/ext/inheritance.html """ import inspect import os import re import io from distutils.version import LooseVersion from sphinx import __version__ from sphinx.ext.autosummary import Autosummary from sphinx.ext.inheritance_diagram import InheritanceDiagram from docutils.parsers.rst.directives import flag from .utils import find_mod_objs, cleanup_whitespace SPHINX_LT_17 = LooseVersion(__version__) < LooseVersion('1.7') def _str_list_converter(argument): """ A directive option conversion function that converts the option into a list of strings. Used for 'skip' option. """ if argument is None: return [] else: return [s.strip() for s in argument.split(',')] class Automodsumm(Autosummary): required_arguments = 1 optional_arguments = 0 final_argument_whitespace = False has_content = False option_spec = dict(Autosummary.option_spec) option_spec['functions-only'] = flag option_spec['classes-only'] = flag option_spec['variables-only'] = flag option_spec['skip'] = _str_list_converter option_spec['allowed-package-names'] = _str_list_converter option_spec['inherited-members'] = flag option_spec['no-inherited-members'] = flag def run(self): env = self.state.document.settings.env modname = self.arguments[0] self.warnings = [] nodelist = [] try: localnames, fqns, objs = find_mod_objs(modname) except ImportError: self.warnings = [] self.warn("Couldn't import module " + modname) return self.warnings try: # set self.content to trick the autosummary internals. # Be sure to respect functions-only and classes-only. funconly = 'functions-only' in self.options clsonly = 'classes-only' in self.options varonly = 'variables-only' in self.options if [clsonly, funconly, varonly].count(True) > 1: self.warning('more than one of functions-only, classes-only, ' 'or variables-only defined. Ignoring.') clsonly = funconly = varonly = False skipnames = [] if 'skip' in self.options: option_skipnames = set(self.options['skip']) for lnm in localnames: if lnm in option_skipnames: option_skipnames.remove(lnm) skipnames.append(lnm) if len(option_skipnames) > 0: self.warn('Tried to skip objects {objs} in module {mod}, ' 'but they were not present. Ignoring.' .format(objs=option_skipnames, mod=modname)) if funconly: cont = [] for nm, obj in zip(localnames, objs): if nm not in skipnames and inspect.isroutine(obj): cont.append(nm) elif clsonly: cont = [] for nm, obj in zip(localnames, objs): if nm not in skipnames and inspect.isclass(obj): cont.append(nm) elif varonly: cont = [] for nm, obj in zip(localnames, objs): if nm not in skipnames and not (inspect.isclass(obj) or inspect.isroutine(obj)): cont.append(nm) else: cont = [nm for nm in localnames if nm not in skipnames] self.content = cont # for some reason, even though ``currentmodule`` is substituted in, # sphinx doesn't necessarily recognize this fact. So we just force # it internally, and that seems to fix things env.temp_data['py:module'] = modname env.ref_context['py:module'] = modname # can't use super because Sphinx/docutils has trouble return # super(Autosummary,self).run() nodelist.extend(Autosummary.run(self)) return self.warnings + nodelist finally: # has_content = False for the Automodsumm self.content = [] def get_items(self, names): self.genopt['imported-members'] = True return Autosummary.get_items(self, names) # <-------------------automod-diagram stuff-----------------------------------> class Automoddiagram(InheritanceDiagram): option_spec = dict(InheritanceDiagram.option_spec) option_spec['allowed-package-names'] = _str_list_converter option_spec['skip'] = _str_list_converter def run(self): try: ols = self.options.get('allowed-package-names', []) ols = True if len(ols) == 0 else ols # if none are given, assume only local nms, objs = find_mod_objs(self.arguments[0], onlylocals=ols)[1:] except ImportError: self.warnings = [] self.warn("Couldn't import module " + self.arguments[0]) return self.warnings # Check if some classes should be skipped skip = self.options.get('skip', []) clsnms = [] for n, o in zip(nms, objs): if n.split('.')[-1] in skip: continue if inspect.isclass(o): clsnms.append(n) oldargs = self.arguments try: if len(clsnms) > 0: self.arguments = [' '.join(clsnms)] return InheritanceDiagram.run(self) finally: self.arguments = oldargs # <---------------------automodsumm generation stuff--------------------------> def process_automodsumm_generation(app): env = app.builder.env filestosearch = [] for docname in env.found_docs: filename = env.doc2path(docname) if os.path.isfile(filename): filestosearch.append(docname + os.path.splitext(filename)[1]) liness = [] for sfn in filestosearch: lines = automodsumm_to_autosummary_lines(sfn, app) liness.append(lines) if app.config.automodsumm_writereprocessed: if lines: # empty list means no automodsumm entry is in the file outfn = os.path.join(app.srcdir, sfn) + '.automodsumm' with open(outfn, 'w') as f: for l in lines: f.write(l) f.write('\n') for sfn, lines in zip(filestosearch, liness): suffix = os.path.splitext(sfn)[1] if len(lines) > 0: generate_automodsumm_docs( lines, sfn, app=app, builder=app.builder, warn=app.warn, info=app.info, suffix=suffix, base_path=app.srcdir, inherited_members=app.config.automodsumm_inherited_members) # _automodsummrex = re.compile(r'^(\s*)\.\. automodsumm::\s*([A-Za-z0-9_.]+)\s*' # r'\n\1(\s*)(\S|$)', re.MULTILINE) _lineendrex = r'(?:\n|$)' _hdrex = r'^\n?(\s*)\.\. automodsumm::\s*(\S+)\s*' + _lineendrex _oprex1 = r'(?:\1(\s+)\S.*' + _lineendrex + ')' _oprex2 = r'(?:\1\4\S.*' + _lineendrex + ')' _automodsummrex = re.compile(_hdrex + '(' + _oprex1 + '?' + _oprex2 + '*)', re.MULTILINE) def automodsumm_to_autosummary_lines(fn, app): """ Generates lines from a file with an "automodsumm" entry suitable for feeding into "autosummary". Searches the provided file for `automodsumm` directives and returns a list of lines specifying the `autosummary` commands for the modules requested. This does *not* return the whole file contents - just an autosummary section in place of any :automodsumm: entries. Note that any options given for `automodsumm` are also included in the generated `autosummary` section. Parameters ---------- fn : str The name of the file to search for `automodsumm` entries. app : sphinx.application.Application The sphinx Application object Returns ------- lines : list of str Lines for all `automodsumm` entries with the entries replaced by `autosummary` and the module's members added. """ fullfn = os.path.join(app.builder.env.srcdir, fn) with io.open(fullfn, encoding='utf8') as fr: # Note: we use __name__ here instead of just writing the module name in # case this extension is bundled into another package from . import automodapi try: extensions = app.extensions except AttributeError: # Sphinx <1.6 extensions = app._extensions if automodapi.__name__ in extensions: # Must do the automodapi on the source to get the automodsumm # that might be in there docname = os.path.splitext(fn)[0] filestr = automodapi.automodapi_replace(fr.read(), app, True, docname, False) else: filestr = fr.read() spl = _automodsummrex.split(filestr) # 0th entry is the stuff before the first automodsumm line indent1s = spl[1::5] mods = spl[2::5] opssecs = spl[3::5] indent2s = spl[4::5] remainders = spl[5::5] # only grab automodsumm sections and convert them to autosummary with the # entries for all the public objects newlines = [] # loop over all automodsumms in this document for i, (i1, i2, modnm, ops, rem) in enumerate(zip(indent1s, indent2s, mods, opssecs, remainders)): allindent = i1 + (' ' if i2 is None else i2) # filter out functions-only, classes-only, and ariables-only # options if present. oplines = ops.split('\n') toskip = [] allowedpkgnms = [] funcsonly = clssonly = varsonly = False for i, ln in reversed(list(enumerate(oplines))): if ':functions-only:' in ln: funcsonly = True del oplines[i] if ':classes-only:' in ln: clssonly = True del oplines[i] if ':variables-only:' in ln: varsonly = True del oplines[i] if ':skip:' in ln: toskip.extend(_str_list_converter(ln.replace(':skip:', ''))) del oplines[i] if ':allowed-package-names:' in ln: allowedpkgnms.extend(_str_list_converter(ln.replace(':allowed-package-names:', ''))) del oplines[i] if [funcsonly, clssonly, varsonly].count(True) > 1: msg = ('Defined more than one of functions-only, classes-only, ' 'and variables-only. Skipping this directive.') lnnum = sum([spl[j].count('\n') for j in range(i * 5 + 1)]) app.warn('[automodsumm]' + msg, (fn, lnnum)) continue # Use the currentmodule directive so we can just put the local names # in the autosummary table. Note that this doesn't always seem to # actually "take" in Sphinx's eyes, so in `Automodsumm.run`, we have to # force it internally, as well. newlines.extend([i1 + '.. currentmodule:: ' + modnm, '', '.. autosummary::']) newlines.extend(oplines) ols = True if len(allowedpkgnms) == 0 else allowedpkgnms for nm, fqn, obj in zip(*find_mod_objs(modnm, onlylocals=ols)): if nm in toskip: continue if funcsonly and not inspect.isroutine(obj): continue if clssonly and not inspect.isclass(obj): continue if varsonly and (inspect.isclass(obj) or inspect.isroutine(obj)): continue newlines.append(allindent + nm) # add one newline at the end of the autosummary block newlines.append('') return newlines def generate_automodsumm_docs(lines, srcfn, app=None, suffix='.rst', warn=None, info=None, base_path=None, builder=None, template_dir=None, inherited_members=False): """ This function is adapted from `sphinx.ext.autosummary.generate.generate_autosummmary_docs` to generate source for the automodsumm directives that should be autosummarized. Unlike generate_autosummary_docs, this function is called one file at a time. """ from sphinx.jinja2glue import BuiltinTemplateLoader from sphinx.ext.autosummary import import_by_name, get_documenter from sphinx.ext.autosummary.generate import (_simple_info, _simple_warn) from sphinx.util.osutil import ensuredir from sphinx.util.inspect import safe_getattr from jinja2 import FileSystemLoader, TemplateNotFound from jinja2.sandbox import SandboxedEnvironment from .utils import find_autosummary_in_lines_for_automodsumm as find_autosummary_in_lines if info is None: info = _simple_info if warn is None: warn = _simple_warn # info('[automodsumm] generating automodsumm for: ' + srcfn) # Create our own templating environment - here we use Astropy's # templates rather than the default autosummary templates, in order to # allow docstrings to be shown for methods. template_dirs = [os.path.join(os.path.dirname(__file__), 'templates'), os.path.join(base_path, '_templates')] if builder is not None: # allow the user to override the templates template_loader = BuiltinTemplateLoader() template_loader.init(builder, dirs=template_dirs) else: if template_dir: template_dirs.insert(0, template_dir) template_loader = FileSystemLoader(template_dirs) template_env = SandboxedEnvironment(loader=template_loader) # read # items = find_autosummary_in_files(sources) items = find_autosummary_in_lines(lines, filename=srcfn) if len(items) > 0: msg = '[automodsumm] {1}: found {0} automodsumm entries to generate' info(msg.format(len(items), srcfn)) # gennms = [item[0] for item in items] # if len(gennms) > 20: # gennms = gennms[:10] + ['...'] + gennms[-10:] # info('[automodsumm] generating autosummary for: ' + ', '.join(gennms)) # remove possible duplicates items = list(set(items)) # keep track of new files new_files = [] # write for name, path, template_name, inherited_mem in sorted(items): if path is None: # The corresponding autosummary:: directive did not have # a :toctree: option continue path = os.path.abspath(os.path.join(base_path, path)) ensuredir(path) try: import_by_name_values = import_by_name(name) except ImportError as e: warn('[automodsumm] failed to import %r: %s' % (name, e)) continue # if block to accommodate Sphinx's v1.2.2 and v1.2.3 respectively if len(import_by_name_values) == 3: name, obj, parent = import_by_name_values elif len(import_by_name_values) == 4: name, obj, parent, module_name = import_by_name_values fn = os.path.join(path, name + suffix) # skip it if it exists if os.path.isfile(fn): continue new_files.append(fn) f = open(fn, 'w') try: if SPHINX_LT_17: doc = get_documenter(obj, parent) else: doc = get_documenter(app, obj, parent) if template_name is not None: template = template_env.get_template(template_name) else: tmplstr = 'autosummary_core/%s.rst' try: template = template_env.get_template(tmplstr % doc.objtype) except TemplateNotFound: template = template_env.get_template(tmplstr % 'base') def get_members_mod(obj, typ, include_public=[]): """ typ = None -> all """ items = [] for name in dir(obj): try: if SPHINX_LT_17: documenter = get_documenter(safe_getattr(obj, name), obj) else: documenter = get_documenter(app, safe_getattr(obj, name), obj) except AttributeError: continue if typ is None or documenter.objtype == typ: items.append(name) public = [x for x in items if x in include_public or not x.startswith('_')] return public, items def get_members_class(obj, typ, include_public=[], include_base=False): """ typ = None -> all include_base -> include attrs that are from a base class """ items = [] # using dir gets all of the attributes, including the elements # from the base class, otherwise use __slots__ or __dict__ if include_base: names = dir(obj) else: if hasattr(obj, '__slots__'): names = tuple(getattr(obj, '__slots__')) else: names = getattr(obj, '__dict__').keys() for name in names: try: if SPHINX_LT_17: documenter = get_documenter(safe_getattr(obj, name), obj) else: documenter = get_documenter(app, safe_getattr(obj, name), obj) except AttributeError: continue if typ is None or documenter.objtype == typ: items.append(name) public = [x for x in items if x in include_public or not x.startswith('_')] return public, items ns = {} if doc.objtype == 'module': ns['members'] = get_members_mod(obj, None) ns['functions'], ns['all_functions'] = \ get_members_mod(obj, 'function') ns['classes'], ns['all_classes'] = \ get_members_mod(obj, 'class') ns['exceptions'], ns['all_exceptions'] = \ get_members_mod(obj, 'exception') elif doc.objtype == 'class': if inherited_mem is not None: # option set in this specifc directive include_base = inherited_mem else: # use default value include_base = inherited_members api_class_methods = ['__init__', '__call__'] ns['members'] = get_members_class(obj, None, include_base=include_base) ns['methods'], ns['all_methods'] = \ get_members_class(obj, 'method', api_class_methods, include_base=include_base) ns['attributes'], ns['all_attributes'] = \ get_members_class(obj, 'attribute', include_base=include_base) ns['methods'].sort() ns['attributes'].sort() parts = name.split('.') if doc.objtype in ('method', 'attribute'): mod_name = '.'.join(parts[:-2]) cls_name = parts[-2] obj_name = '.'.join(parts[-2:]) ns['class'] = cls_name else: mod_name, obj_name = '.'.join(parts[:-1]), parts[-1] ns['fullname'] = name ns['module'] = mod_name ns['objname'] = obj_name ns['name'] = parts[-1] ns['objtype'] = doc.objtype ns['underline'] = len(obj_name) * '=' # We now check whether a file for reference footnotes exists for # the module being documented. We first check if the # current module is a file or a directory, as this will give a # different path for the reference file. For example, if # documenting astropy.wcs then the reference file is at # ../wcs/references.txt, while if we are documenting # astropy.config.logging_helper (which is at # astropy/config/logging_helper.py) then the reference file is set # to ../config/references.txt if '.' in mod_name: mod_name_dir = mod_name.replace('.', '/').split('/', 1)[1] else: mod_name_dir = mod_name if not os.path.isdir(os.path.join(base_path, mod_name_dir)) \ and os.path.isdir(os.path.join(base_path, mod_name_dir.rsplit('/', 1)[0])): mod_name_dir = mod_name_dir.rsplit('/', 1)[0] # We then have to check whether it exists, and if so, we pass it # to the template. if os.path.exists(os.path.join(base_path, mod_name_dir, 'references.txt')): # An important subtlety here is that the path we pass in has # to be relative to the file being generated, so we have to # figure out the right number of '..'s ndirsback = path.replace(base_path, '').count('/') ref_file_rel_segments = ['..'] * ndirsback ref_file_rel_segments.append(mod_name_dir) ref_file_rel_segments.append('references.txt') ns['referencefile'] = os.path.join(*ref_file_rel_segments) rendered = template.render(**ns) f.write(cleanup_whitespace(rendered)) finally: f.close() def setup(app): # need autodoc fixes # Note: we use __name__ here instead of just writing the module name in # case this extension is bundled into another package from . import autodoc_enhancements app.setup_extension(autodoc_enhancements.__name__) # need inheritance-diagram for automod-diagram app.setup_extension('sphinx.ext.inheritance_diagram') app.add_directive('automod-diagram', Automoddiagram) app.add_directive('automodsumm', Automodsumm) app.connect('builder-inited', process_automodsumm_generation) app.add_config_value('automodsumm_writereprocessed', False, True) app.add_config_value('automodsumm_inherited_members', False, 'env') asdf-1.3.3/astropy_helpers/astropy_helpers/extern/automodapi/automodapi.py0000644000175000017500000003665113243564211026626 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This directive takes a single argument that must be a module or package. It will produce a block of documentation that includes the docstring for the package, an :ref:`automodsumm` directive, and an :ref:`automod-diagram` if there are any classes in the module. If only the main docstring of the module/package is desired in the documentation, use `automodule`_ instead of `automodapi`_. It accepts the following options: * ``:include-all-objects:`` If present, include not just functions and classes, but all objects. This includes variables, for which a possible docstring after the variable definition will be shown. * ``:no-inheritance-diagram:`` If present, the inheritance diagram will not be shown even if the module/package has classes. * ``:skip: str`` This option results in the specified object being skipped, that is the object will *not* be included in the generated documentation. This option may appear any number of times to skip multiple objects. * ``:no-main-docstr:`` If present, the docstring for the module/package will not be generated. The function and class tables will still be used, however. * ``:headings: str`` Specifies the characters (in one string) used as the heading levels used for the generated section. This must have at least 2 characters (any after 2 will be ignored). This also *must* match the rest of the documentation on this page for sphinx to be happy. Defaults to "-^", which matches the convention used for Python's documentation, assuming the automodapi call is inside a top-level section (which usually uses '='). * ``:no-heading:`` If specified do not create a top level heading for the section. That is, do not create a title heading with text like "packagename Package". The actual docstring for the package/module will still be shown, though, unless ``:no-main-docstr:`` is given. * ``:allowed-package-names: str`` Specifies the packages that functions/classes documented here are allowed to be from, as comma-separated list of package names. If not given, only objects that are actually in a subpackage of the package currently being documented are included. * ``:inherited-members:`` / ``:no-inherited-members:`` The global sphinx configuration option ``automodsumm_inherited_members`` decides if members that a class inherits from a base class are included in the generated documentation. The option ``:inherited-members:`` or ``:no-inherited-members:`` allows the user to overrride the global setting. This extension also adds three sphinx configuration options: * ``automodapi_toctreedirnm`` This must be a string that specifies the name of the directory the automodsumm generated documentation ends up in. This directory path should be relative to the documentation root (e.g., same place as ``index.rst``). Defaults to ``'api'``. * ``automodapi_writereprocessed`` Should be a bool, and if `True`, will cause `automodapi`_ to write files with any `automodapi`_ sections replaced with the content Sphinx processes after `automodapi`_ has run. The output files are not actually used by sphinx, so this option is only for figuring out the cause of sphinx warnings or other debugging. Defaults to `False`. * ``automodsumm_inherited_members`` Should be a bool and if ``True`` members that a class inherits from a base class are included in the generated documentation. Defaults to ``False``. .. _automodule: http://sphinx-doc.org/latest/ext/autodoc.html?highlight=automodule#directive-automodule """ # Implementation note: # The 'automodapi' directive is not actually implemented as a docutils # directive. Instead, this extension searches for the 'automodapi' text in # all sphinx documents, and replaces it where necessary from a template built # into this extension. This is necessary because automodsumm (and autosummary) # use the "builder-inited" event, which comes before the directives are # actually built. import inspect import io import os import re import sys from .utils import find_mod_objs if sys.version_info[0] == 3: text_type = str else: text_type = unicode automod_templ_modheader = """ {modname} {pkgormod} {modhds}{pkgormodhds} {automoduleline} """ automod_templ_classes = """ Classes {clshds} .. automodsumm:: {modname} :classes-only: {clsfuncoptions} """ automod_templ_funcs = """ Functions {funchds} .. automodsumm:: {modname} :functions-only: {clsfuncoptions} """ automod_templ_vars = """ Variables {otherhds} .. automodsumm:: {modname} :variables-only: {clsfuncoptions} """ automod_templ_inh = """ Class Inheritance Diagram {clsinhsechds} .. automod-diagram:: {modname} :private-bases: :parts: 1 {allowedpkgnms} {skip} """ _automodapirex = re.compile(r'^(?:\.\.\s+automodapi::\s*)([A-Za-z0-9_.]+)' r'\s*$((?:\n\s+:[a-zA-Z_\-]+:.*$)*)', flags=re.MULTILINE) # the last group of the above regex is intended to go into finall with the below _automodapiargsrex = re.compile(r':([a-zA-Z_\-]+):(.*)$', flags=re.MULTILINE) def automodapi_replace(sourcestr, app, dotoctree=True, docname=None, warnings=True): """ Replaces `sourcestr`'s entries of ".. automdapi::" with the automodapi template form based on provided options. This is used with the sphinx event 'source-read' to replace `automodapi`_ entries before sphinx actually processes them, as automodsumm needs the code to be present to generate stub documentation. Parameters ---------- sourcestr : str The string with sphinx source to be checked for automodapi replacement. app : `sphinx.application.Application` The sphinx application. dotoctree : bool If `True`, a ":toctree:" option will be added in the ".. automodsumm::" sections of the template, pointing to the appropriate "generated" directory based on the Astropy convention (e.g. in ``docs/api``) docname : str The name of the file for this `sourcestr` (if known - if not, it can be `None`). If not provided and `dotoctree` is `True`, the generated files may end up in the wrong place. warnings : bool If `False`, all warnings that would normally be issued are silenced. Returns ------- newstr :str The string with automodapi entries replaced with the correct sphinx markup. """ spl = _automodapirex.split(sourcestr) if len(spl) > 1: # automodsumm is in this document # Use app.srcdir because api folder should be inside source folder not # at folder where sphinx is run. if dotoctree: toctreestr = ':toctree: ' api_dir = os.path.join(app.srcdir, app.config.automodapi_toctreedirnm) if docname is None: doc_path = '.' else: doc_path = os.path.join(app.srcdir, docname) toctreestr += os.path.relpath(api_dir, os.path.dirname(doc_path)) else: toctreestr = '' newstrs = [spl[0]] for grp in range(len(spl) // 3): modnm = spl[grp * 3 + 1] # find where this is in the document for warnings if docname is None: location = None else: location = (docname, spl[0].count('\n')) # initialize default options toskip = [] inhdiag = maindocstr = top_head = True hds = '-^' allowedpkgnms = [] allowothers = False # look for actual options unknownops = [] inherited_members = None for opname, args in _automodapiargsrex.findall(spl[grp * 3 + 2]): if opname == 'skip': toskip.append(args.strip()) elif opname == 'no-inheritance-diagram': inhdiag = False elif opname == 'no-main-docstr': maindocstr = False elif opname == 'headings': hds = args elif opname == 'no-heading': top_head = False elif opname == 'allowed-package-names': allowedpkgnms.append(args.strip()) elif opname == 'inherited-members': inherited_members = True elif opname == 'no-inherited-members': inherited_members = False elif opname == 'include-all-objects': allowothers = True else: unknownops.append(opname) # join all the allowedpkgnms if len(allowedpkgnms) == 0: allowedpkgnms = '' onlylocals = True else: allowedpkgnms = ':allowed-package-names: ' + ','.join(allowedpkgnms) onlylocals = allowedpkgnms # get the two heading chars if len(hds) < 2: msg = 'Not enough headings (got {0}, need 2), using default -^' if warnings: app.warn(msg.format(len(hds)), location) hds = '-^' h1, h2 = hds.lstrip()[:2] # tell sphinx that the remaining args are invalid. if len(unknownops) > 0 and app is not None: opsstrs = ','.join(unknownops) msg = 'Found additional options ' + opsstrs + ' in automodapi.' if warnings: app.warn(msg, location) ispkg, hascls, hasfuncs, hasother = _mod_info( modnm, toskip, onlylocals=onlylocals) # add automodule directive only if no-main-docstr isn't present if maindocstr: automodline = '.. automodule:: {modname}'.format(modname=modnm) else: automodline = '' if top_head: newstrs.append(automod_templ_modheader.format( modname=modnm, modhds=h1 * len(modnm), pkgormod='Package' if ispkg else 'Module', pkgormodhds=h1 * (8 if ispkg else 7), automoduleline=automodline)) # noqa else: newstrs.append(automod_templ_modheader.format( modname='', modhds='', pkgormod='', pkgormodhds='', automoduleline=automodline)) # construct the options for the class/function sections # start out indented at 4 spaces, but need to keep the indentation. clsfuncoptions = [] if toctreestr: clsfuncoptions.append(toctreestr) if toskip: clsfuncoptions.append(':skip: ' + ','.join(toskip)) if allowedpkgnms: clsfuncoptions.append(allowedpkgnms) if hascls: # This makes no sense unless there are classes. if inherited_members is True: clsfuncoptions.append(':inherited-members:') if inherited_members is False: clsfuncoptions.append(':no-inherited-members:') clsfuncoptionstr = '\n '.join(clsfuncoptions) if hasfuncs: newstrs.append(automod_templ_funcs.format( modname=modnm, funchds=h2 * 9, clsfuncoptions=clsfuncoptionstr)) if hascls: newstrs.append(automod_templ_classes.format( modname=modnm, clshds=h2 * 7, clsfuncoptions=clsfuncoptionstr)) if allowothers and hasother: newstrs.append(automod_templ_vars.format( modname=modnm, otherhds=h2 * 9, clsfuncoptions=clsfuncoptionstr)) if inhdiag and hascls: # add inheritance diagram if any classes are in the module if toskip: clsskip = ':skip: ' + ','.join(toskip) else: clsskip = '' diagram_entry = automod_templ_inh.format( modname=modnm, clsinhsechds=h2 * 25, allowedpkgnms=allowedpkgnms, skip=clsskip) diagram_entry = diagram_entry.replace(' \n', '') newstrs.append(diagram_entry) newstrs.append(spl[grp * 3 + 3]) newsourcestr = ''.join(newstrs) if app.config.automodapi_writereprocessed: # sometimes they are unicode, sometimes not, depending on how # sphinx has processed things if isinstance(newsourcestr, text_type): ustr = newsourcestr else: ustr = newsourcestr.decode(app.config.source_encoding) if docname is None: with io.open(os.path.join(app.srcdir, 'unknown.automodapi'), 'a', encoding='utf8') as f: f.write(u'\n**NEW DOC**\n\n') f.write(ustr) else: env = app.builder.env # Determine the filename associated with this doc (specifically # the extension) filename = docname + os.path.splitext(env.doc2path(docname))[1] filename += '.automodapi' with io.open(os.path.join(app.srcdir, filename), 'w', encoding='utf8') as f: f.write(ustr) return newsourcestr else: return sourcestr def _mod_info(modname, toskip=[], onlylocals=True): """ Determines if a module is a module or a package and whether or not it has classes or functions. """ hascls = hasfunc = hasother = False for localnm, fqnm, obj in zip(*find_mod_objs(modname, onlylocals=onlylocals)): if localnm not in toskip: hascls = hascls or inspect.isclass(obj) hasfunc = hasfunc or inspect.isroutine(obj) hasother = hasother or (not inspect.isclass(obj) and not inspect.isroutine(obj)) if hascls and hasfunc and hasother: break # find_mod_objs has already imported modname # TODO: There is probably a cleaner way to do this, though this is pretty # reliable for all Python versions for most cases that we care about. pkg = sys.modules[modname] ispkg = (hasattr(pkg, '__file__') and isinstance(pkg.__file__, str) and os.path.split(pkg.__file__)[1].startswith('__init__.py')) return ispkg, hascls, hasfunc, hasother def process_automodapi(app, docname, source): source[0] = automodapi_replace(source[0], app, True, docname) def setup(app): app.setup_extension('sphinx.ext.autosummary') # Note: we use __name__ here instead of just writing the module name in # case this extension is bundled into another package from . import automodsumm app.setup_extension(automodsumm.__name__) app.connect('source-read', process_automodapi) app.add_config_value('automodapi_toctreedirnm', 'api', True) app.add_config_value('automodapi_writereprocessed', False, True) asdf-1.3.3/astropy_helpers/astropy_helpers/extern/__init__.py0000644000175000017500000000111513243564211024044 0ustar dandan00000000000000# The ``astropy_helpers.extern`` sub-module includes modules developed elsewhere # that are bundled here for convenience. At the moment, this consists of the # following two sphinx extensions: # # * `numpydoc `_, a Sphinx extension # developed as part of the Numpy project. This is used to parse docstrings # in Numpy format # # * `sphinx-automodapi `_, a Sphinx # developed as part of the Astropy project. This used to be developed directly # in ``astropy-helpers`` but is now a standalone package. asdf-1.3.3/astropy_helpers/astropy_helpers/extern/numpydoc/0000755000175000017500000000000013246031665023600 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/extern/numpydoc/docscrape.py0000644000175000017500000004452113243564211026116 0ustar dandan00000000000000"""Extract reference documentation from the NumPy source tree. """ from __future__ import division, absolute_import, print_function import inspect import textwrap import re import pydoc from warnings import warn import collections import copy import sys class Reader(object): """A line-based string reader. """ def __init__(self, data): """ Parameters ---------- data : str String with lines separated by '\n'. """ if isinstance(data, list): self._str = data else: self._str = data.split('\n') # store string as list of lines self.reset() def __getitem__(self, n): return self._str[n] def reset(self): self._l = 0 # current line nr def read(self): if not self.eof(): out = self[self._l] self._l += 1 return out else: return '' def seek_next_non_empty_line(self): for l in self[self._l:]: if l.strip(): break else: self._l += 1 def eof(self): return self._l >= len(self._str) def read_to_condition(self, condition_func): start = self._l for line in self[start:]: if condition_func(line): return self[start:self._l] self._l += 1 if self.eof(): return self[start:self._l+1] return [] def read_to_next_empty_line(self): self.seek_next_non_empty_line() def is_empty(line): return not line.strip() return self.read_to_condition(is_empty) def read_to_next_unindented_line(self): def is_unindented(line): return (line.strip() and (len(line.lstrip()) == len(line))) return self.read_to_condition(is_unindented) def peek(self, n=0): if self._l + n < len(self._str): return self[self._l + n] else: return '' def is_empty(self): return not ''.join(self._str).strip() class ParseError(Exception): def __str__(self): message = self.args[0] if hasattr(self, 'docstring'): message = "%s in %r" % (message, self.docstring) return message class NumpyDocString(collections.Mapping): sections = { 'Signature': '', 'Summary': [''], 'Extended Summary': [], 'Parameters': [], 'Returns': [], 'Yields': [], 'Raises': [], 'Warns': [], 'Other Parameters': [], 'Attributes': [], 'Methods': [], 'See Also': [], 'Notes': [], 'Warnings': [], 'References': '', 'Examples': '', 'index': {} } def __init__(self, docstring, config={}): orig_docstring = docstring docstring = textwrap.dedent(docstring).split('\n') self._doc = Reader(docstring) self._parsed_data = copy.deepcopy(self.sections) try: self._parse() except ParseError as e: e.docstring = orig_docstring raise def __getitem__(self, key): return self._parsed_data[key] def __setitem__(self, key, val): if key not in self._parsed_data: warn("Unknown section %s" % key) else: self._parsed_data[key] = val def __iter__(self): return iter(self._parsed_data) def __len__(self): return len(self._parsed_data) def _is_at_section(self): self._doc.seek_next_non_empty_line() if self._doc.eof(): return False l1 = self._doc.peek().strip() # e.g. Parameters if l1.startswith('.. index::'): return True l2 = self._doc.peek(1).strip() # ---------- or ========== return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) def _strip(self, doc): i = 0 j = 0 for i, line in enumerate(doc): if line.strip(): break for j, line in enumerate(doc[::-1]): if line.strip(): break return doc[i:len(doc)-j] def _read_to_next_section(self): section = self._doc.read_to_next_empty_line() while not self._is_at_section() and not self._doc.eof(): if not self._doc.peek(-1).strip(): # previous line was empty section += [''] section += self._doc.read_to_next_empty_line() return section def _read_sections(self): while not self._doc.eof(): data = self._read_to_next_section() name = data[0].strip() if name.startswith('..'): # index section yield name, data[1:] elif len(data) < 2: yield StopIteration else: yield name, self._strip(data[2:]) def _parse_param_list(self, content): r = Reader(content) params = [] while not r.eof(): header = r.read().strip() if ' : ' in header: arg_name, arg_type = header.split(' : ')[:2] else: arg_name, arg_type = header, '' desc = r.read_to_next_unindented_line() desc = dedent_lines(desc) params.append((arg_name, arg_type, desc)) return params _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) def _parse_see_also(self, content): """ func_name : Descriptive text continued text another_func_name : Descriptive text func_name1, func_name2, :meth:`func_name`, func_name3 """ items = [] def parse_item_name(text): """Match ':role:`name`' or 'name'""" m = self._name_rgx.match(text) if m: g = m.groups() if g[1] is None: return g[3], None else: return g[2], g[1] raise ParseError("%s is not a item name" % text) def push_item(name, rest): if not name: return name, role = parse_item_name(name) items.append((name, list(rest), role)) del rest[:] current_func = None rest = [] for line in content: if not line.strip(): continue m = self._name_rgx.match(line) if m and line[m.end():].strip().startswith(':'): push_item(current_func, rest) current_func, line = line[:m.end()], line[m.end():] rest = [line.split(':', 1)[1].strip()] if not rest[0]: rest = [] elif not line.startswith(' '): push_item(current_func, rest) current_func = None if ',' in line: for func in line.split(','): if func.strip(): push_item(func, []) elif line.strip(): current_func = line elif current_func is not None: rest.append(line.strip()) push_item(current_func, rest) return items def _parse_index(self, section, content): """ .. index: default :refguide: something, else, and more """ def strip_each_in(lst): return [s.strip() for s in lst] out = {} section = section.split('::') if len(section) > 1: out['default'] = strip_each_in(section[1].split(','))[0] for line in content: line = line.split(':') if len(line) > 2: out[line[1]] = strip_each_in(line[2].split(',')) return out def _parse_summary(self): """Grab signature (if given) and summary""" if self._is_at_section(): return # If several signatures present, take the last one while True: summary = self._doc.read_to_next_empty_line() summary_str = " ".join([s.strip() for s in summary]).strip() if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): self['Signature'] = summary_str if not self._is_at_section(): continue break if summary is not None: self['Summary'] = summary if not self._is_at_section(): self['Extended Summary'] = self._read_to_next_section() def _parse(self): self._doc.reset() self._parse_summary() sections = list(self._read_sections()) section_names = set([section for section, content in sections]) has_returns = 'Returns' in section_names has_yields = 'Yields' in section_names # We could do more tests, but we are not. Arbitrarily. if has_returns and has_yields: msg = 'Docstring contains both a Returns and Yields section.' raise ValueError(msg) for (section, content) in sections: if not section.startswith('..'): section = (s.capitalize() for s in section.split(' ')) section = ' '.join(section) if self.get(section): if hasattr(self, '_obj'): # we know where the docs came from: try: filename = inspect.getsourcefile(self._obj) except TypeError: filename = None msg = ("The section %s appears twice in " "the docstring of %s in %s." % (section, self._obj, filename)) raise ValueError(msg) else: msg = ("The section %s appears twice" % section) raise ValueError(msg) if section in ('Parameters', 'Returns', 'Yields', 'Raises', 'Warns', 'Other Parameters', 'Attributes', 'Methods'): self[section] = self._parse_param_list(content) elif section.startswith('.. index::'): self['index'] = self._parse_index(section, content) elif section == 'See Also': self['See Also'] = self._parse_see_also(content) else: self[section] = content # string conversion routines def _str_header(self, name, symbol='-'): return [name, len(name)*symbol] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): if self['Signature']: return [self['Signature'].replace('*', '\*')] + [''] else: return [''] def _str_summary(self): if self['Summary']: return self['Summary'] + [''] else: return [] def _str_extended_summary(self): if self['Extended Summary']: return self['Extended Summary'] + [''] else: return [] def _str_param_list(self, name): out = [] if self[name]: out += self._str_header(name) for param, param_type, desc in self[name]: if param_type: out += ['%s : %s' % (param, param_type)] else: out += [param] out += self._str_indent(desc) out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += self[name] out += [''] return out def _str_see_also(self, func_role): if not self['See Also']: return [] out = [] out += self._str_header("See Also") last_had_desc = True for func, desc, role in self['See Also']: if role: link = ':%s:`%s`' % (role, func) elif func_role: link = ':%s:`%s`' % (func_role, func) else: link = "`%s`_" % func if desc or last_had_desc: out += [''] out += [link] else: out[-1] += ", %s" % link if desc: out += self._str_indent([' '.join(desc)]) last_had_desc = True else: last_had_desc = False out += [''] return out def _str_index(self): idx = self['index'] out = [] out += ['.. index:: %s' % idx.get('default', '')] for section, references in idx.items(): if section == 'default': continue out += [' :%s: %s' % (section, ', '.join(references))] return out def __str__(self, func_role=''): out = [] out += self._str_signature() out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Returns', 'Yields', 'Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_section('Warnings') out += self._str_see_also(func_role) for s in ('Notes', 'References', 'Examples'): out += self._str_section(s) for param_list in ('Attributes', 'Methods'): out += self._str_param_list(param_list) out += self._str_index() return '\n'.join(out) def indent(str, indent=4): indent_str = ' '*indent if str is None: return indent_str lines = str.split('\n') return '\n'.join(indent_str + l for l in lines) def dedent_lines(lines): """Deindent a list of lines maximally""" return textwrap.dedent("\n".join(lines)).split("\n") def header(text, style='-'): return text + '\n' + style*len(text) + '\n' class FunctionDoc(NumpyDocString): def __init__(self, func, role='func', doc=None, config={}): self._f = func self._role = role # e.g. "func" or "meth" if doc is None: if func is None: raise ValueError("No function or docstring given") doc = inspect.getdoc(func) or '' NumpyDocString.__init__(self, doc) if not self['Signature'] and func is not None: func, func_name = self.get_func() try: try: signature = str(inspect.signature(func)) except (AttributeError, ValueError): # try to read signature, backward compat for older Python if sys.version_info[0] >= 3: argspec = inspect.getfullargspec(func) else: argspec = inspect.getargspec(func) signature = inspect.formatargspec(*argspec) signature = '%s%s' % (func_name, signature.replace('*', '\*')) except TypeError: signature = '%s()' % func_name self['Signature'] = signature def get_func(self): func_name = getattr(self._f, '__name__', self.__class__.__name__) if inspect.isclass(self._f): func = getattr(self._f, '__call__', self._f.__init__) else: func = self._f return func, func_name def __str__(self): out = '' func, func_name = self.get_func() signature = self['Signature'].replace('*', '\*') roles = {'func': 'function', 'meth': 'method'} if self._role: if self._role not in roles: print("Warning: invalid role %s" % self._role) out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''), func_name) out += super(FunctionDoc, self).__str__(func_role=self._role) return out class ClassDoc(NumpyDocString): extra_public_methods = ['__call__'] def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc, config={}): if not inspect.isclass(cls) and cls is not None: raise ValueError("Expected a class or None, but got %r" % cls) self._cls = cls self.show_inherited_members = config.get( 'show_inherited_class_members', True) if modulename and not modulename.endswith('.'): modulename += '.' self._mod = modulename if doc is None: if cls is None: raise ValueError("No class or documentation string given") doc = pydoc.getdoc(cls) NumpyDocString.__init__(self, doc) if config.get('show_class_members', True): def splitlines_x(s): if not s: return [] else: return s.splitlines() for field, items in [('Methods', self.methods), ('Attributes', self.properties)]: if not self[field]: doc_list = [] for name in sorted(items): try: doc_item = pydoc.getdoc(getattr(self._cls, name)) doc_list.append((name, '', splitlines_x(doc_item))) except AttributeError: pass # method doesn't exist self[field] = doc_list @property def methods(self): if self._cls is None: return [] return [name for name, func in inspect.getmembers(self._cls) if ((not name.startswith('_') or name in self.extra_public_methods) and isinstance(func, collections.Callable) and self._is_show_member(name))] @property def properties(self): if self._cls is None: return [] return [name for name, func in inspect.getmembers(self._cls) if (not name.startswith('_') and (func is None or isinstance(func, property) or inspect.isgetsetdescriptor(func)) and self._is_show_member(name))] def _is_show_member(self, name): if self.show_inherited_members: return True # show all class members if name not in self._cls.__dict__: return False # class member is inherited, we do not show it return True asdf-1.3.3/astropy_helpers/astropy_helpers/extern/numpydoc/docscrape_sphinx.py0000644000175000017500000002510613243564211027505 0ustar dandan00000000000000from __future__ import division, absolute_import, print_function import sys import re import inspect import textwrap import pydoc import collections import os from jinja2 import FileSystemLoader from jinja2.sandbox import SandboxedEnvironment import sphinx from sphinx.jinja2glue import BuiltinTemplateLoader from .docscrape import NumpyDocString, FunctionDoc, ClassDoc if sys.version_info[0] >= 3: sixu = lambda s: s else: sixu = lambda s: unicode(s, 'unicode_escape') class SphinxDocString(NumpyDocString): def __init__(self, docstring, config={}): NumpyDocString.__init__(self, docstring, config=config) self.load_config(config) def load_config(self, config): self.use_plots = config.get('use_plots', False) self.class_members_toctree = config.get('class_members_toctree', True) self.template = config.get('template', None) if self.template is None: template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')] template_loader = FileSystemLoader(template_dirs) template_env = SandboxedEnvironment(loader=template_loader) self.template = template_env.get_template('numpydoc_docstring.rst') # string conversion routines def _str_header(self, name, symbol='`'): return ['.. rubric:: ' + name, ''] def _str_field_list(self, name): return [':' + name + ':'] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): return [''] if self['Signature']: return ['``%s``' % self['Signature']] + [''] else: return [''] def _str_summary(self): return self['Summary'] + [''] def _str_extended_summary(self): return self['Extended Summary'] + [''] def _str_returns(self, name='Returns'): out = [] if self[name]: out += self._str_field_list(name) out += [''] for param, param_type, desc in self[name]: if param_type: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) else: out += self._str_indent([param.strip()]) if desc: out += [''] out += self._str_indent(desc, 8) out += [''] return out def _str_param_list(self, name): out = [] if self[name]: out += self._str_field_list(name) out += [''] for param, param_type, desc in self[name]: if param_type: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) else: out += self._str_indent(['**%s**' % param.strip()]) if desc: out += [''] out += self._str_indent(desc, 8) out += [''] return out @property def _obj(self): if hasattr(self, '_cls'): return self._cls elif hasattr(self, '_f'): return self._f return None def _str_member_list(self, name): """ Generate a member listing, autosummary:: table where possible, and a table where not. """ out = [] if self[name]: out += ['.. rubric:: %s' % name, ''] prefix = getattr(self, '_name', '') if prefix: prefix = '~%s.' % prefix autosum = [] others = [] for param, param_type, desc in self[name]: param = param.strip() # Check if the referenced member can have a docstring or not param_obj = getattr(self._obj, param, None) if not (callable(param_obj) or isinstance(param_obj, property) or inspect.isgetsetdescriptor(param_obj)): param_obj = None if param_obj and (pydoc.getdoc(param_obj) or not desc): # Referenced object has a docstring autosum += [" %s%s" % (prefix, param)] else: others.append((param, param_type, desc)) if autosum: out += ['.. autosummary::'] if self.class_members_toctree: out += [' :toctree:'] out += [''] + autosum if others: maxlen_0 = max(3, max([len(x[0]) + 4 for x in others])) hdr = sixu("=") * maxlen_0 + sixu(" ") + sixu("=") * 10 fmt = sixu('%%%ds %%s ') % (maxlen_0,) out += ['', '', hdr] for param, param_type, desc in others: desc = sixu(" ").join(x.strip() for x in desc).strip() if param_type: desc = "(%s) %s" % (param_type, desc) out += [fmt % ("**" + param.strip() + "**", desc)] out += [hdr] out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += [''] content = textwrap.dedent("\n".join(self[name])).split("\n") out += content out += [''] return out def _str_see_also(self, func_role): out = [] if self['See Also']: see_also = super(SphinxDocString, self)._str_see_also(func_role) out = ['.. seealso::', ''] out += self._str_indent(see_also[2:]) return out def _str_warnings(self): out = [] if self['Warnings']: out = ['.. warning::', ''] out += self._str_indent(self['Warnings']) return out def _str_index(self): idx = self['index'] out = [] if len(idx) == 0: return out out += ['.. index:: %s' % idx.get('default', '')] for section, references in idx.items(): if section == 'default': continue elif section == 'refguide': out += [' single: %s' % (', '.join(references))] else: out += [' %s: %s' % (section, ','.join(references))] return out def _str_references(self): out = [] if self['References']: out += self._str_header('References') if isinstance(self['References'], str): self['References'] = [self['References']] out.extend(self['References']) out += [''] # Latex collects all references to a separate bibliography, # so we need to insert links to it if sphinx.__version__ >= "0.6": out += ['.. only:: latex', ''] else: out += ['.. latexonly::', ''] items = [] for line in self['References']: m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I) if m: items.append(m.group(1)) out += [' ' + ", ".join(["[%s]_" % item for item in items]), ''] return out def _str_examples(self): examples_str = "\n".join(self['Examples']) if (self.use_plots and 'import matplotlib' in examples_str and 'plot::' not in examples_str): out = [] out += self._str_header('Examples') out += ['.. plot::', ''] out += self._str_indent(self['Examples']) out += [''] return out else: return self._str_section('Examples') def __str__(self, indent=0, func_role="obj"): ns = { 'signature': self._str_signature(), 'index': self._str_index(), 'summary': self._str_summary(), 'extended_summary': self._str_extended_summary(), 'parameters': self._str_param_list('Parameters'), 'returns': self._str_returns('Returns'), 'yields': self._str_returns('Yields'), 'other_parameters': self._str_param_list('Other Parameters'), 'raises': self._str_param_list('Raises'), 'warns': self._str_param_list('Warns'), 'warnings': self._str_warnings(), 'see_also': self._str_see_also(func_role), 'notes': self._str_section('Notes'), 'references': self._str_references(), 'examples': self._str_examples(), 'attributes': self._str_member_list('Attributes'), 'methods': self._str_member_list('Methods'), } ns = dict((k, '\n'.join(v)) for k, v in ns.items()) rendered = self.template.render(**ns) return '\n'.join(self._str_indent(rendered.split('\n'), indent)) class SphinxFunctionDoc(SphinxDocString, FunctionDoc): def __init__(self, obj, doc=None, config={}): self.load_config(config) FunctionDoc.__init__(self, obj, doc=doc, config=config) class SphinxClassDoc(SphinxDocString, ClassDoc): def __init__(self, obj, doc=None, func_doc=None, config={}): self.load_config(config) ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config) class SphinxObjDoc(SphinxDocString): def __init__(self, obj, doc=None, config={}): self._f = obj self.load_config(config) SphinxDocString.__init__(self, doc, config=config) def get_doc_object(obj, what=None, doc=None, config={}, builder=None): if what is None: if inspect.isclass(obj): what = 'class' elif inspect.ismodule(obj): what = 'module' elif isinstance(obj, collections.Callable): what = 'function' else: what = 'object' template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')] if builder is not None: template_loader = BuiltinTemplateLoader() template_loader.init(builder, dirs=template_dirs) else: template_loader = FileSystemLoader(template_dirs) template_env = SandboxedEnvironment(loader=template_loader) config['template'] = template_env.get_template('numpydoc_docstring.rst') if what == 'class': return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc, config=config) elif what in ('function', 'method'): return SphinxFunctionDoc(obj, doc=doc, config=config) else: if doc is None: doc = pydoc.getdoc(obj) return SphinxObjDoc(obj, doc, config=config) asdf-1.3.3/astropy_helpers/astropy_helpers/extern/numpydoc/templates/0000755000175000017500000000000013246031665025576 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/extern/numpydoc/templates/numpydoc_docstring.rst0000644000175000017500000000032613243564211032236 0ustar dandan00000000000000{{index}} {{summary}} {{extended_summary}} {{parameters}} {{returns}} {{yields}} {{other_parameters}} {{raises}} {{warns}} {{warnings}} {{see_also}} {{notes}} {{references}} {{examples}} {{attributes}} {{methods}} asdf-1.3.3/astropy_helpers/astropy_helpers/extern/numpydoc/__init__.py0000644000175000017500000000016513243564211025706 0ustar dandan00000000000000from __future__ import division, absolute_import, print_function __version__ = '0.7.0' from .numpydoc import setup asdf-1.3.3/astropy_helpers/astropy_helpers/extern/numpydoc/numpydoc.py0000644000175000017500000002253313243564211026010 0ustar dandan00000000000000""" ======== numpydoc ======== Sphinx extension that handles docstrings in the Numpy standard format. [1] It will: - Convert Parameters etc. sections to field lists. - Convert See Also section to a See also entry. - Renumber references. - Extract the signature from the docstring, if it can't be determined otherwise. .. [1] https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt """ from __future__ import division, absolute_import, print_function import sys import re import pydoc import sphinx import inspect import collections if sphinx.__version__ < '1.0.1': raise RuntimeError("Sphinx 1.0.1 or newer is required") from .docscrape_sphinx import get_doc_object, SphinxDocString if sys.version_info[0] >= 3: sixu = lambda s: s else: sixu = lambda s: unicode(s, 'unicode_escape') def rename_references(app, what, name, obj, options, lines, reference_offset=[0]): # replace reference numbers so that there are no duplicates references = [] for line in lines: line = line.strip() m = re.match(sixu('^.. \\[(%s)\\]') % app.config.numpydoc_citation_re, line, re.I) if m: references.append(m.group(1)) if references: for i, line in enumerate(lines): for r in references: if re.match(sixu('^\\d+$'), r): new_r = sixu("R%d") % (reference_offset[0] + int(r)) else: new_r = sixu("%s%d") % (r, reference_offset[0]) lines[i] = lines[i].replace(sixu('[%s]_') % r, sixu('[%s]_') % new_r) lines[i] = lines[i].replace(sixu('.. [%s]') % r, sixu('.. [%s]') % new_r) reference_offset[0] += len(references) def mangle_docstrings(app, what, name, obj, options, lines): cfg = {'use_plots': app.config.numpydoc_use_plots, 'show_class_members': app.config.numpydoc_show_class_members, 'show_inherited_class_members': app.config.numpydoc_show_inherited_class_members, 'class_members_toctree': app.config.numpydoc_class_members_toctree} u_NL = sixu('\n') if what == 'module': # Strip top title pattern = '^\\s*[#*=]{4,}\\n[a-z0-9 -]+\\n[#*=]{4,}\\s*' title_re = re.compile(sixu(pattern), re.I | re.S) lines[:] = title_re.sub(sixu(''), u_NL.join(lines)).split(u_NL) else: doc = get_doc_object(obj, what, u_NL.join(lines), config=cfg, builder=app.builder) if sys.version_info[0] >= 3: doc = str(doc) else: doc = unicode(doc) lines[:] = doc.split(u_NL) if (app.config.numpydoc_edit_link and hasattr(obj, '__name__') and obj.__name__): if hasattr(obj, '__module__'): v = dict(full_name=sixu("%s.%s") % (obj.__module__, obj.__name__)) else: v = dict(full_name=obj.__name__) lines += [sixu(''), sixu('.. htmlonly::'), sixu('')] lines += [sixu(' %s') % x for x in (app.config.numpydoc_edit_link % v).split("\n")] # call function to replace reference numbers so that there are no # duplicates rename_references(app, what, name, obj, options, lines) def mangle_signature(app, what, name, obj, options, sig, retann): # Do not try to inspect classes that don't define `__init__` if (inspect.isclass(obj) and (not hasattr(obj, '__init__') or 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' if not (isinstance(obj, collections.Callable) or hasattr(obj, '__argspec_is_invalid_')): return if not hasattr(obj, '__doc__'): return doc = SphinxDocString(pydoc.getdoc(obj)) sig = doc['Signature'] or getattr(obj, '__text_signature__', None) if sig: sig = re.sub(sixu("^[^(]*"), sixu(""), sig) return sig, sixu('') def setup(app, get_doc_object_=get_doc_object): if not hasattr(app, 'add_config_value'): return # probably called by nose, better bail out global get_doc_object get_doc_object = get_doc_object_ app.connect('autodoc-process-docstring', mangle_docstrings) app.connect('autodoc-process-signature', mangle_signature) app.add_config_value('numpydoc_edit_link', None, False) app.add_config_value('numpydoc_use_plots', None, False) app.add_config_value('numpydoc_show_class_members', True, True) app.add_config_value('numpydoc_show_inherited_class_members', True, True) app.add_config_value('numpydoc_class_members_toctree', True, True) app.add_config_value('numpydoc_citation_re', '[a-z0-9_.-]+', True) # Extra mangling domains app.add_domain(NumpyPythonDomain) app.add_domain(NumpyCDomain) metadata = {'parallel_read_safe': True} return metadata # ------------------------------------------------------------------------------ # Docstring-mangling domains # ------------------------------------------------------------------------------ from docutils.statemachine import ViewList from sphinx.domains.c import CDomain from sphinx.domains.python import PythonDomain class ManglingDomainBase(object): directive_mangling_map = {} def __init__(self, *a, **kw): super(ManglingDomainBase, self).__init__(*a, **kw) self.wrap_mangling_directives() def wrap_mangling_directives(self): for name, objtype in list(self.directive_mangling_map.items()): self.directives[name] = wrap_mangling_directive( self.directives[name], objtype) class NumpyPythonDomain(ManglingDomainBase, PythonDomain): name = 'np' directive_mangling_map = { 'function': 'function', 'class': 'class', 'exception': 'class', 'method': 'function', 'classmethod': 'function', 'staticmethod': 'function', 'attribute': 'attribute', } indices = [] class NumpyCDomain(ManglingDomainBase, CDomain): name = 'np-c' directive_mangling_map = { 'function': 'function', 'member': 'attribute', 'macro': 'function', 'type': 'class', 'var': 'object', } def match_items(lines, content_old): """Create items for mangled lines. This function tries to match the lines in ``lines`` with the items (source file references and line numbers) in ``content_old``. The ``mangle_docstrings`` function changes the actual docstrings, but doesn't keep track of where each line came from. The manging does many operations on the original lines, which are hard to track afterwards. Many of the line changes come from deleting or inserting blank lines. This function tries to match lines by ignoring blank lines. All other changes (such as inserting figures or changes in the references) are completely ignored, so the generated line numbers will be off if ``mangle_docstrings`` does anything non-trivial. This is a best-effort function and the real fix would be to make ``mangle_docstrings`` actually keep track of the ``items`` together with the ``lines``. Examples -------- >>> lines = ['', 'A', '', 'B', ' ', '', 'C', 'D'] >>> lines_old = ['a', '', '', 'b', '', 'c'] >>> items_old = [('file1.py', 0), ('file1.py', 1), ('file1.py', 2), ... ('file2.py', 0), ('file2.py', 1), ('file2.py', 2)] >>> content_old = ViewList(lines_old, items=items_old) >>> match_items(lines, content_old) # doctest: +NORMALIZE_WHITESPACE [('file1.py', 0), ('file1.py', 0), ('file2.py', 0), ('file2.py', 0), ('file2.py', 2), ('file2.py', 2), ('file2.py', 2), ('file2.py', 2)] >>> # first 2 ``lines`` are matched to 'a', second 2 to 'b', rest to 'c' >>> # actual content is completely ignored. Notes ----- The algorithm tries to match any line in ``lines`` with one in ``lines_old``. It skips over all empty lines in ``lines_old`` and assigns this line number to all lines in ``lines``, unless a non-empty line is found in ``lines`` in which case it goes to the next line in ``lines_old``. """ items_new = [] lines_old = content_old.data items_old = content_old.items j = 0 for i, line in enumerate(lines): # go to next non-empty line in old: # line.strip() checks whether the string is all whitespace while j < len(lines_old) - 1 and not lines_old[j].strip(): j += 1 items_new.append(items_old[j]) if line.strip() and j < len(lines_old) - 1: j += 1 assert(len(items_new) == len(lines)) return items_new def wrap_mangling_directive(base_directive, objtype): class directive(base_directive): def run(self): env = self.state.document.settings.env name = None if self.arguments: m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0]) name = m.group(2).strip() if not name: name = self.arguments[0] lines = list(self.content) mangle_docstrings(env.app, objtype, name, None, None, lines) if self.content: items = match_items(lines, self.content) self.content = ViewList(lines, items=items, parent=self.content.parent) return base_directive.run(self) return directive asdf-1.3.3/astropy_helpers/astropy_helpers/commands/0000755000175000017500000000000013246031665022236 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/commands/build_ext.py0000644000175000017500000004636413246003560024575 0ustar dandan00000000000000import errno import os import re import shlex import shutil import subprocess import sys import textwrap from distutils import log, ccompiler, sysconfig from distutils.core import Extension from distutils.ccompiler import get_default_compiler from setuptools.command.build_ext import build_ext as SetuptoolsBuildExt from ..utils import get_numpy_include_path, invalidate_caches, classproperty from ..version_helpers import get_pkg_version_module def should_build_with_cython(package, release=None): """Returns the previously used Cython version (or 'unknown' if not previously built) if Cython should be used to build extension modules from pyx files. If the ``release`` parameter is not specified an attempt is made to determine the release flag from `astropy.version`. """ try: version_module = __import__(package + '.cython_version', fromlist=['release', 'cython_version']) except ImportError: version_module = None if release is None and version_module is not None: try: release = version_module.release except AttributeError: pass try: cython_version = version_module.cython_version except AttributeError: cython_version = 'unknown' # Only build with Cython if, of course, Cython is installed, we're in a # development version (i.e. not release) or the Cython-generated source # files haven't been created yet (cython_version == 'unknown'). The latter # case can happen even when release is True if checking out a release tag # from the repository have_cython = False try: import Cython # noqa have_cython = True except ImportError: pass if have_cython and (not release or cython_version == 'unknown'): return cython_version else: return False _compiler_versions = {} def get_compiler_version(compiler): if compiler in _compiler_versions: return _compiler_versions[compiler] # Different flags to try to get the compiler version # TODO: It might be worth making this configurable to support # arbitrary odd compilers; though all bets may be off in such # cases anyway flags = ['--version', '--Version', '-version', '-Version', '-v', '-V'] def try_get_version(flag): process = subprocess.Popen( shlex.split(compiler, posix=('win' not in sys.platform)) + [flag], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if process.returncode != 0: return 'unknown' output = stdout.strip().decode('latin-1') # Safest bet if not output: # Some compilers return their version info on stderr output = stderr.strip().decode('latin-1') if not output: output = 'unknown' return output for flag in flags: version = try_get_version(flag) if version != 'unknown': break # Cache results to speed up future calls _compiler_versions[compiler] = version return version # TODO: I think this can be reworked without having to create the class # programmatically. def generate_build_ext_command(packagename, release): """ Creates a custom 'build_ext' command that allows for manipulating some of the C extension options at build time. We use a function to build the class since the base class for build_ext may be different depending on certain build-time parameters (for example, we may use Cython's build_ext instead of the default version in distutils). Uses the default distutils.command.build_ext by default. """ class build_ext(SetuptoolsBuildExt, object): package_name = packagename is_release = release _user_options = SetuptoolsBuildExt.user_options[:] _boolean_options = SetuptoolsBuildExt.boolean_options[:] _help_options = SetuptoolsBuildExt.help_options[:] force_rebuild = False _broken_compiler_mapping = [ ('i686-apple-darwin[0-9]*-llvm-gcc-4.2', 'clang') ] # Warning: Spaghetti code ahead. # During setup.py, the setup_helpers module needs the ability to add # items to a command's user_options list. At this stage we don't know # whether or not we can build with Cython, and so don't know for sure # what base class will be used for build_ext; nevertheless we want to # be able to provide a list to add options into. # # Later, once setup() has been called we should have all build # dependencies included via setup_requires available. distutils needs # to be able to access the user_options as a *class* attribute before # the class has been initialized, but we do need to be able to # enumerate the options for the correct base class at that point @classproperty def user_options(cls): from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._user_options return cls._final_class.user_options @classproperty def boolean_options(cls): # Similar to user_options above from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._boolean_options return cls._final_class.boolean_options @classproperty def help_options(cls): # Similar to user_options above from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._help_options return cls._final_class.help_options @classproperty(lazy=True) def _final_class(cls): """ Late determination of what the build_ext base class should be, depending on whether or not Cython is available. """ uses_cython = should_build_with_cython(cls.package_name, cls.is_release) if uses_cython: # We need to decide late on whether or not to use Cython's # build_ext (since Cython may not be available earlier in the # setup.py if it was brought in via setup_requires) try: from Cython.Distutils.old_build_ext import old_build_ext as base_cls except ImportError: from Cython.Distutils import build_ext as base_cls else: base_cls = SetuptoolsBuildExt # Create and return an instance of a new class based on this class # using one of the above possible base classes def merge_options(attr): base = getattr(base_cls, attr) ours = getattr(cls, '_' + attr) all_base = set(opt[0] for opt in base) return base + [opt for opt in ours if opt[0] not in all_base] boolean_options = (base_cls.boolean_options + [opt for opt in cls._boolean_options if opt not in base_cls.boolean_options]) members = dict(cls.__dict__) members.update({ 'user_options': merge_options('user_options'), 'help_options': merge_options('help_options'), 'boolean_options': boolean_options, 'uses_cython': uses_cython, }) # Update the base class for the original build_ext command build_ext.__bases__ = (base_cls, object) # Create a new class for the existing class, but now with the # appropriate base class depending on whether or not to use Cython. # Ensure that object is one of the bases to make a new-style class. return type(cls.__name__, (build_ext,), members) def __new__(cls, *args, **kwargs): # By the time the command is actually instantialized, the # Distribution instance for the build has been instantiated, which # means setup_requires has been processed--now we can determine # what base class we can use for the actual build, and return an # instance of a build_ext command that uses that base class (right # now the options being Cython.Distutils.build_ext, or the stock # setuptools build_ext) new_cls = super(build_ext, cls._final_class).__new__( cls._final_class) # Since the new cls is not a subclass of the original cls, we must # manually call its __init__ new_cls.__init__(*args, **kwargs) return new_cls def finalize_options(self): # Add a copy of the _compiler.so module as well, but only if there # are in fact C modules to compile (otherwise there's no reason to # include a record of the compiler used) # Note, self.extensions may not be set yet, but # self.distribution.ext_modules is where any extension modules # passed to setup() can be found self._adjust_compiler() extensions = self.distribution.ext_modules if extensions: build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(packagename) src_path = os.path.relpath( os.path.join(os.path.dirname(__file__), 'src')) shutil.copy(os.path.join(src_path, 'compiler.c'), os.path.join(package_dir, '_compiler.c')) ext = Extension(self.package_name + '._compiler', [os.path.join(package_dir, '_compiler.c')]) extensions.insert(0, ext) super(build_ext, self).finalize_options() # Generate if self.uses_cython: try: from Cython import __version__ as cython_version except ImportError: # This shouldn't happen if we made it this far cython_version = None if (cython_version is not None and cython_version != self.uses_cython): self.force_rebuild = True # Update the used cython version self.uses_cython = cython_version # Regardless of the value of the '--force' option, force a rebuild # if the debug flag changed from the last build if self.force_rebuild: self.force = True def run(self): # For extensions that require 'numpy' in their include dirs, # replace 'numpy' with the actual paths np_include = get_numpy_include_path() for extension in self.extensions: if 'numpy' in extension.include_dirs: idx = extension.include_dirs.index('numpy') extension.include_dirs.insert(idx, np_include) extension.include_dirs.remove('numpy') self._check_cython_sources(extension) super(build_ext, self).run() # Update cython_version.py if building with Cython try: cython_version = get_pkg_version_module( packagename, fromlist=['cython_version'])[0] except (AttributeError, ImportError): cython_version = 'unknown' if self.uses_cython and self.uses_cython != cython_version: build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(packagename) cython_py = os.path.join(package_dir, 'cython_version.py') with open(cython_py, 'w') as f: f.write('# Generated file; do not modify\n') f.write('cython_version = {0!r}\n'.format(self.uses_cython)) if os.path.isdir(self.build_lib): # The build/lib directory may not exist if the build_py # command was not previously run, which may sometimes be # the case self.copy_file(cython_py, os.path.join(self.build_lib, cython_py), preserve_mode=False) invalidate_caches() def _adjust_compiler(self): """ This function detects broken compilers and switches to another. If the environment variable CC is explicitly set, or a compiler is specified on the commandline, no override is performed -- the purpose here is to only override a default compiler. The specific compilers with problems are: * The default compiler in XCode-4.2, llvm-gcc-4.2, segfaults when compiling wcslib. The set of broken compilers can be updated by changing the compiler_mapping variable. It is a list of 2-tuples where the first in the pair is a regular expression matching the version of the broken compiler, and the second is the compiler to change to. """ if 'CC' in os.environ: # Check that CC is not set to llvm-gcc-4.2 c_compiler = os.environ['CC'] try: version = get_compiler_version(c_compiler) except OSError: msg = textwrap.dedent( """ The C compiler set by the CC environment variable: {compiler:s} cannot be found or executed. """.format(compiler=c_compiler)) log.warn(msg) sys.exit(1) for broken, fixed in self._broken_compiler_mapping: if re.match(broken, version): msg = textwrap.dedent( """Compiler specified by CC environment variable ({compiler:s}:{version:s}) will fail to compile {pkg:s}. Please set CC={fixed:s} and try again. You can do this, for example, by running: CC={fixed:s} python setup.py where is the command you ran. """.format(compiler=c_compiler, version=version, pkg=self.package_name, fixed=fixed)) log.warn(msg) sys.exit(1) # If C compiler is set via CC, and isn't broken, we are good to go. We # should definitely not try accessing the compiler specified by # ``sysconfig.get_config_var('CC')`` lower down, because this may fail # if the compiler used to compile Python is missing (and maybe this is # why the user is setting CC). For example, the official Python 2.7.3 # MacOS X binary was compiled with gcc-4.2, which is no longer available # in XCode 4. return if self.compiler is not None: # At this point, self.compiler will be set only if a compiler # was specified in the command-line or via setup.cfg, in which # case we don't do anything return compiler_type = ccompiler.get_default_compiler() if compiler_type == 'unix': # We have to get the compiler this way, as this is the one that is # used if os.environ['CC'] is not set. It is actually read in from # the Python Makefile. Note that this is not necessarily the same # compiler as returned by ccompiler.new_compiler() c_compiler = sysconfig.get_config_var('CC') try: version = get_compiler_version(c_compiler) except OSError: msg = textwrap.dedent( """ The C compiler used to compile Python {compiler:s}, and which is normally used to compile C extensions, is not available. You can explicitly specify which compiler to use by setting the CC environment variable, for example: CC=gcc python setup.py or if you are using MacOS X, you can try: CC=clang python setup.py """.format(compiler=c_compiler)) log.warn(msg) sys.exit(1) for broken, fixed in self._broken_compiler_mapping: if re.match(broken, version): os.environ['CC'] = fixed break def _check_cython_sources(self, extension): """ Where relevant, make sure that the .c files associated with .pyx modules are present (if building without Cython installed). """ # Determine the compiler we'll be using if self.compiler is None: compiler = get_default_compiler() else: compiler = self.compiler # Replace .pyx with C-equivalents, unless c files are missing for jdx, src in enumerate(extension.sources): base, ext = os.path.splitext(src) pyxfn = base + '.pyx' cfn = base + '.c' cppfn = base + '.cpp' if not os.path.isfile(pyxfn): continue if self.uses_cython: extension.sources[jdx] = pyxfn else: if os.path.isfile(cfn): extension.sources[jdx] = cfn elif os.path.isfile(cppfn): extension.sources[jdx] = cppfn else: msg = ( 'Could not find C/C++ file {0}.(c/cpp) for Cython ' 'file {1} when building extension {2}. Cython ' 'must be installed to build from a git ' 'checkout.'.format(base, pyxfn, extension.name)) raise IOError(errno.ENOENT, msg, cfn) # Current versions of Cython use deprecated Numpy API features # the use of which produces a few warnings when compiling. # These additional flags should squelch those warnings. # TODO: Feel free to remove this if/when a Cython update # removes use of the deprecated Numpy API if compiler == 'unix': extension.extra_compile_args.extend([ '-Wp,-w', '-Wno-unused-function']) return build_ext asdf-1.3.3/astropy_helpers/astropy_helpers/commands/setup_package.py0000644000175000017500000000017013243564211025414 0ustar dandan00000000000000from os.path import join def get_package_data(): return {'astropy_helpers.commands': [join('src', 'compiler.c')]} asdf-1.3.3/astropy_helpers/astropy_helpers/commands/src/0000755000175000017500000000000013246031665023025 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/commands/src/compiler.c0000644000175000017500000000573113243564211025004 0ustar dandan00000000000000#include /*************************************************************************** * Macros for determining the compiler version. * * These are borrowed from boost, and majorly abridged to include only * the compilers we care about. ***************************************************************************/ #ifndef PY3K #if PY_MAJOR_VERSION >= 3 #define PY3K 1 #else #define PY3K 0 #endif #endif #define STRINGIZE(X) DO_STRINGIZE(X) #define DO_STRINGIZE(X) #X #if defined __clang__ /* Clang C++ emulates GCC, so it has to appear early. */ # define COMPILER "Clang version " __clang_version__ #elif defined(__INTEL_COMPILER) || defined(__ICL) || defined(__ICC) || defined(__ECC) /* Intel */ # if defined(__INTEL_COMPILER) # define INTEL_VERSION __INTEL_COMPILER # elif defined(__ICL) # define INTEL_VERSION __ICL # elif defined(__ICC) # define INTEL_VERSION __ICC # elif defined(__ECC) # define INTEL_VERSION __ECC # endif # define COMPILER "Intel C compiler version " STRINGIZE(INTEL_VERSION) #elif defined(__GNUC__) /* gcc */ # define COMPILER "GCC version " __VERSION__ #elif defined(__SUNPRO_CC) /* Sun Workshop Compiler */ # define COMPILER "Sun compiler version " STRINGIZE(__SUNPRO_CC) #elif defined(_MSC_VER) /* Microsoft Visual C/C++ Must be last since other compilers define _MSC_VER for compatibility as well */ # if _MSC_VER < 1200 # define COMPILER_VERSION 5.0 # elif _MSC_VER < 1300 # define COMPILER_VERSION 6.0 # elif _MSC_VER == 1300 # define COMPILER_VERSION 7.0 # elif _MSC_VER == 1310 # define COMPILER_VERSION 7.1 # elif _MSC_VER == 1400 # define COMPILER_VERSION 8.0 # elif _MSC_VER == 1500 # define COMPILER_VERSION 9.0 # elif _MSC_VER == 1600 # define COMPILER_VERSION 10.0 # else # define COMPILER_VERSION _MSC_VER # endif # define COMPILER "Microsoft Visual C++ version " STRINGIZE(COMPILER_VERSION) #else /* Fallback */ # define COMPILER "Unknown compiler" #endif /*************************************************************************** * Module-level ***************************************************************************/ struct module_state { /* The Sun compiler can't handle empty structs */ #if defined(__SUNPRO_C) || defined(_MSC_VER) int _dummy; #endif }; #if PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "_compiler", NULL, sizeof(struct module_state), NULL, NULL, NULL, NULL, NULL }; #define INITERROR return NULL PyMODINIT_FUNC PyInit__compiler(void) #else #define INITERROR return PyMODINIT_FUNC init_compiler(void) #endif { PyObject* m; #if PY3K m = PyModule_Create(&moduledef); #else m = Py_InitModule3("_compiler", NULL, NULL); #endif if (m == NULL) INITERROR; PyModule_AddStringConstant(m, "compiler", COMPILER); #if PY3K return m; #endif } asdf-1.3.3/astropy_helpers/astropy_helpers/commands/test.py0000644000175000017500000000252413246003560023563 0ustar dandan00000000000000""" Different implementations of the ``./setup.py test`` command depending on what's locally available. If Astropy v1.1.0.dev or later is available it should be possible to import AstropyTest from ``astropy.tests.command``. If ``astropy`` can be imported but not ``astropy.tests.command`` (i.e. an older version of Astropy), we can use the backwards-compat implementation of the command. If Astropy can't be imported at all then there is a skeleton implementation that allows users to at least discover the ``./setup.py test`` command and learn that they need Astropy to run it. """ # Previously these except statements caught only ImportErrors, but there are # some other obscure exceptional conditions that can occur when importing # astropy.tests (at least on older versions) that can cause these imports to # fail try: import astropy # noqa try: from astropy.tests.command import AstropyTest except Exception: from ._test_compat import AstropyTest except Exception: # No astropy at all--provide the dummy implementation from ._dummy import _DummyCommand class AstropyTest(_DummyCommand): command_name = 'test' description = 'Run the tests for this package' error_msg = ( "The 'test' command requires the astropy package to be " "installed and importable.") asdf-1.3.3/astropy_helpers/astropy_helpers/commands/install.py0000644000175000017500000000074613246003560024256 0ustar dandan00000000000000from setuptools.command.install import install as SetuptoolsInstall from ..utils import _get_platlib_dir class AstropyInstall(SetuptoolsInstall): user_options = SetuptoolsInstall.user_options[:] boolean_options = SetuptoolsInstall.boolean_options[:] def finalize_options(self): build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) self.build_lib = platlib_dir SetuptoolsInstall.finalize_options(self) asdf-1.3.3/astropy_helpers/astropy_helpers/commands/register.py0000644000175000017500000000454713246003560024437 0ustar dandan00000000000000from setuptools.command.register import register as SetuptoolsRegister class AstropyRegister(SetuptoolsRegister): """Extends the built in 'register' command to support a ``--hidden`` option to make the registered version hidden on PyPI by default. The result of this is that when a version is registered as "hidden" it can still be downloaded from PyPI, but it does not show up in the list of actively supported versions under http://pypi.python.org/pypi/astropy, and is not set as the most recent version. Although this can always be set through the web interface it may be more convenient to be able to specify via the 'register' command. Hidden may also be considered a safer default when running the 'register' command, though this command uses distutils' normal behavior if the ``--hidden`` option is omitted. """ user_options = SetuptoolsRegister.user_options + [ ('hidden', None, 'mark this release as hidden on PyPI by default') ] boolean_options = SetuptoolsRegister.boolean_options + ['hidden'] def initialize_options(self): SetuptoolsRegister.initialize_options(self) self.hidden = False def build_post_data(self, action): data = SetuptoolsRegister.build_post_data(self, action) if action == 'submit' and self.hidden: data['_pypi_hidden'] = '1' return data def _set_config(self): # The original register command is buggy--if you use .pypirc with a # server-login section *at all* the repository you specify with the -r # option will be overwritten with either the repository in .pypirc or # with the default, # If you do not have a .pypirc using the -r option will just crash. # Way to go distutils # If we don't set self.repository back to a default value _set_config # can crash if there was a user-supplied value for this option; don't # worry, we'll get the real value back afterwards self.repository = 'pypi' SetuptoolsRegister._set_config(self) options = self.distribution.get_option_dict('register') if 'repository' in options: source, value = options['repository'] # Really anything that came from setup.cfg or the command line # should override whatever was in .pypirc self.repository = value asdf-1.3.3/astropy_helpers/astropy_helpers/commands/build_sphinx.py0000644000175000017500000002441313246003560025275 0ustar dandan00000000000000from __future__ import print_function import inspect import os import pkgutil import re import shutil import subprocess import sys import textwrap import warnings from distutils import log from distutils.cmd import DistutilsOptionError import sphinx from sphinx.setup_command import BuildDoc as SphinxBuildDoc from ..utils import minversion, AstropyDeprecationWarning PY3 = sys.version_info[0] >= 3 class AstropyBuildDocs(SphinxBuildDoc): """ A version of the ``build_docs`` command that uses the version of Astropy that is built by the setup ``build`` command, rather than whatever is installed on the system. To build docs against the installed version, run ``make html`` in the ``astropy/docs`` directory. This also automatically creates the docs/_static directories--this is needed because GitHub won't create the _static dir because it has no tracked files. """ description = 'Build Sphinx documentation for Astropy environment' user_options = SphinxBuildDoc.user_options[:] user_options.append( ('warnings-returncode', 'w', 'Parses the sphinx output and sets the return code to 1 if there ' 'are any warnings. Note that this will cause the sphinx log to ' 'only update when it completes, rather than continuously as is ' 'normally the case.')) user_options.append( ('clean-docs', 'l', 'Completely clean previous builds, including ' 'automodapi-generated files before building new ones')) user_options.append( ('no-intersphinx', 'n', 'Skip intersphinx, even if conf.py says to use it')) user_options.append( ('open-docs-in-browser', 'o', 'Open the docs in a browser (using the webbrowser module) if the ' 'build finishes successfully.')) boolean_options = SphinxBuildDoc.boolean_options[:] boolean_options.append('warnings-returncode') boolean_options.append('clean-docs') boolean_options.append('no-intersphinx') boolean_options.append('open-docs-in-browser') _self_iden_rex = re.compile(r"self\.([^\d\W][\w]+)", re.UNICODE) def initialize_options(self): SphinxBuildDoc.initialize_options(self) self.clean_docs = False self.no_intersphinx = False self.open_docs_in_browser = False self.warnings_returncode = False def finalize_options(self): SphinxBuildDoc.finalize_options(self) # Clear out previous sphinx builds, if requested if self.clean_docs: dirstorm = [os.path.join(self.source_dir, 'api'), os.path.join(self.source_dir, 'generated')] if self.build_dir is None: dirstorm.append('docs/_build') else: dirstorm.append(self.build_dir) for d in dirstorm: if os.path.isdir(d): log.info('Cleaning directory ' + d) shutil.rmtree(d) else: log.info('Not cleaning directory ' + d + ' because ' 'not present or not a directory') def run(self): # TODO: Break this method up into a few more subroutines and # document them better import webbrowser if PY3: from urllib.request import pathname2url else: from urllib import pathname2url # This is used at the very end of `run` to decide if sys.exit should # be called. If it's None, it won't be. retcode = None # If possible, create the _static dir if self.build_dir is not None: # the _static dir should be in the same place as the _build dir # for Astropy basedir, subdir = os.path.split(self.build_dir) if subdir == '': # the path has a trailing /... basedir, subdir = os.path.split(basedir) staticdir = os.path.join(basedir, '_static') if os.path.isfile(staticdir): raise DistutilsOptionError( 'Attempted to build_docs in a location where' + staticdir + 'is a file. Must be a directory.') self.mkpath(staticdir) # Now make sure Astropy is built and determine where it was built build_cmd = self.reinitialize_command('build') build_cmd.inplace = 0 self.run_command('build') build_cmd = self.get_finalized_command('build') build_cmd_path = os.path.abspath(build_cmd.build_lib) ah_importer = pkgutil.get_importer('astropy_helpers') if ah_importer is None: ah_path = '.' else: ah_path = os.path.abspath(ah_importer.path) # Now generate the source for and spawn a new process that runs the # command. This is needed to get the correct imports for the built # version runlines, runlineno = inspect.getsourcelines(SphinxBuildDoc.run) subproccode = textwrap.dedent(""" from sphinx.setup_command import * os.chdir({srcdir!r}) sys.path.insert(0, {build_cmd_path!r}) sys.path.insert(0, {ah_path!r}) """).format(build_cmd_path=build_cmd_path, ah_path=ah_path, srcdir=self.source_dir) # runlines[1:] removes 'def run(self)' on the first line subproccode += textwrap.dedent(''.join(runlines[1:])) # All "self.foo" in the subprocess code needs to be replaced by the # values taken from the current self in *this* process subproccode = self._self_iden_rex.split(subproccode) for i in range(1, len(subproccode), 2): iden = subproccode[i] val = getattr(self, iden) if iden.endswith('_dir'): # Directories should be absolute, because the `chdir` call # in the new process moves to a different directory subproccode[i] = repr(os.path.abspath(val)) else: subproccode[i] = repr(val) subproccode = ''.join(subproccode) optcode = textwrap.dedent(""" class Namespace(object): pass self = Namespace() self.pdb = {pdb!r} self.verbosity = {verbosity!r} self.traceback = {traceback!r} """).format(pdb=getattr(self, 'pdb', False), verbosity=getattr(self, 'verbosity', 0), traceback=getattr(self, 'traceback', False)) subproccode = optcode + subproccode # This is a quick gross hack, but it ensures that the code grabbed from # SphinxBuildDoc.run will work in Python 2 if it uses the print # function if minversion(sphinx, '1.3'): subproccode = 'from __future__ import print_function' + subproccode if self.no_intersphinx: # the confoverrides variable in sphinx.setup_command.BuildDoc can # be used to override the conf.py ... but this could well break # if future versions of sphinx change the internals of BuildDoc, # so remain vigilant! subproccode = subproccode.replace( 'confoverrides = {}', 'confoverrides = {\'intersphinx_mapping\':{}}') log.debug('Starting subprocess of {0} with python code:\n{1}\n' '[CODE END])'.format(sys.executable, subproccode)) # To return the number of warnings, we need to capture stdout. This # prevents a continuous updating at the terminal, but there's no # apparent way around this. if self.warnings_returncode: proc = subprocess.Popen([sys.executable, '-c', subproccode], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) retcode = 1 with proc.stdout: for line in iter(proc.stdout.readline, b''): line = line.strip(b'\r\n') print(line.decode('utf-8')) if 'build succeeded.' == line.decode('utf-8'): retcode = 0 # Poll to set proc.retcode proc.wait() if retcode != 0: if os.environ.get('TRAVIS', None) == 'true': # this means we are in the travis build, so customize # the message appropriately. msg = ('The build_docs travis build FAILED ' 'because sphinx issued documentation ' 'warnings (scroll up to see the warnings).') else: # standard failure message msg = ('build_docs returning a non-zero exit ' 'code because sphinx issued documentation ' 'warnings.') log.warn(msg) else: proc = subprocess.Popen([sys.executable], stdin=subprocess.PIPE) proc.communicate(subproccode.encode('utf-8')) if proc.returncode == 0: if self.open_docs_in_browser: if self.builder == 'html': absdir = os.path.abspath(self.builder_target_dir) index_path = os.path.join(absdir, 'index.html') fileurl = 'file://' + pathname2url(index_path) webbrowser.open(fileurl) else: log.warn('open-docs-in-browser option was given, but ' 'the builder is not html! Ignoring.') else: log.warn('Sphinx Documentation subprocess failed with return ' 'code ' + str(proc.returncode)) retcode = proc.returncode if retcode is not None: # this is potentially dangerous in that there might be something # after the call to `setup` in `setup.py`, and exiting here will # prevent that from running. But there's no other apparent way # to signal what the return code should be. sys.exit(retcode) class AstropyBuildSphinx(AstropyBuildDocs): # pragma: no cover description = 'deprecated alias to the build_docs command' def run(self): warnings.warn( 'The "build_sphinx" command is now deprecated. Use' '"build_docs" instead.', AstropyDeprecationWarning) AstropyBuildDocs.run(self) asdf-1.3.3/astropy_helpers/astropy_helpers/commands/build_py.py0000644000175000017500000000265613246003560024421 0ustar dandan00000000000000from setuptools.command.build_py import build_py as SetuptoolsBuildPy from ..utils import _get_platlib_dir class AstropyBuildPy(SetuptoolsBuildPy): user_options = SetuptoolsBuildPy.user_options[:] boolean_options = SetuptoolsBuildPy.boolean_options[:] def finalize_options(self): # Update build_lib settings from the build command to always put # build files in platform-specific subdirectories of build/, even # for projects with only pure-Python source (this is desirable # specifically for support of multiple Python version). build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) build_cmd.build_purelib = platlib_dir build_cmd.build_lib = platlib_dir self.build_lib = platlib_dir SetuptoolsBuildPy.finalize_options(self) def run_2to3(self, files, doctests=False): # Filter the files to exclude things that shouldn't be 2to3'd skip_2to3 = self.distribution.skip_2to3 filtered_files = [] for filename in files: for package in skip_2to3: if filename[len(self.build_lib) + 1:].startswith(package): break else: filtered_files.append(filename) SetuptoolsBuildPy.run_2to3(self, filtered_files, doctests) def run(self): # first run the normal build_py SetuptoolsBuildPy.run(self) asdf-1.3.3/astropy_helpers/astropy_helpers/commands/_dummy.py0000644000175000017500000000557413246003560024106 0ustar dandan00000000000000""" Provides a base class for a 'dummy' setup.py command that has no functionality (probably due to a missing requirement). This dummy command can raise an exception when it is run, explaining to the user what dependencies must be met to use this command. The reason this is at all tricky is that we want the command to be able to provide this message even when the user passes arguments to the command. If we don't know ahead of time what arguments the command can take, this is difficult, because distutils does not allow unknown arguments to be passed to a setup.py command. This hacks around that restriction to provide a useful error message even when a user passes arguments to the dummy implementation of a command. Use this like: try: from some_dependency import SetupCommand except ImportError: from ._dummy import _DummyCommand class SetupCommand(_DummyCommand): description = \ 'Implementation of SetupCommand from some_dependency; ' 'some_dependency must be installed to run this command' # This is the message that will be raised when a user tries to # run this command--define it as a class attribute. error_msg = \ "The 'setup_command' command requires the some_dependency " "package to be installed and importable." """ import sys from setuptools import Command from distutils.errors import DistutilsArgError from textwrap import dedent class _DummyCommandMeta(type): """ Causes an exception to be raised on accessing attributes of a command class so that if ``./setup.py command_name`` is run with additional command-line options we can provide a useful error message instead of the default that tells users the options are unrecognized. """ def __init__(cls, name, bases, members): if bases == (Command, object): # This is the _DummyCommand base class, presumably return if not hasattr(cls, 'description'): raise TypeError( "_DummyCommand subclass must have a 'description' " "attribute.") if not hasattr(cls, 'error_msg'): raise TypeError( "_DummyCommand subclass must have an 'error_msg' " "attribute.") def __getattribute__(cls, attr): if attr in ('description', 'error_msg'): # Allow cls.description to work so that `./setup.py # --help-commands` still works return super(_DummyCommandMeta, cls).__getattribute__(attr) raise DistutilsArgError(cls.error_msg) if sys.version_info[0] < 3: exec(dedent(""" class _DummyCommand(Command, object): __metaclass__ = _DummyCommandMeta """)) else: exec(dedent(""" class _DummyCommand(Command, object, metaclass=_DummyCommandMeta): pass """)) asdf-1.3.3/astropy_helpers/astropy_helpers/commands/install_lib.py0000644000175000017500000000100013246003560025064 0ustar dandan00000000000000from setuptools.command.install_lib import install_lib as SetuptoolsInstallLib from ..utils import _get_platlib_dir class AstropyInstallLib(SetuptoolsInstallLib): user_options = SetuptoolsInstallLib.user_options[:] boolean_options = SetuptoolsInstallLib.boolean_options[:] def finalize_options(self): build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) self.build_dir = platlib_dir SetuptoolsInstallLib.finalize_options(self) asdf-1.3.3/astropy_helpers/astropy_helpers/commands/__init__.py0000644000175000017500000000000013243564211024330 0ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/commands/_test_compat.py0000644000175000017500000002671213246003560025272 0ustar dandan00000000000000""" Old implementation of ``./setup.py test`` command. This has been moved to astropy.tests as of Astropy v1.1.0, but a copy of the implementation is kept here for backwards compatibility. """ from __future__ import absolute_import, unicode_literals import inspect import os import shutil import subprocess import sys import tempfile from setuptools import Command from ..compat import _fix_user_options PY3 = sys.version_info[0] == 3 class AstropyTest(Command, object): description = 'Run the tests for this package' user_options = [ ('package=', 'P', "The name of a specific package to test, e.g. 'io.fits' or 'utils'. " "If nothing is specified, all default tests are run."), ('test-path=', 't', 'Specify a test location by path. If a relative path to a .py file, ' 'it is relative to the built package, so e.g., a leading "astropy/" ' 'is necessary. If a relative path to a .rst file, it is relative to ' 'the directory *below* the --docs-path directory, so a leading ' '"docs/" is usually necessary. May also be an absolute path.'), ('verbose-results', 'V', 'Turn on verbose output from pytest.'), ('plugins=', 'p', 'Plugins to enable when running pytest.'), ('pastebin=', 'b', "Enable pytest pastebin output. Either 'all' or 'failed'."), ('args=', 'a', 'Additional arguments to be passed to pytest.'), ('remote-data', 'R', 'Run tests that download remote data.'), ('pep8', '8', 'Enable PEP8 checking and disable regular tests. ' 'Requires the pytest-pep8 plugin.'), ('pdb', 'd', 'Start the interactive Python debugger on errors.'), ('coverage', 'c', 'Create a coverage report. Requires the coverage package.'), ('open-files', 'o', 'Fail if any tests leave files open. Requires the ' 'psutil package.'), ('parallel=', 'j', 'Run the tests in parallel on the specified number of ' 'CPUs. If negative, all the cores on the machine will be ' 'used. Requires the pytest-xdist plugin.'), ('docs-path=', None, 'The path to the documentation .rst files. If not provided, and ' 'the current directory contains a directory called "docs", that ' 'will be used.'), ('skip-docs', None, "Don't test the documentation .rst files."), ('repeat=', None, 'How many times to repeat each test (can be used to check for ' 'sporadic failures).'), ('temp-root=', None, 'The root directory in which to create the temporary testing files. ' 'If unspecified the system default is used (e.g. /tmp) as explained ' 'in the documentation for tempfile.mkstemp.') ] user_options = _fix_user_options(user_options) package_name = '' def initialize_options(self): self.package = None self.test_path = None self.verbose_results = False self.plugins = None self.pastebin = None self.args = None self.remote_data = False self.pep8 = False self.pdb = False self.coverage = False self.open_files = False self.parallel = 0 self.docs_path = None self.skip_docs = False self.repeat = None self.temp_root = None def finalize_options(self): # Normally we would validate the options here, but that's handled in # run_tests pass # Most of the test runner arguments have the same name as attributes on # this command class, with one exception (for now) _test_runner_arg_attr_map = { 'verbose': 'verbose_results' } def generate_testing_command(self): """ Build a Python script to run the tests. """ cmd_pre = '' # Commands to run before the test function cmd_post = '' # Commands to run after the test function if self.coverage: pre, post = self._generate_coverage_commands() cmd_pre += pre cmd_post += post def get_attr(arg): attr = self._test_runner_arg_attr_map.get(arg, arg) return getattr(self, attr) test_args = filter(lambda arg: hasattr(self, arg), self._get_test_runner_args()) test_args = ', '.join('{0}={1!r}'.format(arg, get_attr(arg)) for arg in test_args) if PY3: set_flag = "import builtins; builtins._ASTROPY_TEST_ = True" else: set_flag = "import __builtin__; __builtin__._ASTROPY_TEST_ = True" cmd = ('{cmd_pre}{0}; import {1.package_name}, sys; result = ' '{1.package_name}.test({test_args}); {cmd_post}' 'sys.exit(result)') return cmd.format(set_flag, self, cmd_pre=cmd_pre, cmd_post=cmd_post, test_args=test_args) def _validate_required_deps(self): """ This method checks that any required modules are installed before running the tests. """ try: import astropy # noqa except ImportError: raise ImportError( "The 'test' command requires the astropy package to be " "installed and importable.") def run(self): """ Run the tests! """ # Ensure there is a doc path if self.docs_path is None: if os.path.exists('docs'): self.docs_path = os.path.abspath('docs') # Build a testing install of the package self._build_temp_install() # Ensure all required packages are installed self._validate_required_deps() # Run everything in a try: finally: so that the tmp dir gets deleted. try: # Construct this modules testing command cmd = self.generate_testing_command() # Run the tests in a subprocess--this is necessary since # new extension modules may have appeared, and this is the # easiest way to set up a new environment # On Python 3.x prior to 3.3, the creation of .pyc files # is not atomic. py.test jumps through some hoops to make # this work by parsing import statements and carefully # importing files atomically. However, it can't detect # when __import__ is used, so its carefulness still fails. # The solution here (admittedly a bit of a hack), is to # turn off the generation of .pyc files altogether by # passing the `-B` switch to `python`. This does mean # that each core will have to compile .py file to bytecode # itself, rather than getting lucky and borrowing the work # already done by another core. Compilation is an # insignificant fraction of total testing time, though, so # it's probably not worth worrying about. retcode = subprocess.call([sys.executable, '-B', '-c', cmd], cwd=self.testing_path, close_fds=False) finally: # Remove temporary directory shutil.rmtree(self.tmp_dir) raise SystemExit(retcode) def _build_temp_install(self): """ Build the package and copy the build to a temporary directory for the purposes of testing this avoids creating pyc and __pycache__ directories inside the build directory """ self.reinitialize_command('build', inplace=True) self.run_command('build') build_cmd = self.get_finalized_command('build') new_path = os.path.abspath(build_cmd.build_lib) # On OSX the default path for temp files is under /var, but in most # cases on OSX /var is actually a symlink to /private/var; ensure we # dereference that link, because py.test is very sensitive to relative # paths... tmp_dir = tempfile.mkdtemp(prefix=self.package_name + '-test-', dir=self.temp_root) self.tmp_dir = os.path.realpath(tmp_dir) self.testing_path = os.path.join(self.tmp_dir, os.path.basename(new_path)) shutil.copytree(new_path, self.testing_path) new_docs_path = os.path.join(self.tmp_dir, os.path.basename(self.docs_path)) shutil.copytree(self.docs_path, new_docs_path) self.docs_path = new_docs_path shutil.copy('setup.cfg', self.tmp_dir) def _generate_coverage_commands(self): """ This method creates the post and pre commands if coverage is to be generated """ if self.parallel != 0: raise ValueError( "--coverage can not be used with --parallel") try: import coverage # noqa except ImportError: raise ImportError( "--coverage requires that the coverage package is " "installed.") # Don't use get_pkg_data_filename here, because it # requires importing astropy.config and thus screwing # up coverage results for those packages. coveragerc = os.path.join( self.testing_path, self.package_name, 'tests', 'coveragerc') # We create a coveragerc that is specific to the version # of Python we're running, so that we can mark branches # as being specifically for Python 2 or Python 3 with open(coveragerc, 'r') as fd: coveragerc_content = fd.read() if PY3: ignore_python_version = '2' else: ignore_python_version = '3' coveragerc_content = coveragerc_content.replace( "{ignore_python_version}", ignore_python_version).replace( "{packagename}", self.package_name) tmp_coveragerc = os.path.join(self.tmp_dir, 'coveragerc') with open(tmp_coveragerc, 'wb') as tmp: tmp.write(coveragerc_content.encode('utf-8')) cmd_pre = ( 'import coverage; ' 'cov = coverage.coverage(data_file="{0}", config_file="{1}"); ' 'cov.start();'.format( os.path.abspath(".coverage"), tmp_coveragerc)) cmd_post = ( 'cov.stop(); ' 'from astropy.tests.helper import _save_coverage; ' '_save_coverage(cov, result, "{0}", "{1}");'.format( os.path.abspath('.'), self.testing_path)) return cmd_pre, cmd_post def _get_test_runner_args(self): """ A hack to determine what arguments are supported by the package's test() function. In the future there should be a more straightforward API to determine this (really it should be determined by the ``TestRunner`` class for whatever version of Astropy is in use). """ if PY3: import builtins builtins._ASTROPY_TEST_ = True else: import __builtin__ __builtin__._ASTROPY_TEST_ = True try: pkg = __import__(self.package_name) if not hasattr(pkg, 'test'): raise ImportError( 'package {0} does not have a {0}.test() function as ' 'required by the Astropy test runner'.format(self.package_name)) argspec = inspect.getargspec(pkg.test) return argspec.args finally: if PY3: del builtins._ASTROPY_TEST_ else: del __builtin__._ASTROPY_TEST_ asdf-1.3.3/astropy_helpers/astropy_helpers/setup_helpers.py0000644000175000017500000006636013246003560023675 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This module contains a number of utilities for use during setup/build/packaging that are useful to astropy as a whole. """ from __future__ import absolute_import, print_function import collections import os import re import subprocess import sys import traceback import warnings from distutils import log from distutils.dist import Distribution from distutils.errors import DistutilsOptionError, DistutilsModuleError from distutils.core import Extension from distutils.core import Command from distutils.command.sdist import sdist as DistutilsSdist from setuptools import find_packages as _find_packages from .distutils_helpers import (add_command_option, get_compiler_option, get_dummy_distribution, get_distutils_build_option, get_distutils_build_or_install_option) from .version_helpers import get_pkg_version_module from .utils import (walk_skip_hidden, import_file, extends_doc, resolve_name, AstropyDeprecationWarning) from .commands.build_ext import generate_build_ext_command from .commands.build_py import AstropyBuildPy from .commands.install import AstropyInstall from .commands.install_lib import AstropyInstallLib from .commands.register import AstropyRegister from .commands.test import AstropyTest # These imports are not used in this module, but are included for backwards # compat with older versions of this module from .utils import get_numpy_include_path, write_if_different # noqa from .commands.build_ext import should_build_with_cython, get_compiler_version # noqa _module_state = {'registered_commands': None, 'have_sphinx': False, 'package_cache': None, 'exclude_packages': set(), 'excludes_too_late': False} try: import sphinx # noqa _module_state['have_sphinx'] = True except ValueError as e: # This can occur deep in the bowels of Sphinx's imports by way of docutils # and an occurrence of this bug: http://bugs.python.org/issue18378 # In this case sphinx is effectively unusable if 'unknown locale' in e.args[0]: log.warn( "Possible misconfiguration of one of the environment variables " "LC_ALL, LC_CTYPES, LANG, or LANGUAGE. For an example of how to " "configure your system's language environment on OSX see " "http://blog.remibergsma.com/2012/07/10/" "setting-locales-correctly-on-mac-osx-terminal-application/") except ImportError: pass except SyntaxError: # occurs if markupsafe is recent version, which doesn't support Python 3.2 pass PY3 = sys.version_info[0] >= 3 # This adds a new keyword to the setup() function Distribution.skip_2to3 = [] def adjust_compiler(package): """ This function detects broken compilers and switches to another. If the environment variable CC is explicitly set, or a compiler is specified on the commandline, no override is performed -- the purpose here is to only override a default compiler. The specific compilers with problems are: * The default compiler in XCode-4.2, llvm-gcc-4.2, segfaults when compiling wcslib. The set of broken compilers can be updated by changing the compiler_mapping variable. It is a list of 2-tuples where the first in the pair is a regular expression matching the version of the broken compiler, and the second is the compiler to change to. """ warnings.warn( 'Direct use of the adjust_compiler function in setup.py is ' 'deprecated and can be removed from your setup.py. This ' 'functionality is now incorporated directly into the build_ext ' 'command.', AstropyDeprecationWarning) def get_debug_option(packagename): """ Determines if the build is in debug mode. Returns ------- debug : bool True if the current build was started with the debug option, False otherwise. """ try: current_debug = get_pkg_version_module(packagename, fromlist=['debug'])[0] except (ImportError, AttributeError): current_debug = None # Only modify the debug flag if one of the build commands was explicitly # run (i.e. not as a sub-command of something else) dist = get_dummy_distribution() if any(cmd in dist.commands for cmd in ['build', 'build_ext']): debug = bool(get_distutils_build_option('debug')) else: debug = bool(current_debug) if current_debug is not None and current_debug != debug: build_ext_cmd = dist.get_command_class('build_ext') build_ext_cmd.force_rebuild = True return debug def add_exclude_packages(excludes): if _module_state['excludes_too_late']: raise RuntimeError( "add_package_excludes must be called before all other setup helper " "functions in order to properly handle excluded packages") _module_state['exclude_packages'].update(set(excludes)) def register_commands(package, version, release, srcdir='.'): if _module_state['registered_commands'] is not None: return _module_state['registered_commands'] if _module_state['have_sphinx']: try: from .commands.build_sphinx import (AstropyBuildSphinx, AstropyBuildDocs) except ImportError: AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx else: AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx _module_state['registered_commands'] = registered_commands = { 'test': generate_test_command(package), # Use distutils' sdist because it respects package_data. # setuptools/distributes sdist requires duplication of information in # MANIFEST.in 'sdist': DistutilsSdist, # The exact form of the build_ext command depends on whether or not # we're building a release version 'build_ext': generate_build_ext_command(package, release), # We have a custom build_py to generate the default configuration file 'build_py': AstropyBuildPy, # Since install can (in some circumstances) be run without # first building, we also need to override install and # install_lib. See #2223 'install': AstropyInstall, 'install_lib': AstropyInstallLib, 'register': AstropyRegister, 'build_sphinx': AstropyBuildSphinx, 'build_docs': AstropyBuildDocs } # Need to override the __name__ here so that the commandline options are # presented as being related to the "build" command, for example; normally # this wouldn't be necessary since commands also have a command_name # attribute, but there is a bug in distutils' help display code that it # uses __name__ instead of command_name. Yay distutils! for name, cls in registered_commands.items(): cls.__name__ = name # Add a few custom options; more of these can be added by specific packages # later for option in [ ('use-system-libraries', "Use system libraries whenever possible", True)]: add_command_option('build', *option) add_command_option('install', *option) add_command_hooks(registered_commands, srcdir=srcdir) return registered_commands def add_command_hooks(commands, srcdir='.'): """ Look through setup_package.py modules for functions with names like ``pre__hook`` and ``post__hook`` where ```` is the name of a ``setup.py`` command (e.g. build_ext). If either hook is present this adds a wrapped version of that command to the passed in ``commands`` `dict`. ``commands`` may be pre-populated with other custom distutils command classes that should be wrapped if there are hooks for them (e.g. `AstropyBuildPy`). """ hook_re = re.compile(r'^(pre|post)_(.+)_hook$') # Distutils commands have a method of the same name, but it is not a # *classmethod* (which probably didn't exist when distutils was first # written) def get_command_name(cmdcls): if hasattr(cmdcls, 'command_name'): return cmdcls.command_name else: return cmdcls.__name__ packages = filter_packages(find_packages(srcdir)) dist = get_dummy_distribution() hooks = collections.defaultdict(dict) for setuppkg in iter_setup_packages(srcdir, packages): for name, obj in vars(setuppkg).items(): match = hook_re.match(name) if not match: continue hook_type = match.group(1) cmd_name = match.group(2) if hook_type not in hooks[cmd_name]: hooks[cmd_name][hook_type] = [] hooks[cmd_name][hook_type].append((setuppkg.__name__, obj)) for cmd_name, cmd_hooks in hooks.items(): commands[cmd_name] = generate_hooked_command( cmd_name, dist.get_command_class(cmd_name), cmd_hooks) def generate_hooked_command(cmd_name, cmd_cls, hooks): """ Returns a generated subclass of ``cmd_cls`` that runs the pre- and post-command hooks for that command before and after the ``cmd_cls.run`` method. """ def run(self, orig_run=cmd_cls.run): self.run_command_hooks('pre_hooks') orig_run(self) self.run_command_hooks('post_hooks') return type(cmd_name, (cmd_cls, object), {'run': run, 'run_command_hooks': run_command_hooks, 'pre_hooks': hooks.get('pre', []), 'post_hooks': hooks.get('post', [])}) def run_command_hooks(cmd_obj, hook_kind): """Run hooks registered for that command and phase. *cmd_obj* is a finalized command object; *hook_kind* is either 'pre_hook' or 'post_hook'. """ hooks = getattr(cmd_obj, hook_kind, None) if not hooks: return for modname, hook in hooks: if isinstance(hook, str): try: hook_obj = resolve_name(hook) except ImportError as exc: raise DistutilsModuleError( 'cannot find hook {0}: {1}'.format(hook, exc)) else: hook_obj = hook if not callable(hook_obj): raise DistutilsOptionError('hook {0!r} is not callable' % hook) log.info('running {0} from {1} for {2} command'.format( hook_kind.rstrip('s'), modname, cmd_obj.get_command_name())) try: hook_obj(cmd_obj) except Exception: log.error('{0} command hook {1} raised an exception: %s\n'.format( hook_obj.__name__, cmd_obj.get_command_name())) log.error(traceback.format_exc()) sys.exit(1) def generate_test_command(package_name): """ Creates a custom 'test' command for the given package which sets the command's ``package_name`` class attribute to the name of the package being tested. """ return type(package_name.title() + 'Test', (AstropyTest,), {'package_name': package_name}) def update_package_files(srcdir, extensions, package_data, packagenames, package_dirs): """ This function is deprecated and maintained for backward compatibility with affiliated packages. Affiliated packages should update their setup.py to use `get_package_info` instead. """ info = get_package_info(srcdir) extensions.extend(info['ext_modules']) package_data.update(info['package_data']) packagenames = list(set(packagenames + info['packages'])) package_dirs.update(info['package_dir']) def get_package_info(srcdir='.', exclude=()): """ Collates all of the information for building all subpackages and returns a dictionary of keyword arguments that can be passed directly to `distutils.setup`. The purpose of this function is to allow subpackages to update the arguments to the package's ``setup()`` function in its setup.py script, rather than having to specify all extensions/package data directly in the ``setup.py``. See Astropy's own ``setup.py`` for example usage and the Astropy development docs for more details. This function obtains that information by iterating through all packages in ``srcdir`` and locating a ``setup_package.py`` module. This module can contain the following functions: ``get_extensions()``, ``get_package_data()``, ``get_build_options()``, ``get_external_libraries()``, and ``requires_2to3()``. Each of those functions take no arguments. - ``get_extensions`` returns a list of `distutils.extension.Extension` objects. - ``get_package_data()`` returns a dict formatted as required by the ``package_data`` argument to ``setup()``. - ``get_build_options()`` returns a list of tuples describing the extra build options to add. - ``get_external_libraries()`` returns a list of libraries that can optionally be built using external dependencies. - ``get_entry_points()`` returns a dict formatted as required by the ``entry_points`` argument to ``setup()``. - ``requires_2to3()`` should return `True` when the source code requires `2to3` processing to run on Python 3.x. If ``requires_2to3()`` is missing, it is assumed to return `True`. """ ext_modules = [] packages = [] package_data = {} package_dir = {} skip_2to3 = [] if exclude: warnings.warn( "Use of the exclude parameter is no longer supported since it does " "not work as expected. Use add_exclude_packages instead. Note that " "it must be called prior to any other calls from setup helpers.", AstropyDeprecationWarning) # Use the find_packages tool to locate all packages and modules packages = filter_packages(find_packages(srcdir, exclude=exclude)) # Update package_dir if the package lies in a subdirectory if srcdir != '.': package_dir[''] = srcdir # For each of the setup_package.py modules, extract any # information that is needed to install them. The build options # are extracted first, so that their values will be available in # subsequent calls to `get_extensions`, etc. for setuppkg in iter_setup_packages(srcdir, packages): if hasattr(setuppkg, 'get_build_options'): options = setuppkg.get_build_options() for option in options: add_command_option('build', *option) if hasattr(setuppkg, 'get_external_libraries'): libraries = setuppkg.get_external_libraries() for library in libraries: add_external_library(library) if hasattr(setuppkg, 'requires_2to3'): requires_2to3 = setuppkg.requires_2to3() else: requires_2to3 = True if not requires_2to3: skip_2to3.append( os.path.dirname(setuppkg.__file__)) for setuppkg in iter_setup_packages(srcdir, packages): # get_extensions must include any Cython extensions by their .pyx # filename. if hasattr(setuppkg, 'get_extensions'): ext_modules.extend(setuppkg.get_extensions()) if hasattr(setuppkg, 'get_package_data'): package_data.update(setuppkg.get_package_data()) # Locate any .pyx files not already specified, and add their extensions in. # The default include dirs include numpy to facilitate numerical work. ext_modules.extend(get_cython_extensions(srcdir, packages, ext_modules, ['numpy'])) # Now remove extensions that have the special name 'skip_cython', as they # exist Only to indicate that the cython extensions shouldn't be built for i, ext in reversed(list(enumerate(ext_modules))): if ext.name == 'skip_cython': del ext_modules[i] # On Microsoft compilers, we need to pass the '/MANIFEST' # commandline argument. This was the default on MSVC 9.0, but is # now required on MSVC 10.0, but it doesn't seem to hurt to add # it unconditionally. if get_compiler_option() == 'msvc': for ext in ext_modules: ext.extra_link_args.append('/MANIFEST') return { 'ext_modules': ext_modules, 'packages': packages, 'package_dir': package_dir, 'package_data': package_data, 'skip_2to3': skip_2to3 } def iter_setup_packages(srcdir, packages): """ A generator that finds and imports all of the ``setup_package.py`` modules in the source packages. Returns ------- modgen : generator A generator that yields (modname, mod), where `mod` is the module and `modname` is the module name for the ``setup_package.py`` modules. """ for packagename in packages: package_parts = packagename.split('.') package_path = os.path.join(srcdir, *package_parts) setup_package = os.path.relpath( os.path.join(package_path, 'setup_package.py')) if os.path.isfile(setup_package): module = import_file(setup_package, name=packagename + '.setup_package') yield module def iter_pyx_files(package_dir, package_name): """ A generator that yields Cython source files (ending in '.pyx') in the source packages. Returns ------- pyxgen : generator A generator that yields (extmod, fullfn) where `extmod` is the full name of the module that the .pyx file would live in based on the source directory structure, and `fullfn` is the path to the .pyx file. """ for dirpath, dirnames, filenames in walk_skip_hidden(package_dir): for fn in filenames: if fn.endswith('.pyx'): fullfn = os.path.relpath(os.path.join(dirpath, fn)) # Package must match file name extmod = '.'.join([package_name, fn[:-4]]) yield (extmod, fullfn) break # Don't recurse into subdirectories def get_cython_extensions(srcdir, packages, prevextensions=tuple(), extincludedirs=None): """ Looks for Cython files and generates Extensions if needed. Parameters ---------- srcdir : str Path to the root of the source directory to search. prevextensions : list of `~distutils.core.Extension` objects The extensions that are already defined. Any .pyx files already here will be ignored. extincludedirs : list of str or None Directories to include as the `include_dirs` argument to the generated `~distutils.core.Extension` objects. Returns ------- exts : list of `~distutils.core.Extension` objects The new extensions that are needed to compile all .pyx files (does not include any already in `prevextensions`). """ # Vanilla setuptools and old versions of distribute include Cython files # as .c files in the sources, not .pyx, so we cannot simply look for # existing .pyx sources in the previous sources, but we should also check # for .c files with the same remaining filename. So we look for .pyx and # .c files, and we strip the extension. prevsourcepaths = [] ext_modules = [] for ext in prevextensions: for s in ext.sources: if s.endswith(('.pyx', '.c', '.cpp')): sourcepath = os.path.realpath(os.path.splitext(s)[0]) prevsourcepaths.append(sourcepath) for package_name in packages: package_parts = package_name.split('.') package_path = os.path.join(srcdir, *package_parts) for extmod, pyxfn in iter_pyx_files(package_path, package_name): sourcepath = os.path.realpath(os.path.splitext(pyxfn)[0]) if sourcepath not in prevsourcepaths: ext_modules.append(Extension(extmod, [pyxfn], include_dirs=extincludedirs)) return ext_modules class DistutilsExtensionArgs(collections.defaultdict): """ A special dictionary whose default values are the empty list. This is useful for building up a set of arguments for `distutils.Extension` without worrying whether the entry is already present. """ def __init__(self, *args, **kwargs): def default_factory(): return [] super(DistutilsExtensionArgs, self).__init__( default_factory, *args, **kwargs) def update(self, other): for key, val in other.items(): self[key].extend(val) def pkg_config(packages, default_libraries, executable='pkg-config'): """ Uses pkg-config to update a set of distutils Extension arguments to include the flags necessary to link against the given packages. If the pkg-config lookup fails, default_libraries is applied to libraries. Parameters ---------- packages : list of str A list of pkg-config packages to look up. default_libraries : list of str A list of library names to use if the pkg-config lookup fails. Returns ------- config : dict A dictionary containing keyword arguments to `distutils.Extension`. These entries include: - ``include_dirs``: A list of include directories - ``library_dirs``: A list of library directories - ``libraries``: A list of libraries - ``define_macros``: A list of macro defines - ``undef_macros``: A list of macros to undefine - ``extra_compile_args``: A list of extra arguments to pass to the compiler """ flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries', '-D': 'define_macros', '-U': 'undef_macros'} command = "{0} --libs --cflags {1}".format(executable, ' '.join(packages)), result = DistutilsExtensionArgs() try: pipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) output = pipe.communicate()[0].strip() except subprocess.CalledProcessError as e: lines = [ ("{0} failed. This may cause the build to fail below." .format(executable)), " command: {0}".format(e.cmd), " returncode: {0}".format(e.returncode), " output: {0}".format(e.output) ] log.warn('\n'.join(lines)) result['libraries'].extend(default_libraries) else: if pipe.returncode != 0: lines = [ "pkg-config could not lookup up package(s) {0}.".format( ", ".join(packages)), "This may cause the build to fail below." ] log.warn('\n'.join(lines)) result['libraries'].extend(default_libraries) else: for token in output.split(): # It's not clear what encoding the output of # pkg-config will come to us in. It will probably be # some combination of pure ASCII (for the compiler # flags) and the filesystem encoding (for any argument # that includes directories or filenames), but this is # just conjecture, as the pkg-config documentation # doesn't seem to address it. arg = token[:2].decode('ascii') value = token[2:].decode(sys.getfilesystemencoding()) if arg in flag_map: if arg == '-D': value = tuple(value.split('=', 1)) result[flag_map[arg]].append(value) else: result['extra_compile_args'].append(value) return result def add_external_library(library): """ Add a build option for selecting the internal or system copy of a library. Parameters ---------- library : str The name of the library. If the library is `foo`, the build option will be called `--use-system-foo`. """ for command in ['build', 'build_ext', 'install']: add_command_option(command, str('use-system-' + library), 'Use the system {0} library'.format(library), is_bool=True) def use_system_library(library): """ Returns `True` if the build configuration indicates that the given library should use the system copy of the library rather than the internal one. For the given library `foo`, this will be `True` if `--use-system-foo` or `--use-system-libraries` was provided at the commandline or in `setup.cfg`. Parameters ---------- library : str The name of the library Returns ------- use_system : bool `True` if the build should use the system copy of the library. """ return ( get_distutils_build_or_install_option('use_system_{0}'.format(library)) or get_distutils_build_or_install_option('use_system_libraries')) @extends_doc(_find_packages) def find_packages(where='.', exclude=(), invalidate_cache=False): """ This version of ``find_packages`` caches previous results to speed up subsequent calls. Use ``invalide_cache=True`` to ignore cached results from previous ``find_packages`` calls, and repeat the package search. """ if exclude: warnings.warn( "Use of the exclude parameter is no longer supported since it does " "not work as expected. Use add_exclude_packages instead. Note that " "it must be called prior to any other calls from setup helpers.", AstropyDeprecationWarning) # Calling add_exclude_packages after this point will have no effect _module_state['excludes_too_late'] = True if not invalidate_cache and _module_state['package_cache'] is not None: return _module_state['package_cache'] packages = _find_packages( where=where, exclude=list(_module_state['exclude_packages'])) _module_state['package_cache'] = packages return packages def filter_packages(packagenames): """ Removes some packages from the package list that shouldn't be installed on the current version of Python. """ if PY3: exclude = '_py2' else: exclude = '_py3' return [x for x in packagenames if not x.endswith(exclude)] class FakeBuildSphinx(Command): """ A dummy build_sphinx command that is called if Sphinx is not installed and displays a relevant error message """ # user options inherited from sphinx.setup_command.BuildDoc user_options = [ ('fresh-env', 'E', ''), ('all-files', 'a', ''), ('source-dir=', 's', ''), ('build-dir=', None, ''), ('config-dir=', 'c', ''), ('builder=', 'b', ''), ('project=', None, ''), ('version=', None, ''), ('release=', None, ''), ('today=', None, ''), ('link-index', 'i', '')] # user options appended in astropy.setup_helpers.AstropyBuildSphinx user_options.append(('warnings-returncode', 'w', '')) user_options.append(('clean-docs', 'l', '')) user_options.append(('no-intersphinx', 'n', '')) user_options.append(('open-docs-in-browser', 'o', '')) def initialize_options(self): try: raise RuntimeError("Sphinx and its dependencies must be installed " "for build_docs.") except: log.error('error: Sphinx and its dependencies must be installed ' 'for build_docs.') sys.exit(1) asdf-1.3.3/astropy_helpers/astropy_helpers/version_helpers.py0000644000175000017500000002313213246003560024210 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Utilities for generating the version string for Astropy (or an affiliated package) and the version.py module, which contains version info for the package. Within the generated astropy.version module, the `major`, `minor`, and `bugfix` variables hold the respective parts of the version number (bugfix is '0' if absent). The `release` variable is True if this is a release, and False if this is a development version of astropy. For the actual version string, use:: from astropy.version import version or:: from astropy import __version__ """ from __future__ import division import datetime import imp import os import pkgutil import sys import time from distutils import log import pkg_resources from . import git_helpers from .distutils_helpers import is_distutils_display_option from .utils import invalidate_caches PY3 = sys.version_info[0] == 3 def _version_split(version): """ Split a version string into major, minor, and bugfix numbers. If any of those numbers are missing the default is zero. Any pre/post release modifiers are ignored. Examples ======== >>> _version_split('1.2.3') (1, 2, 3) >>> _version_split('1.2') (1, 2, 0) >>> _version_split('1.2rc1') (1, 2, 0) >>> _version_split('1') (1, 0, 0) >>> _version_split('') (0, 0, 0) """ parsed_version = pkg_resources.parse_version(version) if hasattr(parsed_version, 'base_version'): # New version parsing for setuptools >= 8.0 if parsed_version.base_version: parts = [int(part) for part in parsed_version.base_version.split('.')] else: parts = [] else: parts = [] for part in parsed_version: if part.startswith('*'): # Ignore any .dev, a, b, rc, etc. break parts.append(int(part)) if len(parts) < 3: parts += [0] * (3 - len(parts)) # In principle a version could have more parts (like 1.2.3.4) but we only # support .. return tuple(parts[:3]) # This is used by setup.py to create a new version.py - see that file for # details. Note that the imports have to be absolute, since this is also used # by affiliated packages. _FROZEN_VERSION_PY_TEMPLATE = """ # Autogenerated by {packagetitle}'s setup.py on {timestamp!s} from __future__ import unicode_literals import datetime {header} major = {major} minor = {minor} bugfix = {bugfix} release = {rel} timestamp = {timestamp!r} debug = {debug} try: from ._compiler import compiler except ImportError: compiler = "unknown" try: from .cython_version import cython_version except ImportError: cython_version = "unknown" """[1:] _FROZEN_VERSION_PY_WITH_GIT_HEADER = """ {git_helpers} _packagename = "{packagename}" _last_generated_version = "{verstr}" _last_githash = "{githash}" # Determine where the source code for this module # lives. If __file__ is not a filesystem path then # it is assumed not to live in a git repo at all. if _get_repo_path(__file__, levels=len(_packagename.split('.'))): version = update_git_devstr(_last_generated_version, path=__file__) githash = get_git_devstr(sha=True, show_warning=False, path=__file__) or _last_githash else: # The file does not appear to live in a git repo so don't bother # invoking git version = _last_generated_version githash = _last_githash """[1:] _FROZEN_VERSION_PY_STATIC_HEADER = """ version = "{verstr}" githash = "{githash}" """[1:] def _get_version_py_str(packagename, version, githash, release, debug, uses_git=True): epoch = int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) timestamp = datetime.datetime.utcfromtimestamp(epoch) major, minor, bugfix = _version_split(version) if packagename.lower() == 'astropy': packagetitle = 'Astropy' else: packagetitle = 'Astropy-affiliated package ' + packagename header = '' if uses_git: header = _generate_git_header(packagename, version, githash) elif not githash: # _generate_git_header will already generate a new git has for us, but # for creating a new version.py for a release (even if uses_git=False) # we still need to get the githash to include in the version.py # See https://github.com/astropy/astropy-helpers/issues/141 githash = git_helpers.get_git_devstr(sha=True, show_warning=True) if not header: # If _generate_git_header fails it returns an empty string header = _FROZEN_VERSION_PY_STATIC_HEADER.format(verstr=version, githash=githash) return _FROZEN_VERSION_PY_TEMPLATE.format(packagetitle=packagetitle, timestamp=timestamp, header=header, major=major, minor=minor, bugfix=bugfix, rel=release, debug=debug) def _generate_git_header(packagename, version, githash): """ Generates a header to the version.py module that includes utilities for probing the git repository for updates (to the current git hash, etc.) These utilities should only be available in development versions, and not in release builds. If this fails for any reason an empty string is returned. """ loader = pkgutil.get_loader(git_helpers) source = loader.get_source(git_helpers.__name__) or '' source_lines = source.splitlines() if not source_lines: log.warn('Cannot get source code for astropy_helpers.git_helpers; ' 'git support disabled.') return '' idx = 0 for idx, line in enumerate(source_lines): if line.startswith('# BEGIN'): break git_helpers_py = '\n'.join(source_lines[idx + 1:]) if PY3: verstr = version else: # In Python 2 don't pass in a unicode string; otherwise verstr will # be represented with u'' syntax which breaks on Python 3.x with x # < 3. This is only an issue when developing on multiple Python # versions at once verstr = version.encode('utf8') new_githash = git_helpers.get_git_devstr(sha=True, show_warning=False) if new_githash: githash = new_githash return _FROZEN_VERSION_PY_WITH_GIT_HEADER.format( git_helpers=git_helpers_py, packagename=packagename, verstr=verstr, githash=githash) def generate_version_py(packagename, version, release=None, debug=None, uses_git=True, srcdir='.'): """Regenerate the version.py module if necessary.""" try: version_module = get_pkg_version_module(packagename) try: last_generated_version = version_module._last_generated_version except AttributeError: last_generated_version = version_module.version try: last_githash = version_module._last_githash except AttributeError: last_githash = version_module.githash current_release = version_module.release current_debug = version_module.debug except ImportError: version_module = None last_generated_version = None last_githash = None current_release = None current_debug = None if release is None: # Keep whatever the current value is, if it exists release = bool(current_release) if debug is None: # Likewise, keep whatever the current value is, if it exists debug = bool(current_debug) package_srcdir = os.path.join(srcdir, *packagename.split('.')) version_py = os.path.join(package_srcdir, 'version.py') if (last_generated_version != version or current_release != release or current_debug != debug): if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) if is_distutils_display_option(): # Always silence unnecessary log messages when display options are # being used log.set_threshold(log.WARN) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(packagename, version, last_githash, release, debug, uses_git=uses_git)) invalidate_caches() if version_module: imp.reload(version_module) def get_pkg_version_module(packagename, fromlist=None): """Returns the package's .version module generated by `astropy_helpers.version_helpers.generate_version_py`. Raises an ImportError if the version module is not found. If ``fromlist`` is an iterable, return a tuple of the members of the version module corresponding to the member names given in ``fromlist``. Raises an `AttributeError` if any of these module members are not found. """ if not fromlist: # Due to a historical quirk of Python's import implementation, # __import__ will not return submodules of a package if 'fromlist' is # empty. # TODO: For Python 3.1 and up it may be preferable to use importlib # instead of the __import__ builtin return __import__(packagename + '.version', fromlist=['']) else: mod = __import__(packagename + '.version', fromlist=fromlist) return tuple(getattr(mod, member) for member in fromlist) asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/0000755000175000017500000000000013246031665021746 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/local/0000755000175000017500000000000013246031665023040 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/local/python3_local_links.inv0000644000175000017500000000122213243564211027524 0ustar dandan00000000000000# Sphinx inventory version 2 # Project: Python # Version: 3.5 # The remainder of this file should be compressed using zlib. x0{b$.!YTUa*!Qq{h\;ٯgɁlv VA#jolGN dk~#k40Zv]'`Z*H? %Z_H{\aj% Gব,:j'/xU2(j%PR\7(j֥5J?,Cf/բO4FZsz ouЏO l;4`6yDMA-}Jwq!dj!#T" h2oS߈~` t8RwjnKcRxr?%+\Ob 3s˻`Vһv@>2b;!I,=Wh_'l!Q%^B#Ô }inuD#e³\:{tu;/wxy. !nX{0BzoH /LxA&UXS{⮸5ߣ\RBiJF?asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/local/python2_local_links.inv0000644000175000017500000000106213246003560027523 0ustar dandan00000000000000# Sphinx inventory version 2 # Project: Python # Version: 2.7 and 3.5 # The remainder of this file should be compressed using zlib. x=O0@w Z!nU bw+1rpKïIiQeI˽w8g"Wf ʬxK%lS(ϭ1 k&Qrp)ɐ.Bi۠3H]a)_ZI>dH, _M_"撠bvIzЀ8 {% if not embedded %}{% endif %} {% endblock %} {% block header %}
{{ theme_logotext1 }}{{ theme_logotext2 }}{{ theme_logotext3 }}
  • Index
  • Modules
  • {% block sidebarsearch %} {% include "searchbox.html" %} {% endblock %}
{% endblock %} {% block relbar1 %} {% endblock %} {# Silence the bottom relbar. #} {% block relbar2 %}{% endblock %} {%- block footer %}

{%- if edit_on_github %} {{ edit_on_github_page_message }}   {%- endif %} {%- if show_source and has_source and sourcename %} {{ _('Page Source') }} {%- endif %}   Back to Top

{%- if show_copyright %} {%- if hasdoc('copyright') %} {% trans path=pathto('copyright'), copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
{%- else %} {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
{%- endif %} {%- endif %} {%- if show_sphinx %} {% trans sphinx_version=sphinx_version|e %}Created using Sphinx {{ sphinx_version }}.{% endtrans %}   {%- endif %} {%- if last_updated %} {% trans last_updated=last_updated|e %}Last built {{ last_updated }}.{% endtrans %}
{%- endif %}

{%- endblock %} asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/0000755000175000017500000000000013246031665030236 5ustar dandan00000000000000././@LongLink0000000000000000000000000000014600000000000011216 Lustar 00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.pngasdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.pn0000644000175000017500000000353413243564211033622 0ustar dandan00000000000000PNG  IHDR szzsBIT|d pHYstEXtSoftwarewww.inkscape.org<IDATXŗPT?cP  FUBȘ&diI# JhC2nj5tw:۪5Sƪڡ%ZF2VkJF`(Jdqa., ?3gsϏΞ\_ɕ#Hl 2pZ8ԯKLW;ACȋh4z>$?#hJ~`;&y#D b0¤u2RqKJr'7<6.´;`2ҋ@&a$`+Ɲ1WB], w.rM|rh?G6Bm"GïK0#&: WBa˰mL6p+Δxti@D1;z v7zCrׇE9,_Ghby; !,eUėAlO-^;V~;MKxUZK%:L剜"9Tr3WCWa89`p4XW;KxBjwɥׇ.WLD_e5w`DzFG;z9?@ghI^ UԳMl+ās%bZKo@`!8o)!pu4W;U00i'@V \}> u  bdǑY>rzc0iI,\1DX )ׇ__m cB3߬|f̃I.K;NAq!~*r8g)Bď߅;!*'#DrdN;Ql |( Xj[`aPy* ؗԥhbO 9el 0Hia29HRe 5*@)}˱ cU5aIr m0JnARPrj&5+ޝAL:KA\ e'_໩lg'm/!7|p7zT@50 K޹g@/fHN|ׯ@b b8Xl,yf} ڠU; )U1obS j~¦aS2!&A8/ 7hu.@0D=_oo nI/ I70Fާ&%,*}t {#$'@tbʾ?uO j&DK -T㎉E4| )p,;!7ÿ3i06XԾ8nBSjOENi 0-g<0c&T@e] K . ;z硳-TR[t:iy脷,,4EBY8{Z5FAK]?upjL,<" ^?aRe AO/YHKC}K7ټV='N h@$.:4}rsFp"jw^qo?%f$2H̀O675E)iנس\oF̄*j{YUIܹ !bQ[Ǣ&X])WHT] 텟A֭`ЇuWXq;dgڱ "20֯зka:ob3u2p!}rn,TjN$9L࿡k{rAMP*ari.i[ hШ7O$0 ˕Lg$33 G.8<IENDB`asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/sidebar.js0000644000175000017500000001155313243564211032205 0ustar dandan00000000000000/* * sidebar.js * ~~~~~~~~~~ * * This script makes the Sphinx sidebar collapsible. * * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton * used to collapse and expand the sidebar. * * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden * and the width of the sidebar and the margin-left of the document * are decreased. When the sidebar is expanded the opposite happens. * This script saves a per-browser/per-session cookie used to * remember the position of the sidebar among the pages. * Once the browser is closed the cookie is deleted and the position * reset to the default (expanded). * * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ $(function() { // global elements used by the functions. // the 'sidebarbutton' element is defined as global after its // creation, in the add_sidebar_button function var bodywrapper = $('.bodywrapper'); var sidebar = $('.sphinxsidebar'); var sidebarwrapper = $('.sphinxsidebarwrapper'); // for some reason, the document has no sidebar; do not run into errors if (!sidebar.length) return; // original margin-left of the bodywrapper and width of the sidebar // with the sidebar expanded var bw_margin_expanded = bodywrapper.css('margin-left'); var ssb_width_expanded = sidebar.width(); // margin-left of the bodywrapper and width of the sidebar // with the sidebar collapsed var bw_margin_collapsed = 12; var ssb_width_collapsed = 12; // custom colors var dark_color = '#404040'; var light_color = '#505050'; function sidebar_is_collapsed() { return sidebarwrapper.is(':not(:visible)'); } function toggle_sidebar() { if (sidebar_is_collapsed()) expand_sidebar(); else collapse_sidebar(); } function collapse_sidebar() { sidebarwrapper.hide(); sidebar.css('width', ssb_width_collapsed); bodywrapper.css('margin-left', bw_margin_collapsed); sidebarbutton.css({ 'margin-left': '-1px', 'height': bodywrapper.height(), 'border-radius': '3px' }); sidebarbutton.find('span').text('»'); sidebarbutton.attr('title', _('Expand sidebar')); document.cookie = 'sidebar=collapsed'; } function expand_sidebar() { bodywrapper.css('margin-left', bw_margin_expanded); sidebar.css('width', ssb_width_expanded); sidebarwrapper.show(); sidebarbutton.css({ 'margin-left': ssb_width_expanded - 12, 'height': bodywrapper.height(), 'border-radius': '0px 3px 3px 0px' }); sidebarbutton.find('span').text('«'); sidebarbutton.attr('title', _('Collapse sidebar')); document.cookie = 'sidebar=expanded'; } function add_sidebar_button() { sidebarwrapper.css({ 'float': 'left', 'margin-right': '0', 'width': ssb_width_expanded - 18 }); // create the button sidebar.append('
«
'); var sidebarbutton = $('#sidebarbutton'); // find the height of the viewport to center the '<<' in the page var viewport_height; if (window.innerHeight) viewport_height = window.innerHeight; else viewport_height = $(window).height(); var sidebar_offset = sidebar.offset().top; var sidebar_height = Math.max(bodywrapper.height(), sidebar.height()); sidebarbutton.find('span').css({ 'font-family': '"Lucida Grande",Arial,sans-serif', 'display': 'block', 'top': Math.min(viewport_height/2, sidebar_height/2 + sidebar_offset) - 10, 'width': 12, 'position': 'fixed', 'text-align': 'center' }); sidebarbutton.click(toggle_sidebar); sidebarbutton.attr('title', _('Collapse sidebar')); sidebarbutton.css({ 'color': '#FFFFFF', 'background-color': light_color, 'border': '1px solid ' + light_color, 'border-radius': '0px 3px 3px 0px', 'font-size': '1.2em', 'cursor': 'pointer', 'height': sidebar_height, 'padding-top': '1px', 'margin': '-1px', 'margin-left': ssb_width_expanded - 12 }); sidebarbutton.hover( function () { $(this).css('background-color', dark_color); }, function () { $(this).css('background-color', light_color); } ); } function set_position_from_cookie() { if (!document.cookie) return; var items = document.cookie.split(';'); for(var k=0; k>>] button on the top-right corner of code samples to hide * the >>> and ... prompts and the output and thus make the code * copyable. */ var div = $('.highlight-python .highlight,' + '.highlight-python3 .highlight,' + '.highlight-default .highlight') var pre = div.find('pre'); // get the styles from the current theme pre.parent().parent().css('position', 'relative'); var hide_text = 'Hide the prompts and output'; var show_text = 'Show the prompts and output'; var border_width = pre.css('border-top-width'); var border_style = pre.css('border-top-style'); var border_color = pre.css('border-top-color'); var button_styles = { 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', 'border-color': border_color, 'border-style': border_style, 'border-width': border_width, 'color': border_color, 'text-size': '75%', 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em', 'border-radius': '0 3px 0 0' } // create and add the button to all the code blocks that contain >>> div.each(function(index) { var jthis = $(this); if (jthis.find('.gp').length > 0) { var button = $('>>>'); button.css(button_styles) button.attr('title', hide_text); button.data('hidden', 'false'); jthis.prepend(button); } // tracebacks (.gt) contain bare text elements that need to be // wrapped in a span to work with .nextUntil() (see later) jthis.find('pre:has(.gt)').contents().filter(function() { return ((this.nodeType == 3) && (this.data.trim().length > 0)); }).wrap(''); }); // define the behavior of the button when it's clicked $('.copybutton').click(function(e){ e.preventDefault(); var button = $(this); if (button.data('hidden') === 'false') { // hide the code output button.parent().find('.go, .gp, .gt').hide(); button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); button.css('text-decoration', 'line-through'); button.attr('title', show_text); button.data('hidden', 'true'); } else { // show the code output button.parent().find('.go, .gp, .gt').show(); button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); button.css('text-decoration', 'none'); button.attr('title', hide_text); button.data('hidden', 'false'); } }); }); ././@LongLink0000000000000000000000000000014600000000000011216 Lustar 00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout.svgasdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout.sv0000644000175000017500000001212113243564211034046 0ustar dandan00000000000000 asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.ico0000644000175000017500000010033413243564211033447 0ustar dandan00000000000000@@ (@F  (n@ ( P (Y(@  * Vy oK  H  I!y >K ? |LYerZJ*<3E X1r=$.C*3^p+7 (! sPf %   i# #"""" yu =!"""""""""""""""""""!!! $$$$$$"m8T%!$$$$$###""""###$$$$$$##$$#!' &j'(&&&&!Nim5 %&&&%%$##""""""##$$%&&&&&&&&%&3)%(*)))(%'IUq'!')((('&#("DPkpmNj!C&"%&'(((((((&'9 +*+*++)*Qo%%++++*'$"Gt}/R'&))******''U+;-.---,)Vt]z('----,$F?`)),,,,,,.,.t@//0000- :z1(0/./,+Zw#L'./////0.-I0%021110/bH)1221.8xTs+/100011/2$@3n3533318Ek,243418q103222320@ 2666655Dl @/65540t\~?e1\,X,X2]?fZ}u335555644X8 5:88877@j08888.Rx:eE1*-..-*0C3]jl168778855:O9<:::8 BM3;:;7O^@/49:::99984.6/]Mu39999;88<{;=<<<;9hc<:<<<2xOy65<=<<<<<<<<<<92 Af!S9;;;<=9;'=?@??=?k7g5>>>;Mc89?>>>>>>>>>>>>?>7:[=<>>>@>>ABBAA?B#[9AAA:U F9A@@@>:6557;?@@@@A;=e>o>@@@AB?<BDCCCADT>CCC9Ev7CCC>4DBssj0f>:BBBBC; FC@BBBDBBhUEGEEEBFOAEED@Y?FC9&a?r?BEEEE=$_4kCDDDDEB@ FJHHHEIxMDGGF OIE?H_bHEGGGEE\EEGGGIDJ4ILJJJGLtNGJJHZpB= ]]BIIIJA ` PGIIIKHJxLNLLLIN{ RILLJ\r;.h+iGKKKHNo5pJKKKKKLOPNNNLOYJNNMR5nFMNNNF9unMKMMMOLPQQQQORbJPPPH\NPPPI!gPNPPPRPRQTSSQT'mJSSSJ}ANRRRM`TPRRRTPV}UVUUSTfFOTUUQ9{_NTTTP Z~VQTTTVRVPWYWWVU0w _SWWVXS:}jPVVVTYj ]SVVVXVZ"X\YYYW^-vQYYYR;bQ9hSYYYWZbeWXXX[Xf X]\\\ZZ~z^W\[[WdOXKNVV[[[Y\e dY[[[^[f_]_^^]\*x<X]^^\_d)wT[Yt4Y]]]Zar`Z]]]`]b<`c```^]oY```][B?ZY`W"ve]___\h`\___b]mbdbbbba/i[abbaZ_;ryWoW[ab_]pYabbb]rb_aaad_Uewdgeeed`yo\bdddc_ZYZ\addda]G9_dddd\(|ebdddgdg%fjggggep3c_fgggfffffff_cGcefffe`GZeefffifihkiiihe5g'}e`dfghhgeabwj5chhhhd p&fghhhkhj0knkkkkjeNi=| qhdflv0Xddjkkkkd*khjjjjmknmpnnnnmgSm_ex|nkmmmmjmlqllmmmmpku%mrpppppoiD~ tkoonjki.#mnoooororgpuqrrrrqljsoqqps~ttnoqqqqqrn`tvuttttttqsR:ortttm%nb0rrssssusryx1vzwwwwwwvup {WTxquuuvs vpsuvvvvvyuw/x[w|yxxxxyyxwtv+ht2wswxxxxxq7vxxwwwyxw|x~{zzzzzzzzzxwux)-)|uwxzzz{{{{v Ovyyyyyz|w|H {~}}}}||||||||{{{z{{{||||||||||wPy{}||||}}z~~~~~~~~~~~~~~~y!}~~~~~}7}b/n;]+u %gL@Mxs&G#Vs]<T%qss wP70W7 &id 8iuE ( @ d W ,uJ>P&:K bs\n +!!!0J +%%%%ps(%%%%%%%%%%%$$$"5+0***>^@_))))$GyPl 1)))))'....0Xv...C{3---*$3#333Xy2228_#N22227t777O77O`K6 =;eJ6667O:<;I;;;@;;;;;;P@;::U@@@Crj@@(^C???????? Fs???ASADDgAtDDjM|DD7lHyDDDTNCCDIII|)fIIW QHHHx\HHHF!MMMo6sMM#e8sLL^LLLMcRRR\YQQhQQQQQQOVVV"mVVcUUUUUUS\l[[ZjZZz[ZZZ]YYWa*___r___G^*y^^^^^^[acchnccce({lccdAcccbbb_i_hhhHqhgggg mhgg zggggfkllljQ8G#llkVkkkip]qqqqN7ppp~tpppowtuuuu~ut ysttttv'y{yyyyy|@lqJ yyyy|yyyw}5~~~~~~~~~~~}}}}"P}}}}{<Ds6i %3a _$;$lw&(0 \&2$1e1i9E Bl{;O " )C!" &&k' g~g##-MTn[tGc<!''&(1+'&Ne!5XE$.*3d411&Mp@e*5187F;g&(X4\2) @Ms1^188D>;8iK2;,C G9,FE=>UB@[>O3j@^S7WO=DC5KFb@]+iJ@oKLIpQNPP W?|?3s YOPVU#n"nFvcD"mbTQ]v__~QdeUL"rhZYa:b^GOUdFR1VB!vU0 ja_q fmgwvb`hQaabjhgnHomt rdr]hqmrzqtXt xjQnytgzwu ~3^eFvqVrwx +}|{{||||'}~s, 6 >4 L;s 9qf$(  * mcmU#1 4E 16G\  'HU'+#!lKi!$0PUAC$v 3 D;->%>4$4"" R6;AJRT0aIx B UW7U-MRX_5A Tt\tOCBNYl-XNKWWG\IVdmd@Nudy_ou%umQ{}qvCU|Pvv totUH NZ: Ul*asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.svg0000644000175000017500000001103213243564211033470 0ustar dandan00000000000000 ././@LongLink0000000000000000000000000000015100000000000011212 Lustar 00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout_20.pngasdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout_200000644000175000017500000000327513243564211033732 0ustar dandan00000000000000PNG  IHDR[8A bKGD oFFsvek pHYs B(x vpAg\@0IDATXi_SϘ+jfF BHXbOέ}-ZAb$iV#TZCՖ IiUd;?)*Orsyss!LO`[=`3|;1`{Ͷﱽv]mX=lyZjs@30l<,ݒ @+60S϶Gmo t% `4 pX<,1:`R~qP`.0kWJs¶@R>)Nt$S`6p6pTm5Hs8{@` J:v=%``/9`/i~\`b{H7KB݀"Ɠof:/hR' J\"`n*[! `'I} o9#g6 l}mh[lOe~tgE;nkmϳ=^KQ&~* N Nx l30L-'w~u O lOm)ީ`"ױakٚs\"5ߟ[m,fB 9{g[ؓ'(8}';aq^{N:l_q-HZ"x.5kO|[86_Y?-B6m8wDqkׅ (eY5$ʯwdz"D%iZMh1/ѪbmZ۟0] V-_پ9냲1K%)AB089l *N' M/o;GcJ=IÁe:T6ܝ}ʳP F76J}K h,aSΌV`%XU [IWE԰ {%- endif %} asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/globaltoc.html0000644000175000017500000000011113243564211031567 0ustar dandan00000000000000

Table of Contents

{{ toctree(maxdepth=-1, titles_only=true) }} asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/localtoc.html0000644000175000017500000000004213243564211031424 0ustar dandan00000000000000

Page Contents

{{ toc }} asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/ext/0000755000175000017500000000000013246031665022546 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/ext/changelog_links.py0000644000175000017500000000554313243564211026251 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This sphinx extension makes the issue numbers in the changelog into links to GitHub issues. """ from __future__ import print_function import re from docutils.nodes import Text, reference BLOCK_PATTERN = re.compile('\[#.+\]', flags=re.DOTALL) ISSUE_PATTERN = re.compile('#[0-9]+') def process_changelog_links(app, doctree, docname): for rex in app.changelog_links_rexes: if rex.match(docname): break else: # if the doc doesn't match any of the changelog regexes, don't process return app.info('[changelog_links] Adding changelog links to "{0}"'.format(docname)) for item in doctree.traverse(): if not isinstance(item, Text): continue # We build a new list of items to replace the current item. If # a link is found, we need to use a 'reference' item. children = [] # First cycle through blocks of issues (delimited by []) then # iterate inside each one to find the individual issues. prev_block_end = 0 for block in BLOCK_PATTERN.finditer(item): block_start, block_end = block.start(), block.end() children.append(Text(item[prev_block_end:block_start])) block = item[block_start:block_end] prev_end = 0 for m in ISSUE_PATTERN.finditer(block): start, end = m.start(), m.end() children.append(Text(block[prev_end:start])) issue_number = block[start:end] refuri = app.config.github_issues_url + issue_number[1:] children.append(reference(text=issue_number, name=issue_number, refuri=refuri)) prev_end = end prev_block_end = block_end # If no issues were found, this adds the whole item, # otherwise it adds the remaining text. children.append(Text(block[prev_end:block_end])) # If no blocks were found, this adds the whole item, otherwise # it adds the remaining text. children.append(Text(item[prev_block_end:])) # Replace item by the new list of items we have generated, # which may contain links. item.parent.replace(item, children) def setup_patterns_rexes(app): app.changelog_links_rexes = [re.compile(pat) for pat in app.config.changelog_links_docpattern] def setup(app): app.connect('doctree-resolved', process_changelog_links) app.connect('builder-inited', setup_patterns_rexes) app.add_config_value('github_issues_url', None, True) app.add_config_value('changelog_links_docpattern', ['.*changelog.*', 'whatsnew/.*'], True) return {'parallel_read_safe': True, 'parallel_write_safe': True} asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/ext/tocdepthfix.py0000644000175000017500000000137013243564211025435 0ustar dandan00000000000000from sphinx import addnodes def fix_toc_entries(app, doctree): # Get the docname; I don't know why this isn't just passed in to the # callback # This seems a bit unreliable as it's undocumented, but it's not "private" # either: docname = app.builder.env.temp_data['docname'] if app.builder.env.metadata[docname].get('tocdepth', 0) != 0: # We need to reprocess any TOC nodes in the doctree and make sure all # the files listed in any TOCs are noted for treenode in doctree.traverse(addnodes.toctree): app.builder.env.note_toctree(docname, treenode) def setup(app): app.connect('doctree-read', fix_toc_entries) return {'parallel_read_safe': True, 'parallel_write_safe': True} asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/ext/edit_on_github.py0000644000175000017500000001346413243564211026106 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This extension makes it easy to edit documentation on github. It adds links associated with each docstring that go to the corresponding view source page on Github. From there, the user can push the "Edit" button, edit the docstring, and submit a pull request. It has the following configuration options (to be set in the project's ``conf.py``): * ``edit_on_github_project`` The name of the github project, in the form "username/projectname". * ``edit_on_github_branch`` The name of the branch to edit. If this is a released version, this should be a git tag referring to that version. For a dev version, it often makes sense for it to be "master". It may also be a git hash. * ``edit_on_github_source_root`` The location within the source tree of the root of the Python package. Defaults to "lib". * ``edit_on_github_doc_root`` The location within the source tree of the root of the documentation source. Defaults to "doc", but it may make sense to set it to "doc/source" if the project uses a separate source directory. * ``edit_on_github_docstring_message`` The phrase displayed in the links to edit a docstring. Defaults to "[edit on github]". * ``edit_on_github_page_message`` The phrase displayed in the links to edit a RST page. Defaults to "[edit this page on github]". * ``edit_on_github_help_message`` The phrase displayed as a tooltip on the edit links. Defaults to "Push the Edit button on the next page" * ``edit_on_github_skip_regex`` When the path to the .rst file matches this regular expression, no "edit this page on github" link will be added. Defaults to ``"_.*"``. """ import inspect import os import re import sys from docutils import nodes from sphinx import addnodes def import_object(modname, name): """ Import the object given by *modname* and *name* and return it. If not found, or the import fails, returns None. """ try: __import__(modname) mod = sys.modules[modname] obj = mod for part in name.split('.'): obj = getattr(obj, part) return obj except: return None def get_url_base(app): return 'http://github.com/%s/tree/%s/' % ( app.config.edit_on_github_project, app.config.edit_on_github_branch) def doctree_read(app, doctree): # Get the configuration parameters if app.config.edit_on_github_project == 'REQUIRED': raise ValueError( "The edit_on_github_project configuration variable must be " "provided in the conf.py") source_root = app.config.edit_on_github_source_root url = get_url_base(app) docstring_message = app.config.edit_on_github_docstring_message # Handle the docstring-editing links for objnode in doctree.traverse(addnodes.desc): if objnode.get('domain') != 'py': continue names = set() for signode in objnode: if not isinstance(signode, addnodes.desc_signature): continue modname = signode.get('module') if not modname: continue fullname = signode.get('fullname') if fullname in names: # only one link per name, please continue names.add(fullname) obj = import_object(modname, fullname) anchor = None if obj is not None: try: lines, lineno = inspect.getsourcelines(obj) except: pass else: anchor = '#L%d' % lineno if anchor: real_modname = inspect.getmodule(obj).__name__ path = '%s%s%s.py%s' % ( url, source_root, real_modname.replace('.', '/'), anchor) onlynode = addnodes.only(expr='html') onlynode += nodes.reference( reftitle=app.config.edit_on_github_help_message, refuri=path) onlynode[0] += nodes.inline( '', '', nodes.raw('', ' ', format='html'), nodes.Text(docstring_message), classes=['edit-on-github', 'viewcode-link']) signode += onlynode def html_page_context(app, pagename, templatename, context, doctree): if (templatename == 'page.html' and not re.match(app.config.edit_on_github_skip_regex, pagename)): doc_root = app.config.edit_on_github_doc_root if doc_root != '' and not doc_root.endswith('/'): doc_root += '/' doc_path = os.path.relpath(doctree.get('source'), app.builder.srcdir) url = get_url_base(app) page_message = app.config.edit_on_github_page_message context['edit_on_github'] = url + doc_root + doc_path context['edit_on_github_page_message'] = page_message def setup(app): app.add_config_value('edit_on_github_project', 'REQUIRED', True) app.add_config_value('edit_on_github_branch', 'master', True) app.add_config_value('edit_on_github_source_root', 'lib', True) app.add_config_value('edit_on_github_doc_root', 'doc', True) app.add_config_value('edit_on_github_docstring_message', '[edit on github]', True) app.add_config_value('edit_on_github_page_message', 'Edit This Page on Github', True) app.add_config_value('edit_on_github_help_message', 'Push the Edit button on the next page', True) app.add_config_value('edit_on_github_skip_regex', '_.*', True) app.connect('doctree-read', doctree_read) app.connect('html-page-context', html_page_context) return {'parallel_read_safe': True, 'parallel_write_safe': True} asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/ext/doctest.py0000644000175000017500000000364113243564211024564 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This is a set of three directives that allow us to insert metadata about doctests into the .rst files so the testing framework knows which tests to skip. This is quite different from the doctest extension in Sphinx itself, which actually does something. For astropy, all of the testing is centrally managed from py.test and Sphinx is not used for running tests. """ import re from docutils.nodes import literal_block from docutils.parsers.rst import Directive class DoctestSkipDirective(Directive): has_content = True def run(self): # Check if there is any valid argument, and skip it. Currently only # 'win32' is supported in astropy.tests.pytest_plugins. if re.match('win32', self.content[0]): self.content = self.content[2:] code = '\n'.join(self.content) return [literal_block(code, code)] class DoctestOmitDirective(Directive): has_content = True def run(self): # Simply do not add any content when this directive is encountered return [] class DoctestRequiresDirective(DoctestSkipDirective): # This is silly, but we really support an unbounded number of # optional arguments optional_arguments = 64 def setup(app): app.add_directive('doctest-requires', DoctestRequiresDirective) app.add_directive('doctest-skip', DoctestSkipDirective) app.add_directive('doctest-skip-all', DoctestSkipDirective) app.add_directive('doctest', DoctestSkipDirective) # Code blocks that use this directive will not appear in the generated # documentation. This is intended to hide boilerplate code that is only # useful for testing documentation using doctest, but does not actually # belong in the documentation itself. app.add_directive('testsetup', DoctestOmitDirective) return {'parallel_read_safe': True, 'parallel_write_safe': True} asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/ext/tests/0000755000175000017500000000000013246031665023710 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/ext/tests/__init__.py0000644000175000017500000000000013243564211026002 0ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/ext/__init__.py0000644000175000017500000000010213243564211024643 0ustar dandan00000000000000from __future__ import division, absolute_import, print_function asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/conf.py0000644000175000017500000002721713246003560023247 0ustar dandan00000000000000# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # # Astropy shared Sphinx settings. These settings are shared between # astropy itself and affiliated packages. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys import warnings from os import path import sphinx from distutils.version import LooseVersion # -- General configuration ---------------------------------------------------- # The version check in Sphinx itself can only compare the major and # minor parts of the version number, not the micro. To do a more # specific version check, call check_sphinx_version("x.y.z.") from # your project's conf.py needs_sphinx = '1.3' on_rtd = os.environ.get('READTHEDOCS', None) == 'True' def check_sphinx_version(expected_version): sphinx_version = LooseVersion(sphinx.__version__) expected_version = LooseVersion(expected_version) if sphinx_version < expected_version: raise RuntimeError( "At least Sphinx version {0} is required to build this " "documentation. Found {1}.".format( expected_version, sphinx_version)) # Configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('https://docs.python.org/3/', (None, 'http://data.astropy.org/intersphinx/python3.inv')), 'pythonloc': ('http://docs.python.org/', path.abspath(path.join(path.dirname(__file__), 'local/python3_local_links.inv'))), 'numpy': ('https://docs.scipy.org/doc/numpy/', (None, 'http://data.astropy.org/intersphinx/numpy.inv')), 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', (None, 'http://data.astropy.org/intersphinx/scipy.inv')), 'matplotlib': ('http://matplotlib.org/', (None, 'http://data.astropy.org/intersphinx/matplotlib.inv')), 'astropy': ('http://docs.astropy.org/en/stable/', None), 'h5py': ('http://docs.h5py.org/en/latest/', None)} if sys.version_info[0] == 2: intersphinx_mapping['python'] = ( 'https://docs.python.org/2/', (None, 'http://data.astropy.org/intersphinx/python2.inv')) intersphinx_mapping['pythonloc'] = ( 'http://docs.python.org/', path.abspath(path.join(path.dirname(__file__), 'local/python2_local_links.inv'))) # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # Add any paths that contain templates here, relative to this directory. # templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # The reST default role (used for this markup: `text`) to use for all # documents. Set to the "smart" one. default_role = 'obj' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog = """ .. _Astropy: http://astropy.org """ # A list of warning types to suppress arbitrary warning messages. We mean to # override directives in astropy_helpers.sphinx.ext.autodoc_enhancements, # thus need to ignore those warning. This can be removed once the patch gets # released in upstream Sphinx (https://github.com/sphinx-doc/sphinx/pull/1843). # Suppress the warnings requires Sphinx v1.4.2 suppress_warnings = ['app.add_directive', ] # -- Project information ------------------------------------------------------ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. #pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Settings for extensions and extension options ---------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.viewcode', 'astropy_helpers.extern.numpydoc', 'astropy_helpers.extern.automodapi.automodapi', 'astropy_helpers.extern.automodapi.smart_resolver', 'astropy_helpers.sphinx.ext.tocdepthfix', 'astropy_helpers.sphinx.ext.doctest', 'astropy_helpers.sphinx.ext.changelog_links'] if not on_rtd and LooseVersion(sphinx.__version__) < LooseVersion('1.4'): extensions.append('sphinx.ext.pngmath') else: extensions.append('sphinx.ext.mathjax') try: import matplotlib.sphinxext.plot_directive extensions += [matplotlib.sphinxext.plot_directive.__name__] # AttributeError is checked here in case matplotlib is installed but # Sphinx isn't. Note that this module is imported by the config file # generator, even if we're not building the docs. except (ImportError, AttributeError): warnings.warn( "matplotlib's plot_directive could not be imported. " + "Inline plots will not be included in the output") # Don't show summaries of the members in each class along with the # class' docstring numpydoc_show_class_members = False autosummary_generate = True automodapi_toctreedirnm = 'api' # Class documentation should contain *both* the class docstring and # the __init__ docstring autoclass_content = "both" # Render inheritance diagrams in SVG graphviz_output_format = "svg" graphviz_dot_args = [ '-Nfontsize=10', '-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif', '-Efontsize=10', '-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif', '-Gfontsize=10', '-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif' ] # -- Options for HTML output ------------------------------------------------- # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [path.abspath(path.join(path.dirname(__file__), 'themes'))] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'bootstrap-astropy' # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': ['localtoc.html'], 'search': [], 'genindex': [], 'py-modindex': [], } # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # included in the bootstrap-astropy theme html_favicon = path.join(html_theme_path[0], html_theme, 'static', 'astropy_logo.ico') # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%d %b %Y' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # -- Options for LaTeX output ------------------------------------------------ # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. latex_toplevel_sectioning = 'part' # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False latex_elements = {} # Additional stuff for the LaTeX preamble. latex_elements['preamble'] = r""" % Use a more modern-looking monospace font \usepackage{inconsolata} % The enumitem package provides unlimited nesting of lists and enums. % Sphinx may use this in the future, in which case this can be removed. % See https://bitbucket.org/birkenfeld/sphinx/issue/777/latex-output-too-deeply-nested \usepackage{enumitem} \setlistdepth{15} % In the parameters section, place a newline after the Parameters % header. (This is stolen directly from Numpy's conf.py, since it % affects Numpy-style docstrings). \usepackage{expdlist} \let\latexdescription=\description \def\description{\latexdescription{}{} \breaklabel} % Support the superscript Unicode numbers used by the "unicode" units % formatter \DeclareUnicodeCharacter{2070}{\ensuremath{^0}} \DeclareUnicodeCharacter{00B9}{\ensuremath{^1}} \DeclareUnicodeCharacter{00B2}{\ensuremath{^2}} \DeclareUnicodeCharacter{00B3}{\ensuremath{^3}} \DeclareUnicodeCharacter{2074}{\ensuremath{^4}} \DeclareUnicodeCharacter{2075}{\ensuremath{^5}} \DeclareUnicodeCharacter{2076}{\ensuremath{^6}} \DeclareUnicodeCharacter{2077}{\ensuremath{^7}} \DeclareUnicodeCharacter{2078}{\ensuremath{^8}} \DeclareUnicodeCharacter{2079}{\ensuremath{^9}} \DeclareUnicodeCharacter{207B}{\ensuremath{^-}} \DeclareUnicodeCharacter{00B0}{\ensuremath{^{\circ}}} \DeclareUnicodeCharacter{2032}{\ensuremath{^{\prime}}} \DeclareUnicodeCharacter{2033}{\ensuremath{^{\prime\prime}}} % Make the "warning" and "notes" sections use a sans-serif font to % make them stand out more. \renewenvironment{notice}[2]{ \def\py@noticetype{#1} \csname py@noticestart@#1\endcsname \textsf{\textbf{#2}} }{\csname py@noticeend@\py@noticetype\endcsname} """ # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # -- Options for the linkcheck builder ---------------------------------------- # A timeout value, in seconds, for the linkcheck builder linkcheck_timeout = 60 asdf-1.3.3/astropy_helpers/astropy_helpers/sphinx/__init__.py0000644000175000017500000000066513243564211024061 0ustar dandan00000000000000""" This package contains utilities and extensions for the Astropy sphinx documentation. In particular, the `astropy.sphinx.conf` should be imported by the sphinx ``conf.py`` file for affiliated packages that wish to make use of the Astropy documentation format. Note that some sphinx extensions which are bundled as-is (numpydoc and sphinx-automodapi) are included in astropy_helpers.extern rather than astropy_helpers.sphinx.ext. """ asdf-1.3.3/astropy_helpers/astropy_helpers/test_helpers.py0000644000175000017500000000100313246003560023473 0ustar dandan00000000000000from __future__ import (absolute_import, division, print_function, unicode_literals) import warnings from .commands.test import AstropyTest # noqa # Leaving this module here for now, but really it needn't exist # (and it's doubtful that any code depends on it anymore) warnings.warn('The astropy_helpers.test_helpers module is deprecated as ' 'of version 1.1.0; the AstropyTest command can be found in ' 'astropy_helpers.commands.test.', DeprecationWarning) asdf-1.3.3/astropy_helpers/astropy_helpers/compat/0000755000175000017500000000000013246031665021720 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/compat/__init__.py0000644000175000017500000000056013243564211024025 0ustar dandan00000000000000def _fix_user_options(options): """ This is for Python 2.x and 3.x compatibility. distutils expects Command options to all be byte strings on Python 2 and Unicode strings on Python 3. """ def to_str_or_none(x): if x is None: return None return str(x) return [tuple(to_str_or_none(x) for x in y) for y in options] asdf-1.3.3/astropy_helpers/astropy_helpers/conftest.py0000644000175000017500000000315713246003560022633 0ustar dandan00000000000000# This file contains settings for pytest that are specific to astropy-helpers. # Since we run many of the tests in sub-processes, we need to collect coverage # data inside each subprocess and then combine it into a single .coverage file. # To do this we set up a list which run_setup appends coverage objects to. # This is not intended to be used by packages other than astropy-helpers. import os from collections import defaultdict try: from coverage import CoverageData except ImportError: HAS_COVERAGE = False else: HAS_COVERAGE = True if HAS_COVERAGE: SUBPROCESS_COVERAGE = [] def pytest_configure(config): if HAS_COVERAGE: SUBPROCESS_COVERAGE[:] = [] def pytest_unconfigure(config): if HAS_COVERAGE: # We create an empty coverage data object combined_cdata = CoverageData() lines = defaultdict(list) for cdata in SUBPROCESS_COVERAGE: # For each CoverageData object, we go through all the files and # change the filename from one which might be a temporary path # to the local filename. We then only keep files that actually # exist. for filename in cdata.measured_files(): try: pos = filename.rindex('astropy_helpers') except ValueError: continue short_filename = filename[pos:] if os.path.exists(short_filename): lines[os.path.abspath(short_filename)].extend(cdata.lines(filename)) combined_cdata.add_lines(lines) combined_cdata.write_file('.coverage.subprocess') asdf-1.3.3/astropy_helpers/astropy_helpers/tests/0000755000175000017500000000000013246031665021577 5ustar dandan00000000000000asdf-1.3.3/astropy_helpers/astropy_helpers/tests/coveragerc0000644000175000017500000000131013246003560023626 0ustar dandan00000000000000[run] source = astropy_helpers omit = astropy_helpers/commands/_test_compat.py astropy_helpers/compat/* astropy_helpers/*/setup_package.py astropy_helpers/extern/* astropy_helpers/extern/*/* astropy_helpers/tests/* astropy_helpers/conftest.py */test_pkg/* [report] exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about packages we have installed except ImportError # Don't complain if tests don't hit assertions raise AssertionError raise NotImplementedError # Don't complain about script hooks def main\(.*\): # Ignore branches that don't pertain to this version of Python pragma: py{ignore_python_version} asdf-1.3.3/astropy_helpers/astropy_helpers/tests/test_git_helpers.py0000644000175000017500000002013213246003560025504 0ustar dandan00000000000000import glob import imp import os import pkgutil import re import sys import tarfile import pytest from warnings import catch_warnings from . import reset_setup_helpers, reset_distutils_log # noqa from . import run_cmd, run_setup, cleanup_import from astropy_helpers.git_helpers import get_git_devstr PY3 = sys.version_info[0] == 3 if PY3: _text_type = str else: _text_type = unicode # noqa _DEV_VERSION_RE = re.compile(r'\d+\.\d+(?:\.\d+)?\.dev(\d+)') ASTROPY_HELPERS_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) TEST_VERSION_SETUP_PY = """\ #!/usr/bin/env python import sys from setuptools import setup NAME = 'apyhtest_eva' VERSION = {version!r} RELEASE = 'dev' not in VERSION sys.path.insert(0, r'{astropy_helpers_path}') from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py if not RELEASE: VERSION += get_git_devstr(False) generate_version_py(NAME, VERSION, RELEASE, False, uses_git=not RELEASE) setup(name=NAME, version=VERSION, packages=['apyhtest_eva']) """ TEST_VERSION_INIT = """\ try: from .version import version as __version__ from .version import githash as __githash__ except ImportError: __version__ = __githash__ = '' """ @pytest.fixture def version_test_package(tmpdir, request): def make_test_package(version='42.42.dev'): test_package = tmpdir.mkdir('test_package') test_package.join('setup.py').write( TEST_VERSION_SETUP_PY.format(version=version, astropy_helpers_path=ASTROPY_HELPERS_PATH)) test_package.mkdir('apyhtest_eva').join('__init__.py').write(TEST_VERSION_INIT) with test_package.as_cwd(): run_cmd('git', ['init']) run_cmd('git', ['add', '--all']) run_cmd('git', ['commit', '-m', 'test package']) if '' in sys.path: sys.path.remove('') sys.path.insert(0, '') def finalize(): cleanup_import('apyhtest_eva') request.addfinalizer(finalize) return test_package return make_test_package def test_update_git_devstr(version_test_package, capsys): """Tests that the commit number in the package's version string updates after git commits even without re-running setup.py. """ # We have to call version_test_package to actually create the package test_pkg = version_test_package() with test_pkg.as_cwd(): run_setup('setup.py', ['--version']) stdout, stderr = capsys.readouterr() version = stdout.strip() m = _DEV_VERSION_RE.match(version) assert m, ( "Stdout did not match the version string pattern:" "\n\n{0}\n\nStderr:\n\n{1}".format(stdout, stderr)) revcount = int(m.group(1)) import apyhtest_eva assert apyhtest_eva.__version__ == version # Make a silly git commit with open('.test', 'w'): pass run_cmd('git', ['add', '.test']) run_cmd('git', ['commit', '-m', 'test']) import apyhtest_eva.version imp.reload(apyhtest_eva.version) # Previously this checked packagename.__version__, but in order for that to # be updated we also have to re-import _astropy_init which could be tricky. # Checking directly that the packagename.version module was updated is # sufficient: m = _DEV_VERSION_RE.match(apyhtest_eva.version.version) assert m assert int(m.group(1)) == revcount + 1 # This doesn't test astropy_helpers.get_helpers.update_git_devstr directly # since a copy of that function is made in packagename.version (so that it # can work without astropy_helpers installed). In order to get test # coverage on the actual astropy_helpers copy of that function just call it # directly and compare to the value in packagename from astropy_helpers.git_helpers import update_git_devstr newversion = update_git_devstr(version, path=str(test_pkg)) assert newversion == apyhtest_eva.version.version def test_version_update_in_other_repos(version_test_package, tmpdir): """ Regression test for https://github.com/astropy/astropy-helpers/issues/114 and for https://github.com/astropy/astropy-helpers/issues/107 """ test_pkg = version_test_package() with test_pkg.as_cwd(): run_setup('setup.py', ['build']) # Add the path to the test package to sys.path for now sys.path.insert(0, str(test_pkg)) try: import apyhtest_eva m = _DEV_VERSION_RE.match(apyhtest_eva.__version__) assert m correct_revcount = int(m.group(1)) with tmpdir.as_cwd(): testrepo = tmpdir.mkdir('testrepo') testrepo.chdir() # Create an empty git repo run_cmd('git', ['init']) import apyhtest_eva.version imp.reload(apyhtest_eva.version) m = _DEV_VERSION_RE.match(apyhtest_eva.version.version) assert m assert int(m.group(1)) == correct_revcount correct_revcount = int(m.group(1)) # Add several commits--more than the revcount for the apyhtest_eva package for idx in range(correct_revcount + 5): test_filename = '.test' + str(idx) testrepo.ensure(test_filename) run_cmd('git', ['add', test_filename]) run_cmd('git', ['commit', '-m', 'A message']) import apyhtest_eva.version imp.reload(apyhtest_eva.version) m = _DEV_VERSION_RE.match(apyhtest_eva.version.version) assert m assert int(m.group(1)) == correct_revcount correct_revcount = int(m.group(1)) finally: sys.path.remove(str(test_pkg)) @pytest.mark.parametrize('version', ['1.0.dev', '1.0']) def test_installed_git_version(version_test_package, version, tmpdir, capsys): """ Test for https://github.com/astropy/astropy-helpers/issues/87 Ensures that packages installed with astropy_helpers have a correct copy of the git hash of the installed commit. """ # To test this, it should suffice to build a source dist, unpack it # somewhere outside the git repository, and then do a build and import # from the build directory--no need to "install" as such test_pkg = version_test_package(version) with test_pkg.as_cwd(): run_setup('setup.py', ['build']) try: import apyhtest_eva githash = apyhtest_eva.__githash__ assert githash and isinstance(githash, _text_type) # Ensure that it does in fact look like a git hash and not some # other arbitrary string assert re.match(r'[0-9a-f]{40}', githash) finally: cleanup_import('apyhtest_eva') run_setup('setup.py', ['sdist', '--dist-dir=dist', '--formats=gztar']) tgzs = glob.glob(os.path.join('dist', '*.tar.gz')) assert len(tgzs) == 1 tgz = test_pkg.join(tgzs[0]) build_dir = tmpdir.mkdir('build_dir') tf = tarfile.open(str(tgz), mode='r:gz') tf.extractall(str(build_dir)) with build_dir.as_cwd(): pkg_dir = glob.glob('apyhtest_eva-*')[0] os.chdir(pkg_dir) with catch_warnings(record=True) as w: run_setup('setup.py', ['build']) try: import apyhtest_eva loader = pkgutil.get_loader('apyhtest_eva') # Ensure we are importing the 'packagename' that was just unpacked # into the build_dir assert loader.get_filename().startswith(str(build_dir)) assert apyhtest_eva.__githash__ == githash finally: cleanup_import('apyhtest_eva') def test_get_git_devstr(tmpdir): dirpath = str(tmpdir) warn_msg = "No git repository present at" # Verify as much as possible, but avoid dealing with paths on windows if not sys.platform.startswith('win'): warn_msg += " '{}'".format(dirpath) with catch_warnings(record=True) as w: devstr = get_git_devstr(path=dirpath) assert devstr == '0' assert len(w) == 1 assert str(w[0].message).startswith(warn_msg) asdf-1.3.3/astropy_helpers/astropy_helpers/tests/test_setup_helpers.py0000644000175000017500000004355313246003560026075 0ustar dandan00000000000000import os import sys import stat import shutil import contextlib import pytest from textwrap import dedent from setuptools import Distribution from ..setup_helpers import get_package_info, register_commands from ..commands import build_ext from . import reset_setup_helpers, reset_distutils_log # noqa from . import run_setup, cleanup_import ASTROPY_HELPERS_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) # Determine whether we're in a PY2 environment without using six USING_PY2 = sys.version_info < (3,0,0) def _extension_test_package(tmpdir, request, extension_type='c'): """Creates a simple test package with an extension module.""" test_pkg = tmpdir.mkdir('test_pkg') test_pkg.mkdir('apyhtest_eva').ensure('__init__.py') # TODO: It might be later worth making this particular test package into a # reusable fixture for other build_ext tests if extension_type in ('c', 'both'): # A minimal C extension for testing test_pkg.join('apyhtest_eva', 'unit01.c').write(dedent("""\ #include #ifndef PY3K #if PY_MAJOR_VERSION >= 3 #define PY3K 1 #else #define PY3K 0 #endif #endif #if PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "unit01", NULL, -1, NULL }; PyMODINIT_FUNC PyInit_unit01(void) { return PyModule_Create(&moduledef); } #else PyMODINIT_FUNC initunit01(void) { Py_InitModule3("unit01", NULL, NULL); } #endif """)) if extension_type in ('pyx', 'both'): # A minimal Cython extension for testing test_pkg.join('apyhtest_eva', 'unit02.pyx').write(dedent("""\ print("Hello cruel angel.") """)) if extension_type == 'c': extensions = ['unit01.c'] elif extension_type == 'pyx': extensions = ['unit02.pyx'] elif extension_type == 'both': extensions = ['unit01.c', 'unit02.pyx'] extensions_list = [ "Extension('apyhtest_eva.{0}', [join('apyhtest_eva', '{1}')])".format( os.path.splitext(extension)[0], extension) for extension in extensions] test_pkg.join('apyhtest_eva', 'setup_package.py').write(dedent("""\ from setuptools import Extension from os.path import join def get_extensions(): return [{0}] """.format(', '.join(extensions_list)))) test_pkg.join('setup.py').write(dedent("""\ import sys from os.path import join from setuptools import setup sys.path.insert(0, r'{astropy_helpers_path}') from astropy_helpers.setup_helpers import register_commands from astropy_helpers.setup_helpers import get_package_info from astropy_helpers.version_helpers import generate_version_py if '--no-cython' in sys.argv: from astropy_helpers.commands import build_ext build_ext.should_build_with_cython = lambda *args: False sys.argv.remove('--no-cython') NAME = 'apyhtest_eva' VERSION = '0.1' RELEASE = True cmdclassd = register_commands(NAME, VERSION, RELEASE) generate_version_py(NAME, VERSION, RELEASE, False, False) package_info = get_package_info() setup( name=NAME, version=VERSION, cmdclass=cmdclassd, **package_info ) """.format(astropy_helpers_path=ASTROPY_HELPERS_PATH))) if '' in sys.path: sys.path.remove('') sys.path.insert(0, '') def finalize(): cleanup_import('apyhtest_eva') request.addfinalizer(finalize) return test_pkg @pytest.fixture def extension_test_package(tmpdir, request): return _extension_test_package(tmpdir, request, extension_type='both') @pytest.fixture def c_extension_test_package(tmpdir, request): return _extension_test_package(tmpdir, request, extension_type='c') @pytest.fixture def pyx_extension_test_package(tmpdir, request): return _extension_test_package(tmpdir, request, extension_type='pyx') def test_cython_autoextensions(tmpdir): """ Regression test for https://github.com/astropy/astropy-helpers/pull/19 Ensures that Cython extensions in sub-packages are discovered and built only once. """ # Make a simple test package test_pkg = tmpdir.mkdir('test_pkg') test_pkg.mkdir('yoda').mkdir('luke') test_pkg.ensure('yoda', '__init__.py') test_pkg.ensure('yoda', 'luke', '__init__.py') test_pkg.join('yoda', 'luke', 'dagobah.pyx').write( """def testfunc(): pass""") # Required, currently, for get_package_info to work register_commands('yoda', '0.0', False, srcdir=str(test_pkg)) package_info = get_package_info(str(test_pkg)) assert len(package_info['ext_modules']) == 1 assert package_info['ext_modules'][0].name == 'yoda.luke.dagobah' def test_compiler_module(capsys, c_extension_test_package): """ Test ensuring that the compiler module is built and installed for packages that have extension modules. """ test_pkg = c_extension_test_package install_temp = test_pkg.mkdir('install_temp') with test_pkg.as_cwd(): # This is one of the simplest ways to install just a package into a # test directory run_setup('setup.py', ['install', '--single-version-externally-managed', '--install-lib={0}'.format(install_temp), '--record={0}'.format(install_temp.join('record.txt'))]) stdout, stderr = capsys.readouterr() assert "No git repository present at" in stderr with install_temp.as_cwd(): import apyhtest_eva # Make sure we imported the apyhtest_eva package from the correct place dirname = os.path.abspath(os.path.dirname(apyhtest_eva.__file__)) assert dirname == str(install_temp.join('apyhtest_eva')) import apyhtest_eva._compiler import apyhtest_eva.version assert apyhtest_eva.version.compiler == apyhtest_eva._compiler.compiler assert apyhtest_eva.version.compiler != 'unknown' def test_no_cython_buildext(capsys, c_extension_test_package, monkeypatch): """ Regression test for https://github.com/astropy/astropy-helpers/pull/35 This tests the custom build_ext command installed by astropy_helpers when used with a project that has no Cython extensions (but does have one or more normal C extensions). """ test_pkg = c_extension_test_package with test_pkg.as_cwd(): run_setup('setup.py', ['build_ext', '--inplace', '--no-cython']) stdout, stderr = capsys.readouterr() assert "No git repository present at" in stderr sys.path.insert(0, str(test_pkg)) try: import apyhtest_eva.unit01 dirname = os.path.abspath(os.path.dirname(apyhtest_eva.unit01.__file__)) assert dirname == str(test_pkg.join('apyhtest_eva')) finally: sys.path.remove(str(test_pkg)) def test_missing_cython_c_files(capsys, pyx_extension_test_package, monkeypatch): """ Regression test for https://github.com/astropy/astropy-helpers/pull/181 Test failure mode when building a package that has Cython modules, but where Cython is not installed and the generated C files are missing. """ test_pkg = pyx_extension_test_package with test_pkg.as_cwd(): run_setup('setup.py', ['build_ext', '--inplace', '--no-cython']) stdout, stderr = capsys.readouterr() assert "No git repository present at" in stderr msg = ('Could not find C/C++ file ' '{0}.(c/cpp)'.format('apyhtest_eva/unit02'.replace('/', os.sep))) assert msg in stderr @pytest.mark.parametrize('mode', ['cli', 'cli-w', 'deprecated', 'cli-l', 'cli-error']) def test_build_docs(capsys, tmpdir, mode): """ Test for build_docs """ test_pkg = tmpdir.mkdir('test_pkg') test_pkg.mkdir('mypackage') test_pkg.join('mypackage').join('__init__.py').write(dedent("""\ def test_function(): pass class A(): pass class B(A): pass """)) test_pkg.mkdir('docs') docs = test_pkg.join('docs') autosummary = docs.mkdir('_templates').mkdir('autosummary') autosummary.join('base.rst').write('{% extends "autosummary_core/base.rst" %}') autosummary.join('class.rst').write('{% extends "autosummary_core/class.rst" %}') autosummary.join('module.rst').write('{% extends "autosummary_core/module.rst" %}') docs_dir = test_pkg.join('docs') docs_dir.join('conf.py').write(dedent("""\ import sys sys.path.append("../") import warnings with warnings.catch_warnings(): # ignore matplotlib warning warnings.simplefilter("ignore") from astropy_helpers.sphinx.conf import * exclude_patterns.append('_templates') """)) if mode == 'cli-error': docs_dir.join('conf.py').write(dedent(""" raise ValueError("TestException") """)) docs_dir.join('index.rst').write(dedent("""\ .. automodapi:: mypackage :no-inheritance-diagram: """)) test_pkg.join('setup.py').write(dedent("""\ import sys sys.path.insert(0, r'{astropy_helpers_path}') from os.path import join from setuptools import setup, Extension from astropy_helpers.setup_helpers import register_commands, get_package_info NAME = 'mypackage' VERSION = 0.1 RELEASE = True cmdclassd = register_commands(NAME, VERSION, RELEASE) setup( name=NAME, version=VERSION, cmdclass=cmdclassd, **get_package_info() ) """.format(astropy_helpers_path=ASTROPY_HELPERS_PATH))) with test_pkg.as_cwd(): if mode == 'cli': run_setup('setup.py', ['build_docs']) elif mode == 'cli-w': run_setup('setup.py', ['build_docs', '-w']) elif mode == 'cli-l': run_setup('setup.py', ['build_docs', '-l']) elif mode == 'deprecated': run_setup('setup.py', ['build_sphinx']) stdout, stderr = capsys.readouterr() assert 'AstropyDeprecationWarning' in stderr def test_command_hooks(tmpdir, capsys): """A basic test for pre- and post-command hooks.""" test_pkg = tmpdir.mkdir('test_pkg') test_pkg.mkdir('_welltall_') test_pkg.join('_welltall_', '__init__.py').ensure() # Create a setup_package module with a couple of command hooks in it test_pkg.join('_welltall_', 'setup_package.py').write(dedent("""\ def pre_build_hook(cmd_obj): print('Hello build!') def post_build_hook(cmd_obj): print('Goodbye build!') """)) # A simple setup.py for the test package--running register_commands should # discover and enable the command hooks test_pkg.join('setup.py').write(dedent("""\ import sys from os.path import join from setuptools import setup, Extension sys.path.insert(0, r'{astropy_helpers_path}') from astropy_helpers.setup_helpers import register_commands, get_package_info NAME = '_welltall_' VERSION = 0.1 RELEASE = True cmdclassd = register_commands(NAME, VERSION, RELEASE) setup( name=NAME, version=VERSION, cmdclass=cmdclassd ) """.format(astropy_helpers_path=ASTROPY_HELPERS_PATH))) with test_pkg.as_cwd(): try: run_setup('setup.py', ['build']) finally: cleanup_import('_welltall_') stdout, stderr = capsys.readouterr() want = dedent("""\ running build running pre_hook from _welltall_.setup_package for build command Hello build! running post_hook from _welltall_.setup_package for build command Goodbye build! """).strip() assert want in stdout.replace('\r\n', '\n').replace('\r', '\n') def test_adjust_compiler(monkeypatch, tmpdir): """ Regression test for https://github.com/astropy/astropy-helpers/issues/182 """ from distutils import ccompiler, sysconfig class MockLog(object): def __init__(self): self.messages = [] def warn(self, message): self.messages.append(message) good = tmpdir.join('gcc-good') good.write(dedent("""\ #!{python} import sys print('gcc 4.10') sys.exit(0) """.format(python=sys.executable))) good.chmod(stat.S_IRUSR | stat.S_IEXEC) # A "compiler" that reports itself to be a version of Apple's llvm-gcc # which is broken bad = tmpdir.join('gcc-bad') bad.write(dedent("""\ #!{python} import sys print('i686-apple-darwin-llvm-gcc-4.2') sys.exit(0) """.format(python=sys.executable))) bad.chmod(stat.S_IRUSR | stat.S_IEXEC) # A "compiler" that doesn't even know its identity (this reproduces the bug # in #182) ugly = tmpdir.join('gcc-ugly') ugly.write(dedent("""\ #!{python} import sys sys.exit(1) """.format(python=sys.executable))) ugly.chmod(stat.S_IRUSR | stat.S_IEXEC) # Scripts with shebang lines don't work implicitly in Windows when passed # to subprocess.Popen, so... if 'win' in sys.platform: good = ' '.join((sys.executable, str(good))) bad = ' '.join((sys.executable, str(bad))) ugly = ' '.join((sys.executable, str(ugly))) dist = Distribution({}) cmd_cls = build_ext.generate_build_ext_command('astropy', False) cmd = cmd_cls(dist) adjust_compiler = cmd._adjust_compiler @contextlib.contextmanager def test_setup(): log = MockLog() monkeypatch.setattr(build_ext, 'log', log) yield log monkeypatch.undo() @contextlib.contextmanager def compiler_setter_with_environ(compiler): monkeypatch.setenv('CC', compiler) with test_setup() as log: yield log monkeypatch.undo() @contextlib.contextmanager def compiler_setter_with_sysconfig(compiler): monkeypatch.setattr(ccompiler, 'get_default_compiler', lambda: 'unix') monkeypatch.setattr(sysconfig, 'get_config_var', lambda v: compiler) old_cc = os.environ.get('CC') if old_cc is not None: del os.environ['CC'] with test_setup() as log: yield log monkeypatch.undo() monkeypatch.undo() monkeypatch.undo() if old_cc is not None: os.environ['CC'] = old_cc compiler_setters = (compiler_setter_with_environ, compiler_setter_with_sysconfig) for compiler_setter in compiler_setters: with compiler_setter(str(good)): # Should have no side-effects adjust_compiler() with compiler_setter(str(ugly)): # Should just pass without complaint, since we can't determine # anything about the compiler anyways adjust_compiler() # In the following tests we check the log messages just to ensure that the # failures occur on the correct code paths for these cases with compiler_setter_with_environ(str(bad)) as log: with pytest.raises(SystemExit): adjust_compiler() assert len(log.messages) == 1 assert 'will fail to compile' in log.messages[0] with compiler_setter_with_sysconfig(str(bad)): adjust_compiler() assert 'CC' in os.environ and os.environ['CC'] == 'clang' with compiler_setter_with_environ('bogus') as log: with pytest.raises(SystemExit): # Missing compiler? adjust_compiler() assert len(log.messages) == 1 assert 'cannot be found or executed' in log.messages[0] with compiler_setter_with_sysconfig('bogus') as log: with pytest.raises(SystemExit): # Missing compiler? adjust_compiler() assert len(log.messages) == 1 assert 'The C compiler used to compile Python' in log.messages[0] def test_invalid_package_exclusion(tmpdir, capsys): module_name = 'foobar' setup_header = dedent("""\ import sys from os.path import join from setuptools import setup, Extension sys.path.insert(0, r'{astropy_helpers_path}') from astropy_helpers.setup_helpers import register_commands, \\ get_package_info, add_exclude_packages NAME = {module_name!r} VERSION = 0.1 RELEASE = True """.format(module_name=module_name, astropy_helpers_path=ASTROPY_HELPERS_PATH)) setup_footer = dedent("""\ setup( name=NAME, version=VERSION, cmdclass=cmdclassd, **package_info ) """) # Test error when using add_package_excludes out of order error_commands = dedent("""\ cmdclassd = register_commands(NAME, VERSION, RELEASE) package_info = get_package_info() add_exclude_packages(['tests*']) """) error_pkg = tmpdir.mkdir('error_pkg') error_pkg.join('setup.py').write( setup_header + error_commands + setup_footer) with error_pkg.as_cwd(): run_setup('setup.py', ['build']) stdout, stderr = capsys.readouterr() assert "RuntimeError" in stderr # Test warning when using deprecated exclude parameter warn_commands = dedent("""\ cmdclassd = register_commands(NAME, VERSION, RELEASE) package_info = get_package_info(exclude=['test*']) """) warn_pkg = tmpdir.mkdir('warn_pkg') warn_pkg.join('setup.py').write( setup_header + warn_commands + setup_footer) with warn_pkg.as_cwd(): run_setup('setup.py', ['build']) stdout, stderr = capsys.readouterr() assert 'AstropyDeprecationWarning' in stderr asdf-1.3.3/astropy_helpers/astropy_helpers/tests/test_utils.py0000644000175000017500000000135713243564211024351 0ustar dandan00000000000000import os from ..utils import find_data_files def test_find_data_files(tmpdir): data = tmpdir.mkdir('data') sub1 = data.mkdir('sub1') sub2 = data.mkdir('sub2') sub3 = sub1.mkdir('sub3') for directory in (data, sub1, sub2, sub3): filename = directory.join('data.dat').strpath with open(filename, 'w') as f: f.write('test') filenames = find_data_files(data.strpath, '**/*.dat') filenames = sorted(os.path.relpath(x, data.strpath) for x in filenames) assert filenames[0] == os.path.join('data.dat') assert filenames[1] == os.path.join('sub1', 'data.dat') assert filenames[2] == os.path.join('sub1', 'sub3', 'data.dat') assert filenames[3] == os.path.join('sub2', 'data.dat') asdf-1.3.3/astropy_helpers/astropy_helpers/tests/test_ah_bootstrap.py0000644000175000017500000003552413246003560025677 0ustar dandan00000000000000# -*- coding: utf-8 -*- import glob import os import json import textwrap from distutils.version import LooseVersion import setuptools import pytest from . import reset_setup_helpers, reset_distutils_log # noqa from . import run_cmd, run_setup, testpackage from ..utils import silence TEST_SETUP_PY = """\ #!/usr/bin/env python from __future__ import print_function import os import sys # This import is not the real run of ah_bootstrap for the purposes of the test, # so we need to preserve the command-line arguments otherwise these get eaten # up by this import args = sys.argv[:] import ah_bootstrap sys.argv = args {extra} # reset the name of the package installed by ah_boostrap to # _astropy_helpers_test_--this will prevent any confusion by pkg_resources with # any already installed packages named astropy_helpers # We also disable auto-upgrade by default ah_bootstrap.DIST_NAME = 'astropy-helpers-test' ah_bootstrap.PACKAGE_NAME = '_astropy_helpers_test_' ah_bootstrap.AUTO_UPGRADE = False ah_bootstrap.DOWNLOAD_IF_NEEDED = False try: ah_bootstrap.BOOTSTRAPPER = ah_bootstrap._Bootstrapper.main() ah_bootstrap.use_astropy_helpers({args}) finally: ah_bootstrap.DIST_NAME = 'astropy-helpers' ah_bootstrap.PACKAGE_NAME = 'astropy_helpers' ah_bootstrap.AUTO_UPGRADE = True ah_bootstrap.DOWNLOAD_IF_NEEDED = True # Kind of a hacky way to do this, but this assertion is specifically # for test_check_submodule_no_git # TODO: Rework the tests in this module so that it's easier to test specific # behaviors of ah_bootstrap for each test assert '--no-git' not in sys.argv import _astropy_helpers_test_ filename = os.path.abspath(_astropy_helpers_test_.__file__) filename = filename.replace('.pyc', '.py') # More consistent this way # We print out variables that are needed in tests below in JSON import json data = {{}} data['filename'] = filename data['ah_bootstrap.BOOTSTRAPPER.use_git'] = ah_bootstrap.BOOTSTRAPPER.use_git print(json.dumps(data)) """ AH_BOOTSTRAP_FILE = os.path.join(os.path.dirname(__file__), '..', '..', 'ah_bootstrap.py') with open(AH_BOOTSTRAP_FILE) as f: AH_BOOTSTRAP = f.read() # The behavior checked in some of the tests depends on the version of # setuptools try: # We need to use LooseVersion here instead of StrictVersion since developer # versions of setuptools ('35.0.2.post20170530') don't satisfy the # StrictVersion criteria even though they satisfy PEP440 SETUPTOOLS_VERSION = LooseVersion(setuptools.__version__).version except: # Broken setuptools? ¯\_(ツ)_/¯ SETUPTOOLS_VERSION = (0, 0, 0) def test_bootstrap_from_submodule(tmpdir, testpackage, capsys): """ Tests importing _astropy_helpers_test_ from a submodule in a git repository. This tests actually performing a fresh clone of the repository without the submodule initialized, and that importing astropy_helpers in that context works transparently after calling `ah_boostrap.use_astropy_helpers`. """ orig_repo = tmpdir.mkdir('orig') with orig_repo.as_cwd(): run_cmd('git', ['init']) orig_repo.join('ah_bootstrap.py').write(AH_BOOTSTRAP) run_cmd('git', ['add', 'ah_bootstrap.py']) # Write a test setup.py that uses ah_bootstrap; it also ensures that # any previous reference to astropy_helpers is first wiped from # sys.modules orig_repo.join('setup.py').write(TEST_SETUP_PY.format(args='', extra='')) run_cmd('git', ['add', 'setup.py']) # Add our own clone of the astropy_helpers repo as a submodule named # astropy_helpers run_cmd('git', ['submodule', 'add', str(testpackage), '_astropy_helpers_test_']) run_cmd('git', ['commit', '-m', 'test repository']) os.chdir(str(tmpdir)) # Creates a clone of our test repo in the directory 'clone' run_cmd('git', ['clone', 'orig', 'clone']) os.chdir('clone') run_setup('setup.py', []) stdout, stderr = capsys.readouterr() path = json.loads(stdout.strip())['filename'] # Ensure that the astropy_helpers used by the setup.py is the one that # was imported from git submodule a = os.path.normcase(path) b = os.path.normcase(str(tmpdir.join('clone', '_astropy_helpers_test_', '_astropy_helpers_test_', '__init__.py'))) assert a == b def test_bootstrap_from_submodule_no_locale(tmpdir, testpackage, capsys, monkeypatch): """ Regression test for https://github.com/astropy/astropy/issues/2749 Runs test_bootstrap_from_submodule but with missing locale/language settings. """ for varname in ('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE'): monkeypatch.delenv(varname, raising=False) test_bootstrap_from_submodule(tmpdir, testpackage, capsys) def test_bootstrap_from_submodule_bad_locale(tmpdir, testpackage, capsys, monkeypatch): """ Additional regression test for https://github.com/astropy/astropy/issues/2749 """ for varname in ('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE'): monkeypatch.delenv(varname, raising=False) # Test also with bad LC_CTYPE a la http://bugs.python.org/issue18378 monkeypatch.setenv('LC_CTYPE', 'UTF-8') test_bootstrap_from_submodule(tmpdir, testpackage, capsys) UPDATE_ERROR_PATCH = """ class UpgradeError(Exception): pass def _do_upgrade(*args, **kwargs): raise UpgradeError() ah_bootstrap._Bootstrapper._do_upgrade = _do_upgrade """ def test_check_submodule_no_git(capsys, tmpdir, testpackage): """ Tests that when importing astropy_helpers from a submodule, it is still recognized as a submodule even when using the --no-git option. In particular this ensures that the auto-upgrade feature is not activated. """ orig_repo = tmpdir.mkdir('orig') with orig_repo.as_cwd(): orig_repo.join('ah_bootstrap.py').write(AH_BOOTSTRAP) run_cmd('git', ['init']) # Write a test setup.py that uses ah_bootstrap; it also ensures that # any previous reference to astropy_helpers is first wiped from # sys.modules args = 'auto_upgrade=True' orig_repo.join('setup.py').write(TEST_SETUP_PY.format(args=args, extra=UPDATE_ERROR_PATCH)) run_cmd('git', ['add', 'setup.py']) # Add our own clone of the astropy_helpers repo as a submodule named # astropy_helpers run_cmd('git', ['submodule', 'add', str(testpackage), '_astropy_helpers_test_']) run_cmd('git', ['commit', '-m', 'test repository']) run_setup('setup.py', ['--no-git']) stdout, stderr = capsys.readouterr() use_git = bool(json.loads(stdout.strip())['ah_bootstrap.BOOTSTRAPPER.use_git']) if 'UpgradeError' in stderr: pytest.fail('Attempted to run auto-upgrade despite importing ' '_astropy_helpers_test_ from a git submodule') # Ensure that the no-git option was in fact set assert not use_git def test_bootstrap_from_directory(tmpdir, testpackage, capsys): """ Tests simply bundling a copy of the astropy_helpers source code in its entirety bundled directly in the source package and not in an archive. """ source = tmpdir.mkdir('source') testpackage.copy(source.join('_astropy_helpers_test_')) with source.as_cwd(): source.join('ah_bootstrap.py').write(AH_BOOTSTRAP) source.join('setup.py').write(TEST_SETUP_PY.format(args='', extra='')) run_setup('setup.py', []) stdout, stderr = capsys.readouterr() path = json.loads(stdout.strip())['filename'] # Ensure that the astropy_helpers used by the setup.py is the one that # was imported from git submodule a = os.path.normcase(path) b = os.path.normcase(str(source.join('_astropy_helpers_test_', '_astropy_helpers_test_', '__init__.py'))) assert a == b def test_bootstrap_from_archive(tmpdir, testpackage, capsys): """ Tests importing _astropy_helpers_test_ from a .tar.gz source archive shipped alongside the package that uses it. """ orig_repo = tmpdir.mkdir('orig') # Make a source distribution of the test package with silence(): run_setup(str(testpackage.join('setup.py')), ['sdist', '--dist-dir=dist', '--formats=gztar']) dist_dir = testpackage.join('dist') for dist_file in dist_dir.visit('*.tar.gz'): dist_file.copy(orig_repo) with orig_repo.as_cwd(): orig_repo.join('ah_bootstrap.py').write(AH_BOOTSTRAP) # Write a test setup.py that uses ah_bootstrap; it also ensures that # any previous reference to astropy_helpers is first wiped from # sys.modules args = 'path={0!r}'.format(os.path.basename(str(dist_file))) orig_repo.join('setup.py').write(TEST_SETUP_PY.format(args=args, extra='')) run_setup('setup.py', []) stdout, stderr = capsys.readouterr() path = json.loads(stdout.strip())['filename'] # Installation from the .tar.gz should have resulted in a .egg # directory that the _astropy_helpers_test_ package was imported from eggs = _get_local_eggs() assert eggs egg = orig_repo.join(eggs[0]) assert os.path.isdir(str(egg)) a = os.path.normcase(path) b = os.path.normcase(str(egg.join('_astropy_helpers_test_', '__init__.py'))) assert a == b def test_download_if_needed(tmpdir, testpackage, capsys): """ Tests the case where astropy_helpers was not actually included in a package, or is otherwise missing, and we need to "download" it. This does not test actually downloading from the internet--this is normally done through setuptools' easy_install command which can also install from a source archive. From the point of view of ah_boostrap the two actions are equivalent, so we can just as easily simulate this by providing a setup.cfg giving the path to a source archive to "download" (as though it were a URL). """ source = tmpdir.mkdir('source') # Ensure ah_bootstrap is imported from the local directory import ah_bootstrap # noqa # Make a source distribution of the test package with silence(): run_setup(str(testpackage.join('setup.py')), ['sdist', '--dist-dir=dist', '--formats=gztar']) dist_dir = testpackage.join('dist') with source.as_cwd(): source.join('ah_bootstrap.py').write(AH_BOOTSTRAP) source.join('setup.py').write(TEST_SETUP_PY.format( args='download_if_needed=True', extra='')) source.join('setup.cfg').write(textwrap.dedent("""\ [easy_install] find_links = {find_links} """.format(find_links=str(dist_dir)))) run_setup('setup.py', []) stdout, stderr = capsys.readouterr() path = json.loads(stdout.strip())['filename'] # easy_install should have worked by 'installing' astropy_helpers as a # .egg in the current directory eggs = _get_local_eggs() assert eggs egg = source.join(eggs[0]) assert os.path.isdir(str(egg)) a = os.path.normcase(path) b = os.path.normcase(str(egg.join('_astropy_helpers_test_', '__init__.py'))) assert a == b EXTRA_PACKAGE_INDEX = """ from setuptools.package_index import PackageIndex class FakePackageIndex(PackageIndex): def __init__(self, *args, **kwargs): PackageIndex.__init__(self, *args, **kwargs) self.to_scan = {dists} def find_packages(self, requirement): # no-op pass ah_bootstrap.PackageIndex = FakePackageIndex """ def test_upgrade(tmpdir, capsys): # Run the testpackage fixture manually, since we use it multiple times in # this test to make different versions of _astropy_helpers_test_ orig_dir = testpackage(tmpdir.mkdir('orig')) # Make a test package that uses _astropy_helpers_test_ source = tmpdir.mkdir('source') dist_dir = source.mkdir('dists') orig_dir.copy(source.join('_astropy_helpers_test_')) with source.as_cwd(): source.join('ah_bootstrap.py').write(AH_BOOTSTRAP) setup_py = TEST_SETUP_PY.format(args='auto_upgrade=True', extra='') source.join('setup.py').write(setup_py) # This will be used to later to fake downloading the upgrade package source.join('setup.cfg').write(textwrap.dedent("""\ [easy_install] find_links = {find_links} """.format(find_links=str(dist_dir)))) # Make additional "upgrade" versions of the _astropy_helpers_test_ # package--one of them is version 0.2 and the other is version 0.1.1. The # auto-upgrade should ignore version 0.2 but use version 0.1.1. upgrade_dir_1 = testpackage(tmpdir.mkdir('upgrade_1'), version='0.2') upgrade_dir_2 = testpackage(tmpdir.mkdir('upgrade_2'), version='0.1.1') dists = [] # For each upgrade package go ahead and build a source distribution of it # and copy that source distribution to a dist directory we'll use later to # simulate a 'download' for upgrade_dir in [upgrade_dir_1, upgrade_dir_2]: with silence(): run_setup(str(upgrade_dir.join('setup.py')), ['sdist', '--dist-dir=dist', '--formats=gztar']) dists.append(str(upgrade_dir.join('dist'))) for dist_file in upgrade_dir.visit('*.tar.gz'): dist_file.copy(source.join('dists')) with source.as_cwd(): setup_py = TEST_SETUP_PY.format(args='auto_upgrade=True', extra=EXTRA_PACKAGE_INDEX.format(dists=dists)) source.join('setup.py').write(setup_py) # Now run the source setup.py; this test is similar to # test_download_if_needed, but we explicitly check that the correct # *version* of _astropy_helpers_test_ was used run_setup('setup.py', []) stdout, stderr = capsys.readouterr() path = json.loads(stdout.strip())['filename'] eggs = _get_local_eggs() assert eggs egg = source.join(eggs[0]) assert os.path.isdir(str(egg)) a = os.path.normcase(path) b = os.path.normcase(str(egg.join('_astropy_helpers_test_', '__init__.py'))) assert a == b assert 'astropy_helpers_test-0.1.1-' in str(egg) def _get_local_eggs(path='.'): """ Helper utility used by some tests to get the list of egg archive files in a local directory. """ if SETUPTOOLS_VERSION[0] >= 7: eggs = glob.glob(os.path.join(path, '.eggs', '*.egg')) else: eggs = glob.glob('*.egg') return eggs asdf-1.3.3/astropy_helpers/astropy_helpers/tests/test_openmp_helpers.py0000644000175000017500000000232013243564211026220 0ustar dandan00000000000000import os import sys from copy import deepcopy from distutils.core import Extension from ..openmp_helpers import add_openmp_flags_if_available from ..setup_helpers import _module_state, register_commands IS_TRAVIS_LINUX = os.environ.get('TRAVIS_OS_NAME', None) == 'linux' IS_APPVEYOR = os.environ.get('APPVEYOR', None) == 'True' PY3_LT_35 = sys.version_info[0] == 3 and sys.version_info[1] < 5 _state = None def setup_function(function): global state state = deepcopy(_module_state) def teardown_function(function): _module_state.clear() _module_state.update(state) def test_add_openmp_flags_if_available(): register_commands('openmp_testing', '0.0', False) using_openmp = add_openmp_flags_if_available(Extension('test', [])) # Make sure that on Travis (Linux) and AppVeyor OpenMP does get used (for # MacOS X usually it will not work but this will depend on the compiler). # Having this is useful because we'll find out if OpenMP no longer works # for any reason on platforms on which it does work at the time of writing. # OpenMP doesn't work on Python 3.x where x<5 on AppVeyor though. if IS_TRAVIS_LINUX or (IS_APPVEYOR and not PY3_LT_35): assert using_openmp asdf-1.3.3/astropy_helpers/astropy_helpers/tests/__init__.py0000644000175000017500000001170413246003560023704 0ustar dandan00000000000000import os import subprocess as sp import sys import pytest try: from coverage import CoverageData except ImportError: HAS_COVERAGE = False else: HAS_COVERAGE = True from ..conftest import SUBPROCESS_COVERAGE PACKAGE_DIR = os.path.dirname(__file__) def run_cmd(cmd, args, path=None, raise_error=True): """ Runs a shell command with the given argument list. Changes directory to ``path`` if given, otherwise runs the command in the current directory. Returns a 3-tuple of (stdout, stderr, exit code) If ``raise_error=True`` raise an exception on non-zero exit codes. """ if path is not None: # Transparently support py.path objects path = str(path) p = sp.Popen([cmd] + list(args), stdout=sp.PIPE, stderr=sp.PIPE, cwd=path) streams = tuple(s.decode('latin1').strip() for s in p.communicate()) return_code = p.returncode if raise_error and return_code != 0: raise RuntimeError( "The command `{0}` with args {1!r} exited with code {2}.\n" "Stdout:\n\n{3}\n\nStderr:\n\n{4}".format( cmd, list(args), return_code, streams[0], streams[1])) return streams + (return_code,) def run_setup(setup_script, args): # This used to call setuptools.sandbox's run_setup, but due to issues with # this and Cython (which caused segmentation faults), we now use subprocess. setup_script = os.path.abspath(setup_script) path = os.path.dirname(setup_script) setup_script = os.path.basename(setup_script) if HAS_COVERAGE: # In this case, we run the command using the coverage command and we # then collect the coverage data into a SUBPROCESS_COVERAGE list which # is set up at the start of the testing process and is then combined # into a single .coverage file at the end of the testing process. p = sp.Popen(['coverage', 'run', setup_script] + list(args), cwd=path, stdout=sp.PIPE, stderr=sp.PIPE) stdout, stderr = p.communicate() cdata = CoverageData() cdata.read_file(os.path.join(path, '.coverage')) SUBPROCESS_COVERAGE.append(cdata) else: # Otherwise we just run the tests with Python p = sp.Popen([sys.executable, setup_script] + list(args), cwd=path, stdout=sp.PIPE, stderr=sp.PIPE) stdout, stderr = p.communicate() sys.stdout.write(stdout.decode('utf-8')) sys.stderr.write(stderr.decode('utf-8')) @pytest.fixture(scope='function', autouse=True) def reset_setup_helpers(request): """ Saves and restores the global state of the astropy_helpers.setup_helpers module between tests. """ mod = __import__('astropy_helpers.setup_helpers', fromlist=['']) old_state = mod._module_state.copy() def finalizer(old_state=old_state): mod = sys.modules.get('astropy_helpers.setup_helpers') if mod is not None: mod._module_state.update(old_state) request.addfinalizer(finalizer) @pytest.fixture(scope='function', autouse=True) def reset_distutils_log(): """ This is a setup/teardown fixture that ensures the log-level of the distutils log is always set to a default of WARN, since different settings could affect tests that check the contents of stdout. """ from distutils import log log.set_threshold(log.WARN) TEST_PACKAGE_SETUP_PY = """\ #!/usr/bin/env python from setuptools import setup NAME = 'astropy-helpers-test' VERSION = {version!r} setup(name=NAME, version=VERSION, packages=['_astropy_helpers_test_'], zip_safe=False) """ @pytest.fixture def testpackage(tmpdir, version='0.1'): """ This fixture creates a simplified package called _astropy_helpers_test_ used primarily for testing ah_boostrap, but without using the astropy_helpers package directly and getting it confused with the astropy_helpers package already under test. """ source = tmpdir.mkdir('testpkg') with source.as_cwd(): source.mkdir('_astropy_helpers_test_') init = source.join('_astropy_helpers_test_', '__init__.py') init.write('__version__ = {0!r}'.format(version)) setup_py = TEST_PACKAGE_SETUP_PY.format(version=version) source.join('setup.py').write(setup_py) # Make the new test package into a git repo run_cmd('git', ['init']) run_cmd('git', ['add', '--all']) run_cmd('git', ['commit', '-m', 'test package']) return source def cleanup_import(package_name): """Remove all references to package_name from sys.modules""" for k in list(sys.modules): if not isinstance(k, str): # Some things will actually do this =_= continue elif k.startswith('astropy_helpers.tests'): # Don't delete imported test modules or else the tests will break, # badly continue if k == package_name or k.startswith(package_name + '.'): del sys.modules[k] asdf-1.3.3/astropy_helpers/astropy_helpers/__init__.py0000644000175000017500000000345413246003560022545 0ustar dandan00000000000000try: from .version import version as __version__ from .version import githash as __githash__ except ImportError: __version__ = '' __githash__ = '' # If we've made it as far as importing astropy_helpers, we don't need # ah_bootstrap in sys.modules anymore. Getting rid of it is actually necessary # if the package we're installing has a setup_requires of another package that # uses astropy_helpers (and possibly a different version at that) # See https://github.com/astropy/astropy/issues/3541 import sys if 'ah_bootstrap' in sys.modules: del sys.modules['ah_bootstrap'] # Note, this is repeated from ah_bootstrap.py, but is here too in case this # astropy-helpers was upgraded to from an older version that did not have this # check in its ah_bootstrap. # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass import os # Ensure that all module-level code in astropy or other packages know that # we're in setup mode: if ('__main__' in sys.modules and hasattr(sys.modules['__main__'], '__file__')): filename = os.path.basename(sys.modules['__main__'].__file__) if filename.rstrip('co') == 'setup.py': if sys.version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True del filename asdf-1.3.3/astropy_helpers/astropy_helpers/git_helpers.py0000644000175000017500000001450113243564211023310 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Utilities for retrieving revision information from a project's git repository. """ # Do not remove the following comment; it is used by # astropy_helpers.version_helpers to determine the beginning of the code in # this module # BEGIN import locale import os import subprocess import warnings def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding = 'utf-8' try: text = stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback text = stream.decode('latin1') return text def update_git_devstr(version, path=None): """ Updates the git revision string if and only if the path is being imported directly from a git working copy. This ensures that the revision number in the version string is accurate. """ try: # Quick way to determine if we're in git or not - returns '' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version if not devstr: # Probably not in git so just pass silently return version if 'dev' in version: # update to the current git revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' + devstr else: # otherwise it's already the true/release version return version def get_git_devstr(sha=False, show_warning=True, path=None): """ Determines the number of revisions in this repository. Parameters ---------- sha : bool If True, the full SHA1 hash will be returned. Otherwise, the total count of commits in the repository will be used as a "revision number". show_warning : bool If True, issue a warning if git returns an error code, otherwise errors pass silently. path : str or None If a string, specifies the directory to look in to find the git repository. If `None`, the current working directory is used, and must be the root of the git repository. If given a filename it uses the directory containing that file. Returns ------- devversion : str Either a string with the revision number (if `sha` is False), the SHA1 hash of the current commit (if `sha` is True), or an empty string if git version info could not be identified. """ if path is None: path = os.getcwd() if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha: # Faster for getting just the hash of HEAD cmd = ['rev-parse', 'HEAD'] else: cmd = ['rev-list', '--count', 'HEAD'] def run_git(cmd): try: p = subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate() except OSError as e: if show_warning: warnings.warn('Error running git: ' + str(e)) return (None, b'', b'') if p.returncode == 128: if show_warning: warnings.warn('No git repository present at {0!r}! Using ' 'default dev version.'.format(path)) return (p.returncode, b'', b'') if p.returncode == 129: if show_warning: warnings.warn('Your git looks old (does it support {0}?); ' 'consider upgrading to v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif p.returncode != 0: if show_warning: warnings.warn('Git failed while determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return p.returncode, stdout, stderr returncode, stdout, stderr = run_git(cmd) if not sha and returncode == 128: # git returns 128 if the command is not run from within a git # repository tree. In this case, a warning is produced above but we # return the default dev version of '0'. return '0' elif not sha and returncode == 129: # git returns 129 if a command option failed to parse; in # particular this could happen in git versions older than 1.7.2 # where the --count option is not supported # Also use --abbrev-commit and --abbrev=0 to display the minimum # number of characters needed per-commit (rather than the full hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr = run_git(cmd) # Fall back on the old method of getting all revisions and counting # the lines if returncode == 0: return str(stdout.count(b'\n')) else: return '' elif sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() # This function is tested but it is only ever executed within a subprocess when # creating a fake package, so it doesn't get picked up by coverage metrics. def _get_repo_path(pathname, levels=None): # pragma: no cover """ Given a file or directory name, determine the root of the git repository this path is under. If given, this won't look any higher than ``levels`` (that is, if ``levels=0`` then the given path must be the root of the git repository and is returned if so. Returns `None` if the given path could not be determined to belong to a git repo. """ if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return None current_level = 0 while levels is None or current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level += 1 if current_dir == os.path.dirname(current_dir): break current_dir = os.path.dirname(current_dir) return None asdf-1.3.3/astropy_helpers/CONTRIBUTING.md0000644000175000017500000000216513243564211017442 0ustar dandan00000000000000Contributing to astropy-helpers =============================== The guidelines for contributing to ``astropy-helpers`` are generally the same as the [contributing guidelines for the astropy core package](http://github.com/astropy/astropy/blob/master/CONTRIBUTING.md). Basically, report relevant issues in the ``astropy-helpers`` issue tracker, and we welcome pull requests that broadly follow the [Astropy coding guidelines](http://docs.astropy.org/en/latest/development/codeguide.html). The key subtlety lies in understanding the relationship between ``astropy`` and ``astropy-helpers``. This package contains the build, installation, and documentation tools used by astropy. It also includes support for the ``setup.py test`` command, though Astropy is still required for this to function (it does not currently include the full Astropy test runner). So issues or improvements to that functionality should be addressed in this package. Any other aspect of the [astropy core package](http://github.com/astropy/astropy) (or any other package that uses ``astropy-helpers``) should be addressed in the github repository for that package. asdf-1.3.3/astropy_helpers/ez_setup.py0000644000175000017500000003037113243564211017421 0ustar dandan00000000000000#!/usr/bin/env python """ Setuptools bootstrapping installer. Maintained at https://github.com/pypa/setuptools/tree/bootstrap. Run this script to install or upgrade setuptools. This method is DEPRECATED. Check https://github.com/pypa/setuptools/issues/581 for more details. """ import os import shutil import sys import tempfile import zipfile import optparse import subprocess import platform import textwrap import contextlib from distutils import log try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen try: from site import USER_SITE except ImportError: USER_SITE = None # 33.1.1 is the last version that supports setuptools self upgrade/installation. DEFAULT_VERSION = "33.1.1" DEFAULT_URL = "https://pypi.io/packages/source/s/setuptools/" DEFAULT_SAVE_DIR = os.curdir DEFAULT_DEPRECATION_MESSAGE = "ez_setup.py is deprecated and when using it setuptools will be pinned to {0} since it's the last version that supports setuptools self upgrade/installation, check https://github.com/pypa/setuptools/issues/581 for more info; use pip to install setuptools" MEANINGFUL_INVALID_ZIP_ERR_MSG = 'Maybe {0} is corrupted, delete it and try again.' log.warn(DEFAULT_DEPRECATION_MESSAGE.format(DEFAULT_VERSION)) def _python_cmd(*args): """ Execute a command. Return True if the command succeeded. """ args = (sys.executable,) + args return subprocess.call(args) == 0 def _install(archive_filename, install_args=()): """Install Setuptools.""" with archive_context(archive_filename): # installing log.warn('Installing Setuptools') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 def _build_egg(egg, archive_filename, to_dir): """Build Setuptools egg.""" with archive_context(archive_filename): # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') class ContextualZipFile(zipfile.ZipFile): """Supplement ZipFile class to support context manager for Python 2.6.""" def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def __new__(cls, *args, **kwargs): """Construct a ZipFile or ContextualZipFile as appropriate.""" if hasattr(zipfile.ZipFile, '__exit__'): return zipfile.ZipFile(*args, **kwargs) return super(ContextualZipFile, cls).__new__(cls) @contextlib.contextmanager def archive_context(filename): """ Unzip filename to a temporary directory, set to the cwd. The unzipped target is cleaned up after. """ tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) try: with ContextualZipFile(filename) as archive: archive.extractall() except zipfile.BadZipfile as err: if not err.args: err.args = ('', ) err.args = err.args + ( MEANINGFUL_INVALID_ZIP_ERR_MSG.format(filename), ) raise # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) yield finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _do_download(version, download_base, to_dir, download_delay): """Download Setuptools.""" py_desig = 'py{sys.version_info[0]}.{sys.version_info[1]}'.format(sys=sys) tp = 'setuptools-{version}-{py_desig}.egg' egg = os.path.join(to_dir, tp.format(**locals())) if not os.path.exists(egg): archive = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, archive, to_dir) sys.path.insert(0, egg) # Remove previously-imported pkg_resources if present (see # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). if 'pkg_resources' in sys.modules: _unload_pkg_resources() import setuptools setuptools.bootstrap_install_from = egg def use_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=DEFAULT_SAVE_DIR, download_delay=15): """ Ensure that a setuptools version is installed. Return None. Raise SystemExit if the requested version or later cannot be installed. """ to_dir = os.path.abspath(to_dir) # prior to importing, capture the module state for # representative modules. rep_modules = 'pkg_resources', 'setuptools' imported = set(sys.modules).intersection(rep_modules) try: import pkg_resources pkg_resources.require("setuptools>=" + version) # a suitable version is already installed return except ImportError: # pkg_resources not available; setuptools is not installed; download pass except pkg_resources.DistributionNotFound: # no version of setuptools was found; allow download pass except pkg_resources.VersionConflict as VC_err: if imported: _conflict_bail(VC_err, version) # otherwise, unload pkg_resources to allow the downloaded version to # take precedence. del pkg_resources _unload_pkg_resources() return _do_download(version, download_base, to_dir, download_delay) def _conflict_bail(VC_err, version): """ Setuptools was imported prior to invocation, so it is unsafe to unload it. Bail out. """ conflict_tmpl = textwrap.dedent(""" The required version of setuptools (>={version}) is not available, and can't be installed while this script is running. Please install a more recent version first, using 'easy_install -U setuptools'. (Currently using {VC_err.args[0]!r}) """) msg = conflict_tmpl.format(**locals()) sys.stderr.write(msg) sys.exit(2) def _unload_pkg_resources(): sys.meta_path = [ importer for importer in sys.meta_path if importer.__class__.__module__ != 'pkg_resources.extern' ] del_modules = [ name for name in sys.modules if name.startswith('pkg_resources') ] for mod_name in del_modules: del sys.modules[mod_name] def _clean_check(cmd, target): """ Run the command to download target. If the command fails, clean up before re-raising the error. """ try: subprocess.check_call(cmd) except subprocess.CalledProcessError: if os.access(target, os.F_OK): os.unlink(target) raise def download_file_powershell(url, target): """ Download the file at url to target using Powershell. Powershell will validate trust. Raise an exception if the command cannot complete. """ target = os.path.abspath(target) ps_cmd = ( "[System.Net.WebRequest]::DefaultWebProxy.Credentials = " "[System.Net.CredentialCache]::DefaultCredentials; " '(new-object System.Net.WebClient).DownloadFile("%(url)s", "%(target)s")' % locals() ) cmd = [ 'powershell', '-Command', ps_cmd, ] _clean_check(cmd, target) def has_powershell(): """Determine if Powershell is available.""" if platform.system() != 'Windows': return False cmd = ['powershell', '-Command', 'echo test'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_powershell.viable = has_powershell def download_file_curl(url, target): cmd = ['curl', url, '--location', '--silent', '--output', target] _clean_check(cmd, target) def has_curl(): cmd = ['curl', '--version'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_curl.viable = has_curl def download_file_wget(url, target): cmd = ['wget', url, '--quiet', '--output-document', target] _clean_check(cmd, target) def has_wget(): cmd = ['wget', '--version'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_wget.viable = has_wget def download_file_insecure(url, target): """Use Python to download the file, without connection authentication.""" src = urlopen(url) try: # Read all the data in one block. data = src.read() finally: src.close() # Write all the data in one block to avoid creating a partial file. with open(target, "wb") as dst: dst.write(data) download_file_insecure.viable = lambda: True def get_best_downloader(): downloaders = ( download_file_powershell, download_file_curl, download_file_wget, download_file_insecure, ) viable_downloaders = (dl for dl in downloaders if dl.viable()) return next(viable_downloaders, None) def download_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=DEFAULT_SAVE_DIR, delay=15, downloader_factory=get_best_downloader): """ Download setuptools from a specified location and return its filename. `version` should be a valid setuptools version number that is available as an sdist for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. ``downloader_factory`` should be a function taking no arguments and returning a function for downloading a URL to a target. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) zip_name = "setuptools-%s.zip" % version url = download_base + zip_name saveto = os.path.join(to_dir, zip_name) if not os.path.exists(saveto): # Avoid repeated downloads log.warn("Downloading %s", url) downloader = downloader_factory() downloader(url, saveto) return os.path.realpath(saveto) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the setuptools package. Returns list of command line arguments. """ return ['--user'] if options.user_install else [] def _parse_args(): """Parse the command line for options.""" parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the setuptools package') parser.add_option( '--insecure', dest='downloader_factory', action='store_const', const=lambda: download_file_insecure, default=get_best_downloader, help='Use internal, non-validating downloader' ) parser.add_option( '--version', help="Specify which version to download", default=DEFAULT_VERSION, ) parser.add_option( '--to-dir', help="Directory to save (and re-use) package", default=DEFAULT_SAVE_DIR, ) options, args = parser.parse_args() # positional arguments are ignored return options def _download_args(options): """Return args for download_setuptools function from cmdline args.""" return dict( version=options.version, download_base=options.download_base, downloader_factory=options.downloader_factory, to_dir=options.to_dir, ) def main(): """Install or upgrade setuptools and EasyInstall.""" options = _parse_args() archive = download_setuptools(**_download_args(options)) return _install(archive, _build_install_args(options)) if __name__ == '__main__': sys.exit(main()) asdf-1.3.3/docs/0000755000175000017500000000000013246031665012717 5ustar dandan00000000000000asdf-1.3.3/docs/asdf/0000755000175000017500000000000013246031665013634 5ustar dandan00000000000000asdf-1.3.3/docs/asdf/examples.rst0000644000175000017500000003475013246003441016204 0ustar dandan00000000000000.. _examples: Examples ======== Hello World ----------- In it's simplest form, ASDF is a way of saving nested data structures to YAML. Here we save a dictionary with the key/value pair ``'hello': 'world'``. .. runcode:: from asdf import AsdfFile # Make the tree structure, and create a AsdfFile from it. tree = {'hello': 'world'} ff = AsdfFile(tree) ff.write_to("test.asdf") # You can also make the AsdfFile first, and modify its tree directly: ff = AsdfFile() ff.tree['hello'] = 'world' ff.write_to("test.asdf") .. asdf:: test.asdf Saving arrays ------------- Beyond the basic data types of dictionaries, lists, strings and numbers, the most important thing ASDF can save is arrays. It's as simple as putting a Numpy array somewhere in the tree. Here, we save an 8x8 array of random floating-point numbers. Note that the YAML part contains information about the structure (size and data type) of the array, but the actual array content is in a binary block. .. runcode:: from asdf import AsdfFile import numpy as np tree = {'my_array': np.random.rand(8, 8)} ff = AsdfFile(tree) ff.write_to("test.asdf") .. note:: In the file examples below, the first YAML part appears as it appears in the file. The ``BLOCK`` sections are stored as binary data in the file, but are presented in human-readable form on this page. .. asdf:: test.asdf Schema validation ----------------- In the current draft of the ASDF schema, there are very few elements defined at the top-level -- for the most part, the top-level can contain any elements. One of the few specified elements is ``data``: it must be an array, and is used to specify the "main" data content (for some definition of "main") so that tools that merely want to view or preview the ASDF file have a standard location to find the most interesting data. If you set this to anything but an array, ``asdf`` will complain:: >>> from asdf import AsdfFile >>> tree = {'data': 'Not an array'} >>> AsdfFile(tree) # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... ValidationError: mismatched tags, wanted 'tag:stsci.edu:asdf/core/ndarray-1.0.0', got 'tag:yaml.org,2002:str' ... This validation happens only when a `AsdfFile` is instantiated, read or saved, so it's still possible to get the tree into an invalid intermediate state:: >>> from asdf import AsdfFile >>> ff = AsdfFile() >>> ff.tree['data'] = 'Not an array' >>> # The ASDF file is now invalid, but asdf will tell us when >>> # we write it out. >>> ff.write_to('test.asdf') # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... ValidationError: mismatched tags, wanted 'tag:stsci.edu:asdf/core/ndarray-1.0.0', got 'tag:yaml.org,2002:str' ... Sharing of data --------------- Arrays that are views on the same data automatically share the same data in the file. In this example an array and a subview on that same array are saved to the same file, resulting in only a single block of data being saved. .. runcode:: from asdf import AsdfFile import numpy as np my_array = np.random.rand(8, 8) subset = my_array[2:4,3:6] tree = { 'my_array': my_array, 'subset': subset } ff = AsdfFile(tree) ff.write_to("test.asdf") .. asdf:: test.asdf Saving inline arrays -------------------- For these sort of small arrays, you may not care about the efficiency of a binary representation and want to just save the content directly in the YAML tree. The `~asdf.AsdfFile.set_array_storage` method can be used to set the type of block of the associated data, either ``internal``, ``external`` or ``inline``. - ``internal``: The default. The array data will be stored in a binary block in the same ASDF file. - ``external``: Store the data in a binary block in a separate ASDF file. - ``inline``: Store the data as YAML inline in the tree. .. runcode:: from asdf import AsdfFile import numpy as np my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = AsdfFile(tree) ff.set_array_storage(my_array, 'inline') ff.write_to("test.asdf") .. asdf:: test.asdf Saving external arrays ---------------------- ASDF files may also be saved in "exploded form", in multiple files: - An ASDF file containing only the header and tree. - *n* ASDF files, each containing a single block. Exploded form is useful in the following scenarios: - Not all text editors may handle the hybrid text and binary nature of the ASDF file, and therefore either can't open a ASDF file or would break a ASDF file upon saving. In this scenario, a user may explode the ASDF file, edit the YAML portion as a pure YAML file, and implode the parts back together. - Over a network protocol, such as HTTP, a client may only need to access some of the blocks. While reading a subset of the file can be done using HTTP ``Range`` headers, it still requires one (small) request per block to "jump" through the file to determine the start location of each block. This can become time-consuming over a high-latency network if there are many blocks. Exploded form allows each block to be requested directly by a specific URI. - An ASDF writer may stream a table to disk, when the size of the table is not known at the outset. Using exploded form simplifies this, since a standalone file containing a single table can be iteratively appended to without worrying about any blocks that may follow it. To save a block in an external file, set its block type to ``'external'``. .. runcode:: from asdf import AsdfFile import numpy as np my_array = np.random.rand(8, 8) tree = {'my_array': my_array} ff = AsdfFile(tree) # On an individual block basis: ff.set_array_storage(my_array, 'external') ff.write_to("test.asdf") # Or for every block: ff.write_to("test.asdf", all_array_storage='external') .. asdf:: test.asdf .. asdf:: test0000.asdf Streaming array data -------------------- In certain scenarios, you may want to stream data to disk, rather than writing an entire array of data at once. For example, it may not be possible to fit the entire array in memory, or you may want to save data from a device as it comes in to prevent data loss. The ASDF standard allows exactly one streaming block per file where the size of the block isn't included in the block header, but instead is implicitly determined to include all of the remaining contents of the file. By definition, it must be the last block in the file. To use streaming, rather than including a Numpy array object in the tree, you include a `asdf.Stream` object which sets up the structure of the streamed data, but will not write out the actual content. The file handle's `write` method is then used to manually write out the binary data. .. runcode:: from asdf import AsdfFile, Stream import numpy as np tree = { # Each "row" of data will have 128 entries. 'my_stream': Stream([128], np.float64) } ff = AsdfFile(tree) with open('test.asdf', 'wb') as fd: ff.write_to(fd) # Write 100 rows of data, one row at a time. ``write`` # expects the raw binary bytes, not an array, so we use # ``tostring()``. for i in range(100): fd.write(np.array([i] * 128, np.float64).tostring()) .. asdf:: test.asdf A case where streaming may be useful is when converting large data sets from a different format into ASDF. In these cases it would be impractical to hold all of the data in memory as an intermediate step. Consider the following example that streams a large CSV file containing rows of integer data and converts it to numpy arrays stored in ASDF: .. doctest-skip:: import csv import numpy as np from asdf import AsdfFile, Stream tree = { # We happen to know in advance that each row in the CSV has 100 ints 'data': Stream([100], np.int64) } ff = AsdfFile(tree) # open the output file handle with open('new_file.asdf', 'wb') as fd: ff.write_to(fd) # open the CSV file to be converted with open('large_file.csv', 'r') as cfd: # read each line of the CSV file reader = csv.reader(cfd) for row in reader: # convert each row to a numpy array array = np.array([int(x) for x in row], np.int64) # write the array to the output file handle fd.write(array.tostring()) References ---------- ASDF files may reference items in the tree in other ASDF files. The syntax used in the file for this is called "JSON Pointer", but users of ``asdf`` can largely ignore that. First, we'll create a ASDF file with a couple of arrays in it: .. runcode:: from asdf import AsdfFile import numpy as np tree = { 'a': np.arange(0, 10), 'b': np.arange(10, 20) } target = AsdfFile(tree) target.write_to('target.asdf') .. asdf:: target.asdf Then we will reference those arrays in a couple of different ways. First, we'll load the source file in Python and use the `make_reference` method to generate a reference to array ``a``. Second, we'll work at the lower level by manually writing a JSON Pointer to array ``b``, which doesn't require loading or having access to the target file. .. runcode:: ff = AsdfFile() with AsdfFile.open('target.asdf') as target: ff.tree['my_ref_a'] = target.make_reference(['a']) ff.tree['my_ref_b'] = {'$ref': 'target.asdf#b'} ff.write_to('source.asdf') .. asdf:: source.asdf Calling `~asdf.AsdfFile.find_references` will look up all of the references so they can be used as if they were local to the tree. It doesn't actually move any of the data, and keeps the references as references. .. runcode:: with AsdfFile.open('source.asdf') as ff: ff.find_references() assert ff.tree['my_ref_b'].shape == (10,) On the other hand, calling `~asdf.AsdfFile.resolve_references` places all of the referenced content directly in the tree, so when we write it out again, all of the external references are gone, with the literal content in its place. .. runcode:: with AsdfFile.open('source.asdf') as ff: ff.resolve_references() ff.write_to('resolved.asdf') .. asdf:: resolved.asdf A similar feature provided by YAML, anchors and aliases, also provides a way to support references within the same file. These are supported by asdf, however the JSON Pointer approach is generally favored because: - It is possible to reference elements in another file - Elements are referenced by location in the tree, not an identifier, therefore, everything can be referenced. Anchors and aliases are handled automatically by ``asdf`` when the data structure is recursive. For example here is a dictionary that is included twice in the same tree: .. runcode:: d = {'foo': 'bar'} d['baz'] = d tree = {'d': d} ff = AsdfFile(tree) ff.write_to('anchors.asdf') .. asdf:: anchors.asdf Compression ----------- Individual blocks in an ASDF file may be compressed. You can easily `zlib `__ or `bzip2 `__ compress all blocks: .. runcode:: from asdf import AsdfFile import numpy as np tree = { 'a': np.random.rand(256, 256), 'b': np.random.rand(512, 512) } target = AsdfFile(tree) target.write_to('target.asdf', all_array_compression='zlib') target.write_to('target.asdf', all_array_compression='bzp2') .. asdf:: target.asdf Saving history entries ---------------------- ``asdf`` has a convenience method for notating the history of transformations that have been performed on a file. Given a `~asdf.AsdfFile` object, call `~asdf.AsdfFile.add_history_entry`, given a description of the change and optionally a description of the software (i.e. your software, not ``asdf``) that performed the operation. .. runcode:: from asdf import AsdfFile import numpy as np tree = { 'a': np.random.rand(256, 256) } ff = AsdfFile(tree) ff.add_history_entry( u"Initial random numbers", {u'name': u'asdf examples', u'author': u'John Q. Public', u'homepage': u'http://github.com/spacetelescope/asdf', u'version': u'0.1'}) ff.write_to('example.asdf') .. asdf:: example.asdf Saving ASDF in FITS ------------------- Sometimes you may need to store the structured data supported by ASDF inside of a FITS file in order to be compatible with legacy tools that support only FITS. This can be achieved by including a special extension with the name ``ASDF`` to the FITS file, containing the YAML tree from an ASDF file. The array tags within the ASDF tree point directly to other binary extensions in the FITS file. First, make a FITS file in the usual way with astropy.io.fits. Here, we are building a FITS file from scratch, but it could also have been loaded from a file. This FITS file has two image extensions, SCI and DQ respectively. .. runcode:: from astropy.io import fits hdulist = fits.HDUList() hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='SCI')) hdulist.append(fits.ImageHDU(np.arange(512, dtype=np.float), name='DQ')) Next we make a tree structure out of the data in the FITS file. Importantly, we use the *same* arrays in the FITS HDUList and store them in the tree. By doing this, asdf will be smart enough to point to the data in the regular FITS extensions. .. runcode:: tree = { 'model': { 'sci': { 'data': hdulist['SCI'].data, }, 'dq': { 'data': hdulist['DQ'].data, } } } Now we take both the FITS HDUList and the ASDF tree and create a `~asdf.fits_embed.AsdfInFits` object. It behaves identically to the `~asdf.AsdfFile` object, but reads and writes this special ASDF-in-FITS format. .. runcode:: from asdf import fits_embed ff = fits_embed.AsdfInFits(hdulist, tree) ff.write_to('embedded_asdf.fits') .. runcode:: hidden with open('content.asdf', 'wb') as fd: fd.write(hdulist['ASDF'].data.tostring()) The special ASDF extension in the resulting FITS file looks like the following. Note that the data source of the arrays uses the ``fits:`` prefix to indicate that the data comes from a FITS extension. .. asdf:: content.asdf To load an ASDF-in-FITS file, first open it with ``astropy.io.fits``, and then pass that HDU list to `~asdf.fits_embed.AsdfInFits`: .. runcode:: with fits.open('embedded_asdf.fits') as hdulist: with fits_embed.AsdfInFits.open(hdulist) as asdf: science = asdf.tree['model']['sci'] asdf-1.3.3/docs/asdf/extensions.rst0000644000175000017500000002207513246003441016562 0ustar dandan00000000000000Writing ASDF extensions ======================= Extensions provide a way for ASDF to represent complex types that are not defined by the ASDF standard. Examples of types that require custom extensions include types from third-party libraries, user-defined types, and also complex types that are part of the Python standard library but are not handled in the ASDF standard. From ASDF's perspective, these are all considered 'custom' types. Supporting new types in asdf is easy. There are three pieces needed: 1. A YAML Schema file for each new type. 2. A tag class (inheriting from `asdf.CustomType`) corresponding to each new custom type. The class must override ``to_tree`` and ``from_tree`` from `asdf.CustomType` in order to define how ASDF serializes and deserializes the custom type. 3. A Python class to define an "extension" to ASDF, which is a set of related types. This class must implement the `asdf.AsdfExtension` abstract base class. In general, a third-party library that defines multiple custom types can group them all in the same extension. An Example ---------- As an example, we will write an extension for ASDF that allows us to represent Python's standard ``fractions.Fraction`` class for representing rational numbers. We will call our new ASDF type ``fraction``. First, the YAML Schema, defining the type as a pair of integers: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/1.0.0/fraction" title: An example custom type for handling fractions tag: "tag:nowhere.org:custom/1.0.0/fraction" type: array items: type: integer minItems: 2 maxItems: 2 ... Then, the Python implementation of the tag class and extension class. See the `asdf.CustomType` and `asdf.AsdfExtension` documentation for more information: .. code-block:: python import os import asdf from asdf import util import fractions class FractionType(asdf.CustomType): name = 'fraction' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [fractions.Fraction] @classmethod def to_tree(cls, node, ctx): return [node.numerator, node.denominator] @classmethod def from_tree(cls, tree, ctx): return fractions.Fraction(tree[0], tree[1]) class FractionExtension(object): @property def types(self): return [FractionType] @property def tag_mapping(self): return [('tag:nowhere.org:custom', 'http://nowhere.org/schemas/custom{tag_suffix}')] @property def url_mapping(self): return [('http://nowhere.org/schemas/custom/1.0.0/', util.filepath_to_url(os.path.dirname(__file__)) + '/{url_suffix}.yaml')] Note that the method ``to_tree`` of the tag class ``FractionType`` defines how the library converts ``fractions.Fraction`` into a tree that can be stored by ASDF. Conversely, the method ``from_tree`` defines how the library reads a serialized representation of the object and converts it back into a ``fractions.Fraction``. Explicit version support ------------------------ To some extent schemas and tag classes will be closely tied to the custom data types that they represent. This means that in some cases API changes or other changes to the representation of the underlying types will force us to modify our schemas and tag classes. ASDF's schema versioning allows us to handle changes in schemas over time. Let's consider an imaginary custom type called ``Person`` that we want to serialize in ASDF. The first version of ``Person`` was constructed using a first and last name: .. code-block:: python person = Person('James', 'Webb') print(person.first, person.last) Our version 1.0.0 YAML schema for ``Person`` might look like the following: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/1.0.0/person" title: An example custom type for representing a Person tag: "tag:nowhere.org:custom/1.0.0/person" type: array items: type: string minItems: 2 maxItems: 2 ... And our tag implementation would look something like this: .. code-block:: python import asdf from people import Person class PersonType(asdf.CustomType): name = 'person' organization = 'nowhere.org' version = (1, 0, 0) standard = 'custom' types = [Person] @classmethod def to_tree(cls, node, ctx): return [node.first, node.last] @classmethod def from_tree(cls, tree, ctx): return Person(tree[0], tree[1]) However, a newer version of ``Person`` now requires a middle name in the constructor as well: .. code-block:: python person = Person('James', 'Edwin', 'Webb') print(person.first, person.middle, person.last) James Edwin Webb So we update our YAML schema to version 1.1.0 in order to support newer versions of Person: .. code-block:: yaml %YAML 1.1 --- $schema: "http://stsci.edu/schemas/yaml-schema/draft-01" id: "http://nowhere.org/schemas/custom/1.1.0/person" title: An example custom type for representing a Person tag: "tag:nowhere.org:custom/1.1.0/person" type: array items: type: string minItems: 3 maxItems: 3 ... We need to update our tag class implementation as well. However, we need to be careful. We still want to be able to read version 1.0.0 of our schema and be able to convert it to the newer version of ``Person`` objects. To accomplish this, we will make use of the ``supported_versions`` attribute for our tag class. This will allow us to declare explicit support for the schema versions our tag class implements. Under the hood, ASDF creates multiple copies of our ``PersonType`` tag class, each with a different ``version`` attribute corresponding to one of the supported versions. This means that in our new tag class implementation, we can condition our ``from_tree`` implementation on the value of ``cls.version`` to determine which schema version should be used when reading: .. code-block:: python import asdf from people import Person class PersonType(asdf.CustomType): name = 'person' organization = 'nowhere.org' version = (1, 1, 0) supported_versions = [(1, 0, 0), (1, 1, 0)] standard = 'custom' types = [Person] @classmethod def to_tree(cls, node, ctx): return [node.first, node.middle, node.last] @classmethod def from_tree(cls, tree, ctx): # Handle the older version of the person schema if cls.version == (1, 0, 0): # Construct a Person object with an empty middle name field return Person(tree[0], '', tree[1]) else: # The newer version of the schema stores the middle name too return person(tree[0], tree[1], tree[2]) Note that the implementation of ``to_tree`` is not conditioned on ``cls.version`` since we do not need to convert new ``Person`` objects back to the older version of the schema. Adding custom validators ------------------------ A new type may also add new validation keywords to the schema language. This can be used to impose type-specific restrictions on the values in an ASDF file. This feature is used internally so a schema can specify the required datatype of an array. To support custom validation keywords, set the ``validators`` member of a ``CustomType`` subclass to a dictionary where the keys are the validation keyword name and the values are validation functions. The validation functions are of the same form as the validation functions in the underlying ``jsonschema`` library, and are passed the following arguments: - ``validator``: A `jsonschema.Validator` instance. - ``value``: The value of the schema keyword. - ``instance``: The instance to validate. This will be made up of basic datatypes as represented in the YAML file (list, dict, number, strings), and not include any object types. - ``schema``: The entire schema that applies to instance. Useful to get other related schema keywords. The validation function should either return ``None`` if the instance is valid or ``yield`` one or more `asdf.ValidationError` objects if the instance is invalid. To continue the example from above, for the ``FractionType`` say we want to add a validation keyword "``simplified``" that, when ``true``, asserts that the corresponding fraction is in simplified form: .. code-block:: python from asdf import ValidationError def validate_simplified(validator, simplified, instance, schema): if simplified: reduced = fraction.Fraction(instance[0], instance[1]) if (reduced.numerator != instance[0] or reduced.denominator != instance[1]): yield ValidationError("Fraction is not in simplified form.") FractionType.validators = {'simplified': validate_simplified} asdf-1.3.3/docs/_templates/0000755000175000017500000000000013246031665015054 5ustar dandan00000000000000asdf-1.3.3/docs/_templates/autosummary/0000755000175000017500000000000013246031665017442 5ustar dandan00000000000000asdf-1.3.3/docs/_templates/autosummary/module.rst0000644000175000017500000000037413243547254021470 0ustar dandan00000000000000{% extends "autosummary_core/module.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}asdf-1.3.3/docs/_templates/autosummary/class.rst0000644000175000017500000000037313243547254021307 0ustar dandan00000000000000{% extends "autosummary_core/class.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}asdf-1.3.3/docs/_templates/autosummary/base.rst0000644000175000017500000000037213243547254021113 0ustar dandan00000000000000{% extends "autosummary_core/base.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}asdf-1.3.3/docs/rtd-pip-requirements0000644000175000017500000000011213246003441016723 0ustar dandan00000000000000asdf numpy Cython astropy astropy-helpers jsonschema pyyaml sphinx>=1.4.2 asdf-1.3.3/docs/sphinxext/0000755000175000017500000000000013246031665014751 5ustar dandan00000000000000asdf-1.3.3/docs/sphinxext/example.py0000644000175000017500000001075413246003441016754 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import atexit import io import os import shutil import tempfile import textwrap import codecs from docutils.parsers.rst import Directive from docutils import nodes from sphinx.util.nodes import set_source_info from asdf import AsdfFile from asdf.constants import ASDF_MAGIC, BLOCK_FLAG_STREAMED from asdf import versioning version_string = str(versioning.default_version) TMPDIR = tempfile.mkdtemp() def delete_tmpdir(): shutil.rmtree(TMPDIR) GLOBALS = {} LOCALS = {} FLAGS = { BLOCK_FLAG_STREAMED: "BLOCK_FLAG_STREAMED" } class RunCodeDirective(Directive): has_content = True optional_arguments = 1 def run(self): code = textwrap.dedent('\n'.join(self.content)) cwd = os.getcwd() os.chdir(TMPDIR) try: try: exec(code, GLOBALS, LOCALS) except: print(code) raise literal = nodes.literal_block(code, code) literal['language'] = 'python' set_source_info(self, literal) finally: os.chdir(cwd) if 'hidden' not in self.arguments: return [literal] else: return [] class AsdfDirective(Directive): required_arguments = 1 def run(self): filename = self.arguments[0] cwd = os.getcwd() os.chdir(TMPDIR) parts = [] try: ff = AsdfFile() code = AsdfFile._open_impl(ff, filename, _get_yaml_content=True) code = '{0} {1}\n'.format(ASDF_MAGIC, version_string) + code.strip().decode('utf-8') literal = nodes.literal_block(code, code) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) with AsdfFile.open(filename) as ff: for i, block in enumerate(ff.blocks.internal_blocks): data = codecs.encode(block.data.tostring(), 'hex') if len(data) > 40: data = data[:40] + '...'.encode() allocated = block._allocated size = block._size data_size = block._data_size flags = block._flags if flags & BLOCK_FLAG_STREAMED: allocated = size = data_size = 0 lines = [] lines.append('BLOCK {0}:'.format(i)) human_flags = [] for key, val in FLAGS.items(): if flags & key: human_flags.append(val) if len(human_flags): lines.append(' flags: {0}'.format(' | '.join(human_flags))) if block.input_compression: lines.append(' compression: {0}'.format(block.input_compression)) lines.append(' allocated_size: {0}'.format(allocated)) lines.append(' used_size: {0}'.format(size)) lines.append(' data_size: {0}'.format(data_size)) lines.append(' data: {0}'.format(data)) code = '\n'.join(lines) literal = nodes.literal_block(code, code) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) internal_blocks = list(ff.blocks.internal_blocks) if (len(internal_blocks) and internal_blocks[-1].array_storage != 'streamed'): buff = io.BytesIO() ff.blocks.write_block_index(buff, ff) block_index = buff.getvalue().decode('utf-8') literal = nodes.literal_block(block_index, block_index) literal['language'] = 'yaml' set_source_info(self, literal) parts.append(literal) finally: os.chdir(cwd) result = nodes.admonition() textnodes, messages = self.state.inline_text(filename, self.lineno) title = nodes.title(filename, '', *textnodes) result += title result += parts return [result] def setup(app): app.add_directive('runcode', RunCodeDirective) app.add_directive('asdf', AsdfDirective) atexit.register(delete_tmpdir) asdf-1.3.3/docs/sphinxext/__init__.py0000644000175000017500000000025413246003441017052 0ustar dandan00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function asdf-1.3.3/docs/conf.py0000644000175000017500000001441213243547254014223 0ustar dandan00000000000000# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # # Astropy documentation build configuration file. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default. Some values are defined in # the global Astropy configuration which is loaded here before anything else. # See astropy.sphinx.conf for which values are set there. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('..')) # IMPORTANT: the above commented section was generated by sphinx-quickstart, but # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left # commented out with this explanation to make it clear why this should not be # done. If the sys.path entry above is added, when the astropy.sphinx.conf # import occurs, it will import the *source* version of astropy instead of the # version installed (if invoked as "make html" or directly with sphinx), or the # version in the build directory (if "python setup.py build_sphinx" is used). # Thus, any C-extensions that are needed to build the documentation will *not* # be accessible, and the documentation will not build correctly. import datetime import os import sys try: import astropy_helpers except ImportError: # Building from inside the docs/ directory? if os.path.basename(os.getcwd()) == 'docs': a_h_path = os.path.abspath(os.path.join('..', 'astropy_helpers')) if os.path.isdir(a_h_path): sys.path.insert(1, a_h_path) # Load all of the global Astropy configuration from astropy_helpers.sphinx.conf import * # Get configuration information from setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) setup_cfg['package_name'] = 'asdf' # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.2' # To perform a Sphinx version check that needs to be more specific than # major.minor, call `check_sphinx_version("x.y.z")` here. # check_sphinx_version("1.2.1") # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns.append('_templates') # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog += """ """ # -- Project information ------------------------------------------------------ # This does not *have* to match the package name, but typically does project = setup_cfg['package_name'] author = setup_cfg['author'] copyright = '{0}, {1}'.format( datetime.datetime.now().year, setup_cfg['author']) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. __import__(setup_cfg['package_name']) package = sys.modules[setup_cfg['package_name']] # The short X.Y version. version = package.__version__.split('-', 1)[0] # The full version, including alpha/beta/rc tags. release = package.__version__ # -- Options for HTML output --------------------------------------------------- # A NOTE ON HTML THEMES # The global astropy configuration uses a custom theme, 'bootstrap-astropy', # which is installed along with astropy. A different theme can be used or # the options for this theme can be modified by overriding some of the # variables set in the global configuration. The variables set in the # global configuration are listed below, commented out. # Add any paths that contain custom themes here, relative to this directory. # To use a different custom theme, add the directory containing the theme. #html_theme_path = [] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. To override the custom theme, set this to the # name of a builtin theme or the name of a custom theme in html_theme_path. #html_theme = None # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = '' # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '' # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". html_title = '{0} v{1}'.format(project, release) # Output file base name for HTML help builder. htmlhelp_basename = project + 'doc' # -- Options for LaTeX output -------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [('index', project + '.tex', project + u' Documentation', author, 'manual')] # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [('index', project.lower(), project + u' Documentation', [author], 1)] ## -- Options for the edit_on_github extension ---------------------------------------- if eval(setup_cfg.get('edit_on_github')): extensions += ['astropy_helpers.sphinx.ext.edit_on_github'] versionmod = __import__(setup_cfg['package_name'] + '.version') edit_on_github_project = setup_cfg['github_project'] if versionmod.version.release: edit_on_github_branch = "v" + versionmod.version.version else: edit_on_github_branch = "master" edit_on_github_source_root = "" edit_on_github_doc_root = "docs" sys.path.insert(0, os.path.join(os.path.dirname('__file__'), 'sphinxext')) extensions += ['example'] asdf-1.3.3/docs/make.bat0000644000175000017500000001064113243547254014331 0ustar dandan00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end asdf-1.3.3/docs/Makefile0000644000175000017500000001116413243547254014365 0ustar dandan00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest #This is needed with git because git doesn't create a dir if it's empty $(shell [ -d "_static" ] || mkdir -p _static) help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR) -rm -rf api html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." asdf-1.3.3/docs/index.rst0000644000175000017500000000433113243547254014564 0ustar dandan00000000000000asdf Documentation ==================== ``asdf`` is a tool for reading and writing Advanced Scientific Data Format (ASDF) files. .. note:: This is the **A**\ dvanced **S**\ cientific **D**\ ata **F**\ ormat - if you are looking for the **A**\ daptable **S**\ eismic **D**\ ata **F**\ ormat, go here: http://seismic-data.org/ Installation ------------ ``asdf`` requires: - `python `__ 2.7, 3.3, 3.4 or 3.5. - `numpy `__ 1.6 or later - `jsonschema `__ 2.3.0 or later - `pyyaml `__ 3.10 or later - `six `__ 1.9.0 or later Support for units, time, transform, wcs, or running the tests also requires: - `astropy `__ 1.1 or later Getting Started --------------- The fundamental data model in ASDF is the ``tree``, which is a nested combination of basic data structures: dictionaries, lists, strings and numbers. In addition, ASDF understands how to handle other types, such as Numpy arrays. In the simplest example, you create a tree, and write it to a ASDF file. ``asdf`` handles saving the Numpy array as a binary block transparently: .. runcode:: from asdf import AsdfFile import numpy as np tree = { 'author': 'John Doe', 'my_array': np.random.rand(8, 8) } ff = AsdfFile(tree) ff.write_to("example.asdf") .. asdf:: example.asdf Other :ref:`examples` are provided below. .. toctree:: :maxdepth: 2 asdf/examples.rst asdf/extensions.rst Commandline tool ---------------- ``asdf`` includes a command-line tool, ``asdftool`` that performs a number of basic operations: - ``explode``: Convert a self-contained ASDF file into exploded form. - ``implode``: Convert an ASDF file in exploded form into a self-contained file. - ``to_yaml``: Inline all of the data in an ASDF file so that it is pure YAML. - ``defragment``: Remove unused blocks and extra space. Run ``asdftool --help`` for more information. See also -------- - The `Advanced Scientific Data Format (ASDF) standard `__ Reference/API ------------- .. automodapi:: asdf .. automodapi:: asdf.fits_embed asdf-1.3.3/ez_setup.py0000644000175000017500000002757313243547254014220 0ustar dandan00000000000000#!python """Bootstrap setuptools installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from ez_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import os import shutil import sys import tempfile import tarfile import optparse import subprocess import platform from distutils import log try: from site import USER_SITE except ImportError: USER_SITE = None DEFAULT_VERSION = "1.4.2" DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" def _python_cmd(*args): args = (sys.executable,) + args return subprocess.call(args) == 0 def _check_call_py24(cmd, *args, **kwargs): res = subprocess.call(cmd, *args, **kwargs) class CalledProcessError(Exception): pass if not res == 0: msg = "Command '%s' return non-zero exit status %d" % (cmd, res) raise CalledProcessError(msg) vars(subprocess).setdefault('check_call', _check_call_py24) def _install(tarball, install_args=()): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # installing log.warn('Installing Setuptools') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _build_egg(egg, tarball, to_dir): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) finally: os.chdir(old_wd) shutil.rmtree(tmpdir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') def _do_download(version, download_base, to_dir, download_delay): egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' % (version, sys.version_info[0], sys.version_info[1])) if not os.path.exists(egg): tarball = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, tarball, to_dir) sys.path.insert(0, egg) # Remove previously-imported pkg_resources if present (see # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). if 'pkg_resources' in sys.modules: del sys.modules['pkg_resources'] import setuptools setuptools.bootstrap_install_from = egg def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15): # making sure we use the absolute path to_dir = os.path.abspath(to_dir) was_imported = 'pkg_resources' in sys.modules or \ 'setuptools' in sys.modules try: import pkg_resources except ImportError: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("setuptools>=" + version) return except pkg_resources.VersionConflict: e = sys.exc_info()[1] if was_imported: sys.stderr.write( "The required version of setuptools (>=%s) is not available,\n" "and can't be installed while this script is running. Please\n" "install a more recent version first, using\n" "'easy_install -U setuptools'." "\n\n(Currently using %r)\n" % (version, e.args[0])) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) def _clean_check(cmd, target): """ Run the command to download target. If the command fails, clean up before re-raising the error. """ try: subprocess.check_call(cmd) except subprocess.CalledProcessError: if os.access(target, os.F_OK): os.unlink(target) raise def download_file_powershell(url, target): """ Download the file at url to target using Powershell (which will validate trust). Raise an exception if the command cannot complete. """ target = os.path.abspath(target) cmd = [ 'powershell', '-Command', "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), ] _clean_check(cmd, target) def has_powershell(): if platform.system() != 'Windows': return False cmd = ['powershell', '-Command', 'echo test'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_powershell.viable = has_powershell def download_file_curl(url, target): cmd = ['curl', url, '--silent', '--output', target] _clean_check(cmd, target) def has_curl(): cmd = ['curl', '--version'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_curl.viable = has_curl def download_file_wget(url, target): cmd = ['wget', url, '--quiet', '--output-document', target] _clean_check(cmd, target) def has_wget(): cmd = ['wget', '--version'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_wget.viable = has_wget def download_file_insecure(url, target): """ Use Python to download the file, even though it cannot authenticate the connection. """ try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen src = dst = None try: src = urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src.read() dst = open(target, "wb") dst.write(data) finally: if src: src.close() if dst: dst.close() download_file_insecure.viable = lambda: True def get_best_downloader(): downloaders = [ download_file_powershell, download_file_curl, download_file_wget, download_file_insecure, ] for dl in downloaders: if dl.viable(): return dl def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader): """Download setuptools from a specified location and return its filename `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. ``downloader_factory`` should be a function taking no arguments and returning a function for downloading a URL to a target. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) tgz_name = "setuptools-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) if not os.path.exists(saveto): # Avoid repeated downloads log.warn("Downloading %s", url) downloader = downloader_factory() downloader(url, saveto) return os.path.realpath(saveto) def _extractall(self, path=".", members=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). """ import copy import operator from tarfile import ExtractError directories = [] if members is None: members = self for tarinfo in members: if tarinfo.isdir(): # Extract directories with a safe mode. directories.append(tarinfo) tarinfo = copy.copy(tarinfo) tarinfo.mode = 448 # decimal for oct 0700 self.extract(tarinfo, path) # Reverse sort directories. if sys.version_info < (2, 4): def sorter(dir1, dir2): return cmp(dir1.name, dir2.name) directories.sort(sorter) directories.reverse() else: directories.sort(key=operator.attrgetter('name'), reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: dirpath = os.path.join(path, tarinfo.name) try: self.chown(tarinfo, dirpath) self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError: e = sys.exc_info()[1] if self.errorlevel > 1: raise else: self._dbg(1, "tarfile: %s" % e) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the setuptools package """ install_args = [] if options.user_install: if sys.version_info < (2, 6): log.warn("--user requires Python 2.6 or later") raise SystemExit(1) install_args.append('--user') return install_args def _parse_args(): """ Parse the command line for options """ parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package (requires Python 2.6 or later)') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the setuptools package') parser.add_option( '--insecure', dest='downloader_factory', action='store_const', const=lambda: download_file_insecure, default=get_best_downloader, help='Use internal, non-validating downloader' ) options, args = parser.parse_args() # positional arguments are ignored return options def main(version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" options = _parse_args() tarball = download_setuptools(download_base=options.download_base, downloader_factory=options.downloader_factory) return _install(tarball, _build_install_args(options)) if __name__ == '__main__': sys.exit(main())