ccdproc-1.3.0.post1/0000775000175000017500000000000013207623133015655 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/ccdproc/0000775000175000017500000000000013207623133017272 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/ccdproc/ccddata.py0000664000175000017500000010017013207605210021222 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """This module implements the base CCDData class.""" from __future__ import (absolute_import, division, print_function, unicode_literals) import copy import numbers import weakref from collections import OrderedDict import numpy as np from astropy.nddata import (NDDataArray, StdDevUncertainty, NDUncertainty, MissingDataAssociationException) from astropy.io import fits, registry from astropy import units as u from astropy import log from astropy.utils import minversion from astropy.wcs import WCS _ASTROPY_LT_1_2 = not minversion("astropy", "1.2") _ASTROPY_LT_1_3 = not minversion("astropy", "1.3") _ASTROPY_GT_2_0 = minversion("astropy", "2.0") # FIXME: Remove the content of the following "if" as soon as astropy 1.1 isn't # supported anymore. This is just a temporary workaround to fix the memory leak # described in https://github.com/astropy/astropy/issues/4825 if _ASTROPY_LT_1_2: class ParentNDDataDescriptor(object): def __get__(self, obj, objtype=None): message = "uncertainty is not associated with an NDData object." try: if obj._parent_nddata is None: raise MissingDataAssociationException(message) else: # The NDData is saved as weak reference so we must call it # to get the object the reference points to. if isinstance(obj._parent_nddata, weakref.ref): return obj._parent_nddata() log.info("parent_nddata should be a weakref to an " "NDData object.") return obj._parent_nddata except AttributeError: raise MissingDataAssociationException(message) def __set__(self, obj, value): if value is not None and not isinstance(value, weakref.ref): # Save a weak reference on the uncertainty that points to this # instance of NDData. Direct references should NOT be used: # https://github.com/astropy/astropy/pull/4799#discussion_r61236832 value = weakref.ref(value) obj._parent_nddata = value # Use the descriptor as parent_nddata property. This only affects # instances created after importing this module. StdDevUncertainty.parent_nddata = ParentNDDataDescriptor() __all__ = ['CCDData', 'fits_ccddata_reader', 'fits_ccddata_writer'] # Global value which can turn on/off the unit requirements when creating a # CCDData. Should be used with care because several functions actually break # if the unit is None! _config_ccd_requires_unit = True if not _ASTROPY_LT_1_2: from astropy.utils.decorators import sharedmethod def _arithmetic(op): """Decorator factory which temporarly disables the need for a unit when creating a new CCDData instance. The final result must have a unit. Parameters ---------- op : function The function to apply. Supported are: - ``np.add`` - ``np.subtract`` - ``np.multiply`` - ``np.true_divide`` Notes ----- Should only be used on CCDData ``add``, ``subtract``, ``divide`` or ``multiply`` because only these methods from NDArithmeticMixin are overwritten. """ def decorator(func): def inner(self, operand, operand2=None, **kwargs): global _config_ccd_requires_unit _config_ccd_requires_unit = False result = self._prepare_then_do_arithmetic(op, operand, operand2, **kwargs) # Wrap it again as CCDData so it checks the final unit. _config_ccd_requires_unit = True return result.__class__(result) inner.__doc__ = ("See `astropy.nddata.NDArithmeticMixin.{}`." "".format(func.__name__)) return sharedmethod(inner) return decorator class CCDData(NDDataArray): """A class describing basic CCD data. The CCDData class is based on the NDData object and includes a data array, uncertainty frame, mask frame, meta data, units, and WCS information for a single CCD image. Parameters ----------- data : `~ccdproc.CCDData`-like or `numpy.ndarray`-like The actual data contained in this `~ccdproc.CCDData` object. Note that the data will always be saved by *reference*, so you should make a copy of the ``data`` before passing it in if that's the desired behavior. uncertainty : `~astropy.nddata.StdDevUncertainty`, `numpy.ndarray` or \ None, optional Uncertainties on the data. Default is ``None``. mask : `numpy.ndarray` or None, optional Mask for the data, given as a boolean Numpy array with a shape matching that of the data. The values must be `False` where the data is *valid* and `True` when it is not (like Numpy masked arrays). If ``data`` is a numpy masked array, providing ``mask`` here will causes the mask from the masked array to be ignored. Default is ``None``. flags : `numpy.ndarray` or `~astropy.nddata.FlagCollection` or None, \ optional Flags giving information about each pixel. These can be specified either as a Numpy array of any type with a shape matching that of the data, or as a `~astropy.nddata.FlagCollection` instance which has a shape matching that of the data. Default is ``None``. wcs : `~astropy.wcs.WCS` or None, optional WCS-object containing the world coordinate system for the data. Default is ``None``. meta : dict-like object or None, optional Metadata for this object. "Metadata" here means all information that is included with this object but not part of any other attribute of this particular object, e.g. creation date, unique identifier, simulation parameters, exposure time, telescope name, etc. unit : `~astropy.units.Unit` or str, optional The units of the data. Default is ``None``. .. warning:: If the unit is ``None`` or not otherwise specified it will raise a ``ValueError`` Raises ------ ValueError If the ``uncertainty`` or ``mask`` inputs cannot be broadcast (e.g., match shape) onto ``data``. Methods ------- read(\\*args, \\**kwargs) ``Classmethod`` to create an CCDData instance based on a ``FITS`` file. This method uses :func:`fits_ccddata_reader` with the provided parameters. write(\\*args, \\**kwargs) Writes the contents of the CCDData instance into a new ``FITS`` file. This method uses :func:`fits_ccddata_writer` with the provided parameters. Notes ----- `~ccdproc.CCDData` objects can be easily converted to a regular Numpy array using `numpy.asarray`. For example:: >>> from ccdproc import CCDData >>> import numpy as np >>> x = CCDData([1,2,3], unit='adu') >>> np.asarray(x) array([1, 2, 3]) This is useful, for example, when plotting a 2D image using matplotlib. >>> from ccdproc import CCDData >>> from matplotlib import pyplot as plt # doctest: +SKIP >>> x = CCDData([[1,2,3], [4,5,6]], unit='adu') >>> plt.imshow(x) # doctest: +SKIP """ def __init__(self, *args, **kwd): if 'meta' not in kwd: kwd['meta'] = kwd.pop('header', None) if 'header' in kwd: raise ValueError("can't have both header and meta.") super(CCDData, self).__init__(*args, **kwd) # Check if a unit is set. This can be temporarly disabled by the # _CCDDataUnit contextmanager. if _config_ccd_requires_unit and self.unit is None: raise ValueError("a unit for CCDData must be specified.") @property def data(self): return self._data @data.setter def data(self, value): self._data = value @property def wcs(self): return self._wcs @wcs.setter def wcs(self, value): self._wcs = value @property def unit(self): return self._unit @unit.setter def unit(self, value): self._unit = u.Unit(value) @property def header(self): return self._meta @header.setter def header(self, value): self.meta = value @property def meta(self): return self._meta @meta.setter def meta(self, value): if value is None: self._meta = OrderedDict() else: if hasattr(value, 'keys'): self._meta = value else: raise TypeError( 'the meta attribute of CCDData must be dict-like.') @property def uncertainty(self): return self._uncertainty @uncertainty.setter def uncertainty(self, value): if value is not None: if isinstance(value, NDUncertainty): if getattr(value, '_parent_nddata', None) is not None: value = value.__class__(value, copy=False) self._uncertainty = value elif isinstance(value, np.ndarray): if value.shape != self.shape: raise ValueError("uncertainty must have same shape as " "data.") self._uncertainty = StdDevUncertainty(value) log.info("array provided for uncertainty; assuming it is a " "StdDevUncertainty.") else: raise TypeError("uncertainty must be an instance of a " "NDUncertainty object or a numpy array.") self._uncertainty.parent_nddata = self else: self._uncertainty = value def to_hdu(self, hdu_mask='MASK', hdu_uncertainty='UNCERT', hdu_flags=None, wcs_relax=True): """Creates an HDUList object from a CCDData object. Parameters ---------- hdu_mask, hdu_uncertainty, hdu_flags : str or None, optional If it is a string append this attribute to the HDUList as `~astropy.io.fits.ImageHDU` with the string as extension name. Flags are not supported at this time. If ``None`` this attribute is not appended. Default is ``'MASK'`` for mask, ``'UNCERT'`` for uncertainty and ``None`` for flags. wcs_relax : bool Value of the ``relax`` parameter to use in converting the WCS to a FITS header using `~astropy.wcs.WCS.to_header`. The common ``CTYPE`` ``RA---TAN-SIP`` and ``DEC--TAN-SIP`` requires ``relax=True`` for the ``-SIP`` part of the ``CTYPE`` to be preserved. Raises ------- ValueError - If ``self.mask`` is set but not a `numpy.ndarray`. - If ``self.uncertainty`` is set but not a `~astropy.nddata.StdDevUncertainty`. - If ``self.uncertainty`` is set but has another unit then ``self.data``. NotImplementedError Saving flags is not supported. Returns ------- hdulist : `~astropy.io.fits.HDUList` """ if isinstance(self.header, fits.Header): # Copy here so that we can modify the HDU header by adding WCS # information without changing the header of the CCDData object. header = self.header.copy() else: # Because _insert_in_metadata_fits_safe is written as a method # we need to create a dummy CCDData instance to hold the FITS # header we are constructing. This probably indicates that # _insert_in_metadata_fits_safe should be rewritten in a more # sensible way... dummy_ccd = CCDData([1], meta=fits.Header(), unit="adu") for k, v in self.header.items(): dummy_ccd._insert_in_metadata_fits_safe(k, v) header = dummy_ccd.header if self.unit is not u.dimensionless_unscaled: header['bunit'] = self.unit.to_string() if self.wcs: # Simply extending the FITS header with the WCS can lead to # duplicates of the WCS keywords; iterating over the WCS # header should be safer. # # Turns out if I had read the io.fits.Header.extend docs more # carefully, I would have realized that the keywords exist to # avoid duplicates and preserve, as much as possible, the # structure of the commentary cards. # # Note that until astropy/astropy#3967 is closed, the extend # will fail if there are comment cards in the WCS header but # not header. wcs_header = self.wcs.to_header(relax=wcs_relax) header.extend(wcs_header, useblanks=False, update=True) hdus = [fits.PrimaryHDU(self.data, header)] if hdu_mask and self.mask is not None: # Always assuming that the mask is a np.ndarray (check that it has # a 'shape'). if not hasattr(self.mask, 'shape'): raise ValueError('only a numpy.ndarray mask can be saved.') # Convert boolean mask to uint since io.fits cannot handle bool. hduMask = fits.ImageHDU(self.mask.astype(np.uint8), name=hdu_mask) hdus.append(hduMask) if hdu_uncertainty and self.uncertainty is not None: # We need to save some kind of information which uncertainty was # used so that loading the HDUList can infer the uncertainty type. # No idea how this can be done so only allow StdDevUncertainty. if self.uncertainty.__class__.__name__ != 'StdDevUncertainty': raise ValueError('only StdDevUncertainty can be saved.') # Assuming uncertainty is an StdDevUncertainty save just the array # this might be problematic if the Uncertainty has a unit differing # from the data so abort for different units. This is important for # astropy > 1.2 if (hasattr(self.uncertainty, 'unit') and self.uncertainty.unit is not None and self.uncertainty.unit != self.unit): raise ValueError('saving uncertainties with a unit differing' 'from the data unit is not supported.') hduUncert = fits.ImageHDU(self.uncertainty.array, name=hdu_uncertainty) hdus.append(hduUncert) if hdu_flags and self.flags: raise NotImplementedError('adding the flags to a HDU is not ' 'supported at this time.') hdulist = fits.HDUList(hdus) return hdulist def copy(self): """ Return a copy of the CCDData object. """ try: return self.__class__(self, copy=True) except TypeError: new = self.__class__(copy.deepcopy(self)) return new def _ccddata_arithmetic(self, other, operation, scale_uncertainty=False): """ Perform the common parts of arithmetic operations on CCDData objects. This should only be called when ``other`` is a Quantity or a number. """ # THE "1 *" IS NECESSARY to get the right result, at least in # astropy-0.4dev. Using the np.multiply, etc, methods with a Unit # and a Quantity is currently broken, but it works with two Quantity # arguments. if isinstance(other, u.Quantity): if (operation.__name__ in ['add', 'subtract'] and self.unit != other.unit): # For addition and subtraction we need to convert the unit # to the same unit otherwise operating on the values alone will # give wrong results (#291) other_value = other.to(self.unit).value else: other_value = other.value elif isinstance(other, numbers.Number): other_value = other else: raise TypeError("cannot do arithmetic with type '{0}' " "and 'CCDData'".format(type(other))) result_unit = operation(1 * self.unit, other).unit result_data = operation(self.data, other_value) if self.uncertainty: result_uncertainty = self.uncertainty.array if scale_uncertainty: result_uncertainty = operation(result_uncertainty, other_value) result_uncertainty = StdDevUncertainty(result_uncertainty) else: result_uncertainty = None new_mask = copy.deepcopy(self.mask) new_meta = copy.deepcopy(self.meta) new_wcs = copy.deepcopy(self.wcs) result = CCDData(result_data, unit=result_unit, mask=new_mask, uncertainty=result_uncertainty, meta=new_meta, wcs=new_wcs) return result def multiply(self, other, compare_wcs='first_found'): if isinstance(other, CCDData): if compare_wcs is None or compare_wcs == 'first_found': tmp_wcs_1, tmp_wcs_2 = self.wcs, other.wcs self.wcs, other.wcs = None, None # Determine the WCS of the result if compare_wcs is None: result_wcs = None else: result_wcs = tmp_wcs_1 if tmp_wcs_1 else tmp_wcs_2 result = super(CCDData, self).multiply(other) result.wcs = result_wcs self.wcs, other.wcs = tmp_wcs_1, tmp_wcs_2 return result else: if hasattr(self, '_arithmetics_wcs'): return super(CCDData, self).multiply( other, compare_wcs=compare_wcs) else: raise ImportError("wcs_compare functionality requires " "astropy 1.2 or greater.") return self._ccddata_arithmetic(other, np.multiply, scale_uncertainty=True) def divide(self, other, compare_wcs='first_found'): if isinstance(other, CCDData): if compare_wcs is None or compare_wcs == 'first_found': tmp_wcs_1, tmp_wcs_2 = self.wcs, other.wcs self.wcs, other.wcs = None, None # Determine the WCS of the result if compare_wcs is None: result_wcs = None else: result_wcs = tmp_wcs_1 if tmp_wcs_1 else tmp_wcs_2 result = super(CCDData, self).divide(other) result.wcs = result_wcs self.wcs, other.wcs = tmp_wcs_1, tmp_wcs_2 return result else: if hasattr(self, '_arithmetics_wcs'): return super(CCDData, self).divide( other, compare_wcs=compare_wcs) else: raise ImportError("wcs_compare functionality requires " "astropy 1.2 or greater.") return self._ccddata_arithmetic(other, np.divide, scale_uncertainty=True) def add(self, other, compare_wcs='first_found'): if isinstance(other, CCDData): if compare_wcs is None or compare_wcs == 'first_found': tmp_wcs_1, tmp_wcs_2 = self.wcs, other.wcs self.wcs, other.wcs = None, None # Determine the WCS of the result if compare_wcs is None: result_wcs = None else: result_wcs = tmp_wcs_1 if tmp_wcs_1 else tmp_wcs_2 result = super(CCDData, self).add(other) result.wcs = result_wcs self.wcs, other.wcs = tmp_wcs_1, tmp_wcs_2 return result else: if hasattr(self, '_arithmetics_wcs'): return super(CCDData, self).add( other, compare_wcs=compare_wcs) else: raise ImportError("wcs_compare functionality requires " "astropy 1.2 or greater.") return self._ccddata_arithmetic(other, np.add, scale_uncertainty=False) def subtract(self, other, compare_wcs='first_found'): if isinstance(other, CCDData): if compare_wcs is None or compare_wcs == 'first_found': tmp_wcs_1, tmp_wcs_2 = self.wcs, other.wcs self.wcs, other.wcs = None, None # Determine the WCS of the result if compare_wcs is None: result_wcs = None else: result_wcs = tmp_wcs_1 if tmp_wcs_1 else tmp_wcs_2 result = super(CCDData, self).subtract(other) result.wcs = result_wcs self.wcs, other.wcs = tmp_wcs_1, tmp_wcs_2 return result else: if hasattr(self, '_arithmetics_wcs'): return super(CCDData, self).subtract( other, compare_wcs=compare_wcs) else: raise ImportError("wcs_compare functionality requires " "astropy 1.2 or greater.") return self._ccddata_arithmetic(other, np.subtract, scale_uncertainty=False) # Use NDDataArithmetic methods if astropy version is 1.2 or greater if not _ASTROPY_LT_1_2: del add, subtract, divide, multiply, _ccddata_arithmetic add = _arithmetic(np.add)(NDDataArray.add) subtract = _arithmetic(np.subtract)(NDDataArray.subtract) multiply = _arithmetic(np.multiply)(NDDataArray.multiply) divide = _arithmetic(np.true_divide)(NDDataArray.divide) def _insert_in_metadata_fits_safe(self, key, value): """ Insert key/value pair into metadata in a way that FITS can serialize. Parameters ---------- key : str Key to be inserted in dictionary. value : str or None Value to be inserted. Notes ----- This addresses a shortcoming of the FITS standard. There are length restrictions on both the ``key`` (8 characters) and ``value`` (72 characters) in the FITS standard. There is a convention for handline long keywords and a convention for handling long values, but the two conventions cannot be used at the same time. Autologging in `ccdproc` frequently creates keywords/values with this combination. The workaround is to use a shortened name for the keyword. """ from .core import _short_names if key in _short_names and isinstance(self.meta, fits.Header): # This keyword was (hopefully) added by autologging but the # combination of it and its value not FITS-compliant in two # ways: the keyword name may be more than 8 characters and # the value may be too long. FITS cannot handle both of # those problems at once, so this fixes one of those # problems... # Shorten, sort of... short_name = _short_names[key] self.meta['HIERARCH {0}'.format(key.upper())] = ( short_name, "Shortened name for ccdproc command") self.meta[short_name] = value else: self.meta[key] = value # This needs to be importable by the tests... _KEEP_THESE_KEYWORDS_IN_HEADER = [ 'JD-OBS', 'MJD-OBS', 'DATE-OBS' ] def _generate_wcs_and_update_header(hdr): """ Generate a WCS object from a header and remove the WCS-specific keywords from the header. Parameters ---------- hdr : astropy.io.fits.header or other dict-like Returns ------- new_header, wcs """ # Try constructing a WCS object. try: wcs = WCS(hdr) except Exception as exc: # Normally WCS only raises Warnings and doesn't fail but in rare # cases (malformed header) it could fail... log.info('An exception happened while extracting WCS informations from ' 'the Header.\n{}: {}'.format(type(exc).__name__, str(exc))) return hdr, None # Test for success by checking to see if the wcs ctype has a non-empty # value, return None for wcs if ctype is empty. if not wcs.wcs.ctype[0]: return hdr, None new_hdr = hdr.copy() # If the keywords below are in the header they are also added to WCS. # It seems like they should *not* be removed from the header, though. wcs_header = wcs.to_header(relax=True) for k in wcs_header: if k not in _KEEP_THESE_KEYWORDS_IN_HEADER: try: new_hdr.remove(k) except KeyError: pass return new_hdr, wcs def fits_ccddata_reader(filename, hdu=0, unit=None, hdu_uncertainty='UNCERT', hdu_mask='MASK', hdu_flags=None, **kwd): """ Generate a CCDData object from a FITS file. Parameters ---------- filename : str Name of fits file. hdu : int, optional FITS extension from which CCDData should be initialized. If zero and and no data in the primary extension, it will search for the first extension with data. The header will be added to the primary header. Default is ``0``. unit : `~astropy.units.Unit`, optional Units of the image data. If this argument is provided and there is a unit for the image in the FITS header (the keyword ``BUNIT`` is used as the unit, if present), this argument is used for the unit. Default is ``None``. hdu_uncertainty : str or None, optional FITS extension from which the uncertainty should be initialized. If the extension does not exist the uncertainty of the CCDData is ``None``. Default is ``'UNCERT'``. hdu_mask : str or None, optional FITS extension from which the mask should be initialized. If the extension does not exist the mask of the CCDData is ``None``. Default is ``'MASK'``. hdu_flags : str or None, optional Currently not implemented. Default is ``None``. kwd : Any additional keyword parameters are passed through to the FITS reader in :mod:`astropy.io.fits`; see Notes for additional discussion. Notes ----- FITS files that contained scaled data (e.g. unsigned integer images) will be scaled and the keywords used to manage scaled data in :mod:`astropy.io.fits` are disabled. """ unsupport_open_keywords = { 'do_not_scale_image_data': ('Image data must be scaled to perform ' 'ccdproc operations.'), 'scale_back': 'Scale information is not preserved.' } for key, msg in unsupport_open_keywords.items(): if key in kwd: prefix = 'unsupported keyword: {0}.'.format(key) raise TypeError(' '.join([prefix, msg])) with fits.open(filename, **kwd) as hdus: hdr = hdus[hdu].header if hdu_uncertainty is not None and hdu_uncertainty in hdus: uncertainty = StdDevUncertainty(hdus[hdu_uncertainty].data) else: uncertainty = None if hdu_mask is not None and hdu_mask in hdus: # Mask is saved as uint but we want it to be boolean. mask = hdus[hdu_mask].data.astype(np.bool_) else: mask = None if hdu_flags is not None and hdu_flags in hdus: raise NotImplementedError('loading flags is currently not ' 'supported.') # search for the first instance with data if # the primary header is empty. if hdu == 0 and hdus[hdu].data is None: for i in range(len(hdus)): if hdus.fileinfo(i)['datSpan'] > 0: hdu = i comb_hdr = hdus[hdu].header.copy() comb_hdr.extend(hdr, unique=True) hdr = comb_hdr log.info("first HDU with data is extension " "{0}.".format(hdu)) break if 'bunit' in hdr: fits_unit_string = hdr['bunit'] # patch to handle FITS files using ADU for the unit instead of the # standard version of 'adu' if fits_unit_string.strip().lower() == 'adu': fits_unit_string = fits_unit_string.lower() else: fits_unit_string = None if unit is not None and fits_unit_string: log.info("using the unit {0} passed to the FITS reader instead of " "the unit {1} in the FITS file.".format(unit, fits_unit_string)) use_unit = unit or fits_unit_string hdr, wcs = _generate_wcs_and_update_header(hdr) ccd_data = CCDData(hdus[hdu].data, meta=hdr, unit=use_unit, mask=mask, uncertainty=uncertainty, wcs=wcs) return ccd_data def fits_ccddata_writer(ccd_data, filename, hdu_mask='MASK', hdu_uncertainty='UNCERT', hdu_flags=None, **kwd): """ Write CCDData object to FITS file. Parameters ---------- filename : str Name of file. hdu_mask, hdu_uncertainty, hdu_flags : str or None, optional If it is a string append this attribute to the HDUList as `~astropy.io.fits.ImageHDU` with the string as extension name. Flags are not supported at this time. If ``None`` this attribute is not appended. Default is ``'MASK'`` for mask, ``'UNCERT'`` for uncertainty and ``None`` for flags. kwd : All additional keywords are passed to :py:mod:`astropy.io.fits` Raises ------- ValueError - If ``self.mask`` is set but not a `numpy.ndarray`. - If ``self.uncertainty`` is set but not a `~astropy.nddata.StdDevUncertainty`. - If ``self.uncertainty`` is set but has another unit then ``self.data``. NotImplementedError Saving flags is not supported. """ hdu = ccd_data.to_hdu(hdu_mask=hdu_mask, hdu_uncertainty=hdu_uncertainty, hdu_flags=hdu_flags) hdu.writeto(filename, **kwd) # This should be be a tuple to ensure it isn't inadvertently changed # elsewhere. _recognized_fits_file_extensions = ('fit', 'fits', 'fts') if _ASTROPY_LT_1_3: def is_fits(origin, filepath, fileobj, *args, **kwargs): """ Wrapper around astropy.io.fits.connect.is_fits that handles the extra extension. Can be removed if fts is added to astropy.io as a recognized FITS extension. """ if ((filepath is not None) and filepath.lower().endswith(('.fts', '.fts.gz'))): return True else: return fits.connect.is_fits(origin, filepath, fileobj, *args, **kwargs) else: is_fits = fits.connect.is_fits if _ASTROPY_LT_1_3: registry.register_reader('fits', CCDData, fits_ccddata_reader) registry.register_writer('fits', CCDData, fits_ccddata_writer) registry.register_identifier('fits', CCDData, is_fits) else: with registry.delay_doc_updates(CCDData): registry.register_reader('fits', CCDData, fits_ccddata_reader) registry.register_writer('fits', CCDData, fits_ccddata_writer) registry.register_identifier('fits', CCDData, is_fits) try: CCDData.read.__doc__ = fits_ccddata_reader.__doc__ except AttributeError: CCDData.read.__func__.__doc__ = fits_ccddata_reader.__doc__ try: CCDData.write.__doc__ = fits_ccddata_writer.__doc__ except AttributeError: CCDData.write.__func__.__doc__ = fits_ccddata_writer.__doc__ # CCDData moved to astropy core so we just import them from there (overwriting) # the classes defined here. if _ASTROPY_GT_2_0: from astropy.nddata import fits_ccddata_reader, fits_ccddata_writer, CCDData ccdproc-1.3.0.post1/ccdproc/image_collection.py0000664000175000017500000010524613207605210023145 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (print_function, division, absolute_import, unicode_literals) from collections import OrderedDict import fnmatch from os import listdir, path import logging import numpy as np import numpy.ma as ma from astropy.table import Table, MaskedColumn import astropy.io.fits as fits from astropy.extern import six from astropy.utils import minversion import warnings from astropy.utils.exceptions import AstropyUserWarning from .ccddata import fits_ccddata_reader, _recognized_fits_file_extensions logger = logging.getLogger(__name__) __all__ = ['ImageFileCollection'] __doctest_skip__ = ['*'] _ASTROPY_LT_1_3 = not minversion("astropy", "1.3") class ImageFileCollection(object): """ Representation of a collection of image files. The class offers a table summarizing values of keywords in the FITS headers of the files in the collection and offers convenient methods for iterating over the files in the collection. The generator methods use simple filtering syntax and can automate storage of any FITS files modified in the loop using the generator. Parameters ---------- location : str or None, optional Path to directory containing FITS files. Default is ``None``. keywords : list of str, '*' or None, optional Keywords that should be used as column headings in the summary table. If the value is or includes '*' then all keywords that appear in any of the FITS headers of the files in the collection become table columns. Default value is '*' unless ``info_file`` is specified. Default is ``None``. info_file : str or None, optional Path to file that contains a table of information about FITS files. In this case the keywords are set to the names of the columns of the ``info_file`` unless ``keywords`` is explicitly set to a different list. Default is ``None``. .. deprecated:: 1.3 filenames: str, list of str, or None, optional List of the names of FITS files which will be added to the collection. The filenames are assumed to be in ``location``. Default is ``None``. glob_include: str or None, optional Unix-style filename pattern to select filenames to include in the file collection. Can be used in conjunction with ``glob_exclude`` to easily select subsets of files in the target directory. Default is ``None``. glob_exclude: str or None, optional Unix-style filename pattern to select filenames to exclude from the file collection. Can be used in conjunction with ``glob_include`` to easily select subsets of files in the target directory. Default is ``None``. ext: str or int, optional The extension from which the header and data will be read in all files. Default is ``0``. Raises ------ ValueError Raised if keywords are set to a combination of '*' and any other value. """ def __init__(self, location=None, keywords=None, info_file=None, filenames=None, glob_include=None, glob_exclude=None, ext=0): if info_file is not None: warnings.warn("The 'info_file' argument is deprecated and will be " "removed in a future version", DeprecationWarning) # Include or exclude files from the collection based on glob pattern # matching - has to go above call to _get_files() if glob_exclude is not None: glob_exclude = str(glob_exclude) # some minimal validation self._glob_exclude = glob_exclude if glob_include is not None: glob_include = str(glob_include) self._glob_include = glob_include self._location = location self._filenames = filenames self._files = [] self._info_file = info_file if location: self._files = self._get_files() if self._files == []: warnings.warn("no FITS files in the collection.", AstropyUserWarning) self._summary = {} if keywords is None: if info_file is not None: # Default to empty list so that keywords will be populated # from table columns names. keywords = [] else: # Otherwise use all keywords. keywords = '*' if info_file is not None: try: info_path = path.join(self.location, info_file) except (AttributeError, TypeError): info_path = info_file try: self._summary = Table.read(info_path, format='ascii', delimiter=',') self._summary = Table(self._summary, masked=True) except IOError: if location: logger.warning('unable to open table file %s, will try ' 'initializing from location instead.', info_path) else: raise # Used internally to keep track of whether the user asked for all # keywords or a specific list. The keywords setter takes care of # actually setting the correct value, this just ensure that there # is always *some* value. self._all_keywords = False self._ext = ext if keywords: self.keywords = keywords def __repr__(self): if self.location is None: location = "" else: location = "location={!r}".format(self.location) if self._all_keywords: kw = "" else: kw = "keywords={!r}".format(self.keywords[1:]) if self._info_file is None: infofile = '' else: infofile = "info_file={!r}".format(self._info_file) if self.glob_exclude is None: glob_exclude = '' else: glob_exclude = "glob_exclude={!r}".format(self.glob_exclude) if self.glob_include is None: glob_include = '' else: glob_include = "glob_include={!r}".format(self.glob_include) if self.ext == 0: ext = "" else: ext = "ext={}".format(self.ext) if self._filenames is None: filenames = "" else: filenames = "filenames={}".format(self._filenames) params = [location, kw, infofile, filenames, glob_include, glob_exclude, ext] params = ', '.join([p for p in params if p]) str_repr = "{self.__class__.__name__}({params})".format( self=self, params=params) return str_repr @property def summary(self): """ `~astropy.table.Table` of values of FITS keywords for files in the collection. Each keyword is a column heading. In addition, there is a column called ``file`` that contains the name of the FITS file. The directory is not included as part of that name. The first column is always named ``file``. The order of the remaining columns depends on how the summary was constructed. If a wildcard, ``*`` was used then the order is the order in which the keywords appear in the FITS files from which the summary is constructed. If an explicit list of keywords was supplied in setting up the collection then the order of the columns is the order of the keywords. """ return self._summary @property def summary_info(self): """ `~astropy.table.Table` of values of FITS keywords for files in the collection. Each keyword is a column heading. In addition, there is a column called 'file' that contains the name of the FITS file. The directory is not included as part of that name. .. deprecated:: 0.4 """ warnings.warn('"summary_info" is deprecated and will be removed in ' 'a future version. Use the "summary" attribute instead.', AstropyUserWarning) return self._summary @property def location(self): """ str, Path name to directory containing FITS files. """ return self._location @property def keywords(self): """ list of str, Keywords currently in the summary table. Setting the keywords causes the summary table to be regenerated unless the new keywords are a subset of the old. .. versionchanged:: 1.3 Added ``deleter`` for ``keywords`` property. """ if self.summary: return self.summary.keys() else: return [] @keywords.setter def keywords(self, keywords): # since keywords are drawn from self.summary, setting # summary sets the keywords. if keywords is None: self._summary = [] return if keywords == '*': self._all_keywords = True else: self._all_keywords = False logging.debug('keywords in setter before pruning: %s.', keywords) # remove duplicates and force a copy so we can sort the items later # by their given position. new_keys_set = set(keywords) new_keys_lst = list(new_keys_set) new_keys_set.add('file') logging.debug('keywords after pruning %s.', new_keys_lst) current_set = set(self.keywords) if new_keys_set.issubset(current_set): logging.debug('table columns before trimming: %s.', ' '.join(current_set)) cut_keys = current_set.difference(new_keys_set) logging.debug('will try removing columns: %s.', ' '.join(cut_keys)) for key in cut_keys: self._summary.remove_column(key) logging.debug('after removal column names are: %s.', ' '.join(self.keywords)) else: logging.debug('should be building new table...') # Reorder the keywords to match the initial ordering. new_keys_lst.sort(key=keywords.index) self._summary = self._fits_summary(new_keys_lst) @keywords.deleter def keywords(self): # since keywords are drawn from self._summary, setting # _summary = [] deletes the keywords. self._summary = [] @property def files(self): """ list of str, Unfiltered list of FITS files in location. """ return self._files @property def glob_include(self): """ str or None, Unix-style filename pattern to select filenames to include in the file collection. """ return self._glob_include @property def glob_exclude(self): """ str or None, Unix-style filename pattern to select filenames to exclude in the file collection. """ return self._glob_exclude @property def ext(self): """ str or int, The extension from which the header and data will be read in all files. """ return self._ext def values(self, keyword, unique=False): """ List of values for a keyword. Parameters ---------- keyword : str Keyword (i.e. table column) for which values are desired. unique : bool, optional If True, return only the unique values for the keyword. Default is ``False``. Returns ------- list Values as a list. """ if keyword not in self.keywords: raise ValueError( 'keyword %s is not in the current summary' % keyword) if unique: return list(set(self.summary[keyword])) else: return list(self.summary[keyword]) def files_filtered(self, **kwd): """Determine files whose keywords have listed values. Parameters ---------- include_path : bool, keyword-only If the keyword ``include_path=True`` is set, the returned list contains not just the filename, but the full path to each file. Default is ``False``. **kwd : ``**kwd`` is dict of keywords and values the files must have. The value '*' represents any value. A missing keyword is indicated by value ''. Returns ------- filenames : list The files that satisfy the keyword-value restrictions specified by the ``**kwd``. Examples -------- Some examples for filtering:: >>> keys = ['imagetyp','filter'] >>> collection = ImageFileCollection('test/data', keywords=keys) >>> collection.files_filtered(imagetyp='LIGHT', filter='R') >>> collection.files_filtered(imagetyp='*', filter='') In case you want to filter with keyword names that cannot be used as keyword argument name, you have to unpack them using a dictionary. For example if a keyword name contains a space or a ``-``:: >>> add_filters = {'exp-time': 20, 'ESO TPL ID': 1050} >>> collection.files_filtered(imagetyp='LIGHT', **add_filters) Notes ----- Value comparison is case *insensitive* for strings. """ # force a copy by explicitly converting to a list current_file_mask = list(self.summary['file'].mask) include_path = kwd.pop('include_path', False) self._find_keywords_by_values(**kwd) filtered_files = self.summary['file'].compressed() self.summary['file'].mask = current_file_mask if include_path: filtered_files = [path.join(self._location, f) for f in filtered_files] return filtered_files def refresh(self): """ Refresh the collection by re-reading headers. """ keywords = '*' if self._all_keywords else self.keywords # Re-load list of files self._files = self._get_files() self._summary = self._fits_summary(header_keywords=keywords) def sort(self, keys): """Sort the list of files to determine the order of iteration. Sort the table of files according to one or more keys. This does not create a new object, instead is sorts in place. Parameters ---------- keys : str, list of str The key(s) to order the table by. """ if len(self._summary) > 0: self._summary.sort(keys) self._files = list(self.summary['file']) def _get_files(self): """ Helper method which checks whether ``files`` should be set to a subset of file names or to all file names in a directory. Returns ------- files : list or str List of file names which will be added to the collection. """ files = [] if self._filenames: if isinstance(self._filenames, six.string_types): files.append(self._filenames) else: files = self._filenames else: files = self._fits_files_in_directory() if self.glob_include is not None: files = fnmatch.filter(files, self.glob_include) if self.glob_exclude is not None: files = [file for file in files if not fnmatch.fnmatch(file, self.glob_exclude)] return files def _dict_from_fits_header(self, file_name, input_summary=None, missing_marker=None): """ Construct an ordered dictionary whose keys are the header keywords and values are a list of the values from this file and the input dictionary. If the input dictionary is ordered then that order is preserved. Parameters ---------- file_name : str Name of FITS file. input_summary : dict or None, optional Existing dictionary to which new values should be appended. Default is ``None``. missing_marker : any type, optional Fill value for missing header-keywords. Default is ``None``. Returns ------- file_table : `~astropy.table.Table` """ def _add_val_to_dict(key, value, tbl_dict, n_previous, missing_marker): try: tbl_dict[key].append(value) except KeyError: tbl_dict[key] = [missing_marker] * n_previous tbl_dict[key].append(value) if input_summary is None: summary = OrderedDict() n_previous = 0 else: summary = input_summary n_previous = len(summary['file']) h = fits.getheader(file_name, self.ext) assert 'file' not in h # Try opening header before this so that file name is only added if # file is valid FITS try: summary['file'].append(path.basename(file_name)) except KeyError: summary['file'] = [path.basename(file_name)] missing_in_this_file = [k for k in summary if (k not in h and k != 'file')] multi_entry_keys = {'comment': [], 'history': []} alreadyencountered = set() for k, v in six.iteritems(h): if k == '': continue k = k.lower() if k in ['comment', 'history']: multi_entry_keys[k].append(str(v)) # Accumulate these in a separate dictionary until the # end to avoid adding multiple entries to summary. continue elif k in alreadyencountered: # The "normal" multi-entries HISTORY, COMMENT and BLANK are # already processed so any further duplication is probably # a mistake. It would lead to problems in ImageFileCollection # to add it as well, so simply ignore those. warnings.warn( 'Header from file "{f}" contains multiple entries for ' '"{k}", the pair "{k}={v}" will be ignored.' ''.format(k=k, v=v, f=file_name), UserWarning) continue else: # Add the key to the already encountered keys so we don't add # it more than once. alreadyencountered.add(k) _add_val_to_dict(k, v, summary, n_previous, missing_marker) for k, v in six.iteritems(multi_entry_keys): if v: joined = ','.join(v) _add_val_to_dict(k, joined, summary, n_previous, missing_marker) for missing in missing_in_this_file: summary[missing].append(missing_marker) return summary def _set_column_name_case_to_match_keywords(self, header_keys, summary_table): for k in header_keys: k_lower = k.lower() if k_lower != k: try: summary_table.rename_column(k_lower, k) except KeyError: pass def _fits_summary(self, header_keywords): """ Generate a summary table of keywords from FITS headers. Parameters ---------- header_keywords : list of str or '*' Keywords whose value should be extracted from FITS headers or '*' to extract all. """ if not self.files: return None # Make sure we have a list...for example, in python 3, dict.keys() # is not a list. original_keywords = list(header_keywords) # Get rid of any duplicate keywords, also forces a copy. header_keys = set(original_keywords) header_keys.add('file') file_name_column = MaskedColumn(name='file', data=self.files) if not header_keys or (header_keys == {'file'}): summary_table = Table(masked=True) summary_table.add_column(file_name_column) return summary_table summary_dict = None missing_marker = None for file_name in file_name_column: file_path = path.join(self.location, file_name) try: # Note: summary_dict is an OrderedDict, so should preserve # the order of the keywords in the FITS header. summary_dict = self._dict_from_fits_header( file_path, input_summary=summary_dict, missing_marker=missing_marker) except IOError as e: logger.warning('unable to get FITS header for file %s: %s.', file_path, e) continue summary_table = Table(summary_dict, masked=True) for column in summary_table.colnames: summary_table[column].mask = [v is missing_marker for v in summary_table[column]] self._set_column_name_case_to_match_keywords(header_keys, summary_table) missing_columns = header_keys - set(summary_table.colnames) missing_columns -= {'*'} length = len(summary_table) for column in missing_columns: all_masked = MaskedColumn(name=column, data=np.zeros(length), mask=np.ones(length)) summary_table.add_column(all_masked) if '*' not in header_keys: # Rearrange table columns to match order of keywords. # File always comes first. header_keys -= {'file'} original_order = ['file'] + sorted(header_keys, key=original_keywords.index) summary_table = summary_table[original_order] if not summary_table.masked: summary_table = Table(summary_table, masked=True) return summary_table def _find_keywords_by_values(self, **kwd): """ Find files whose keywords have given values. `**kwd` is list of keywords and values the files must have. The value '*' represents any value. A missing keyword is indicated by value '' Example:: >>> keys = ['imagetyp','filter'] >>> collection = ImageFileCollection('test/data', keywords=keys) >>> collection.files_filtered(imagetyp='LIGHT', filter='R') >>> collection.files_filtered(imagetyp='*', filter='') NOTE: Value comparison is case *insensitive* for strings. """ keywords = kwd.keys() values = kwd.values() if set(keywords).issubset(self.keywords): # we already have the information in memory use_info = self.summary else: # we need to load information about these keywords. use_info = self._fits_summary(header_keywords=keywords) matches = np.ones(len(use_info), dtype=bool) for key, value in zip(keywords, values): logger.debug('key %s, value %s', key, value) logger.debug('value in table %s', use_info[key]) value_missing = use_info[key].mask logger.debug('value missing: %s', value_missing) value_not_missing = np.logical_not(value_missing) if value == '*': have_this_value = value_not_missing elif value is not None: if isinstance(value, six.string_types): # need to loop explicitly over array rather than using # where to correctly do string comparison. have_this_value = np.zeros(len(use_info), dtype=bool) for idx, file_key_value in enumerate(use_info[key]): if value_not_missing[idx]: try: value_matches = ( file_key_value.lower() == value.lower()) except AttributeError: # In case we're dealing with an object column # there could be values other than strings in it # so it could fail with an AttributeError. value_matches = False else: value_matches = False have_this_value[idx] = (value_not_missing[idx] & value_matches) else: have_this_value = value_not_missing tmp = (use_info[key][value_not_missing] == value) have_this_value[value_not_missing] = tmp have_this_value &= value_not_missing else: # this case--when value==None--is asking for the files which # are missing a value for this keyword have_this_value = value_missing matches &= have_this_value # the numpy convention is that the mask is True for values to # be omitted, hence use ~matches. logger.debug('Matches: %s', matches) self.summary['file'].mask = ma.nomask self.summary['file'].mask[~matches] = True def _fits_files_in_directory(self, extensions=None, compressed=True): """ Get names of FITS files in directory, based on filename extension. Parameters ---------- extension : list of str or None, optional List of filename extensions that are FITS files. Default is ``['fit', 'fits', 'fts']``. Default is ``None``. compressed : bool, optional If ``True``, compressed files should be included in the list (e.g. `.fits.gz`). Default is ``True``. Returns ------- list *Names* of the files (with extension), not the full pathname. """ full_extensions = extensions or list(_recognized_fits_file_extensions) if compressed: with_gz = [extension + '.gz' for extension in full_extensions] full_extensions.extend(with_gz) all_files = listdir(self.location) files = [] for extension in full_extensions: files.extend(fnmatch.filter(all_files, '*' + extension)) files.sort() return files def _generator(self, return_type, save_with_name="", save_location='', clobber=False, overwrite=False, do_not_scale_image_data=True, return_fname=False, ccd_kwargs=None, **kwd): """ Generator that yields each {name} in the collection. If any of the parameters ``save_with_name``, ``save_location`` or ``overwrite`` evaluates to ``True`` the generator will write a copy of each FITS file it is iterating over. In other words, if ``save_with_name`` and/or ``save_location`` is a string with non-zero length, and/or ``overwrite`` is ``True``, a copy of each FITS file will be made. Parameters ---------- save_with_name : str, optional string added to end of file name (before extension) if FITS file should be saved after iteration. Unless ``save_location`` is set, files will be saved to location of the source files ``self.location``. Default is ``''``. save_location : str, optional Directory in which to save FITS files; implies that FITS files will be saved. Note this provides an easy way to copy a directory of files--loop over the {name} with ``save_location`` set. Default is ``''``. overwrite : bool, optional If ``True``, overwrite input FITS files. Default is ``False``. clobber : bool, optional Alias for ``overwrite``. Default is ``False``. do_not_scale_image_data : bool, optional If ``True``, prevents fits from scaling images. Default is ``{default_scaling}``. Default is ``True``. return_fname : bool, optional If True, return the tuple (header, file_name) instead of just header. The file name returned is the name of the file only, not the full path to the file. Default is ``False``. ccd_kwargs : dict, optional Dict with parameters for `~astropy.nddata.fits_ccddata_reader`. For instance, the key ``'unit'`` can be used to specify the unit of the data. If ``'unit'`` is not given then ``'adu'`` is used as the default unit. See `~astropy.nddata.fits_ccddata_reader` for a complete list of parameters that can be passed through ``ccd_kwargs``. **kwd : Any additional keywords are used to filter the items returned; see `files_filtered` examples for details. Returns ------- `{return_type}` If ``return_fname`` is ``False``, yield the next {name} in the collection. (`{return_type}`, str) If ``return_fname`` is ``True``, yield a tuple of ({name}, ``file name``) for the next item in the collection. """ # store mask so we can reset at end--must COPY, otherwise # current_mask just points to the mask of summary if not self.summary: return current_mask = {} for col in self.summary.columns: current_mask[col] = self.summary[col].mask if kwd: self._find_keywords_by_values(**kwd) ccd_kwargs = ccd_kwargs or {} for full_path in self._paths(): add_kwargs = {'do_not_scale_image_data': do_not_scale_image_data} # We need to open the file here, get the appropriate values and then # close it again before it "yields" otherwise it's not garantueed # that the generator actually advances and closes the file again. # For example if one uses "next" on the generator manually the # file handle could "leak". if return_type == 'header': return_thing = fits.getheader(full_path, self.ext) elif return_type == 'data': return_thing = fits.getdata(full_path, self.ext, **add_kwargs) elif return_type == 'ccd': return_thing = fits_ccddata_reader( full_path, hdu=self.ext, **ccd_kwargs) elif return_type == 'hdu': with fits.open(full_path, **add_kwargs) as hdulist: ext_index = hdulist.index_of(self.ext) # Need to copy the HDU to prevent lazy loading problems # and "IO operations on closed file" errors return_thing = hdulist[ext_index].copy() else: raise ValueError('no generator for {}'.format(return_type)) file_name = path.basename(full_path) if return_fname: yield return_thing, file_name else: yield return_thing if save_location: destination_dir = save_location else: destination_dir = path.dirname(full_path) basename = path.basename(full_path) if save_with_name: base, ext = path.splitext(basename) basename = base + save_with_name + ext new_path = path.join(destination_dir, basename) # I really should have called the option overwrite from # the beginning. The hack below ensures old code works, # at least... if clobber or overwrite: if _ASTROPY_LT_1_3: nuke_existing = {'clobber': True} else: nuke_existing = {'overwrite': True} else: nuke_existing = {} if return_type == 'ccd': pass elif (new_path != full_path) or nuke_existing: with fits.open(full_path, **add_kwargs) as hdulist: ext_index = hdulist.index_of(self.ext) if return_type == 'hdu': hdulist[ext_index] = return_thing elif return_type == 'data': hdulist[ext_index].data = return_thing elif return_type == 'header': hdulist[ext_index].header = return_thing try: hdulist.writeto(new_path, **nuke_existing) except IOError: logger.error('error writing file %s', new_path) raise # reset mask for col in self.summary.columns: self.summary[col].mask = current_mask[col] def _paths(self): """ Full path to each file. """ unmasked_files = self.summary['file'].compressed() return [path.join(self.location, file_) for file_ in unmasked_files] def headers(self, do_not_scale_image_data=True, **kwd): return self._generator('header', do_not_scale_image_data=do_not_scale_image_data, **kwd) headers.__doc__ = _generator.__doc__.format( name='header', default_scaling='True', return_type='astropy.io.fits.Header') def hdus(self, do_not_scale_image_data=False, **kwd): return self._generator('hdu', do_not_scale_image_data=do_not_scale_image_data, **kwd) hdus.__doc__ = _generator.__doc__.format( name='HDUList', default_scaling='False', return_type='astropy.io.fits.HDUList') def data(self, do_not_scale_image_data=False, **kwd): return self._generator('data', do_not_scale_image_data=do_not_scale_image_data, **kwd) data.__doc__ = _generator.__doc__.format( name='image', default_scaling='False', return_type='numpy.ndarray') def ccds(self, ccd_kwargs=None, **kwd): if kwd.get('clobber') or kwd.get('overwrite'): raise NotImplementedError( "overwrite=True (or clobber=True) is not supported for CCDs.") return self._generator('ccd', ccd_kwargs=ccd_kwargs, **kwd) ccds.__doc__ = _generator.__doc__.format( name='CCDData', default_scaling='True', return_type='astropy.nddata.CCDData') ccdproc-1.3.0.post1/ccdproc/__init__.py0000664000175000017500000000136213207605210021401 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The ccdproc package is a collection of code that will be helpful in basic CCD processing. These steps will allow reduction of basic CCD data as either a stand-alone processing or as part of a pipeline. """ # Affiliated packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # ---------------------------------------------------------------------------- # set up namespace, unless we are in setup... if not _ASTROPY_SETUP_: from .core import * from .ccddata import * from .combiner import * from .image_collection import * ccdproc-1.3.0.post1/ccdproc/conftest.py0000664000175000017500000000200013207605210021455 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # this contains imports plugins that configure py.test for astropy tests. # by importing them here in conftest.py they are discoverable by py.test # no matter how it is invoked within the source tree. import os from astropy.tests.pytest_plugins import * from .tests.pytest_fixtures import * # This is to figure out ccdproc version, rather than using Astropy's try: from .version import version except ImportError: version = 'dev' packagename = os.path.basename(os.path.dirname(__file__)) TESTED_VERSIONS[packagename] = version # Uncomment the following line to treat all DeprecationWarnings as # exceptions # enable_deprecations_as_exceptions() # Add astropy to test header information and remove unused packages. try: PYTEST_HEADER_MODULES['Astropy'] = 'astropy' PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy' PYTEST_HEADER_MODULES['reproject'] = 'reproject' del PYTEST_HEADER_MODULES['h5py'] except KeyError: pass ccdproc-1.3.0.post1/ccdproc/core.py0000664000175000017500000020413013207617404020600 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """This module implements the base CCDPROC functions""" from __future__ import (absolute_import, division, print_function, unicode_literals) import numbers import numpy as np import math from astropy.extern import six from astropy.units.quantity import Quantity from astropy import units as u from astropy.modeling import fitting from astropy import stats from astropy.nddata import utils as nddata_utils from astropy.nddata import StdDevUncertainty from astropy.wcs.utils import proj_plane_pixel_area from astropy.utils import deprecated import astropy # To get the version. from scipy import ndimage from .ccddata import CCDData from .utils.slices import slice_from_string from .log_meta import log_to_metadata from .extern.bitfield import bitfield_to_boolean_mask as _bitfield_to_boolean_mask __all__ = ['background_deviation_box', 'background_deviation_filter', 'ccd_process', 'cosmicray_median', 'cosmicray_lacosmic', 'create_deviation', 'flat_correct', 'gain_correct', 'rebin', 'sigma_func', 'subtract_bias', 'subtract_dark', 'subtract_overscan', 'transform_image', 'trim_image', 'wcs_project', 'Keyword', 'median_filter', 'ccdmask', 'bitfield_to_boolean_mask'] # The dictionary below is used to translate actual function names to names # that are FITS compliant, i.e. 8 characters or less. _short_names = { 'background_deviation_box': 'bakdevbx', 'background_deviation_filter': 'bakdfilt', 'ccd_process': 'ccdproc', 'cosmicray_median': 'crmedian', 'create_deviation': 'creatvar', 'flat_correct': 'flatcor', 'gain_correct': 'gaincor', 'subtract_bias': 'subbias', 'subtract_dark': 'subdark', 'subtract_overscan': 'suboscan', 'trim_image': 'trimim', 'transform_image': 'tranim', 'wcs_project': 'wcsproj' } @log_to_metadata def ccd_process(ccd, oscan=None, trim=None, error=False, master_bias=None, dark_frame=None, master_flat=None, bad_pixel_mask=None, gain=None, readnoise=None, oscan_median=True, oscan_model=None, min_value=None, dark_exposure=None, data_exposure=None, exposure_key=None, exposure_unit=None, dark_scale=False, gain_corrected=True): """Perform basic processing on ccd data. The following steps can be included: * overscan correction (:func:`subtract_overscan`) * trimming of the image (:func:`trim_image`) * create deviation frame (:func:`create_deviation`) * gain correction (:func:`gain_correct`) * add a mask to the data * subtraction of master bias (:func:`subtract_bias`) * subtraction of a dark frame (:func:`subtract_dark`) * correction of flat field (:func:`flat_correct`) The task returns a processed `~astropy.nddata.CCDData` object. Parameters ---------- ccd : `~astropy.nddata.CCDData` Frame to be reduced. oscan : `~astropy.nddata.CCDData`, str or None, optional For no overscan correction, set to None. Otherwise provide a region of ccd from which the overscan is extracted, using the FITS conventions for index order and index start, or a slice from ccd that contains the overscan. Default is ``None``. trim : str or None, optional For no trim correction, set to None. Otherwise provide a region of ccd from which the image should be trimmed, using the FITS conventions for index order and index start. Default is ``None``. error : bool, optional If True, create an uncertainty array for ccd. Default is ``False``. master_bias : `~astropy.nddata.CCDData` or None, optional A master bias frame to be subtracted from ccd. The unit of the master bias frame should match the unit of the image **after gain correction** if ``gain_corrected`` is True. Default is ``None``. dark_frame : `~astropy.nddata.CCDData` or None, optional A dark frame to be subtracted from the ccd. The unit of the master dark frame should match the unit of the image **after gain correction** if ``gain_corrected`` is True. Default is ``None``. master_flat : `~astropy.nddata.CCDData` or None, optional A master flat frame to be divided into ccd. The unit of the master flat frame should match the unit of the image **after gain correction** if ``gain_corrected`` is True. Default is ``None``. bad_pixel_mask : `numpy.ndarray` or None, optional A bad pixel mask for the data. The bad pixel mask should be in given such that bad pixels have a value of 1 and good pixels a value of 0. Default is ``None``. gain : `~astropy.units.Quantity` or None, optional Gain value to multiple the image by to convert to electrons. Default is ``None``. readnoise : `~astropy.units.Quantity` or None, optional Read noise for the observations. The read noise should be in electrons. Default is ``None``. oscan_median : bool, optional If true, takes the median of each line. Otherwise, uses the mean. Default is ``True``. oscan_model : `~astropy.modeling.Model` or None, optional Model to fit to the data. If None, returns the values calculated by the median or the mean. Default is ``None``. min_value : float or None, optional Minimum value for flat field. The value can either be None and no minimum value is applied to the flat or specified by a float which will replace all values in the flat by the min_value. Default is ``None``. dark_exposure : `~astropy.units.Quantity` or None, optional Exposure time of the dark image; if specified, must also provided ``data_exposure``. Default is ``None``. data_exposure : `~astropy.units.Quantity` or None, optional Exposure time of the science image; if specified, must also provided ``dark_exposure``. Default is ``None``. exposure_key : `~ccdproc.Keyword`, str or None, optional Name of key in image metadata that contains exposure time. Default is ``None``. exposure_unit : `~astropy.units.Unit` or None, optional Unit of the exposure time if the value in the meta data does not include a unit. Default is ``None``. dark_scale : bool, optional If True, scale the dark frame by the exposure times. Default is ``False``. gain_corrected : bool, optional If True, the ``master_bias``, ``master_flat``, and ``dark_frame`` have already been gain corrected. Default is ``True``. Returns ------- occd : `~astropy.nddata.CCDData` Reduded ccd. Examples -------- 1. To overscan, trim and gain correct a data set:: >>> import numpy as np >>> from astropy import units as u >>> from ccdproc import CCDData >>> from ccdproc import ccd_process >>> ccd = CCDData(np.ones([100, 100]), unit=u.adu) >>> nccd = ccd_process(ccd, oscan='[1:10,1:100]', ... trim='[10:100, 1:100]', error=False, ... gain=2.0*u.electron/u.adu) """ # make a copy of the object nccd = ccd.copy() # apply the overscan correction if isinstance(oscan, CCDData): nccd = subtract_overscan(nccd, overscan=oscan, median=oscan_median, model=oscan_model) elif isinstance(oscan, six.string_types): nccd = subtract_overscan(nccd, fits_section=oscan, median=oscan_median, model=oscan_model) elif oscan is None: pass else: raise TypeError('oscan is not None, a string, or CCDData object.') # apply the trim correction if isinstance(trim, six.string_types): nccd = trim_image(nccd, fits_section=trim) elif trim is None: pass else: raise TypeError('trim is not None or a string.') # create the error frame if error and gain is not None and readnoise is not None: nccd = create_deviation(nccd, gain=gain, readnoise=readnoise) elif error and (gain is None or readnoise is None): raise ValueError( 'gain and readnoise must be specified to create error frame.') # apply the bad pixel mask if isinstance(bad_pixel_mask, np.ndarray): nccd.mask = bad_pixel_mask elif bad_pixel_mask is None: pass else: raise TypeError('bad_pixel_mask is not None or numpy.ndarray.') # apply the gain correction if not (gain is None or isinstance(gain, Quantity)): raise TypeError('gain is not None or astropy.units.Quantity.') if gain is not None and gain_corrected: nccd = gain_correct(nccd, gain) # subtracting the master bias if isinstance(master_bias, CCDData): nccd = subtract_bias(nccd, master_bias) elif master_bias is None: pass else: raise TypeError( 'master_bias is not None or a CCDData object.') # subtract the dark frame if isinstance(dark_frame, CCDData): nccd = subtract_dark(nccd, dark_frame, dark_exposure=dark_exposure, data_exposure=data_exposure, exposure_time=exposure_key, exposure_unit=exposure_unit, scale=dark_scale) elif dark_frame is None: pass else: raise TypeError( 'dark_frame is not None or a CCDData object.') # test dividing the master flat if isinstance(master_flat, CCDData): nccd = flat_correct(nccd, master_flat, min_value=min_value) elif master_flat is None: pass else: raise TypeError( 'master_flat is not None or a CCDData object.') # apply the gain correction only at the end if gain_corrected is False if gain is not None and not gain_corrected: nccd = gain_correct(nccd, gain) return nccd @log_to_metadata def create_deviation(ccd_data, gain=None, readnoise=None): """ Create a uncertainty frame. The function will update the uncertainty plane which gives the standard deviation for the data. Gain is used in this function only to scale the data in constructing the deviation; the data is not scaled. Parameters ---------- ccd_data : `~astropy.nddata.CCDData` Data whose deviation will be calculated. gain : `~astropy.units.Quantity` or None, optional Gain of the CCD; necessary only if ``ccd_data`` and ``readnoise`` are not in the same units. In that case, the units of ``gain`` should be those that convert ``ccd_data.data`` to the same units as ``readnoise``. Default is ``None``. readnoise : `~astropy.units.Quantity` or None, optional Read noise per pixel. Default is ``None``. {log} Raises ------ UnitsError Raised if ``readnoise`` units are not equal to product of ``gain`` and ``ccd_data`` units. Returns ------- ccd : `~astropy.nddata.CCDData` CCDData object with uncertainty created; uncertainty is in the same units as the data in the parameter ``ccd_data``. """ if gain is not None and not isinstance(gain, Quantity): raise TypeError('gain must be a astropy.units.Quantity.') if readnoise is None: raise ValueError('must provide a readnoise.') if not isinstance(readnoise, Quantity): raise TypeError('readnoise must be a astropy.units.Quantity.') if gain is None: gain = 1.0 * u.dimensionless_unscaled if gain.unit * ccd_data.unit != readnoise.unit: raise u.UnitsError("units of data, gain and readnoise do not match.") # Need to convert Quantity to plain number because NDData data is not # a Quantity. All unit checking should happen prior to this point. gain_value = float(gain / gain.unit) readnoise_value = float(readnoise / readnoise.unit) var = (gain_value * ccd_data.data + readnoise_value ** 2) ** 0.5 ccd = ccd_data.copy() # ensure uncertainty and image data have same unit var /= gain_value ccd.uncertainty = StdDevUncertainty(var) return ccd @log_to_metadata def subtract_overscan(ccd, overscan=None, overscan_axis=1, fits_section=None, median=False, model=None): """ Subtract the overscan region from an image. Parameters ---------- ccd : `~astropy.nddata.CCDData` Data to have overscan frame corrected. overscan : `~astropy.nddata.CCDData` or None, optional Slice from ``ccd`` that contains the overscan. Must provide either this argument or ``fits_section``, but not both. Default is ``None``. overscan_axis : 0, 1 or None, optional Axis along which overscan should combined with mean or median. Axis numbering follows the *python* convention for ordering, so 0 is the first axis and 1 is the second axis. If overscan_axis is explicitly set to None, the axis is set to the shortest dimension of the overscan section (or 1 in case of a square overscan). Default is ``1``. fits_section : str or None, optional Region of ``ccd`` from which the overscan is extracted, using the FITS conventions for index order and index start. See Notes and Examples below. Must provide either this argument or ``overscan``, but not both. Default is ``None``. median : bool, optional If true, takes the median of each line. Otherwise, uses the mean. Default is ``False``. model : `~astropy.modeling.Model` or None, optional Model to fit to the data. If None, returns the values calculated by the median or the mean. Default is ``None``. {log} Raises ------ TypeError A TypeError is raised if either ``ccd`` or ``overscan`` are not the correct objects. Returns ------- ccd : `~astropy.nddata.CCDData` CCDData object with overscan subtracted. Notes ----- The format of the ``fits_section`` string follow the rules for slices that are consistent with the FITS standard (v3) and IRAF usage of keywords like TRIMSEC and BIASSEC. Its indexes are one-based, instead of the python-standard zero-based, and the first index is the one that increases most rapidly as you move through the array in memory order, opposite the python ordering. The 'fits_section' argument is provided as a convenience for those who are processing files that contain TRIMSEC and BIASSEC. The preferred, more pythonic, way of specifying the overscan is to do it by indexing the data array directly with the ``overscan`` argument. Examples -------- Creating a 100x100 array containing ones just for demonstration purposes:: >>> import numpy as np >>> from astropy import units as u >>> arr1 = CCDData(np.ones([100, 100]), unit=u.adu) The statement below uses all rows of columns 90 through 99 as the overscan:: >>> no_scan = subtract_overscan(arr1, overscan=arr1[:, 90:100]) >>> assert (no_scan.data == 0).all() This statement does the same as the above, but with a FITS-style section:: >>> no_scan = subtract_overscan(arr1, fits_section='[91:100, :]') >>> assert (no_scan.data == 0).all() Spaces are stripped out of the ``fits_section`` string. """ if not (isinstance(ccd, CCDData) or isinstance(ccd, np.ndarray)): raise TypeError('ccddata is not a CCDData or ndarray object.') if ((overscan is not None and fits_section is not None) or (overscan is None and fits_section is None)): raise TypeError('specify either overscan or fits_section, but not ' 'both.') if (overscan is not None) and (not isinstance(overscan, CCDData)): raise TypeError('overscan is not a CCDData object.') if (fits_section is not None and not isinstance(fits_section, six.string_types)): raise TypeError('overscan is not a string.') if fits_section is not None: overscan = ccd[slice_from_string(fits_section, fits_convention=True)] if overscan_axis is None: overscan_axis = 0 if overscan.shape[1] > overscan.shape[0] else 1 if median: oscan = np.median(overscan.data, axis=overscan_axis) else: oscan = np.mean(overscan.data, axis=overscan_axis) if model is not None: of = fitting.LinearLSQFitter() yarr = np.arange(len(oscan)) oscan = of(model, yarr, oscan) oscan = oscan(yarr) if overscan_axis == 1: oscan = np.reshape(oscan, (oscan.size, 1)) else: oscan = np.reshape(oscan, (1, oscan.size)) else: if overscan_axis == 1: oscan = np.reshape(oscan, oscan.shape + (1,)) else: oscan = np.reshape(oscan, (1,) + oscan.shape) subtracted = ccd.copy() # subtract the overscan subtracted.data = ccd.data - oscan return subtracted @log_to_metadata def trim_image(ccd, fits_section=None): """ Trim the image to the dimensions indicated. Parameters ---------- ccd : `~astropy.nddata.CCDData` CCD image to be trimmed, sliced if desired. fits_section : str or None, optional Region of ``ccd`` from which the overscan is extracted; see `~ccdproc.subtract_overscan` for details. Default is ``None``. {log} Returns ------- trimmed_ccd : `~astropy.nddata.CCDData` Trimmed image. Examples -------- Given an array that is 100x100, >>> import numpy as np >>> from astropy import units as u >>> arr1 = CCDData(np.ones([100, 100]), unit=u.adu) the syntax for trimming this to keep all of the first index but only the first 90 rows of the second index is >>> trimmed = trim_image(arr1[:, :90]) >>> trimmed.shape (100, 90) >>> trimmed.data[0, 0] = 2 >>> arr1.data[0, 0] 1.0 This both trims *and makes a copy* of the image. Indexing the image directly does *not* do the same thing, quite: >>> not_really_trimmed = arr1[:, :90] >>> not_really_trimmed.data[0, 0] = 2 >>> arr1.data[0, 0] 2.0 In this case, ``not_really_trimmed`` is a view of the underlying array ``arr1``, not a copy. """ if (fits_section is not None and not isinstance(fits_section, six.string_types)): raise TypeError("fits_section must be a string.") trimmed = ccd.copy() if fits_section: python_slice = slice_from_string(fits_section, fits_convention=True) trimmed = trimmed[python_slice] return trimmed @log_to_metadata def subtract_bias(ccd, master): """ Subtract master bias from image. Parameters ---------- ccd : `~astropy.nddata.CCDData` Image from which bias will be subtracted. master : `~astropy.nddata.CCDData` Master image to be subtracted from ``ccd``. {log} Returns ------- result : `~astropy.nddata.CCDData` CCDData object with bias subtracted. """ try: result = ccd.subtract(master) except ValueError as e: if 'operand units' in str(e): raise u.UnitsError("Unit '{}' of the uncalibrated image does not " "match unit '{}' of the calibration " "image".format(ccd.unit, master.unit)) else: raise e result.meta = ccd.meta.copy() return result @log_to_metadata def subtract_dark(ccd, master, dark_exposure=None, data_exposure=None, exposure_time=None, exposure_unit=None, scale=False): """ Subtract dark current from an image. Parameters ---------- ccd : `~astropy.nddata.CCDData` Image from which dark will be subtracted. master : `~astropy.nddata.CCDData` Dark image. dark_exposure : `~astropy.units.Quantity` or None, optional Exposure time of the dark image; if specified, must also provided ``data_exposure``. Default is ``None``. data_exposure : `~astropy.units.Quantity` or None, optional Exposure time of the science image; if specified, must also provided ``dark_exposure``. Default is ``None``. exposure_time : str or `~ccdproc.Keyword` or None, optional Name of key in image metadata that contains exposure time. Default is ``None``. exposure_unit : `~astropy.units.Unit` or None, optional Unit of the exposure time if the value in the meta data does not include a unit. Default is ``None``. scale: bool, optional If True, scale the dark frame by the exposure times. Default is ``False``. {log} Returns ------- result : `~astropy.nddata.CCDData` Dark-subtracted image. """ if not (isinstance(ccd, CCDData) and isinstance(master, CCDData)): raise TypeError("ccd and master must both be CCDData objects.") if (data_exposure is not None and dark_exposure is not None and exposure_time is not None): raise TypeError("specify either exposure_time or " "(dark_exposure and data_exposure), not both.") if data_exposure is None and dark_exposure is None: if exposure_time is None: raise TypeError("must specify either exposure_time or both " "dark_exposure and data_exposure.") if isinstance(exposure_time, Keyword): data_exposure = exposure_time.value_from(ccd.header) dark_exposure = exposure_time.value_from(master.header) else: data_exposure = ccd.header[exposure_time] dark_exposure = master.header[exposure_time] if not (isinstance(dark_exposure, Quantity) and isinstance(data_exposure, Quantity)): if exposure_time: try: data_exposure *= exposure_unit dark_exposure *= exposure_unit except TypeError: raise TypeError("must provide unit for exposure time.") else: raise TypeError("exposure times must be astropy.units.Quantity " "objects.") try: if scale: master_scaled = master.copy() # data_exposure and dark_exposure are both quantities, # so we can just have subtract do the scaling master_scaled = master_scaled.multiply(data_exposure / dark_exposure) result = ccd.subtract(master_scaled) else: result = ccd.subtract(master) except (u.UnitsError, u.UnitConversionError, ValueError) as e: # Astropy LTS (v1) returns a ValueError, not a UnitsError, so catch # that if it appears to really be a UnitsError. if (isinstance(e, ValueError) and 'operand units' not in str(e) and astropy.__version__.startswith('1.0')): raise e # Make the error message a little more explicit than what is returned # by default. raise u.UnitsError("Unit '{}' of the uncalibrated image does not " "match unit '{}' of the calibration " "image".format(ccd.unit, master.unit)) result.meta = ccd.meta.copy() return result @log_to_metadata def gain_correct(ccd, gain, gain_unit=None): """Correct the gain in the image. Parameters ---------- ccd : `~astropy.nddata.CCDData` Data to have gain corrected. gain : `~astropy.units.Quantity` or `~ccdproc.Keyword` gain value for the image expressed in electrons per adu. gain_unit : `~astropy.units.Unit` or None, optional Unit for the ``gain``; used only if ``gain`` itself does not provide units. Default is ``None``. {log} Returns ------- result : `~astropy.nddata.CCDData` CCDData object with gain corrected. """ if isinstance(gain, Keyword): gain_value = gain.value_from(ccd.header) elif isinstance(gain, numbers.Number) and gain_unit is not None: gain_value = gain * u.Unit(gain_unit) else: gain_value = gain result = ccd.multiply(gain_value) result.meta = ccd.meta.copy() return result @log_to_metadata def flat_correct(ccd, flat, min_value=None, norm_value=None): """Correct the image for flat fielding. The flat field image is normalized by its mean or a user-supplied value before flat correcting. Parameters ---------- ccd : `~astropy.nddata.CCDData` Data to be transformed. flat : `~astropy.nddata.CCDData` Flatfield to apply to the data. min_value : float or None, optional Minimum value for flat field. The value can either be None and no minimum value is applied to the flat or specified by a float which will replace all values in the flat by the min_value. Default is ``None``. norm_value : float or None, optional If not ``None``, normalize flat field by this argument rather than the mean of the image. This allows fixing several different flat fields to have the same scale. If this value is negative or 0, a ``ValueError`` is raised. Default is ``None``. {log} Returns ------- ccd : `~astropy.nddata.CCDData` CCDData object with flat corrected. """ # Use the min_value to replace any values in the flat use_flat = flat if min_value is not None: flat_min = flat.copy() flat_min.data[flat_min.data < min_value] = min_value use_flat = flat_min # If a norm_value was input and is positive, use it to scale the flat if norm_value is not None and norm_value > 0: flat_mean_val = norm_value elif norm_value is not None: # norm_value was set to a bad value raise ValueError('norm_value must be greater than zero.') else: # norm_value was not set, use mean of the image. flat_mean_val = use_flat.data.mean() # Normalize the flat. flat_mean = flat_mean_val * use_flat.unit flat_normed = use_flat.divide(flat_mean) # divide through the flat flat_corrected = ccd.divide(flat_normed) flat_corrected.meta = ccd.meta.copy() return flat_corrected @log_to_metadata def transform_image(ccd, transform_func, **kwargs): """Transform the image. Using the function specified by transform_func, the transform will be applied to data, uncertainty, and mask in ccd. Parameters ---------- ccd : `~astropy.nddata.CCDData` Data to be transformed. transform_func : callable Function to be used to transform the data, mask and uncertainty. kwargs : Additional keyword arguments to be used by the transform_func. {log} Returns ------- ccd : `~astropy.nddata.CCDData` A transformed CCDData object. Notes ----- At this time, transform will be applied to the uncertainty data but it will only transform the data. This will not properly handle uncertainties that arise due to correlation between the pixels. These should only be geometric transformations of the images. Other methods should be used if the units of ccd need to be changed. Examples -------- Given an array that is 100x100:: >>> import numpy as np >>> from astropy import units as u >>> arr1 = CCDData(np.ones([100, 100]), unit=u.adu) The syntax for transforming the array using `scipy.ndimage.shift`:: >>> from scipy.ndimage.interpolation import shift >>> from ccdproc import transform_image >>> transformed = transform_image(arr1, shift, shift=(5.5, 8.1)) """ # check that it is a ccddata object if not isinstance(ccd, CCDData): raise TypeError('ccd is not a CCDData.') # make a copy of the object nccd = ccd.copy() # transform the image plane try: nccd.data = transform_func(nccd.data, **kwargs) except TypeError as exc: if 'is not callable' in str(exc): raise TypeError('transform_func is not a callable.') raise # transform the uncertainty plane if it exists if nccd.uncertainty is not None: nccd.uncertainty.array = transform_func(nccd.uncertainty.array, **kwargs) # transform the mask plane if nccd.mask is not None: mask = transform_func(nccd.mask, **kwargs) nccd.mask = mask > 0 return nccd @log_to_metadata def wcs_project(ccd, target_wcs, target_shape=None, order='bilinear'): """ Given a CCDData image with WCS, project it onto a target WCS and return the reprojected data as a new CCDData image. Any flags, weight, or uncertainty are ignored in doing the reprojection. Parameters ---------- ccd : `~astropy.nddata.CCDData` Data to be projected. target_wcs : `~astropy.wcs.WCS` object WCS onto which all images should be projected. target_shape : two element list-like or None, optional Shape of the output image. If omitted, defaults to the shape of the input image. Default is ``None``. order : str, optional Interpolation order for re-projection. Must be one of: + 'nearest-neighbor' + 'bilinear' + 'biquadratic' + 'bicubic' Default is ``'bilinear'``. {log} Returns ------- ccd : `~astropy.nddata.CCDData` A transformed CCDData object. """ from reproject import reproject_interp if not (ccd.wcs.is_celestial and target_wcs.is_celestial): raise ValueError('one or both WCS is not celestial.') if target_shape is None: target_shape = ccd.shape projected_image_raw, _ = reproject_interp((ccd.data, ccd.wcs), target_wcs, shape_out=target_shape, order=order) reprojected_mask = None if ccd.mask is not None: reprojected_mask, _ = reproject_interp((ccd.mask, ccd.wcs), target_wcs, shape_out=target_shape, order=order) # Make the mask 1 if the reprojected mask pixel value is non-zero. # A small threshold is included to allow for some rounding in # reproject_interp. reprojected_mask = reprojected_mask > 1e-8 # The reprojection will contain nan for any pixels for which the source # was outside the original image. Those should be masked also. output_mask = np.isnan(projected_image_raw) if reprojected_mask is not None: output_mask = output_mask | reprojected_mask # Need to scale counts by ratio of pixel areas area_ratio = (proj_plane_pixel_area(target_wcs) / proj_plane_pixel_area(ccd.wcs)) # If nothing ended up masked, don't create a mask. if not output_mask.any(): output_mask = None nccd = CCDData(area_ratio * projected_image_raw, wcs=target_wcs, mask=output_mask, header=ccd.header, unit=ccd.unit) return nccd def sigma_func(arr, axis=None): """ Robust method for calculating the deviation of an array. ``sigma_func`` uses the median absolute deviation to determine the standard deviation. Parameters ---------- arr : `~astropy.nddata.CCDData` or `numpy.ndarray` Array whose deviation is to be calculated. axis : int, tuple of ints or None, optional Axis or axes along which the function is performed. If ``None`` it is performed over all the dimensions of the input array. The axis argument can also be negative, in this case it counts from the last to the first axis. Default is ``None``. Returns ------- uncertainty : float uncertainty of array estimated from median absolute deviation. """ return stats.median_absolute_deviation(arr, axis=axis) * 1.482602218505602 def setbox(x, y, mbox, xmax, ymax): """ Create a box of length mbox around a position x,y. If the box will be out of [0,len] then reset the edges of the box to be within the boundaries. Parameters ---------- x : int Central x-position of box. y : int Central y-position of box. mbox : int Width of box. xmax : int Maximum x value. ymax : int Maximum y value. Returns ------- x1 : int Lower x corner of box. x2 : int Upper x corner of box. y1 : int Lower y corner of box. y2 : int Upper y corner of box. """ mbox = max(int(0.5 * mbox), 1) y1 = max(0, y - mbox) y2 = min(y + mbox + 1, ymax - 1) x1 = max(0, x - mbox) x2 = min(x + mbox + 1, xmax - 1) return x1, x2, y1, y2 def background_deviation_box(data, bbox): """ Determine the background deviation with a box size of bbox. The algorithm steps through the image and calculates the deviation within each box. It returns an array with the pixels in each box filled with the deviation value. Parameters ---------- data : `numpy.ndarray` or `numpy.ma.MaskedArray` Data to measure background deviation. bbox : int Box size for calculating background deviation. Raises ------ ValueError A value error is raised if bbox is less than 1. Returns ------- background : `numpy.ndarray` or `numpy.ma.MaskedArray` An array with the measured background deviation in each pixel. """ # Check to make sure the background box is an appropriate size # If it is too small, then insufficient statistics are generated if bbox < 1: raise ValueError('bbox must be greater than 1.') # make the background image barr = data * 0.0 + data.std() ylen, xlen = data.shape for i in range(int(0.5 * bbox), xlen, bbox): for j in range(int(0.5 * bbox), ylen, bbox): x1, x2, y1, y2 = setbox(i, j, bbox, xlen, ylen) barr[y1:y2, x1:x2] = sigma_func(data[y1:y2, x1:x2]) return barr def background_deviation_filter(data, bbox): """ Determine the background deviation for each pixel from a box with size of bbox. Parameters ---------- data : `numpy.ndarray` Data to measure background deviation. bbox : int Box size for calculating background deviation. Raises ------ ValueError A value error is raised if bbox is less than 1. Returns ------- background : `numpy.ndarray` or `numpy.ma.MaskedArray` An array with the measured background deviation in each pixel. """ # Check to make sure the background box is an appropriate size if bbox < 1: raise ValueError('bbox must be greater than 1.') return ndimage.generic_filter(data, sigma_func, size=(bbox, bbox)) @deprecated('1.1') def rebin(ccd, newshape): """ Rebin an array to have a new shape. Parameters ---------- ccd : `~astropy.nddata.CCDData` or `numpy.ndarray` Data to rebin. newshape : tuple Tuple containing the new shape for the array. Returns ------- output : `~astropy.nddata.CCDData` or `numpy.ndarray` An array with the new shape. It will have the same type as the input object. Raises ------ TypeError A type error is raised if data is not an `numpy.ndarray` or `~astropy.nddata.CCDData`. ValueError A value error is raised if the dimension of the new shape is not equal to the data's. Notes ----- This is based on the scipy cookbook for rebinning: http://wiki.scipy.org/Cookbook/Rebinning If rebinning a CCDData object to a smaller shape, the masking and uncertainty are not handled correctly. Examples -------- Given an array that is 100x100:: import numpy as np from astropy import units as u arr1 = CCDData(np.ones([10, 10]), unit=u.adu) The syntax for rebinning an array to a shape of (20,20) is:: rebin(arr1, (20,20)) """ # check to see that is in a nddata type if isinstance(ccd, np.ndarray): # check to see that the two arrays are going to be the same length if len(ccd.shape) != len(newshape): raise ValueError('newshape does not have the same dimensions as ' 'ccd.') slices = [slice(0, old, old/new) for old, new in zip(ccd.shape, newshape)] coordinates = np.mgrid[slices] indices = coordinates.astype('i') return ccd[tuple(indices)] elif isinstance(ccd, CCDData): # check to see that the two arrays are going to be the same length if len(ccd.shape) != len(newshape): raise ValueError('newshape does not have the same dimensions as ' 'ccd.') nccd = ccd.copy() # rebin the data plane nccd.data = rebin(nccd.data, newshape) # rebin the uncertainty plane if nccd.uncertainty is not None: nccd.uncertainty.array = rebin(nccd.uncertainty.array, newshape) # rebin the mask plane if nccd.mask is not None: nccd.mask = rebin(nccd.mask, newshape) return nccd else: raise TypeError('ccd is not an ndarray or a CCDData object.') def block_reduce(ccd, block_size, func=np.sum): """Thin wrapper around `astropy.nddata.block_reduce`.""" data = nddata_utils.block_reduce(ccd, block_size, func) if isinstance(ccd, CCDData): # unit and meta "should" be unaffected by the change of shape and can # be copied. However wcs, mask, uncertainty should not be copied! data = CCDData(data, unit=ccd.unit, meta=ccd.meta.copy()) return data def block_average(ccd, block_size): """Like `block_reduce` but with predefined ``func=np.mean``. """ data = nddata_utils.block_reduce(ccd, block_size, np.mean) # Like in block_reduce: if isinstance(ccd, CCDData): data = CCDData(data, unit=ccd.unit, meta=ccd.meta.copy()) return data def block_replicate(ccd, block_size, conserve_sum=True): """Thin wrapper around `astropy.nddata.block_replicate`.""" data = nddata_utils.block_replicate(ccd, block_size, conserve_sum) # Like in block_reduce: if isinstance(ccd, CCDData): data = CCDData(data, unit=ccd.unit, meta=ccd.meta.copy()) return data try: # Append original docstring to docstrings of these functions block_reduce.__doc__ += nddata_utils.block_reduce.__doc__ block_replicate.__doc__ += nddata_utils.block_replicate.__doc__ __all__ += ['block_average', 'block_reduce', 'block_replicate'] except AttributeError: # Astropy 1.0 has no block_reduce, block_average del block_reduce, block_average, block_replicate def _blkavg(data, newshape): """ Block average an array such that it has the new shape. Parameters ---------- data : `numpy.ndarray` or `numpy.ma.MaskedArray` Data to average. newshape : tuple Tuple containing the new shape for the array. Returns ------- output : `numpy.ndarray` or `numpy.ma.MaskedArray` An array with the new shape and the average of the pixels. Raises ------ TypeError A type error is raised if data is not an `numpy.ndarray`. ValueError A value error is raised if the dimensions of new shape is not equal to data. Notes ----- This is based on the scipy cookbook for rebinning: http://wiki.scipy.org/Cookbook/Rebinning """ # check to see that is in a nddata type if not isinstance(data, np.ndarray): raise TypeError('data is not a ndarray object.') # check to see that the two arrays are going to be the same length if len(data.shape) != len(newshape): raise ValueError('newshape does not have the same dimensions as data.') shape = data.shape lenShape = len(shape) factor = np.asarray(shape)/np.asarray(newshape) evList = ['data.reshape('] + \ ['newshape[%d],int(factor[%d]),' % (i, i) for i in range(lenShape)] + \ [')'] + ['.mean(%d)' % (i + 1) for i in range(lenShape)] return eval(''.join(evList)) def median_filter(data, *args, **kwargs): """See `scipy.ndimage.median_filter` for arguments. If the ``data`` is a `~astropy.nddata.CCDData` object the result will be another `~astropy.nddata.CCDData` object with the median filtered data as ``data`` and copied ``unit`` and ``meta``. """ if isinstance(data, CCDData): out_kwargs = {'meta': data.meta.copy(), 'unit': data.unit} result = ndimage.median_filter(data.data, *args, **kwargs) return CCDData(result, **out_kwargs) else: return ndimage.median_filter(data, *args, **kwargs) def cosmicray_lacosmic(ccd, sigclip=4.5, sigfrac=0.3, objlim=5.0, gain=1.0, readnoise=6.5, satlevel=65535.0, pssl=0.0, niter=4, sepmed=True, cleantype='meanmask', fsmode='median', psfmodel='gauss', psffwhm=2.5, psfsize=7, psfk=None, psfbeta=4.765, verbose=False): r""" Identify cosmic rays through the lacosmic technique. The lacosmic technique identifies cosmic rays by identifying pixels based on a variation of the Laplacian edge detection. The algorithm is an implementation of the code describe in van Dokkum (2001) [1]_ as implemented by McCully (2014) [2]_. If you use this algorithm, please cite these two works. Parameters ---------- ccd : `~astropy.nddata.CCDData` or `numpy.ndarray` Data to have cosmic ray cleaned. sigclip : float, optional Laplacian-to-noise limit for cosmic ray detection. Lower values will flag more pixels as cosmic rays. Default: 4.5. sigfrac : float, optional Fractional detection limit for neighboring pixels. For cosmic ray neighbor pixels, a Laplacian-to-noise detection limit of sigfrac * sigclip will be used. Default: 0.3. objlim : float, optional Minimum contrast between Laplacian image and the fine structure image. Increase this value if cores of bright stars are flagged as cosmic rays. Default: 5.0. pssl : float, optional Previously subtracted sky level in ADU. We always need to work in electrons for cosmic ray detection, so we need to know the sky level that has been subtracted so we can add it back in. Default: 0.0. gain : float, optional Gain of the image (electrons / ADU). We always need to work in electrons for cosmic ray detection. Default: 1.0 readnoise : float, optional Read noise of the image (electrons). Used to generate the noise model of the image. Default: 6.5. satlevel : float, optional Saturation level of the image (electrons). This value is used to detect saturated stars and pixels at or above this level are added to the mask. Default: 65535.0. niter : int, optional Number of iterations of the LA Cosmic algorithm to perform. Default: 4. sepmed : bool, optional Use the separable median filter instead of the full median filter. The separable median is not identical to the full median filter, but they are approximately the same and the separable median filter is significantly faster and still detects cosmic rays well. Default: True cleantype : str, optional Set which clean algorithm is used: - ``"median"``: An unmasked 5x5 median filter. - ``"medmask"``: A masked 5x5 median filter. - ``"meanmask"``: A masked 5x5 mean filter. - ``"idw"``: A masked 5x5 inverse distance weighted interpolation. Default: ``"meanmask"``. fsmode : str, optional Method to build the fine structure image: - ``"median"``: Use the median filter in the standard LA Cosmic \ algorithm. - ``"convolve"``: Convolve the image with the psf kernel to calculate \ the fine structure image. Default: ``"median"``. psfmodel : str, optional Model to use to generate the psf kernel if fsmode == 'convolve' and psfk is None. The current choices are Gaussian and Moffat profiles: - ``"gauss"`` and ``"moffat"`` produce circular PSF kernels. - The ``"gaussx"`` and ``"gaussy"`` produce Gaussian kernels in the x \ and y directions respectively. Default: ``"gauss"``. psffwhm : float, optional Full Width Half Maximum of the PSF to use to generate the kernel. Default: 2.5. psfsize : int, optional Size of the kernel to calculate. Returned kernel will have size psfsize x psfsize. psfsize should be odd. Default: 7. psfk : `numpy.ndarray` (with float dtype) or None, optional PSF kernel array to use for the fine structure image if ``fsmode == 'convolve'``. If None and ``fsmode == 'convolve'``, we calculate the psf kernel using ``psfmodel``. Default: None. psfbeta : float, optional Moffat beta parameter. Only used if ``fsmode=='convolve'`` and ``psfmodel=='moffat'``. Default: 4.765. verbose : bool, optional Print to the screen or not. Default: False. Notes ----- Implementation of the cosmic ray identification L.A.Cosmic: http://www.astro.yale.edu/dokkum/lacosmic/ Returns ------- nccd : `~astropy.nddata.CCDData` or `numpy.ndarray` An object of the same type as ccd is returned. If it is a `~astropy.nddata.CCDData`, the mask attribute will also be updated with areas identified with cosmic rays masked. crmask : `numpy.ndarray` If an `numpy.ndarray` is provided as ccd, a boolean ndarray with the cosmic rays identified will also be returned. References ---------- .. [1] van Dokkum, P; 2001, "Cosmic-Ray Rejection by Laplacian Edge Detection". The Publications of the Astronomical Society of the Pacific, Volume 113, Issue 789, pp. 1420-1427. doi: 10.1086/323894 .. [2] McCully, C., 2014, "Astro-SCRAPPY", https://github.com/astropy/astroscrappy Examples -------- 1) Given an numpy.ndarray object, the syntax for running cosmicrar_lacosmic would be: >>> newdata, mask = cosmicray_lacosmic(data, sigclip=5) #doctest: +SKIP where the error is an array that is the same shape as data but includes the pixel error. This would return a data array, newdata, with the bad pixels replaced by the local median from a box of 11 pixels; and it would return a mask indicating the bad pixels. 2) Given an `~astropy.nddata.CCDData` object with an uncertainty frame, the syntax for running cosmicrar_lacosmic would be: >>> newccd = cosmicray_lacosmic(ccd, sigclip=5) # doctest: +SKIP The newccd object will have bad pixels in its data array replace and the mask of the object will be created if it did not previously exist or be updated with the detected cosmic rays. """ from astroscrappy import detect_cosmics if isinstance(ccd, np.ndarray): data = ccd crmask, cleanarr = detect_cosmics( data, inmask=None, sigclip=sigclip, sigfrac=sigfrac, objlim=objlim, gain=gain, readnoise=readnoise, satlevel=satlevel, pssl=pssl, niter=niter, sepmed=sepmed, cleantype=cleantype, fsmode=fsmode, psfmodel=psfmodel, psffwhm=psffwhm, psfsize=psfsize, psfk=psfk, psfbeta=psfbeta, verbose=verbose) return cleanarr, crmask elif isinstance(ccd, CCDData): crmask, cleanarr = detect_cosmics( ccd.data, inmask=ccd.mask, sigclip=sigclip, sigfrac=sigfrac, objlim=objlim, gain=gain, readnoise=readnoise, satlevel=satlevel, pssl=pssl, niter=niter, sepmed=sepmed, cleantype=cleantype, fsmode=fsmode, psfmodel=psfmodel, psffwhm=psffwhm, psfsize=psfsize, psfk=psfk, psfbeta=psfbeta, verbose=verbose) # create the new ccd data object nccd = ccd.copy() nccd.data = cleanarr if nccd.mask is None: nccd.mask = crmask else: nccd.mask = nccd.mask + crmask return nccd else: raise TypeError('ccd is not a CCDData or ndarray object.') def cosmicray_median(ccd, error_image=None, thresh=5, mbox=11, gbox=0, rbox=0): """ Identify cosmic rays through median technique. The median technique identifies cosmic rays by identifying pixels by subtracting a median image from the initial data array. Parameters ---------- ccd : `~astropy.nddata.CCDData`, `numpy.ndarray` or `numpy.ma.MaskedArray` Data to have cosmic ray cleaned. thresh : float, optional Threshold for detecting cosmic rays. Default is ``5``. error_image : `numpy.ndarray`, float or None, optional Error level. If None, the task will use the standard deviation of the data. If an ndarray, it should have the same shape as data. Default is ``None``. mbox : int, optional Median box for detecting cosmic rays. Default is ``11``. gbox : int, optional Box size to grow cosmic rays. If zero, no growing will be done. Default is ``0``. rbox : int, optional Median box for calculating replacement values. If zero, no pixels will be replaced. Default is ``0``. Notes ----- Similar implementation to crmedian in iraf.imred.crutil.crmedian. Returns ------- nccd : `~astropy.nddata.CCDData` or `numpy.ndarray` An object of the same type as ccd is returned. If it is a `~astropy.nddata.CCDData`, the mask attribute will also be updated with areas identified with cosmic rays masked. nccd : `numpy.ndarray` If an `numpy.ndarray` is provided as ccd, a boolean ndarray with the cosmic rays identified will also be returned. Examples -------- 1) Given an numpy.ndarray object, the syntax for running cosmicray_median would be: >>> newdata, mask = cosmicray_median(data, error_image=error, ... thresh=5, mbox=11, ... rbox=11, gbox=5) # doctest: +SKIP where error is an array that is the same shape as data but includes the pixel error. This would return a data array, newdata, with the bad pixels replaced by the local median from a box of 11 pixels; and it would return a mask indicating the bad pixels. 2) Given an `~astropy.nddata.CCDData` object with an uncertainty frame, the syntax for running cosmicray_median would be: >>> newccd = cosmicray_median(ccd, thresh=5, mbox=11, ... rbox=11, gbox=5) # doctest: +SKIP The newccd object will have bad pixels in its data array replace and the mask of the object will be created if it did not previously exist or be updated with the detected cosmic rays. """ if isinstance(ccd, np.ndarray): data = ccd if error_image is None: error_image = data.std() else: if not isinstance(error_image, (float, np.ndarray)): raise TypeError('error_image is not a float or ndarray.') # create the median image marr = ndimage.median_filter(data, size=(mbox, mbox)) # Only look at the data array if isinstance(data, np.ma.MaskedArray): data = data.data # Find the residual image rarr = (data - marr) / error_image # identify all sources crarr = (rarr > thresh) # grow the pixels if gbox > 0: crarr = ndimage.maximum_filter(crarr, gbox) # replace bad pixels in the image ndata = data.copy() if rbox > 0: data = np.ma.masked_array(data, (crarr == 1)) mdata = ndimage.median_filter(data, rbox) ndata[crarr == 1] = mdata[crarr == 1] return ndata, crarr elif isinstance(ccd, CCDData): # set up the error image if error_image is None and ccd.uncertainty is not None: error_image = ccd.uncertainty.array if ccd.data.shape != error_image.shape: raise ValueError('error_image is not the same shape as data.') data, crarr = cosmicray_median(ccd.data, error_image=error_image, thresh=thresh, mbox=mbox, gbox=gbox, rbox=rbox) # create the new ccd data object nccd = ccd.copy() nccd.data = data if nccd.mask is None: nccd.mask = crarr else: nccd.mask = nccd.mask + crarr return nccd else: raise TypeError('ccd is not an numpy.ndarray or a CCDData object.') def ccdmask(ratio, findbadcolumns=False, byblocks=False, ncmed=7, nlmed=7, ncsig=15, nlsig=15, lsigma=9, hsigma=9, ngood=5): """ Uses method based on the IRAF ccdmask task to generate a mask based on the given input. .. note:: This function uses ``lines`` as synonym for the first axis and ``columns`` the second axis. Only two-dimensional ``ratio`` is currently supported. Parameters ---------- ratio : `~astropy.nddata.CCDData` Data to used to form mask. Typically this is the ratio of two flat field images. findbadcolumns : `bool`, optional If set to True, the code will search for bad column sections. Note that this treats columns as special and breaks symmetry between lines and columns and so is likely only appropriate for detectors which have readout directions. Default is ``False``. byblocks : `bool`, optional If set to true, the code will divide the image up in to blocks of size nlsig by ncsig and determine the standard deviation estimate in each block (as described in the original IRAF task, see Notes below). If set to False, then the code will use `scipy.ndimage.percentile_filter` to generate a running box version of the standard deviation estimate and use that value for the standard deviation at each pixel. Default is ``False``. ncmed, nlmed : `int`, optional The column and line size of the moving median rectangle used to estimate the uncontaminated local signal. The column median size should be at least 3 pixels to span single bad columns. Default is ``7``. ncsig, nlsig : `int`, optional The column and line size of regions used to estimate the uncontaminated local sigma using a percentile. The size of the box should contain of order 100 pixels or more. Default is ``15``. lsigma, hsigma : `float`, optional Positive sigma factors to use for selecting pixels below and above the median level based on the local percentile sigma. Default is ``9``. ngood : `int`, optional Gaps of undetected pixels along the column direction of length less than this amount are also flagged as bad pixels, if they are between pixels masked in that column. Default is ``5``. Returns ------- mask : `numpy.ndarray` A boolean ndarray where the bad pixels have a value of 1 (True) and valid pixels 0 (False), following the numpy.ma conventions. Notes ----- Similar implementation to IRAF's ccdmask task. The Following documentation is copied directly from: http://stsdas.stsci.edu/cgi-bin/gethelp.cgi?ccdmask The input image is first subtracted by a moving box median. The median is unaffected by bad pixels provided the median size is larger that twice the size of a bad region. Thus, if 3 pixel wide bad columns are present then the column median box size should be at least 7 pixels. The median box can be a single pixel wide along one dimension if needed. This may be appropriate for spectroscopic long slit data. The median subtracted image is then divided into blocks of size nclsig by nlsig. In each block the pixel values are sorted and the pixels nearest the 30.9 and 69.1 percentile points are found; this would be the one sigma points in a Gaussian noise distribution. The difference between the two count levels divided by two is then the local sigma estimate. This algorithm is used to avoid contamination by the bad pixel values. The block size must be at least 10 pixels in each dimension to provide sufficient pixels for a good estimate of the percentile sigma. The sigma uncertainty estimate of each pixel in the image is then the sigma from the nearest block. The deviant pixels are found by comparing the median subtracted residual to a specified sigma threshold factor times the local sigma above and below zero (the lsigma and hsigma parameters). This is done for individual pixels and then for column sums of pixels (excluding previously flagged bad pixels) from two to the number of lines in the image. The sigma of the sums is scaled by the square root of the number of pixels summed so that statistically low or high column regions may be detected even though individual pixels may not be statistically deviant. For the purpose of this task one would normally select large sigma threshold factors such as six or greater to detect only true bad pixels and not the extremes of the noise distribution. As a final step each column is examined to see if there are small segments of unflagged pixels between bad pixels. If the length of a segment is less than that given by the ngood parameter all the pixels in the segment are also marked as bad. """ try: nlines, ncols = ratio.data.shape except (TypeError, ValueError): # shape is not iterable or has more or less than two values raise ValueError('"ratio" must be two-dimensional.') except AttributeError: # No data attribute or data has no shape attribute. raise ValueError('"ratio" should be a "CCDData".') def _sigma_mask(baseline, one_sigma_value, lower_sigma, upper_sigma): """Helper function to mask values outside of the specified sigma range. """ return ((baseline < -lower_sigma * one_sigma_value) | (baseline > upper_sigma * one_sigma_value)) mask = ~np.isfinite(ratio.data) medsub = (ratio.data - ndimage.median_filter(ratio.data, size=(nlmed, ncmed))) if byblocks: nlinesblock = int(math.ceil(nlines / nlsig)) ncolsblock = int(math.ceil(ncols / ncsig)) for i in six.moves.range(nlinesblock): for j in six.moves.range(ncolsblock): l1 = i * nlsig l2 = min((i + 1) * nlsig, nlines) c1 = j * ncsig c2 = min((j + 1) * ncsig, ncols) block = medsub[l1:l2, c1:c2] high = np.percentile(block.ravel(), 69.1) low = np.percentile(block.ravel(), 30.9) block_sigma = (high - low) / 2.0 block_mask = _sigma_mask(block, block_sigma, lsigma, hsigma) mblock = np.ma.MaskedArray(block, mask=block_mask, copy=False) if findbadcolumns: csum = np.ma.sum(mblock, axis=0) csum[csum <= 0] = 0 csum_sigma = np.ma.MaskedArray(np.sqrt(c2 - c1 - csum)) colmask = _sigma_mask(csum.filled(1), csum_sigma, lsigma, hsigma) block_mask[:, :] |= colmask[np.newaxis, :] mask[l1:l2, c1:c2] = block_mask else: high = ndimage.percentile_filter(medsub, 69.1, size=(nlsig, ncsig)) low = ndimage.percentile_filter(medsub, 30.9, size=(nlsig, ncsig)) sigmas = (high - low) / 2.0 mask |= _sigma_mask(medsub, sigmas, lsigma, hsigma) if findbadcolumns: # Loop through columns and look for short segments (>> import ccdproc >>> import numpy as np >>> ccdproc.bitfield_to_boolean_mask(np.arange(8)) array([False, True, True, True, True, True, True, True], dtype=bool) To ignore all bit flags ``ignore_bits=None`` can be used:: >>> ccdproc.bitfield_to_boolean_mask(np.arange(8), ignore_bits=None) array([False, False, False, False, False, False, False, False], dtype=bool) To ignore only specific bit flags one can use a ``list`` of bits flags to ignore:: >>> ccdproc.bitfield_to_boolean_mask(np.arange(8), ignore_bits=[1, 4]) array([False, False, True, True, False, False, True, True], dtype=bool) There are some equivalent ways:: >>> # pass in the sum of the "ignore_bits" directly >>> ccdproc.bitfield_to_boolean_mask(np.arange(8), ignore_bits=5) # 1 + 4 array([False, False, True, True, False, False, True, True], dtype=bool) >>> # use a comma seperated string of integers >>> ccdproc.bitfield_to_boolean_mask(np.arange(8), ignore_bits='1, 4') array([False, False, True, True, False, False, True, True], dtype=bool) >>> # use a + seperated string of integers >>> ccdproc.bitfield_to_boolean_mask(np.arange(8), ignore_bits='1+4') array([False, False, True, True, False, False, True, True], dtype=bool) Instead of directly specifying the **bits flags to ignore** one can also pass in the **only bits that shouldn't be ignored** by prepending a ``~`` to the string of ``ignore_bits`` (or if it's not a string in ``ignore_bits`` one can set ``flip_bits=True``):: >>> # ignore all bit flags except the one for 2. >>> ccdproc.bitfield_to_boolean_mask(np.arange(8), ignore_bits='~(2)') array([False, False, True, True, False, False, True, True], dtype=bool) >>> # ignore all bit flags except the one for 1, 8 and 32. >>> ccdproc.bitfield_to_boolean_mask(np.arange(8), ignore_bits='~(1, 8, 32)') array([False, True, False, True, False, True, False, True], dtype=bool) >>> # Equivalent for a list using flip_bits. >>> ccdproc.bitfield_to_boolean_mask(np.arange(8), ignore_bits=[1, 8, 32], flip_bits=True) array([False, True, False, True, False, True, False, True], dtype=bool) """ return _bitfield_to_boolean_mask( bitfield, ignore_bits, flip_bits=flip_bits, good_mask_value=False, dtype=bool) class Keyword(object): """ """ def __init__(self, name, unit=None, value=None): self._name = name self._unit = unit self.value = value @property def name(self): return self._name @property def unit(self): return self._unit @property def value(self): return self._value @value.setter def value(self, value): if value is None: self._value = value elif isinstance(value, Quantity): self._unit = value.unit self._value = value elif isinstance(value, six.string_types): if self.unit is not None: raise ValueError("keyword with a unit cannot have a " "string value.") else: self._value = value else: if self.unit is None: raise ValueError("no unit provided. Set value with " "an astropy.units.Quantity.") self._value = value * self.unit def value_from(self, header): """ Set value of keyword from FITS header. Parameters ---------- header : `~astropy.io.fits.Header` FITS header containing a value for this keyword. """ value_from_header = header[self.name] self.value = value_from_header return self.value ccdproc-1.3.0.post1/ccdproc/_astropy_init.py0000664000175000017500000001221413207605210022523 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst __all__ = ['__version__', '__githash__', 'test'] # this indicates whether or not we are in the package's setup.py try: _ASTROPY_SETUP_ except NameError: from sys import version_info if version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = False try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' # set up the test command def _get_test_runner(): import os from astropy.tests.helper import TestRunner return TestRunner(os.path.dirname(__file__)) def test(package=None, test_path=None, args=None, plugins=None, verbose=False, pastebin=None, remote_data=False, pep8=False, pdb=False, coverage=False, open_files=False, **kwargs): """ Run the tests using `py.test `__. A proper set of arguments is constructed and passed to `pytest.main`_. .. _py.test: http://pytest.org/latest/ .. _pytest.main: http://pytest.org/latest/builtin.html#pytest.main Parameters ---------- package : str, optional The name of a specific package to test, e.g. 'io.fits' or 'utils'. If nothing is specified all default tests are run. test_path : str, optional Specify location to test by path. May be a single file or directory. Must be specified absolutely or relative to the calling directory. args : str, optional Additional arguments to be passed to pytest.main_ in the ``args`` keyword argument. plugins : list, optional Plugins to be passed to pytest.main_ in the ``plugins`` keyword argument. verbose : bool, optional Convenience option to turn on verbose output from py.test_. Passing True is the same as specifying ``'-v'`` in ``args``. pastebin : {'failed','all',None}, optional Convenience option for turning on py.test_ pastebin output. Set to ``'failed'`` to upload info for failed tests, or ``'all'`` to upload info for all tests. remote_data : bool, optional Controls whether to run tests marked with @remote_data. These tests use online data and are not run by default. Set to True to run these tests. pep8 : bool, optional Turn on PEP8 checking via the `pytest-pep8 plugin `_ and disable normal tests. Same as specifying ``'--pep8 -k pep8'`` in ``args``. pdb : bool, optional Turn on PDB post-mortem analysis for failing tests. Same as specifying ``'--pdb'`` in ``args``. coverage : bool, optional Generate a test coverage report. The result will be placed in the directory htmlcov. open_files : bool, optional Fail when any tests leave files open. Off by default, because this adds extra run time to the test suite. Works only on platforms with a working ``lsof`` command. parallel : int, optional When provided, run the tests in parallel on the specified number of CPUs. If parallel is negative, it will use the all the cores on the machine. Requires the `pytest-xdist `_ plugin installed. Only available when using Astropy 0.3 or later. kwargs Any additional keywords passed into this function will be passed on to the astropy test runner. This allows use of test-related functionality implemented in later versions of astropy without explicitly updating the package template. """ test_runner = _get_test_runner() return test_runner.run_tests( package=package, test_path=test_path, args=args, plugins=plugins, verbose=verbose, pastebin=pastebin, remote_data=remote_data, pep8=pep8, pdb=pdb, coverage=coverage, open_files=open_files, **kwargs) if not _ASTROPY_SETUP_: import os from warnings import warn from astropy import config # add these here so we only need to cleanup the namespace at the end config_dir = None if not os.environ.get('ASTROPY_SKIP_CONFIG_UPDATE', False): config_dir = os.path.dirname(__file__) config_template = os.path.join(config_dir, __package__ + ".cfg") if os.path.isfile(config_template): try: config.configuration.update_default_config( __package__, config_dir, version=__version__) except TypeError as orig_error: try: config.configuration.update_default_config( __package__, config_dir) except config.configuration.ConfigurationDefaultMissingError as e: wmsg = (e.args[0] + " Cannot install default profile. If you are " "importing from source, this is expected.") warn(config.configuration.ConfigurationDefaultMissingWarning(wmsg)) del e except: raise orig_error ccdproc-1.3.0.post1/ccdproc/tests/0000775000175000017500000000000013207623133020434 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/ccdproc/tests/test_ccdproc_logging.py0000664000175000017500000000610213207605210025163 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np from astropy.extern import six import pytest from .. import subtract_bias, create_deviation, Keyword, CCDData @pytest.mark.parametrize('key', [ 'short', 'toolongforfits']) def test_log_string(ccd_data, key): add_key = key new = create_deviation(ccd_data, readnoise=3 * ccd_data.unit, add_keyword=add_key) # Keys should be added to new but not to ccd_data and should have # no value. assert add_key in new.meta assert add_key not in ccd_data.meta # Long keyword names should be accessible with just the keyword name # without HIERARCH -- is it? assert new.meta[add_key] is None def test_log_keyword(ccd_data): key = 'filter' key_val = 'V' kwd = Keyword(key, value=key_val) new = create_deviation(ccd_data, readnoise=3 * ccd_data.unit, add_keyword=kwd) # Was the Keyword added with the correct value? assert kwd.name in new.meta assert kwd.name not in ccd_data.meta assert new.meta[kwd.name] == key_val def test_log_dict(ccd_data): keys_to_add = { 'process': 'Added deviation', 'n_images_input': 1, 'current_temp': 42.9 } new = create_deviation(ccd_data, readnoise=3 * ccd_data.unit, add_keyword=keys_to_add) for k, v in six.iteritems(keys_to_add): # Were all dictionary items added? assert k in new.meta assert k not in ccd_data.meta assert new.meta[k] == v def test_log_bad_type_fails(ccd_data): add_key = 15 # anything not string and not dict-like will work here # Do we fail with non-string, non-Keyword, non-dict-like value? with pytest.raises(AttributeError): create_deviation(ccd_data, readnoise=3 * ccd_data.unit, add_keyword=add_key) def test_log_set_to_None_does_not_change_header(ccd_data): new = create_deviation(ccd_data, readnoise=3 * ccd_data.unit, add_keyword=None) assert new.meta.keys() == ccd_data.header.keys() def test_implicit_logging(ccd_data): # If nothing is supplied for the add_keyword argument then the following # should happen: # + A key named func.__name__ is created, with # + value that is the list of arguments the function was called with. bias = CCDData(np.zeros_like(ccd_data.data), unit="adu") result = subtract_bias(ccd_data, bias) assert "subtract_bias" in result.header assert result.header['subtract_bias'] == ( 'subbias', 'Shortened name for ccdproc command') assert result.header['subbias'] == "ccd=, master=" result = create_deviation(ccd_data, readnoise=3 * ccd_data.unit) assert result.header['create_deviation'] == ( 'creatvar', 'Shortened name for ccdproc command') assert ("readnoise="+str(3 * ccd_data.unit) in result.header['creatvar']) ccdproc-1.3.0.post1/ccdproc/tests/test_image_collection.py0000664000175000017500000011443513207605210025346 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (print_function, division, absolute_import, unicode_literals) import os from shutil import rmtree from tempfile import mkdtemp from glob import iglob import sys import logging import pytest import astropy.io.fits as fits import numpy as np from astropy.tests.helper import catch_warnings from astropy.utils import minversion from astropy.utils.exceptions import AstropyUserWarning from astropy.extern import six from ccdproc import CCDData from ..image_collection import ImageFileCollection _filters = [] _original_dir = '' _ASTROPY_LT_1_3 = not minversion("astropy", "1.3") def test_fits_summary(triage_setup): keywords = ['imagetyp', 'filter'] ic = ImageFileCollection(triage_setup.test_dir, keywords=keywords) summary = ic._fits_summary(header_keywords=keywords) assert len(summary['file']) == triage_setup.n_test['files'] for keyword in keywords: assert len(summary[keyword]) == triage_setup.n_test['files'] # explicit conversion to array is needed to avoid astropy Table bug in # 0.2.4 no_filter_no_object_row = np.array(summary['file'] == 'no_filter_no_object_bias.fit') # there should be no filter keyword in the bias file assert summary['filter'][no_filter_no_object_row].mask class TestImageFileCollectionRepresentation(object): def test_repr_location(self, triage_setup): ic = ImageFileCollection(location=triage_setup.test_dir) assert repr(ic) == "ImageFileCollection(location={0!r})".format( triage_setup.test_dir) def test_repr_keywords(self, triage_setup): ic = ImageFileCollection( location=triage_setup.test_dir, keywords=['imagetyp']) ref = ("ImageFileCollection(location={0!r}, keywords=['imagetyp'])" .format(triage_setup.test_dir)) assert repr(ic) == ref def test_repr_globs(self, triage_setup): ic = ImageFileCollection( location=triage_setup.test_dir, glob_exclude="*no_filter*", glob_include="*object_light*") ref = ("ImageFileCollection(location={0!r}, " "glob_include='*object_light*', " "glob_exclude='*no_filter*')" .format(triage_setup.test_dir)) assert repr(ic) == ref def test_repr_files(self, triage_setup): ic = ImageFileCollection( location=triage_setup.test_dir, filenames=['no_filter_no_object_light.fit', 'no_filter_no_object_bias.fit']) ref = ("ImageFileCollection(location={0!r}, " "filenames=[{1}'no_filter_no_object_light.fit', " "{1}'no_filter_no_object_bias.fit'])" .format(triage_setup.test_dir, 'u' if six.PY2 else '')) assert repr(ic) == ref def test_repr_ext(self, triage_setup): hdul = fits.HDUList([fits.PrimaryHDU(np.ones((10, 10))), fits.ImageHDU(np.ones((10, 10)))]) hdul.writeto(os.path.join(triage_setup.test_dir, 'mef.fits')) ic = ImageFileCollection( location=triage_setup.test_dir, filenames=['mef.fits'], ext=1) ref = ("ImageFileCollection(location={0!r}, " "filenames=[{1}'mef.fits'], " "ext=1)" .format(triage_setup.test_dir, 'u' if six.PY2 else '')) assert repr(ic) == ref def test_repr_info(self, triage_setup): summary_file_path = os.path.join(triage_setup.test_dir, 'info.csv') ic = ImageFileCollection( location=triage_setup.test_dir, keywords=['naxis']) ic.summary.write(summary_file_path) with catch_warnings() as w: ic2 = ImageFileCollection(info_file=summary_file_path) # ImageFileCollections from info_files contain no files. That issues # a Warning that we'll ignore here. assert len(w) == 2 assert "'info_file' argument is deprecated" in str(w[0].message) assert 'no FITS files in the collection' in str(w[1].message) ref = ("ImageFileCollection(keywords=['naxis'], info_file={0!r})" .format(summary_file_path)) assert repr(ic2) == ref # This should work mark all test methods as using the triage_setup # fixture, but it doesn't, so the fixture is given explicitly as an # argument to each method. # @pytest.mark.usefixtures("triage_setup") class TestImageFileCollection(object): def _setup_logger(self, path, level=logging.WARN): """ Set up file logger at the path. """ logger = logging.getLogger() logger.setLevel(level) logger.addHandler(logging.FileHandler(path)) return logger def test_filter_files(self, triage_setup): img_collection = ImageFileCollection( location=triage_setup.test_dir, keywords=['imagetyp', 'filter']) assert len(img_collection.files_filtered( imagetyp='bias')) == triage_setup.n_test['bias'] assert len(img_collection.files) == triage_setup.n_test['files'] assert ('filter' in img_collection.keywords) assert ('flying monkeys' not in img_collection.keywords) assert len(img_collection.values('imagetyp', unique=True)) == 2 def test_filter_files_whitespace_keys(self, triage_setup): hdr = fits.Header([('HIERARCH a b', 2)]) hdul = fits.HDUList([fits.PrimaryHDU(np.ones((10, 10)), header=hdr)]) hdul.writeto(os.path.join(triage_setup.test_dir, 'hdr_with_whitespace.fits')) ic = ImageFileCollection(location=triage_setup.test_dir) # Using a dictionary and unpacking it should work filtered = ic.files_filtered(**{'a b': 2}) assert len(filtered) == 1 assert 'hdr_with_whitespace.fits' in filtered # Also check it's working with generators: for _, filename in ic.data(a_b=2, replace_='_', return_fname=True): assert filename == 'hdr_with_whitespace.fits' def test_filter_files_with_str_on_nonstr_column(self, triage_setup): ic = ImageFileCollection(location=triage_setup.test_dir) # Filtering an integer column with a string filtered = ic.files_filtered(naxis='2') assert len(filtered) == 0 def test_filtered_files_have_proper_path(self, triage_setup): ic = ImageFileCollection(location=triage_setup.test_dir, keywords='*') # Get a subset of the files. plain_biases = ic.files_filtered(imagetyp='bias') # Force a copy... plain_biases = list(plain_biases) # Same subset, but with full path. path_biases = ic.files_filtered(imagetyp='bias', include_path=True) for path_b, plain_b in zip(path_biases, plain_biases): # If the path munging has been done properly, this will succeed. assert os.path.basename(path_b) == plain_b def test_summary_is_summary_info(self, triage_setup): img_collection = ImageFileCollection( location=triage_setup.test_dir, keywords=['imagetyp', 'filter']) # summary_info is deprecated. with catch_warnings(AstropyUserWarning) as w: assert img_collection.summary is img_collection.summary_info assert len(w) def test_filenames_are_set_properly(self, triage_setup): fn = ['filter_no_object_bias.fit', 'filter_object_light_foo.fit'] img_collection = ImageFileCollection( location=triage_setup.test_dir, filenames=fn, keywords=['filter']) assert img_collection.files == fn img_collection.refresh() assert img_collection.files == fn fn = 'filter_no_object_bias.fit' img_collection = ImageFileCollection( location=triage_setup.test_dir, filenames=fn, keywords=['filter']) assert img_collection.files == [fn] def test_keywords_deleter(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir, keywords='*') assert ic.keywords != [] del ic.keywords assert ic.keywords == [] def test_files_with_compressed(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir) assert len(collection._fits_files_in_directory( compressed=True)) == triage_setup.n_test['files'] def test_files_with_no_compressed(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir) n_files_found = len( collection._fits_files_in_directory(compressed=False)) n_uncompressed = (triage_setup.n_test['files'] - triage_setup.n_test['compressed']) assert n_files_found == n_uncompressed def test_generator_full_path(self, triage_setup): collection = ImageFileCollection( location=triage_setup.test_dir, keywords=['imagetyp']) for path, file_name in zip(collection._paths(), collection.files): assert path == os.path.join(triage_setup.test_dir, file_name) def test_hdus(self, triage_setup): collection = ImageFileCollection( location=triage_setup.test_dir, keywords=['imagetyp']) n_hdus = 0 for hdu in collection.hdus(): assert isinstance(hdu, fits.PrimaryHDU) data = hdu.data # must access the data to force scaling # pre-astropy 1.1 unsigned data was changed to float32 and BZERO # removed. In 1.1 and later, BZERO stays but the data type is # unsigned int. assert (('BZERO' not in hdu.header) or (data.dtype is np.dtype(np.uint16))) n_hdus += 1 assert n_hdus == triage_setup.n_test['files'] def test_hdus_masking(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp', 'exposure']) old_data = np.array(collection.summary) for hdu in collection.hdus(imagetyp='bias'): pass new_data = np.array(collection.summary) assert (new_data == old_data).all() @pytest.mark.parametrize('extension', ['TESTEXT', 1, ('TESTEXT', 1)]) def test_multiple_extensions(self, triage_setup, extension): ext1 = fits.PrimaryHDU() ext1.data = np.arange(1, 5) # It is important than the name used for this test extension # NOT be MASK or UNCERT because both are treated in a special # way by the FITS reader. test_ext_name = 'TESTEXT' ext2 = fits.ImageHDU(name=test_ext_name) ext2.data = np.arange(6, 10) hdulist = fits.hdu.hdulist.HDUList([ext1, ext2]) hdulist.writeto(os.path.join(triage_setup.test_dir, 'multi-extension.fits')) ic2 = ImageFileCollection( triage_setup.test_dir, keywords='*', filenames=['multi-extension.fits'], ext=extension) ic1 = ImageFileCollection( triage_setup.test_dir, keywords='*', filenames=['multi-extension.fits'], ext=0) assert ic1.ext == 0 assert ic2.ext == extension column2 = ic2.summary.colnames column1 = ic1.summary.colnames assert column1 != column2 list1 = [key.lower() for key in ext2.header] list2 = ic2.summary.colnames[1:] assert list1 == list2 ccd_kwargs = {'unit': 'adu'} for data, hdr, hdu, ccd in zip(ic2.data(), ic2.headers(), ic2.hdus(), ic2.ccds(ccd_kwargs)): np.testing.assert_array_equal(data, ext2.data) assert hdr == ext2.header # Now compare that the generators each give the same stuff np.testing.assert_array_equal(data, ccd.data) np.testing.assert_array_equal(data, hdu.data) assert hdr == hdu.header assert hdr == ccd.meta def test_headers(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) n_headers = 0 for header in collection.headers(): assert isinstance(header, fits.Header) assert ('bzero' in header) n_headers += 1 assert n_headers == triage_setup.n_test['files'] def test_headers_save_location(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) destination = mkdtemp() for header in collection.headers(save_location=destination): pass new_collection = ImageFileCollection(location=destination, keywords=['imagetyp']) basenames = lambda paths: set( [os.path.basename(file) for file in paths]) assert (len(basenames(collection._paths()) - basenames(new_collection._paths())) == 0) rmtree(destination) def test_headers_with_filter(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) cnt = 0 for header in collection.headers(imagetyp='light'): assert header['imagetyp'].lower() == 'light' cnt += 1 assert cnt == triage_setup.n_test['light'] def test_headers_with_multiple_filters(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) cnt = 0 for header in collection.headers(imagetyp='light', filter='R'): assert header['imagetyp'].lower() == 'light' assert header['filter'].lower() == 'r' cnt += 1 assert cnt == (triage_setup.n_test['light'] - triage_setup.n_test['need_filter']) def test_headers_with_filter_wildcard(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) cnt = 0 for header in collection.headers(imagetyp='*'): cnt += 1 assert cnt == triage_setup.n_test['files'] def test_headers_with_filter_missing_keyword(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) for header in collection.headers(imagetyp='light', object=''): assert header['imagetyp'].lower() == 'light' with pytest.raises(KeyError): header['object'] def test_generator_headers_save_with_name(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) for header in collection.headers(save_with_name='_new'): assert isinstance(header, fits.Header) new_collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) assert (len(new_collection._paths()) == 2 * (triage_setup.n_test['files']) - triage_setup.n_test['compressed']) [os.remove(fil) for fil in iglob(triage_setup.test_dir + '/*_new*')] def test_generator_data(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) for img in collection.data(): assert isinstance(img, np.ndarray) def test_generator_ccds_without_unit(self, triage_setup): collection = ImageFileCollection( location=triage_setup.test_dir, keywords=['imagetyp']) with pytest.raises(ValueError): ccd = next(collection.ccds()) def test_generator_ccds(self, triage_setup): collection = ImageFileCollection( location=triage_setup.test_dir, keywords=['imagetyp']) ccd_kwargs = {'unit': 'adu'} for ccd in collection.ccds(ccd_kwargs=ccd_kwargs): assert isinstance(ccd, CCDData) def test_consecutive_fiilters(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp', 'filter', 'object']) no_files_match = collection.files_filtered(object='fdsafs') assert(len(no_files_match) == 0) some_files_should_match = collection.files_filtered(object=None, imagetyp='light') assert(len(some_files_should_match) == triage_setup.n_test['need_object']) def test_filter_does_not_not_permanently_change_file_mask(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) # ensure all files are originally unmasked assert not collection.summary['file'].mask.any() # generate list that will match NO files collection.files_filtered(imagetyp='foisajfoisaj') # if the code works, this should have no permanent effect assert not collection.summary['file'].mask.any() @pytest.mark.parametrize("new_keywords,collection_keys", [ (['imagetyp', 'object'], ['imagetyp', 'filter']), (['imagetyp'], ['imagetyp', 'filter'])]) def test_keyword_setting(self, new_keywords, collection_keys, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=collection_keys) tbl_orig = collection.summary collection.keywords = new_keywords tbl_new = collection.summary if set(new_keywords).issubset(collection_keys): # should just delete columns without rebuilding table assert(tbl_orig is tbl_new) else: # we need new keywords so must rebuild assert(tbl_orig is not tbl_new) for key in new_keywords: assert(key in tbl_new.keys()) assert (tbl_orig['file'] == tbl_new['file']).all() assert (tbl_orig['imagetyp'] == tbl_new['imagetyp']).all() assert 'filter' not in tbl_new.keys() assert 'object' not in tbl_orig.keys() def test_keyword_setting_to_empty_list(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir) ic.keywords = [] assert ['file'] == ic.keywords def test_header_and_filename(self, triage_setup): collection = ImageFileCollection(location=triage_setup.test_dir, keywords=['imagetyp']) for header, fname in collection.headers(return_fname=True): assert (fname in collection.summary['file']) assert (isinstance(header, fits.Header)) def test_dir_with_no_fits_files(self, tmpdir): empty_dir = tmpdir.mkdtemp() some_file = empty_dir.join('some_file.txt') some_file.dump('words') with catch_warnings() as w: collection = ImageFileCollection(location=empty_dir.strpath, keywords=['imagetyp']) assert len(w) == 1 assert str(w[0].message) == "no FITS files in the collection." assert collection.summary is None for hdr in collection.headers(): # this statement should not be reached if there are no FITS files assert 0 def test_dir_with_no_keys(self, tmpdir): # This test should fail if the FITS files in the directory # are actually read. bad_dir = tmpdir.mkdtemp() not_really_fits = bad_dir.join('not_fits.fit') not_really_fits.dump('I am not really a FITS file') # make sure an error will be generated if the FITS file is read with pytest.raises(IOError): fits.getheader(not_really_fits.strpath) log = tmpdir.join('tmp.log') self._setup_logger(log.strpath) _ = ImageFileCollection(location=bad_dir.strpath, keywords=[]) with open(log.strpath) as f: warnings = f.read() # ImageFileCollection will suppress the IOError but log a warning # so check that the log has no warnings in it. assert (len(warnings) == 0) def test_fits_summary_when_keywords_are_not_subset(self, triage_setup): """ Catch case when there is overlap between keyword list passed to the ImageFileCollection and to files_filtered but the latter is not a subset of the former. """ ic = ImageFileCollection(triage_setup.test_dir, keywords=['imagetyp', 'exptime']) n_files = len(ic.files) files_missing_this_key = ic.files_filtered(imagetyp='*', monkeys=None) assert(n_files > 0) assert(n_files == len(files_missing_this_key)) def test_duplicate_keywords_in_setting(self, triage_setup): keywords_in = ['imagetyp', 'a', 'a'] ic = ImageFileCollection(triage_setup.test_dir, keywords=keywords_in) for key in set(keywords_in): assert (key in ic.keywords) # one keyword gets added: file assert len(ic.keywords) < len(keywords_in) + 1 def test_keyword_includes_file(self, triage_setup): keywords_in = ['file', 'imagetyp'] ic = ImageFileCollection(triage_setup.test_dir, keywords=keywords_in) assert 'file' in ic.keywords file_keywords = [key for key in ic.keywords if key == 'file'] assert len(file_keywords) == 1 def test_setting_keywords_to_none(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir, keywords=['imagetyp']) ic.keywords = None assert ic.summary == [] def test_getting_value_for_keyword(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir, keywords=['imagetyp']) # Does it fail if the keyword is not in the summary? with pytest.raises(ValueError): ic.values('filter') # If I ask for unique values do I get them? values = ic.values('imagetyp', unique=True) assert values == list(set(ic.summary['imagetyp'])) assert len(values) < len(ic.summary['imagetyp']) # Does the list of non-unique values match the raw column? values = ic.values('imagetyp', unique=False) assert values == list(ic.summary['imagetyp']) # Does unique actually default to false? values2 = ic.values('imagetyp') assert values == values2 def test_collection_when_one_file_not_fits(self, triage_setup): not_fits = 'foo.fit' path_bad = os.path.join(triage_setup.test_dir, not_fits) # create an empty file... with open(path_bad, 'w'): pass ic = ImageFileCollection(triage_setup.test_dir, keywords=['imagetyp']) assert not_fits not in ic.summary['file'] os.remove(path_bad) def test_data_type_mismatch_in_fits_keyword_values(self, triage_setup): # If one keyword has an unexpected type, do we notice? img = np.uint16(np.arange(100)) bad_filter = fits.PrimaryHDU(img) bad_filter.header['imagetyp'] = 'LIGHT' bad_filter.header['filter'] = 15.0 path_bad = os.path.join(triage_setup.test_dir, 'bad_filter.fit') bad_filter.writeto(path_bad) ic = ImageFileCollection(triage_setup.test_dir, keywords=['filter']) # dtype is object when there is a mix of types assert ic.summary['filter'].dtype == np.dtype('O') os.remove(path_bad) def test_filter_by_numerical_value(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir, keywords=['naxis']) should_be_zero = ic.files_filtered(naxis=2) assert len(should_be_zero) == 0 should_not_be_zero = ic.files_filtered(naxis=1) assert len(should_not_be_zero) == triage_setup.n_test['files'] def test_files_filtered_with_full_path(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir, keywords=['naxis']) files = ic.files_filtered(naxis=1, include_path=True) for f in files: assert f.startswith(triage_setup.test_dir) def test_unknown_generator_type_raises_error(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir, keywords=['naxis']) with pytest.raises(ValueError): for foo in ic._generator('not a real generator'): pass def test_setting_write_location_to_bad_dest_raises_error(self, tmpdir, triage_setup): new_tmp = tmpdir.mkdtemp() bad_directory = new_tmp.join('foo') ic = ImageFileCollection(triage_setup.test_dir, keywords=['naxis']) with pytest.raises(IOError): for hdr in ic.headers(save_location=bad_directory.strpath): pass def test_initializing_from_table(self, triage_setup): keys = ['imagetyp', 'filter'] ic = ImageFileCollection(triage_setup.test_dir, keywords=keys) table = ic.summary table_path = os.path.join(triage_setup.test_dir, 'input_tbl.csv') nonsense = 'forks' table['imagetyp'][0] = nonsense table.write(table_path, format='ascii', delimiter=',') with catch_warnings() as w: ic = ImageFileCollection(location=None, info_file=table_path) # By using location=None we don't have actual files in the collection. assert len(w) == 2 assert "'info_file' argument is deprecated" in str(w[0].message) assert str(w[1].message) == "no FITS files in the collection." # keywords can only have been set from saved table for key in keys: assert key in ic.keywords # no location, so should be no files assert len(ic.files) == 0 # no location, so no way to iterate over files with pytest.raises((AttributeError, TypeError)): for h in ic.headers(): pass with catch_warnings() as w: ic = ImageFileCollection(location=triage_setup.test_dir, info_file=table_path) assert len(w) == 1 assert "'info_file' argument is deprecated" in str(w[0].message) # we now have a location, so did we get files? assert len(ic.files) == len(table) # Is the summary table masked? assert ic.summary.masked # can I loop over headers? for h in ic.headers(): assert isinstance(h, fits.Header) # Does ImageFileCollection summary contain values from table? assert nonsense in ic.summary['imagetyp'] def test_initializing_from_table_file_that_does_not_exist( self, triage_setup, tmpdir): log = tmpdir.join('tmp.log') self._setup_logger(log.strpath) # Do we get a warning if we try reading a file that doesn't exist, # but where we can initialize from a directory? with catch_warnings() as w: ic = ImageFileCollection( location=triage_setup.test_dir, info_file='iufadsdhfasdifre') assert len(w) == 1 assert "'info_file' argument is deprecated" in str(w[0].message) with open(log.strpath) as f: warnings = f.readlines() assert (len(warnings) == 1) is_in = ['unable to open table file' in w for w in warnings] assert all(is_in) # Do we raise an error if the table name is bad AND the location # is None? with pytest.raises(IOError): # Because the location is None we get a Warning about "no files in # the collection". with catch_warnings() as w: ImageFileCollection(location=None, info_file='iufadsdhfasdifre') assert len(w) == 2 assert "'info_file' argument is deprecated" in str(w[0].message) assert str(w[1].message) == "no FITS files in the collection." # Do we raise an error if the table name is bad AND # the location is given but is bad? with pytest.raises(OSError): with catch_warnings() as w: ic = ImageFileCollection(location='dasifjoaurun', info_file='iufadsdhfasdifre') assert len(w) == 1 assert "'info_file' argument is deprecated" in str(w[0].message) def test_no_fits_files_in_collection(self): with catch_warnings(AstropyUserWarning) as warning_lines: # FIXME: What exactly does this assert? assert "no fits files in the collection." def test_initialization_with_no_keywords(self, triage_setup): # This test is primarily historical -- the old default for # keywords was an empty list (it is now the wildcard '*'). ic = ImageFileCollection(location=triage_setup.test_dir, keywords=[]) # iteration below failed before bugfix... execs = 0 for h in ic.headers(): execs += 1 assert not execs def check_all_keywords_in_collection(self, image_collection): lower_case_columns = [c.lower() for c in image_collection.summary.colnames] for h in image_collection.headers(): for k in h: assert k.lower() in lower_case_columns def test_tabulate_all_keywords(self, triage_setup): ic = ImageFileCollection(location=triage_setup.test_dir, keywords='*') self.check_all_keywords_in_collection(ic) def test_summary_table_is_always_masked(self, triage_setup): # First, try grabbing all of the keywords ic = ImageFileCollection(location=triage_setup.test_dir, keywords='*') assert ic.summary.masked # Now, try keywords that every file will have ic.keywords = ['bitpix'] assert ic.summary.masked # What about keywords that include some that will surely be missing? ic.keywords = ['bitpix', 'dsafui'] assert ic.summary.masked def test_case_of_keywords_respected(self, triage_setup): keywords_in = ['BitPix', 'instrume', 'NAXIS'] ic = ImageFileCollection(location=triage_setup.test_dir, keywords=keywords_in) for key in keywords_in: assert key in ic.summary.colnames def test_grabbing_all_keywords_and_specific_keywords(self, triage_setup): keyword_not_in_headers = 'OIdn89!@' ic = ImageFileCollection(triage_setup.test_dir, keywords=['*', keyword_not_in_headers]) assert keyword_not_in_headers in ic.summary.colnames self.check_all_keywords_in_collection(ic) def test_grabbing_all_keywords_excludes_empty_key(self, triage_setup): # This test needs a file with a blank keyword in it to ensure # that case is handled correctly. blank_keyword = fits.PrimaryHDU() blank_keyword.data = np.zeros((100, 100)) blank_keyword.header[''] = 'blank' blank_keyword.writeto(os.path.join(triage_setup.test_dir, 'blank.fits')) ic = ImageFileCollection(triage_setup.test_dir, keywords='*') assert 'col0' not in ic.summary.colnames def test_header_with_long_history_roundtrips_to_disk(self, triage_setup): # I tried combing several history comments into one table entry with # '\n'.join(history), which resulted in a table that couldn't # round trip to disk because on read the newline character was # interpreted as...a new line! This test is a check against future # foolishness. from astropy.table import Table img = np.uint16(np.arange(100)) long_history = fits.PrimaryHDU(img) long_history.header['imagetyp'] = 'BIAS' long_history.header['history'] = 'Something happened' long_history.header['history'] = 'Then something else happened' long_history.header['history'] = 'And then something odd happened' path_history = os.path.join(triage_setup.test_dir, 'long_history.fit') long_history.writeto(path_history) ic = ImageFileCollection(triage_setup.test_dir, keywords='*') ic.summary.write('test_table.txt', format='ascii.csv') table_disk = Table.read('test_table.txt', format='ascii.csv') assert len(table_disk) == len(ic.summary) @pytest.mark.skipif("os.environ.get('APPVEYOR') or os.sys.platform == 'win32'", reason="fails on Windows because file " "overwriting fails") def test_refresh_method_sees_added_keywords(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir, keywords='*') # Add a keyword I know isn't already in the header to each file. not_in_header = 'BARKARK' for h in ic.headers(overwrite=True): h[not_in_header] = True assert not_in_header not in ic.summary.colnames ic.refresh() # After refreshing the odd keyword should be present. assert not_in_header.lower() in ic.summary.colnames def test_refresh_method_sees_added_files(self, triage_setup): ic = ImageFileCollection(triage_setup.test_dir, keywords='*') # Compressed files don't get copied. Not sure why... original_len = len(ic.summary) - triage_setup.n_test['compressed'] # Generate additional files in this directory for h in ic.headers(save_with_name="_foo"): pass ic.refresh() new_len = len(ic.summary) - triage_setup.n_test['compressed'] assert new_len == 2 * original_len def test_keyword_order_is_preserved(self, triage_setup): keywords = ['imagetyp', 'exposure', 'filter'] ic = ImageFileCollection(triage_setup.test_dir, keywords=keywords) assert ic.keywords == ['file'] + keywords def test_sorting(self, triage_setup): collection = ImageFileCollection( location=triage_setup.test_dir, keywords=['imagetyp', 'filter', 'object']) all_elements = [] for hdu, fname in collection.hdus(return_fname=True): all_elements.append((str(hdu.header), fname)) # Now sort collection.sort(keys=['imagetyp', 'object']) # and check it's all still right for hdu, fname in collection.hdus(return_fname=True): assert((str(hdu.header), fname) in all_elements) for i in range(len(collection.summary)): assert(collection.summary['file'][i] == collection.files[i]) @pytest.mark.skipif( _ASTROPY_LT_1_3, reason="It seems to fail with a TypeError there but because of " "different reasons (something to do with NumPy).") def test_sorting_without_key_fails(self, triage_setup): ic = ImageFileCollection(location=triage_setup.test_dir) with pytest.raises(ValueError): ic.sort(keys=None) def test_duplicate_keywords(self, triage_setup): # Make sure duplicated keywords don't make the imagefilecollection # fail. hdu = fits.PrimaryHDU() hdu.data = np.zeros((5, 5)) hdu.header['stupid'] = 'fun' hdu.header.append(('stupid', 'nofun')) hdu.writeto(os.path.join(triage_setup.test_dir, 'duplicated.fits')) with catch_warnings(UserWarning) as w: ic = ImageFileCollection(triage_setup.test_dir, keywords='*') assert len(w) == 1 assert 'stupid' in str(w[0].message) assert 'stupid' in ic.summary.colnames assert 'fun' in ic.summary['stupid'] assert 'nofun' not in ic.summary['stupid'] @pytest.mark.skipif( "sys.platform.startswith('win') and six.PY2", reason="os.path.samefile isn't available on windows (python < 3.2).") def test_ccds_generator_in_different_directory(self, triage_setup, tmpdir): """ Regression test for https://github.com/astropy/ccdproc/issues/421 in which the ccds generator fails if the current working directory is not the location of the ImageFileCollection. """ coll = ImageFileCollection(triage_setup.test_dir) # The temporary directory below should be different that the collection # location. os.chdir(tmpdir.strpath) # Let's make sure it is. assert not os.path.samefile(os.getcwd(), coll.location) # This generated an IOError before the issue was fixed. for _ in coll.ccds(ccd_kwargs={'unit': 'adu'}): pass def test_ccds_generator_does_not_support_overwrite(self, triage_setup): """ CCDData objects have several attributes that make it hard to reliably support overwriting. For example in what extension should mask, uncertainty be written? Also CCDData doesn't explicitly support in-place operations so it's to easy to create a new CCDData object inadvertantly and all modifications might be lost. """ ic = ImageFileCollection(triage_setup.test_dir) with pytest.raises(NotImplementedError): ic.ccds(overwrite=True) with pytest.raises(NotImplementedError): ic.ccds(clobber=True) def test_glob_matching(self, triage_setup): # We'll create two files with strange names to test glob # includes / excludes one = fits.PrimaryHDU() one.data = np.zeros((5, 5)) one.header[''] = 'whatever' one.writeto(os.path.join(triage_setup.test_dir, 'SPAM_stuff.fits')) one.writeto(os.path.join(triage_setup.test_dir, 'SPAM_other_stuff.fits')) coll = ImageFileCollection(triage_setup.test_dir, glob_include='SPAM*') assert len(coll.files) == 2 coll = ImageFileCollection(triage_setup.test_dir, glob_include='SPAM*', glob_exclude='*other*') assert len(coll.files) == 1 # the glob attributes are readonly, so setting them raises an Exception. with pytest.raises(AttributeError): coll.glob_exclude = '*stuff*' with pytest.raises(AttributeError): coll.glob_include = '*stuff*' ccdproc-1.3.0.post1/ccdproc/tests/test_cosmicray.py0000664000175000017500000001412713207605210024037 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np from numpy.testing import assert_allclose import pytest from astropy.utils import NumpyRNGContext from astropy.nddata import StdDevUncertainty from ..core import (cosmicray_lacosmic, cosmicray_median, background_deviation_box, background_deviation_filter) DATA_SCALE = 5.3 NCRAYS = 30 def add_cosmicrays(data, scale, threshold, ncrays=NCRAYS): size = data.shape[0] with NumpyRNGContext(125): crrays = np.random.randint(0, size, size=(ncrays, 2)) # use (threshold + 1) below to make sure cosmic ray is well above the # threshold no matter what the random number generator returns crflux = (10 * scale * np.random.random(NCRAYS) + (threshold + 5) * scale) for i in range(ncrays): y, x = crrays[i] data.data[y, x] = crflux[i] @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_lacosmic(ccd_data): threshold = 5 add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) noise = DATA_SCALE * np.ones_like(ccd_data.data) data, crarr = cosmicray_lacosmic(ccd_data.data, sigclip=5) # check the number of cosmic rays detected # currently commented out while checking on issues # in astroscrappy # assert crarr.sum() == NCRAYS @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_lacosmic_ccddata(ccd_data): threshold = 5 add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) noise = DATA_SCALE * np.ones_like(ccd_data.data) ccd_data.uncertainty = noise nccd_data = cosmicray_lacosmic(ccd_data, sigclip=5) # check the number of cosmic rays detected # currently commented out while checking on issues # in astroscrappy # assert nccd_data.mask.sum() == NCRAYS @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_lacosmic_check_data(ccd_data): with pytest.raises(TypeError): noise = DATA_SCALE * np.ones_like(ccd_data.data) cosmicray_lacosmic(10, noise) @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_median_check_data(): with pytest.raises(TypeError): ndata, crarr = cosmicray_median(10, thresh=5, mbox=11, error_image=DATA_SCALE) @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_median(ccd_data): threshold = 5 add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) ndata, crarr = cosmicray_median(ccd_data.data, thresh=5, mbox=11, error_image=DATA_SCALE) # check the number of cosmic rays detected assert crarr.sum() == NCRAYS @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_median_ccddata(ccd_data): threshold = 5 add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) ccd_data.uncertainty = ccd_data.data*0.0+DATA_SCALE nccd = cosmicray_median(ccd_data, thresh=5, mbox=11, error_image=None) # check the number of cosmic rays detected assert nccd.mask.sum() == NCRAYS @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_median_masked(ccd_data): threshold = 5 add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) data = np.ma.masked_array(ccd_data.data, (ccd_data.data > -1e6)) ndata, crarr = cosmicray_median(data, thresh=5, mbox=11, error_image=DATA_SCALE) # check the number of cosmic rays detected assert crarr.sum() == NCRAYS @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_median_background_None(ccd_data): threshold = 5 add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) data, crarr = cosmicray_median(ccd_data.data, thresh=5, mbox=11, error_image=None) # check the number of cosmic rays detected assert crarr.sum() == NCRAYS @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_median_gbox(ccd_data): scale = DATA_SCALE # yuck. Maybe use pytest.parametrize? threshold = 5 add_cosmicrays(ccd_data, scale, threshold, ncrays=NCRAYS) error = ccd_data.data*0.0+DATA_SCALE data, crarr = cosmicray_median(ccd_data.data, error_image=error, thresh=5, mbox=11, rbox=0, gbox=5) data = np.ma.masked_array(data, crarr) assert crarr.sum() > NCRAYS assert abs(data.std() - scale) < 0.1 @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_median_rbox(ccd_data): scale = DATA_SCALE # yuck. Maybe use pytest.parametrize? threshold = 5 add_cosmicrays(ccd_data, scale, threshold, ncrays=NCRAYS) error = ccd_data.data*0.0+DATA_SCALE data, crarr = cosmicray_median(ccd_data.data, error_image=error, thresh=5, mbox=11, rbox=21, gbox=5) assert data[crarr].mean() < ccd_data.data[crarr].mean() assert crarr.sum() > NCRAYS @pytest.mark.data_scale(DATA_SCALE) def test_cosmicray_median_background_deviation(ccd_data): with pytest.raises(TypeError): cosmicray_median(ccd_data.data, thresh=5, mbox=11, error_image='blank') def test_background_deviation_box(): with NumpyRNGContext(123): scale = 5.3 cd = np.random.normal(loc=0, size=(100, 100), scale=scale) bd = background_deviation_box(cd, 25) assert abs(bd.mean() - scale) < 0.10 def test_background_deviation_box_fail(): with NumpyRNGContext(123): scale = 5.3 cd = np.random.normal(loc=0, size=(100, 100), scale=scale) with pytest.raises(ValueError): background_deviation_box(cd, 0.5) def test_background_deviation_filter(): with NumpyRNGContext(123): scale = 5.3 cd = np.random.normal(loc=0, size=(100, 100), scale=scale) bd = background_deviation_filter(cd, 25) assert abs(bd.mean() - scale) < 0.10 def test_background_deviation_filter_fail(): with NumpyRNGContext(123): scale = 5.3 cd = np.random.normal(loc=0, size=(100, 100), scale=scale) with pytest.raises(ValueError): background_deviation_filter(cd, 0.5) ccdproc-1.3.0.post1/ccdproc/tests/test_combiner.py0000664000175000017500000005565513207605210023657 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np import astropy.units as u from astropy.stats import median_absolute_deviation as mad import pytest from astropy.utils.data import get_pkg_data_filename from astropy.wcs import WCS from ..ccddata import CCDData from ..combiner import Combiner, combine #test that the Combiner raises error if empty def test_combiner_empty(): with pytest.raises(TypeError): Combiner() # empty initializer should fail #test that the Combiner raises error if empty if ccd_list is None def test_combiner_init_with_none(): with pytest.raises(TypeError): Combiner(None) # empty initializer should fail #test that Combiner throws an error if input #objects are not ccddata objects def test_ccddata_combiner_objects(ccd_data): ccd_list = [ccd_data, ccd_data, None] with pytest.raises(TypeError): Combiner(ccd_list) # different objects should fail #test that Combiner throws an error if input #objects do not have the same size def test_ccddata_combiner_size(ccd_data): ccd_large = CCDData(np.zeros((200, 100)), unit=u.adu) ccd_list = [ccd_data, ccd_data, ccd_large] with pytest.raises(TypeError): Combiner(ccd_list) # arrays of different sizes should fail #test that Combiner throws an error if input #objects do not have the same units def test_ccddata_combiner_units(ccd_data): ccd_large = CCDData(np.zeros((100, 100)), unit=u.second) ccd_list = [ccd_data, ccd_data, ccd_large] with pytest.raises(TypeError): Combiner(ccd_list) #test if mask and data array are created def test_combiner_create(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) assert c.data_arr.shape == (3, 100, 100) assert c.data_arr.mask.shape == (3, 100, 100) #test if dtype matches the value that is passed def test_combiner_dtype(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list, dtype=np.float32) assert c.data_arr.dtype == np.float32 avg = c.average_combine() # dtype of average should match input dtype assert avg.dtype == c.dtype med = c.median_combine() # dtype of median should match dtype of input assert med.dtype == c.dtype result_sum = c.sum_combine() # dtype of sum should match dtype of input assert result_sum.dtype == c.dtype #test mask is created from ccd.data def test_combiner_mask(): data = np.zeros((10, 10)) data[5, 5] = 1 mask = (data == 0) ccd = CCDData(data, unit=u.adu, mask=mask) ccd_list = [ccd, ccd, ccd] c = Combiner(ccd_list) assert c.data_arr.shape == (3, 10, 10) assert c.data_arr.mask.shape == (3, 10, 10) assert not c.data_arr.mask[0, 5, 5] def test_weights(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) with pytest.raises(TypeError): c.weights = 1 def test_weights_shape(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) with pytest.raises(ValueError): c.weights = ccd_data.data #test the min-max rejection def test_combiner_minmax(): ccd_list = [CCDData(np.zeros((10, 10)), unit=u.adu), CCDData(np.zeros((10, 10)) - 1000, unit=u.adu), CCDData(np.zeros((10, 10)) + 1000, unit=u.adu)] c = Combiner(ccd_list) c.minmax_clipping(min_clip=-500, max_clip=500) ccd = c.median_combine() assert ccd.data.mean() == 0 def test_combiner_minmax_max(): ccd_list = [CCDData(np.zeros((10, 10)), unit=u.adu), CCDData(np.zeros((10, 10)) - 1000, unit=u.adu), CCDData(np.zeros((10, 10)) + 1000, unit=u.adu)] c = Combiner(ccd_list) c.minmax_clipping(min_clip=None, max_clip=500) assert c.data_arr[2].mask.all() def test_combiner_minmax_min(): ccd_list = [CCDData(np.zeros((10, 10)), unit=u.adu), CCDData(np.zeros((10, 10)) - 1000, unit=u.adu), CCDData(np.zeros((10, 10)) + 1000, unit=u.adu)] c = Combiner(ccd_list) c.minmax_clipping(min_clip=-500, max_clip=None) assert c.data_arr[1].mask.all() def test_combiner_sigmaclip_high(): ccd_list = [CCDData(np.zeros((10, 10)), unit=u.adu), CCDData(np.zeros((10, 10)) - 10, unit=u.adu), CCDData(np.zeros((10, 10)) + 10, unit=u.adu), CCDData(np.zeros((10, 10)) - 10, unit=u.adu), CCDData(np.zeros((10, 10)) + 10, unit=u.adu), CCDData(np.zeros((10, 10)) + 1000, unit=u.adu)] c = Combiner(ccd_list) # using mad for more robust statistics vs. std c.sigma_clipping(high_thresh=3, low_thresh=None, func=np.ma.median, dev_func=mad) assert c.data_arr[5].mask.all() def test_combiner_sigmaclip_single_pix(): ccd_list = [CCDData(np.zeros((10, 10)), unit=u.adu), CCDData(np.zeros((10, 10)) - 10, unit=u.adu), CCDData(np.zeros((10, 10)) + 10, unit=u.adu), CCDData(np.zeros((10, 10)) - 10, unit=u.adu), CCDData(np.zeros((10, 10)) + 10, unit=u.adu), CCDData(np.zeros((10, 10)) - 10, unit=u.adu)] c = Combiner(ccd_list) # add a single pixel in another array to check that # that one gets rejected c.data_arr[0, 5, 5] = 0 c.data_arr[1, 5, 5] = -5 c.data_arr[2, 5, 5] = 5 c.data_arr[3, 5, 5] = -5 c.data_arr[4, 5, 5] = 25 c.sigma_clipping(high_thresh=3, low_thresh=None, func=np.ma.median, dev_func=mad) assert c.data_arr.mask[4, 5, 5] def test_combiner_sigmaclip_low(): ccd_list = [CCDData(np.zeros((10, 10)), unit=u.adu), CCDData(np.zeros((10, 10)) - 10, unit=u.adu), CCDData(np.zeros((10, 10)) + 10, unit=u.adu), CCDData(np.zeros((10, 10)) - 10, unit=u.adu), CCDData(np.zeros((10, 10)) + 10, unit=u.adu), CCDData(np.zeros((10, 10)) - 1000, unit=u.adu)] c = Combiner(ccd_list) # using mad for more robust statistics vs. std c.sigma_clipping(high_thresh=None, low_thresh=3, func=np.ma.median, dev_func=mad) assert c.data_arr[5].mask.all() #test that the median combination works and returns a ccddata object def test_combiner_median(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) ccd = c.median_combine() assert isinstance(ccd, CCDData) assert ccd.shape == (100, 100) assert ccd.unit == u.adu assert ccd.meta['NCOMBINE'] == len(ccd_list) #test that the average combination works and returns a ccddata object def test_combiner_average(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) ccd = c.average_combine() assert isinstance(ccd, CCDData) assert ccd.shape == (100, 100) assert ccd.unit == u.adu assert ccd.meta['NCOMBINE'] == len(ccd_list) #test that the sum combination works and returns a ccddata object def test_combiner_sum(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) ccd = c.sum_combine() assert isinstance(ccd, CCDData) assert ccd.shape == (100, 100) assert ccd.unit == u.adu assert ccd.meta['NCOMBINE'] == len(ccd_list) #test data combined with mask is created correctly def test_combiner_mask_average(): data = np.zeros((10, 10)) data[5, 5] = 1 mask = (data == 0) ccd = CCDData(data, unit=u.adu, mask=mask) ccd_list = [ccd, ccd, ccd] c = Combiner(ccd_list) ccd = c.average_combine() assert ccd.data[0, 0] == 0 assert ccd.data[5, 5] == 1 assert ccd.mask[0, 0] assert not ccd.mask[5, 5] def test_combiner_with_scaling(ccd_data): # The factors below are not particularly important; just avoid anything # whose average is 1. ccd_data_lower = ccd_data.multiply(3) ccd_data_higher = ccd_data.multiply(0.9) combiner = Combiner([ccd_data, ccd_data_higher, ccd_data_lower]) # scale each array to the mean of the first image scale_by_mean = lambda x: ccd_data.data.mean()/np.ma.average(x) combiner.scaling = scale_by_mean avg_ccd = combiner.average_combine() # Does the mean of the scaled arrays match the value to which it was # scaled? np.testing.assert_almost_equal(avg_ccd.data.mean(), ccd_data.data.mean()) assert avg_ccd.shape == ccd_data.shape median_ccd = combiner.median_combine() # Does median also scale to the correct value? np.testing.assert_almost_equal(np.median(median_ccd.data), np.median(ccd_data.data)) # Set the scaling manually... combiner.scaling = [scale_by_mean(combiner.data_arr[i]) for i in range(3)] avg_ccd = combiner.average_combine() np.testing.assert_almost_equal(avg_ccd.data.mean(), ccd_data.data.mean()) assert avg_ccd.shape == ccd_data.shape def test_combiner_scaling_fails(ccd_data): combiner = Combiner([ccd_data, ccd_data.copy()]) # Should fail unless scaling is set to a function or list-like with pytest.raises(TypeError): combiner.scaling = 5 #test data combined with mask is created correctly def test_combiner_mask_median(): data = np.zeros((10, 10)) data[5, 5] = 1 mask = (data == 0) ccd = CCDData(data, unit=u.adu, mask=mask) ccd_list = [ccd, ccd, ccd] c = Combiner(ccd_list) ccd = c.median_combine() assert ccd.data[0, 0] == 0 assert ccd.data[5, 5] == 1 assert ccd.mask[0, 0] assert not ccd.mask[5, 5] #test data combined with mask is created correctly def test_combiner_mask_sum(): data = np.zeros((10, 10)) data[5, 5] = 1 mask = (data == 0) ccd = CCDData(data, unit=u.adu, mask=mask) ccd_list = [ccd, ccd, ccd] c = Combiner(ccd_list) ccd = c.sum_combine() assert ccd.data[0, 0] == 0 assert ccd.data[5, 5] == 3 assert ccd.mask[0, 0] assert not ccd.mask[5, 5] #test combiner convenience function reads fits file and combine as expected def test_combine_average_fitsimages(): fitsfile = get_pkg_data_filename('data/a8280271.fits') ccd = CCDData.read(fitsfile, unit=u.adu) ccd_list = [ccd]*3 c = Combiner(ccd_list) ccd_by_combiner = c.average_combine() fitsfilename_list = [fitsfile]*3 avgccd = combine(fitsfilename_list, output_file=None, method='average', unit=u.adu) # averaging same fits images should give back same fits image np.testing.assert_array_almost_equal(avgccd.data, ccd_by_combiner.data) def test_combine_numpyndarray(): """ Test of numpy ndarray implementation: #493 Test the average combine using ``Combiner`` and ``combine`` with input ``img_list`` in the format of ``numpy.ndarray``. """ fitsfile = get_pkg_data_filename('data/a8280271.fits') ccd = CCDData.read(fitsfile, unit=u.adu) ccd_list = [ccd]*3 c = Combiner(ccd_list) ccd_by_combiner = c.average_combine() fitsfilename_list = np.array([fitsfile]*3) avgccd = combine(fitsfilename_list, output_file=None, method='average', unit=u.adu) # averaging same fits images should give back same fits image np.testing.assert_array_almost_equal(avgccd.data, ccd_by_combiner.data) def test_combiner_result_dtype(): """Regression test: #391 The result should have the appropriate dtype not the dtype of the first input.""" ccd = CCDData(np.ones((3, 3), dtype=np.uint16), unit='adu') res = combine([ccd, ccd.multiply(2)]) # The default dtype of Combiner is float64 assert res.data.dtype == np.float64 ref = np.ones((3, 3)) * 1.5 np.testing.assert_array_almost_equal(res.data, ref) res = combine([ccd, ccd.multiply(2), ccd.multiply(3)], dtype=int) # The result dtype should be integer: assert res.data.dtype == np.int_ ref = np.ones((3, 3)) * 2 np.testing.assert_array_almost_equal(res.data, ref) #test combiner convenience function works with list of ccddata objects def test_combine_average_ccddata(): fitsfile = get_pkg_data_filename('data/a8280271.fits') ccd = CCDData.read(fitsfile, unit=u.adu) ccd_list = [ccd]*3 c = Combiner(ccd_list) ccd_by_combiner = c.average_combine() avgccd = combine(ccd_list,output_file=None, method='average', unit=u.adu) # averaging same ccdData should give back same images np.testing.assert_array_almost_equal(avgccd.data, ccd_by_combiner.data) #test combiner convenience function reads fits file and # and combine as expected when asked to run in limited memory def test_combine_limitedmem_fitsimages(): fitsfile = get_pkg_data_filename('data/a8280271.fits') ccd = CCDData.read(fitsfile, unit=u.adu) ccd_list = [ccd]*5 c = Combiner(ccd_list) ccd_by_combiner = c.average_combine() fitsfilename_list = [fitsfile]*5 avgccd = combine(fitsfilename_list,output_file=None, method='average', mem_limit=1e6, unit=u.adu) # averaging same ccdData should give back same images np.testing.assert_array_almost_equal(avgccd.data, ccd_by_combiner.data) #test combiner convenience function reads fits file and # and combine as expected when asked to run in limited memory with scaling def test_combine_limitedmem_scale_fitsimages(): fitsfile = get_pkg_data_filename('data/a8280271.fits') ccd = CCDData.read(fitsfile, unit=u.adu) ccd_list = [ccd]*5 c = Combiner(ccd_list) # scale each array to the mean of the first image scale_by_mean = lambda x: ccd.data.mean()/np.ma.average(x) c.scaling = scale_by_mean ccd_by_combiner = c.average_combine() fitsfilename_list = [fitsfile]*5 avgccd = combine(fitsfilename_list,output_file=None, method='average', mem_limit=1e6, scale=scale_by_mean, unit=u.adu) np.testing.assert_array_almost_equal(avgccd.data, ccd_by_combiner.data, decimal=4) #test the optional uncertainty function in average_combine def test_average_combine_uncertainty(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) ccd = c.average_combine(uncertainty_func=np.sum) uncert_ref = np.sum(c.data_arr, 0) / np.sqrt(3) np.testing.assert_array_equal(ccd.uncertainty.array, uncert_ref) # Compare this also to the "combine" call ccd2 = combine(ccd_list, method='average', combine_uncertainty_function=np.sum) np.testing.assert_array_equal(ccd.data, ccd2.data) np.testing.assert_array_equal(ccd.uncertainty.array, ccd2.uncertainty.array) #test the optional uncertainty function in median_combine def test_median_combine_uncertainty(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) ccd = c.median_combine(uncertainty_func=np.sum) uncert_ref = np.sum(c.data_arr, 0) / np.sqrt(3) np.testing.assert_array_equal(ccd.uncertainty.array, uncert_ref) # Compare this also to the "combine" call ccd2 = combine(ccd_list, method='median', combine_uncertainty_function=np.sum) np.testing.assert_array_equal(ccd.data, ccd2.data) np.testing.assert_array_equal(ccd.uncertainty.array, ccd2.uncertainty.array) #test the optional uncertainty function in sum_combine def test_sum_combine_uncertainty(ccd_data): ccd_list = [ccd_data, ccd_data, ccd_data] c = Combiner(ccd_list) ccd = c.sum_combine(uncertainty_func=np.sum) uncert_ref = np.sum(c.data_arr, 0) * np.sqrt(3) np.testing.assert_almost_equal(ccd.uncertainty.array, uncert_ref) # Compare this also to the "combine" call ccd2 = combine(ccd_list, method='sum', combine_uncertainty_function=np.sum) np.testing.assert_array_equal(ccd.data, ccd2.data) np.testing.assert_array_equal(ccd.uncertainty.array, ccd2.uncertainty.array) # test resulting uncertainty is corrected for the number of images def test_combiner_uncertainty_average(): ccd_list = [CCDData(np.ones((10, 10)), unit=u.adu), CCDData(np.ones((10, 10))*2, unit=u.adu)] c = Combiner(ccd_list) ccd = c.average_combine() # Just the standard deviation of ccd data. ref_uncertainty = np.ones((10, 10)) / 2 # Correction because we combined two images. ref_uncertainty /= np.sqrt(2) np.testing.assert_array_almost_equal(ccd.uncertainty.array, ref_uncertainty) # test resulting uncertainty is corrected for the number of images (with mask) def test_combiner_uncertainty_average_mask(): mask = np.zeros((10, 10), dtype=np.bool_) mask[5, 5] = True ccd_with_mask = CCDData(np.ones((10, 10)), unit=u.adu, mask=mask) ccd_list = [ccd_with_mask, CCDData(np.ones((10, 10))*2, unit=u.adu), CCDData(np.ones((10, 10))*3, unit=u.adu)] c = Combiner(ccd_list) ccd = c.average_combine() # Just the standard deviation of ccd data. ref_uncertainty = np.ones((10, 10)) * np.std([1, 2, 3]) # Correction because we combined two images. ref_uncertainty /= np.sqrt(3) ref_uncertainty[5, 5] = np.std([2, 3]) / np.sqrt(2) np.testing.assert_array_almost_equal(ccd.uncertainty.array, ref_uncertainty) # test resulting uncertainty is corrected for the number of images (with mask) def test_combiner_uncertainty_sum_mask(): mask = np.zeros((10, 10), dtype=np.bool_) mask[5, 5] = True ccd_with_mask = CCDData(np.ones((10, 10)), unit=u.adu, mask=mask) ccd_list = [ccd_with_mask, CCDData(np.ones((10, 10))*2, unit=u.adu), CCDData(np.ones((10, 10))*3, unit=u.adu)] c = Combiner(ccd_list) ccd = c.sum_combine() # Just the standard deviation of ccd data. ref_uncertainty = np.ones((10, 10)) * np.std([1, 2, 3]) ref_uncertainty *= np.sqrt(3) ref_uncertainty[5, 5] = np.std([2, 3]) * np.sqrt(2) np.testing.assert_array_almost_equal(ccd.uncertainty.array, ref_uncertainty) def test_combiner_3d(): data1 = CCDData(3 * np.ones((5,5,5)), unit=u.adu) data2 = CCDData(2 * np.ones((5,5,5)), unit=u.adu) data3 = CCDData(4 * np.ones((5,5,5)), unit=u.adu) ccd_list = [data1, data2, data3] c = Combiner(ccd_list) assert c.data_arr.shape == (3, 5, 5, 5) assert c.data_arr.mask.shape == (3, 5, 5, 5) ccd = c.average_combine() assert ccd.shape == (5, 5, 5) np.testing.assert_array_almost_equal(ccd.data, data1, decimal=4) def test_3d_combiner_with_scaling(ccd_data): # The factors below are not particularly important; just avoid anything # whose average is 1. ccd_data = CCDData(np.ones((5,5,5)), unit=u.adu) ccd_data_lower = CCDData(3 * np.ones((5,5,5)), unit=u.adu) ccd_data_higher = CCDData(0.9 * np.ones((5,5,5)), unit=u.adu) combiner = Combiner([ccd_data, ccd_data_higher, ccd_data_lower]) # scale each array to the mean of the first image scale_by_mean = lambda x: ccd_data.data.mean()/np.ma.average(x) combiner.scaling = scale_by_mean avg_ccd = combiner.average_combine() # Does the mean of the scaled arrays match the value to which it was # scaled? np.testing.assert_almost_equal(avg_ccd.data.mean(), ccd_data.data.mean()) assert avg_ccd.shape == ccd_data.shape median_ccd = combiner.median_combine() # Does median also scale to the correct value? np.testing.assert_almost_equal(np.median(median_ccd.data), np.median(ccd_data.data)) # Set the scaling manually... combiner.scaling = [scale_by_mean(combiner.data_arr[i]) for i in range(3)] avg_ccd = combiner.average_combine() np.testing.assert_almost_equal(avg_ccd.data.mean(), ccd_data.data.mean()) assert avg_ccd.shape == ccd_data.shape def test_clip_extrema_3d(): ccdlist = [CCDData(np.ones((3, 3, 3))*90., unit="adu"), CCDData(np.ones((3, 3, 3))*20., unit="adu"), CCDData(np.ones((3, 3, 3))*10., unit="adu"), CCDData(np.ones((3, 3, 3))*40., unit="adu"), CCDData(np.ones((3, 3, 3))*25., unit="adu"), CCDData(np.ones((3, 3, 3))*35., unit="adu"), ] c = Combiner(ccdlist) c.clip_extrema(nlow=1, nhigh=1) result = c.average_combine() expected = CCDData(np.ones((3, 3, 3)) * 30, unit="adu") np.testing.assert_array_equal(result, expected) @pytest.mark.parametrize('comb_func', ['average_combine', 'median_combine', 'sum_combine']) def test_writeable_after_combine(ccd_data, tmpdir, comb_func): tmp_file = tmpdir.join('tmp.fits') from ..combiner import Combiner combined = Combiner([ccd_data for _ in range(3)]) ccd2 = getattr(combined, comb_func)() # This should not fail because the resulting uncertainty has a mask ccd2.write(tmp_file.strpath) def test_clip_extrema(): ccdlist = [CCDData(np.ones((3, 5))*90., unit="adu"), CCDData(np.ones((3, 5))*20., unit="adu"), CCDData(np.ones((3, 5))*10., unit="adu"), CCDData(np.ones((3, 5))*40., unit="adu"), CCDData(np.ones((3, 5))*25., unit="adu"), CCDData(np.ones((3, 5))*35., unit="adu"), ] ccdlist[0].data[0,1] = 3.1 ccdlist[1].data[1,2] = 100.1 ccdlist[1].data[2,0] = 100.1 c = Combiner(ccdlist) c.clip_extrema(nlow=1, nhigh=1) result = c.average_combine() expected = [[30.0, 22.5, 30.0, 30.0, 30.0], [30.0, 30.0, 47.5, 30.0, 30.0], [47.5, 30.0, 30.0, 30.0, 30.0]] np.testing.assert_array_equal(result, expected) def test_clip_extrema_via_combine(): ccdlist = [CCDData(np.ones((3, 5))*90., unit="adu"), CCDData(np.ones((3, 5))*20., unit="adu"), CCDData(np.ones((3, 5))*10., unit="adu"), CCDData(np.ones((3, 5))*40., unit="adu"), CCDData(np.ones((3, 5))*25., unit="adu"), CCDData(np.ones((3, 5))*35., unit="adu"), ] ccdlist[0].data[0,1] = 3.1 ccdlist[1].data[1,2] = 100.1 ccdlist[1].data[2,0] = 100.1 result = combine(ccdlist, clip_extrema=True, nlow=1, nhigh=1,) expected = [[30.0, 22.5, 30.0, 30.0, 30.0], [30.0, 30.0, 47.5, 30.0, 30.0], [47.5, 30.0, 30.0, 30.0, 30.0]] np.testing.assert_array_equal(result, expected) def test_clip_extrema_with_other_rejection(): ccdlist = [CCDData(np.ones((3, 5))*90., unit="adu"), CCDData(np.ones((3, 5))*20., unit="adu"), CCDData(np.ones((3, 5))*10., unit="adu"), CCDData(np.ones((3, 5))*40., unit="adu"), CCDData(np.ones((3, 5))*25., unit="adu"), CCDData(np.ones((3, 5))*35., unit="adu"), ] ccdlist[0].data[0,1] = 3.1 ccdlist[1].data[1,2] = 100.1 ccdlist[1].data[2,0] = 100.1 c = Combiner(ccdlist) ## Reject ccdlist[1].data[1,2] by other means c.data_arr.mask[1,1,2] = True ## Reject ccdlist[1].data[1,2] by other means c.data_arr.mask[3,0,0] = True c.clip_extrema(nlow=1, nhigh=1) result = c.average_combine() expected = [[ 80./3., 22.5, 30. , 30., 30.], [ 30. , 30. , 47.5, 30., 30.], [ 47.5, 30. , 30. , 30., 30.]] np.testing.assert_array_equal(result, expected) ccdproc-1.3.0.post1/ccdproc/tests/__init__.py0000664000175000017500000000017113207605210022540 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This packages contains affiliated package tests. """ ccdproc-1.3.0.post1/ccdproc/tests/test_ccdmask.py0000664000175000017500000002536413207605210023460 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) from numpy.testing import assert_array_equal import numpy as np import pytest from ..core import ccdmask from ..ccddata import CCDData def test_ccdmask_no_ccddata(): # Fails when a simple list is given. with pytest.raises(ValueError): ccdmask([[0, 0, 0], [0, 0, 0], [0, 0, 0]]) def test_ccdmask_not_2d(): # Fails when a CCDData has less than 2 dimensions with pytest.raises(ValueError): ccdmask(CCDData(np.ones(3), unit='adu')) # Fails when scalar with pytest.raises(ValueError): ccdmask(CCDData(np.array(10), unit='adu')) # Fails when more than 2d with pytest.raises(ValueError): ccdmask(CCDData(np.ones((3, 3, 3)), unit='adu')) def test_ccdmask_pixels(): flat1 = CCDData(np.array([[ 20044, 19829, 19936, 20162, 19948, 19965, 19919, 20004, 19951, 20002, 19926, 20151, 19886, 20014, 19928, 20025, 19921, 19996, 19912, 20017, 19969, 20103, 20161, 20110, 19977, 19922, 20004, 19802, 20079, 19981, 20083, 19871], [20068, 20204, 20085, 20027, 20103, 19866, 20089, 19914, 20160, 19884, 19956, 20095, 20004, 20075, 19899, 20016, 19995, 20178, 19963, 20030, 20055, 20005, 20073, 19969, 19958, 20040, 19979, 19938, 19986, 19957, 20172, 20054], [20099, 20180, 19912, 20050, 19930, 19930, 20036, 20006, 19833, 19984, 19879, 19815, 20105, 20011, 19949, 20062, 19837, 20070, 20047, 19855, 19956, 19928, 19878, 20102, 19940, 20001, 20082, 20080, 20019, 19991, 19919, 20121], [20014, 20262, 19953, 20077, 19928, 20271, 19962, 20048, 20011, 20054, 20112, 19931, 20125, 19899, 19993, 19939, 19916, 19998, 19921, 19949, 20246, 20160, 19881, 19863, 19874, 19979, 19989, 19901, 19850, 19931, 20001, 20167], [20131, 19991, 20073, 19945, 19980, 20021, 19938, 19964, 20002, 20177, 19888, 19901, 19919, 19977, 20280, 20035, 20045, 19849, 20169, 20074, 20113, 19993, 19965, 20026, 20018, 19966, 20023, 19965, 19962, 20082, 20027, 20145], [20106, 20025, 19846, 19865, 19913, 20046, 19998, 20037, 19986, 20048, 20005, 19790, 20011, 19985, 19959, 19882, 20085, 19978, 19881, 19960, 20111, 19936, 19983, 19863, 19819, 19896, 19968, 20134, 19824, 19990, 20146, 19886], [20162, 19997, 19966, 20110, 19822, 19923, 20029, 20129, 19936, 19882, 20077, 20112, 20040, 20051, 20177, 19763, 20097, 19898, 19832, 20061, 19919, 20056, 20010, 19929, 20010, 19995, 20124, 19965, 19922, 19860, 20021, 19989], [20088, 20104, 19956, 19959, 20018, 19948, 19836, 20107, 19920, 20117, 19882, 20039, 20206, 20067, 19784, 20087, 20117, 19990, 20242, 19861, 19923, 19779, 20024, 20024, 19981, 19915, 20017, 20053, 19932, 20179, 20062, 19908], [19993, 20047, 20008, 20172, 19977, 20054, 19980, 19952, 20138, 19940, 19995, 20029, 19888, 20191, 19958, 20007, 19938, 19959, 19933, 20139, 20069, 19905, 20101, 20086, 19904, 19807, 20131, 20048, 19927, 19905, 19939, 20030], [20040, 20051, 19997, 20013, 19942, 20130, 19983, 19603, 19934, 19944, 19961, 19979, 20164, 19855, 20157, 20010, 20020, 19902, 20134, 19971, 20228, 19967, 19879, 20022, 19915, 20063, 19768, 19976, 19860, 20041, 19955, 19984], [19807, 20066, 19986, 19999, 19975, 20115, 19998, 20056, 20059, 20016, 19970, 19964, 20053, 19975, 19985, 19973, 20041, 19918, 19875, 19997, 19954, 19777, 20117, 20248, 20034, 20019, 20018, 20058, 20027, 20121, 19909, 20094], [19890, 20018, 20032, 20058, 19909, 19906, 19812, 20206, 19908, 19767, 20127, 20015, 19959, 20026, 20021, 19964, 19824, 19934, 20147, 19984, 20026, 20168, 19992, 20175, 20040, 20208, 20077, 19897, 20037, 19996, 19998, 20019], [19966, 19897, 20062, 19914, 19780, 20004, 20029, 20140, 20057, 20134, 20125, 19973, 19894, 19929, 19876, 20135, 19981, 20057, 20015, 20113, 20107, 20115, 19924, 19987, 19926, 19885, 20013, 20058, 19950, 20155, 19825, 20092], [19889, 20046, 20113, 19991, 19829, 20180, 19949, 20011, 20014, 20123, 19980, 19770, 20086, 20041, 19957, 19949, 20026, 19918, 19777, 20062, 19862, 20085, 20090, 20122, 19692, 19937, 19897, 20018, 19935, 20037, 19946, 19998], [20001, 19940, 19994, 19835, 19959, 19895, 20017, 20002, 20007, 19851, 19900, 20044, 20354, 19814, 19869, 20148, 20001, 20143, 19778, 20146, 19975, 19859, 20008, 20041, 19937, 20072, 20203, 19778, 20027, 20075, 19877, 19999], [19753, 19866, 20037, 20149, 20020, 20071, 19955, 20164, 19837, 19967, 19959, 20163, 20003, 20127, 20065, 20118, 20104, 19839, 20124, 20057, 19943, 20023, 20138, 19996, 19910, 20048, 20070, 19833, 19913, 20012, 19897, 19983]]), unit='adu') flat2 = CCDData(np.array([[ 20129, 20027, 19945, 20085, 19951, 20015, 20102, 19957, 20100, 19865, 19878, 20111, 20047, 19882, 19929, 20079, 19937, 19999, 20109, 19929, 19985, 19970, 19941, 19868, 20191, 20142, 19948, 20079, 19975, 19949, 19972, 20053], [20075, 19980, 20035, 20014, 19865, 20058, 20091, 20030, 19931, 19806, 19990, 19902, 19895, 19789, 20079, 20048, 20040, 19968, 20049, 19946, 19982, 19865, 19766, 19903, 20025, 19916, 19904, 20128, 19865, 20103, 19864, 19832], [20008, 19989, 20032, 19891, 20063, 20061, 20179, 19920, 19960, 19655, 19897, 19943, 20015, 20123, 20009, 19940, 19876, 19964, 20097, 19814, 20086, 20096, 20030, 20140, 19903, 19858, 19978, 19817, 20107, 19893, 19988, 19956], [20105, 19873, 20003, 19671, 19993, 19981, 20234, 19976, 20079, 19882, 19982, 19959, 19882, 20103, 20008, 19960, 20084, 20025, 19864, 19969, 19945, 19979, 19937, 19965, 19981, 19957, 19906, 19959, 19839, 19679, 19988, 20154], [20053, 20152, 19858, 20134, 19867, 20027, 20024, 19884, 20015, 19904, 19992, 20137, 19981, 20147, 19814, 20035, 19992, 19921, 20007, 20103, 19920, 19889, 20182, 19964, 19859, 20016, 20011, 20203, 19761, 19954, 20151, 19973], [20029, 19863, 20217, 19819, 19984, 19950, 19914, 20028, 19980, 20033, 20016, 19796, 19901, 20027, 20078, 20136, 19995, 19915, 20014, 19920, 19996, 20216, 19939, 19967, 19949, 20023, 20024, 19949, 19949, 19902, 19980, 19895], [19962, 19872, 19926, 20047, 20136, 19944, 20151, 19956, 19958, 20054, 19942, 20010, 19972, 19936, 20062, 20259, 20230, 19927, 20004, 19963, 20095, 19866, 19942, 19958, 20149, 19956, 20000, 19979, 19949, 19892, 20249, 20050], [20019, 19999, 19954, 20095, 20045, 20002, 19761, 20187, 20113, 20048, 20117, 20002, 19938, 19968, 19993, 19995, 20094, 19913, 19963, 19813, 20040, 19950, 19992, 19958, 20043, 19925, 20036, 19930, 20057, 20055, 20040, 19937], [19958, 19984, 19842, 19990, 19985, 19958, 20070, 19850, 20026, 20047, 20081, 20094, 20048, 20048, 19917, 19893, 19766, 19765, 20109, 20067, 19905, 19870, 19832, 20019, 19868, 20075, 20132, 19916, 19944, 19840, 20140, 20117], [19995, 20122, 19998, 20039, 20125, 19879, 19911, 20010, 19944, 19994, 19903, 20057, 20021, 20139, 19972, 20026, 19922, 20132, 19976, 20025, 19948, 20038, 19807, 19809, 20145, 20003, 20090, 19848, 19884, 19936, 19997, 19944], [19839, 19990, 20005, 19826, 20070, 19987, 20015, 19835, 20083, 19908, 19910, 20218, 19960, 19937, 19987, 19808, 19893, 19929, 20004, 20055, 19973, 19794, 20242, 20082, 20110, 20058, 19876, 20042, 20064, 19966, 20041, 20015], [20048, 20203, 19855, 20011, 19888, 19926, 19973, 19893, 19986, 20152, 20030, 19880, 20012, 19848, 19959, 20002, 20027, 19935, 19975, 19905, 19932, 20190, 20188, 19903, 20012, 19943, 19954, 19891, 19947, 19939, 19974, 19808], [20102, 20041, 20013, 20097, 20101, 19859, 20011, 20144, 19920, 19880, 20134, 19963, 19980, 20090, 20027, 19822, 20051, 19903, 19784, 19845, 20014, 19974, 20043, 20141, 19968, 20055, 20066, 20045, 20182, 20104, 20008, 19999], [19932, 20023, 20042, 19894, 20070, 20015, 20172, 20024, 19988, 20181, 20180, 20023, 19978, 19989, 19976, 19870, 20152, 20003, 19984, 19903, 19904, 19940, 19990, 19922, 19911, 19976, 19841, 19946, 20273, 20085, 20142, 20122], [19959, 20071, 20020, 20037, 20024, 19967, 20044, 20009, 19997, 20045, 19995, 19831, 20035, 19976, 20049, 19958, 20021, 19887, 19961, 19928, 19805, 20173, 19928, 19939, 19826, 20096, 20078, 20100, 19935, 19942, 19969, 19941], [19876, 20056, 20071, 19886, 19979, 20174, 19978, 20037, 19933, 20184, 19948, 20034, 19896, 19905, 20138, 19870, 19936, 20085, 19971, 20063, 19936, 19941, 19928, 19937, 19970, 19931, 20036, 19965, 19855, 19949, 19965, 19821]]), unit='adu') target_mask = np.zeros(flat1.shape, dtype=np.bool) # No bad pixels in this scenario ratio = flat1.divide(flat2) mask = ccdmask(ratio, ncsig=9, nlsig=11) assert mask.shape == ratio.shape assert_array_equal(mask, target_mask) # Check again with different ncsig and nlsig ratio = flat1.divide(flat2) mask = ccdmask(ratio, ncsig=11, nlsig=15) assert mask.shape == ratio.shape assert_array_equal(mask, target_mask) # Add single bad pixel flat1.data[14][3] = 65535 flat2.data[14][3] = 1 ratio = flat1.divide(flat2) mask = ccdmask(ratio, ncsig=11, nlsig=15) target_mask[14][3] = True assert_array_equal(mask, target_mask) # Add single bad column flat1.data[:, 7] = 65535 flat2.data[:, 7] = 1 ratio = flat1.divide(flat2) target_mask[:, 7] = True mask = ccdmask(ratio, ncsig=11, nlsig=15) assert_array_equal(mask, target_mask) mask = ccdmask(ratio, ncsig=11, nlsig=15, byblocks=True) assert_array_equal(mask, target_mask) mask = ccdmask(ratio, ncsig=11, nlsig=15, findbadcolumns=True) assert_array_equal(mask, target_mask) mask = ccdmask(ratio, ncsig=11, nlsig=15, findbadcolumns=True, byblocks=True) assert_array_equal(mask, target_mask) # Add bad column with gaps flat1.data[0:8, 2] = 65535 flat1.data[11:, 2] = 65535 flat2.data[0:8, 2] = 1 flat2.data[11:, 2] = 1 ratio = flat1.divide(flat2) mask = ccdmask(ratio, ncsig=11, nlsig=15, findbadcolumns=False) target_mask[0:8, 2] = True target_mask[11:, 2] = True assert_array_equal(mask, target_mask) mask = ccdmask(ratio, ncsig=11, nlsig=15, findbadcolumns=True) target_mask[:, 2] = True assert_array_equal(mask, target_mask) ccdproc-1.3.0.post1/ccdproc/tests/pytest_fixtures.py0000664000175000017500000001107113207605210024263 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import gzip from tempfile import mkdtemp import os from shutil import rmtree import numpy as np import pytest from astropy import units as u from astropy.utils import NumpyRNGContext from astropy.io import fits from ..ccddata import CCDData # If additional pytest markers are defined the key in the dictionary below # should be the name of the marker. DEFAULTS = { 'seed': 123, 'data_size': 100, 'data_scale': 1.0, 'data_mean': 0.0 } DEFAULT_SEED = 123 DEFAULT_DATA_SIZE = 100 DEFAULT_DATA_SCALE = 1.0 def value_from_markers(key, request): try: val = request.keywords[key].args[0] except KeyError: val = DEFAULTS[key] return val @pytest.fixture def ccd_data(request): """ Return a CCDData object with units of ADU. The size of the data array is 100x100 but can be changed using the marker @pytest.mark.data_size(N) on the test function, where N should be the desired dimension. Data values are initialized to random numbers drawn from a normal distribution with mean of 0 and scale 1. The scale can be changed with the marker @pytest.marker.scale(s) on the test function, where s is the desired scale. The mean can be changed with the marker @pytest.marker.scale(m) on the test function, where m is the desired mean. """ size = value_from_markers('data_size', request) scale = value_from_markers('data_scale', request) mean = value_from_markers('data_mean', request) with NumpyRNGContext(DEFAULTS['seed']): data = np.random.normal(loc=mean, size=[size, size], scale=scale) fake_meta = {'my_key': 42, 'your_key': 'not 42'} ccd = CCDData(data, unit=u.adu) ccd.header = fake_meta return ccd @pytest.fixture def triage_setup(request): n_test = {'files': 0, 'need_object': 0, 'need_filter': 0, 'bias': 0, 'compressed': 0, 'light': 0, 'need_pointing': 0} test_dir = '' for key in n_test.keys(): n_test[key] = 0 test_dir = mkdtemp() original_dir = os.getcwd() os.chdir(test_dir) img = np.uint16(np.arange(100)) no_filter_no_object = fits.PrimaryHDU(img) no_filter_no_object.header['imagetyp'] = 'light'.upper() no_filter_no_object.writeto('no_filter_no_object_light.fit') n_test['files'] += 1 n_test['need_object'] += 1 n_test['need_filter'] += 1 n_test['light'] += 1 n_test['need_pointing'] += 1 no_filter_no_object.header['imagetyp'] = 'bias'.upper() no_filter_no_object.writeto('no_filter_no_object_bias.fit') n_test['files'] += 1 n_test['bias'] += 1 filter_no_object = fits.PrimaryHDU(img) filter_no_object.header['imagetyp'] = 'light'.upper() filter_no_object.header['filter'] = 'R' filter_no_object.writeto('filter_no_object_light.fit') n_test['files'] += 1 n_test['need_object'] += 1 n_test['light'] += 1 n_test['need_pointing'] += 1 filter_no_object.header['imagetyp'] = 'bias'.upper() filter_no_object.writeto('filter_no_object_bias.fit') n_test['files'] += 1 n_test['bias'] += 1 filter_object = fits.PrimaryHDU(img) filter_object.header['imagetyp'] = 'light'.upper() filter_object.header['filter'] = 'R' filter_object.header['OBJCTRA'] = '00:00:00' filter_object.header['OBJCTDEC'] = '00:00:00' filter_object.writeto('filter_object_light.fit') n_test['files'] += 1 n_test['light'] += 1 n_test['need_object'] += 1 with open('filter_object_light.fit', 'rb') as f_in: with gzip.open('filter_object_light.fit.gz', 'wb') as f_out: f_out.write(f_in.read()) n_test['files'] += 1 n_test['compressed'] += 1 n_test['light'] += 1 n_test['need_object'] += 1 filter_object.header['RA'] = filter_object.header['OBJCTRA'] filter_object.header['Dec'] = filter_object.header['OBJCTDEC'] filter_object.writeto('filter_object_RA_keyword_light.fit') n_test['files'] += 1 n_test['light'] += 1 n_test['need_object'] += 1 def teardown(): for key in n_test.keys(): n_test[key] = 0 try: rmtree(test_dir) except OSError: # If we cannot clean up just keep going. pass os.chdir(original_dir) request.addfinalizer(teardown) class Result(object): def __init__(self, n, directory): self.n_test = n self.test_dir = directory return Result(n_test, test_dir) ccdproc-1.3.0.post1/ccdproc/tests/data/0000775000175000017500000000000013207623133021345 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/ccdproc/tests/data/sip-wcs.fit0000664000175000017500000005500013207605210023432 0ustar mseifertmseifert00000000000000SIMPLE = T / Created by ImageJ FITS_Writer BITPIX = 16 / number of bits per data pixel NAXIS = 2 / number of data axes NAXIS1 = 100 / length of data axis 1 NAXIS2 = 50 / length of data axis 2 INSTRUME= 'Apogee Alta' / instrument or camera used DATE-OBS= '2011-09-01T02:09:05' / YYYY-MM-DDThh:mm:ss observation start, UT EXPTIME = 120.00000000000000 /Exposure time in seconds EXPOSURE= 120.00000000000000 /Exposure time in seconds SET-TEMP= -18.899999618530273 /CCD temperature setpoint in C CCD-TEMP= -19.015454250000001 /CCD temperature at start of exposure in C XPIXSZ = 9.0000000000000000 /Pixel Width in microns (after binning) YPIXSZ = 9.0000000000000000 /Pixel Height in microns (after binning) XBINNING= 1 /Binning factor in width YBINNING= 1 /Binning factor in height XORGSUBF= 0 /Subframe X position in binned pixels YORGSUBF= 0 /Subframe Y position in binned pixels FILTER = 'B ' / Filter used when taking image IMAGETYP= 'LIGHT ' / Type of image FOCALLEN= 0.00000000000000000 /Focal length of telescope in mm APTDIA = 0.00000000000000000 /Aperture diameter of telescope in mm APTAREA = 0.00000000000000000 /Aperture area of telescope in mm^2 SWCREATE= 'MaxIm DL Version 4.10' /Name of software that created the image SBSTDVER= 'SBFITSEXT Version 1.0' /Version of SBFITSEXT standard in effect SWOWNER = 'Linda Winkler' / Licensed owner of software PURGED = T / Have bad keywords been removed? LATITUDE= '+46:52:00.408' / [degrees] Observatory latitude SITELAT = '+46:52:00.408' / [degrees] Observatory latitude LONGITUD= '-96:27:11.8008' / [degrees east] Observatory longitude SITELONG= '-96:27:11.8008' / [degrees east] Observatory longitude ALTITUDE= 311.7999999995668 / [meters] Observatory altitude LST = '18:22:52.8880' / Local Sidereal Time at start of observation JD-OBS = 2455805.589641204 / Julian Date at start of observation MJD-OBS = 55805.089641204 / Modified Julian date at start of observation OBSERVER= 'Matt Craig' BUNIT = 'adu ' HISTORY +++++ BEGIN patch_headers history on 2013-11-07 13:54:02.872029 +++++ HISTORY patch_headers.py modified this file on 2013-11-07 13:54:02.872029 HISTORY Changed IMAGETYP from Light Frame to LIGHT HISTORY Updated keyword LATITUDE to value +46:52:00.41 HISTORY Updated keyword SITELAT to value +46:52:00.41 HISTORY Updated keyword LONGITUD to value -96:27:11.80 HISTORY Updated keyword SITELONG to value -96:27:11.80 HISTORY Updated keyword ALTITUDE to value 311.8 HISTORY Updated keyword LST to value 18:22:53.19 HISTORY Updated keyword JD-OBS to value 2455805.58964 HISTORY Updated keyword MJD-OBS to value 55805.0896412 HISTORY ********* FILE NOT PATCHED *********~~Stopped patching header of /Users/HISTORY m HISTORY attcraig/ast390/raw/2011/2011-08-31/sa110sf2-001std_b.fit because of ValHISTORY ueError: No RA is present. HISTORY ----- END patch_headers history on 2013-11-07 13:54:02.872029 ----- HISTORY Updated keyword OBSERVER to value Matt Craig HISTORY +++++ BEGIN patch_headers history on 2016-11-21 20:10:41.595989 +++++ HISTORY patch_headers.py modified this file on 2016-11-21 20:10:41.595989 HISTORY Updated keyword LATITUDE to value +46:52:00.408 HISTORY Updated keyword SITELAT to value +46:52:00.408 HISTORY Updated keyword LONGITUD to value -96:27:11.8008 HISTORY Updated keyword SITELONG to value -96:27:11.8008 HISTORY Updated keyword ALTITUDE to value 311.8 HISTORY Updated keyword LST to value 18:22:52.8880 HISTORY Updated keyword JD-OBS to value 2455805.58964 HISTORY Updated keyword MJD-OBS to value 55805.0896412 HISTORY Set image data unit to adu HISTORY ********* FILE NOT PATCHED *********Stopped patching header of sa110sf2-HISTORY 001std_b.fit because of ValueError: No RA is present. HISTORY ----- END patch_headers history on 2016-11-21 20:10:41.595989 ----- HISTORY +++++ BEGIN patch_headers history on 2016-11-22 13:44:33.242638 +++++ HISTORY patch_headers.py modified this file on 2016-11-22 13:44:33.242638 HISTORY Updated keyword LATITUDE to value +46:52:00.408 HISTORY Updated keyword SITELAT to value +46:52:00.408 HISTORY Updated keyword LONGITUD to value -96:27:11.8008 HISTORY Updated keyword SITELONG to value -96:27:11.8008 HISTORY Updated keyword ALTITUDE to value 311.8 HISTORY Updated keyword LST to value 18:22:52.8880 HISTORY Updated keyword JD-OBS to value 2455805.58964 HISTORY Updated keyword MJD-OBS to value 55805.0896412 HISTORY Set image data unit to adu HISTORY ********* FILE NOT PATCHED *********Stopped patching header of sa110sf2-HISTORY 001std_b.fit because of ValueError: No RA is present. HISTORY ----- END patch_headers history on 2016-11-22 13:44:33.242638 ----- HISTORY WCS created by AIJ link to Astronomy.net website HISTORY WCS created on 2017-04-16T21:01:55.162 WCSAXES = 2 / no comment CTYPE1 = 'RA---TAN-SIP' / TAN (gnomic) projection + SIP distortions CTYPE2 = 'DEC--TAN-SIP' / TAN (gnomic) projection + SIP distortions CUNIT1 = 'deg ' / X pixel scale units CUNIT2 = 'deg ' / Y pixel scale units EQUINOX = 2000.0 / Equatorial coordinates definition (yr) LONPOLE = 180.0 / no comment LATPOLE = 0.112838900008 / no comment CRVAL1 = 280.544106813 / RA of reference point CRVAL2 = 0.112838900008 / DEC of reference point CRPIX1 = 37.5 / X reference pixel CRPIX2 = 25.0 / Y reference pixel CD1_1 = 0.000152978060309 / Transformation matrix CD1_2 = 2.52560835888E-05 / no comment CD2_1 = -2.48148467064E-05 / no comment CD2_2 = 0.000152891927788 / no comment IMAGEW = 3073 / Image width, in pixels. IMAGEH = 2048 / Image height, in pixels. A_ORDER = 2 / Polynomial order, axis 1 A_0_2 = -1.15762991047E-06 / no comment A_1_1 = 4.31199937889E-07 / no comment A_2_0 = 1.63167357146E-07 / no comment B_ORDER = 2 / Polynomial order, axis 2 B_0_2 = 4.9476013938E-06 / no comment B_1_1 = -6.08988358344E-07 / no comment B_2_0 = -9.96142066538E-07 / no comment AP_ORDER= 2 / Inv polynomial order, axis 1 AP_0_1 = -6.1308893291E-06 / no comment AP_0_2 = 1.1576402513E-06 / no comment AP_1_0 = 6.8330870317E-07 / no comment AP_1_1 = -4.31205686397E-07 / no comment AP_2_0 = -1.63168312753E-07 / no comment BP_ORDER= 2 / Inv polynomial order, axis 2 BP_0_1 = 2.45641194548E-05 / no comment BP_0_2 = -4.94764841932E-06 / no comment BP_1_0 = -2.18798222057E-06 / no comment BP_1_1 = 6.09013751394E-07 / no comment BP_2_0 = 9.96148449409999E-07 / no comment ANNOTATE= '269.5,1756.51,30,1,0,1,1' / HD 172652 ANNOTATE= '1681.64,1324.56,30,1,0,1,1' / HD 172829 PC1_1 = 0.000152978060309 PC1_2 = 2.52560835888E-05 PC2_1 = -2.48148467064E-05 PC2_2 = 0.000152891927788 CDELT1 = 1.0 CDELT2 = 1.0 RADESYS = 'FK5 ' BSCALE = 1 BZERO = 32768 END x\~n[Nzyiuxpwtuw}p[g}nSu]Œipddrkrcymecmetqel~ia/jm||yrepks]Xkj(}rrursg"fqoz{dtvt}r{tt~ihe`|zp`xdtyeuXc^Ys|tny|s3v{~|Z{qv{kzpX~s^dwdYvvi{2nx~lt|l`W{djYb&v||W]q|\2~di|}x{hjsbn~$sju{|~}ydvc}jnzxe{uzolmlqhzZyywlFbm~mz}n{n[yrkv}|rpvzhlw\|{yjzmu~p`^Wyie6{kmd\Vuclzmjoiu}rxX9ZRqrlbpu+uii|]zo{v[rgqz{V{{d]fryvsH(5\vpwpowzSvqsZÌVV|_pkwe{Mqgtv_hߌerfSumqyndLomrl^az^aeX]yznbd}S~zx}Q{rv}xrmvefhm~szja~As`ZU~\|gcutvqs}oizivis}{sgO}dceqz|frtscxfrsxnk}hPsqLnwU`qn~i.bh]|{qlu{zrZk{[qpoaYpx|uh}vHax~dzyk!glvyduexqrtr~npxsr||yps]jntmu\z/|kj|cj}cj^chxjOvpzrsay}}t_|.w،t~w}qlpa%l~e}qyelhkolo*}mtoy{&iyzknpps]fczl}cea{܌vhR8t0bpklu#`lezgobdr`vnuqozwofy{u}nfvnkvgzqxɌci4mOx~nts ~xiYtwpnXvxvhgb%Iivy{z|||r[n{j}|wtmlx]ŒTv̌mjV~orw{splAz{~pkkwt^gg}ozuis_tz@/kpYz}ZxpqRi|qklnkbxbzxdmltitr`}xYpVjbsfOiorx_?4ypsq{tvdlr~omNjli]nj]cyjzhuΌhstwesyroxjrX~{r`{d]|zhloh{obknxw p}~y)}{soyol]vUzrntnbTmh}xx~~le|qvvq{DDrxqsiHh_nmtu}􌑌mhek\|Yzj\itS@lGtfrj}#tsjgk}mwpwkpIwvs}pz{fxmyl͌pwiijf^rswpx팎NXpZsus|oogt~x^Yrv0rohsLxLz\L~xtytrsfU~sqyubʌrzUˌ|r-uu_u^WZp}`r}utoŒpyu}ns|vzspg{ob_q}hmzrsu~|vt\zYlUaV}ly}T|RhsxinbVdyf_qNquZy}|_eigyvQx$xxf_tIuq^l~~k\Rx|r\kSvwyl.^snzuvTut}qyqvtGsxge:wLn|r[TWosdf-wr{mspOeyxq~vЌrx}cSnq}vv~ljvqf/oGpzbz}thnj{Owxomoym}jm|~Mz@vvgworur|uJ(g~tviwvxXt^zj9zsx^hqgsw_l@eflydss{pXvs`xyyHccdproc-1.3.0.post1/ccdproc/tests/data/a8280271.fits0000664000175000017500000211070013207605210023225 0ustar mseifertmseifert00000000000000SIMPLE = T / BITPIX = 16 / NAXIS = 2 / NAXIS1 = 536 / columns NAXIS2 = 520 / rows OBSERVAT= 'SAAO ' / observatory source TELESCOP= 'SAAO 1.0m' / OBSERVER= 'crawford ' / observer INSTRUME= 'STE3 CCD' / DATE-OBS= '2013-07-13' / UT date OBJECT = 'rf0420 ' / IMAGETYP= 'object ' / IRAF image type FILTERS = 48 / Filters 10*A+B RA = ' 22:04:08' / ra DEC = '-00:55:31' / dec RA_OBS = 331.0334 / ra in degrees DEC_OBS = -0.9253 / dec in degrees EPOCH = 2000.0 / Equinox of RA,Dec UT = '00:57:33' / UT at Exposure start ST = '21:45:06' / Siderial time at Start exp MJD-OBS = 56486.03997 / JD-2400000.5 at Start exp EXPTIME = 150.040 / integration time in secs SECZ = 1.176 / Air mass at start exp TRIMSEC = '[ 17: 528, 1: 520]' / Useful part of data BIASSEC = '[ 4: 13, 1: 520]' / Overscan region BSCALE = 1.0 / DATA=BSCALE*INT+BZERO BZERO = 32768 / GAIN = 1.9 / e-/ADU RDNOISE = 5.0 / e-(rms) read noise CCD-TEMP= 180.2 / CCD Cu block temperature COMMENT = END ƀӀԀрԀـрրրҀ׀׀ր׀ҁ$&/%+%,$/$#$'7 )4 &&/)+5 $'%, *&(%/#'+! '*8/&,-&04)&*1!+$'##%!7"*'(-%4) $-5#71&''&4%#&' ),'!5(,%'&','%)"-.( -*$&1-" '!'5"#"(($+-5'';('*,1,,.*8#3)'%!/4+1;(/ /$(#'%.&%!%!2%*!$1())' %5/%,2")% &'7!0*!''&!)/-%*%%.+%.('.*)+#,$*)2.,-*+) ,&#%%#>.0-#"//)&-+/$-(**/+*$!)*523,)",&4!!/+'0')()++/ $&3&1-9%#'#1-.!/ &*)%"13#621/%-3($%$!($) +1-"$(0,,*$ &0,5:6*;2(0$$/,(( %1'3**0$4"$1+('(1(*(",+)*-1&8 $+*.$,)(.5*&$-+2*-#',%#'#).'()0&'*"**2. 089&/#1$"-%.!/)#0#-2؀Ԁ܀ـ؀Ҁـ׀ՀƀڀՀ݀ހրԀ׀ـڀՀրڀրہ;9.-65$32.53-8<08()&15.75/+#/$%2.4"89+2:'853)3-/>+"&3.*-*--(!(!9/.2,*),*,!-)719+/56@#*5,*3(2,5/+/%.-10%(,0-&*-44/$%2,0&).*+22$#%-3(0.1C4-*+1%%*%/87"+5'7,;*-$/&5:%(1)*)423512*/,'),70+*&9+,62,72/'1B>+7/3867++:50+-*4.$!/6!&01:28+37%',6;1).)9+*.157+'2531.)+1++&(5%$)/23.+&4# /;66-4.0)//*:3')*,92/5;''75.+-/20%%0/',4*3*$-7+3&.-'56/*3-:(,-41)435/+3!0+)?+46;(@6,2+):660.,)17&B'8%3470%0*23+4/-(/8 ?7+9/6253./4*:-;)37,A/%17*7A ++=(2"29,69'*=57,52$23'6(11.69754,)8=(.0505'=2=73.'365-91/1'6:/.3)&75/356;5,76.ــ܀ր؀݀Ӏۀڀʀ׀؀ڀրــۀۀ؀Ԁـ߀րց0*466+,7)62-005.7(&1-*C&#,*;+(/%*5.3-7:2.+3*,=*'0$)19(.-%7?+6+6952..52;,.2-+(7334,>0=/'5."0%"%828.0/03/-9:9.43(A7%'9:/238+444.-6)'+)$+-*01;55/2/9)"$,63-*7:8/-/+6(9'0;1',+5*&%-0+-,061$66/'+,1,62-7870!726,+/*)-:1$/6-543/55/-58,>+.(1/)853"*20283//0)*.:<+/943(8/11),ـ݀ـڀـ؀Ӏ܀ۀȀڀۀــԀڀ߀Ӏ׀ـۀۀ؀ׁ(03(,73<31':30''8,5;7;#3.@9+(79$5%$3''!)/02;3+3-#'/2/63/+"3 ,/17;1,163*.365)40210639/.2/29*1).)())0#@8$'%2, 4,+:7.4;/43*2--.,-+2;**4+1)7&63'1.7+77('(3.,#1,211@/(-66:44'48%/25*;-*224.5*(.46*,27-../2*+3F9.65-92'5 >09):/7>251+/+4*4-3$1/7,(#-(3..2/-/8/-=3/,$+@-#-8/.=2$0$%.+2*,10"'*'-*6)49;58$*%'&(.0:!:',7'1#-$*2'/9(7)()01,5'0<.'/ *3;#-(#1#*.(00+($11,.%$$4--5A&0(.74)0%--2*(<'+%558922-42/72E880$//=:7;5728'ր׀ۀ݀րڀـԀۀʀ݀ـۀڀրҀـڀڀ؀Ԁ׀׀܁5?4.$",51,'41.75!//7+55!1450+9-21' -)546#,&.0//-&>&:.90,19#1($%:,585,3.-,/+%16090 -45,"!7'+(1-.'1-*3.-671/&)&/05'&+$3&+7-2(+%)61(&;:#-2-41$-+(-$26+/60!-69,,2"/(4/+.5%-;/(2/3.+6+(.*/=)52"0*42#.**32@);#,+/9!($(6%4=,18)&'%2/%3*12:2.73.*3/83/*78*:*A!(-.4=50;%3(20.,(-3-7/5/153*C71,-/''*$<0&62?7*%4.4D218&)+,2,/.52-<+8+(..,/4*-8/.1+48.#3+/5:9-/<+/)2519,3.5&++":!045!*@5<"$.7.3+34!0@<'6-73,,+-8,)ڀހڀـՀـԀـՀǀ݀ـ׀׀׀ӀڀՀԀԀ׀ր׀ׁ&/6(-:/6/3/835+2**5'*/>2$950:>)%''6'1*'1-,3/ %,#>017/+&.11,/$**07+("&-*#/+%(30'-(>#$+5-/+,$22(6+32+%,/,7%3,)+'0/$=,4#(/,1.5-,+5)%'81/ 27C#4)(& ))%3&/+:$4-&//2.)3/,:)5//5220)1,/)-+51/..'26*!#*%<-1'-6#-*6,1<49*(17+%546;3%/,0%/38,99$-*(-/%81)/9>1&4%$3+(/2)+-1.#581."*2--$-5(*8+))/*%,7++'-%#<,+))9"*,<67&;3+*(*+*(/;,0514,:130=,)(4-+,/7%"+$1-)74017')<+6%*,)94#//(22,+04)!&(1.).-;.9(+)./1671041,,*,3"9--112;%5//1(-;')153.+/1+3-0'+&7,"/6'%2++156" 1(6*('/3(*4)7)(,330"-325!)&5;33,/)14-&%760/#,71%-*-2&&+,,-','5>րۀԀ؀ۀ׀؀܀րʀԀ׀րـӀրۀӀ׀ր؀ހր+34.,)3$+(/"++;00$0(".,&/,@$2/981+%,.+!218,&1(2(21-,13%(9'+! '3#')2;)(+-+-+9*-7&.%7/81553')1++051101;+)-7/$+30'-+%!.)#/.,(%-)'2#.5'$*-&42!-*6,5*3/$2-015'9(.91+.;52/*%-91/25.(6&)735('/&&/+")%#&/33'(B4*+0,$:,"6%)50)/-,'027-)F%;-"00).2&0.:310--),(5%)$2(5)4+18)(#232.(,1&+3).93)'160%:--/+(08%-)*4//!1,+(;.$=9:+1$&#+4)15*(!+;@1+-/!5*#<&0"#.4%2//4'+4+,82',-*(9/*611(/''6%#%/4/'*$0)&3()!7$/ *9#,*1,/(--626*$6',.-/*1":,,"55!###!)922)/%(/67;.,7*/)/-)-31641-7+-$7"-@1/071*(+)1' 4)-0)3/3.24:4%36*(3&-81=1/5"0׀рـՀ׀ڀڀــ̀ـۀ׀ـ݀ۀ؀׀ӀҀր׀܀ف'?&$(+75-0134*$,//:3%/+.#+!5,61!++.*23)&F(-50(=-.30*79$-2)=,*8)3%%39+)>49*,&)!0%-45#/31/42')2,302--,)(*644/03'/#$+&!)-/07'$/$/83,(,.)-/?6@+&+%"42$%(0*%5$.+""5%6'*6)+-?3,)(012+7 !1(+-("47822,;3.)3%.2,'/+//-41-.6+%%1.*3$4'54+2-'#//7/4<)!42C/ &5$%%+05/2)%&59*-()/($15',&,(*2$%7-2111"-8-!!53-+.)?(5/$*;--0-2-!&',5--7.528(+"04/!743/901'4/983/1!#%*0*01/641*0'&7+/=**1++'5!-(#:(3(+24;)/446#6)/5)/71!8-,,#,4.%-&/-,#7/-&(5($0*2#'2)/%1$/*0(---4)1/2%$4.-+&*$524-((&/:)+'/+&+900).< 41,:"/10>223-.23+2.:+1+;܀ր߀ڀ؀׀׀րڀŀրՀڀڀـՀՀڀ׀ր܀ـ׀ց.4)>, 3')6*-.,(1775*&)13)90$01 /"#/,*&,1%)),/*()+5,02441781,,9)#265)+72(6&//-004/)%"802-3/--. )975.,03#&.%.6/+5+4-',571.70',&5,&2+/2.&/(*-!0"8.#2, 4 <00/;1$),*/ )230:..Y/+%-#0**/+74&7/('4).*'*/ */2-?*5$"/$21/17-*=$ +00,0)467)3&'B340/0*(*!-;,4.*).+/*21(+2',*)227).1@*<+*/-,)2/-%/=/!$+.'/21*12&,1*1'&&:+,3,,23 ,729&!;%#9*2*+ 7'()%. :2#.9((,+9%-(((-014/2$9!(4-)+%4/+.*,%3,6-2+')>+03%.$5%&%)..=2(4--2431),&)+/*/948',1."*"4*-3'-/3*&$ ='#41+,,,9,-6!12&%1.6$'%1'25+(1,/ )2!-3*4:13/,42.'-422:/+&14-)-24=ـڀ׀ـ׀׀׀׀ՀƀԀۀڀ؀ՀրҀӀ׀րՀڀڀց733)703/&3" .84+0//,&6*/!/+:2")-*-.,/!)2$0,A+3:.-/)"1/2(/-+$$13/#/)8&6%#+34/",./2/85#3,?73*-=)0+0),%;+2&,)3'>3,/23.5/&03 8+5 0/%'2)/(9.#&/./.4"092*%/%5/(8*7$-(0!.%5/1.%C&=0+#2+-*(1;(!(%3+'*.#2,(,/(2%49&',*72*2/721,"*7--6&32'=22+* 0-,"7+38*%'"&213./0((48'58(1*0)!45'8-<3/*'0%+'1-/&#-0*3(&-4-&,&0>!&)1?3,$0$)'2(*%243%("#/41&'.0&1%$"7+4)/$"9-+%+0//..1?&2G,(6./E6-0-09,<4+-0$+7&-&6,'4+#+(/,,-$'2)+>&<55+,.3(21.(1*/0=-%'4,.')-1$,!60'),4$359$*5#(,))"++,6'5+&0",2*-%&8,-(.,2'.(*2;->)46$1%3#-.&-/"<12/Ѐ؀րЀ݀؀րրҀŀրـ؀ۀ؀׀ـ׀ր؀׀рـӁ17///+*+&+.)57&441-0(B1)/'#5( /8<+.(074;82%,50$':6('/4--/+-#%# 13.$:0(()2-*+021(+=(-+/3!';*&"3"- #/"3/&,#8%/1.!%/-'))31))-+/.)''0 1/%,*1/-5*4#8(8.%"2,-'1+/$0*$.+%.//'34!/4.4($0)!1*36-!)(-6,;74**50%&+01&.)57(-052,=//&.6;%!0%8)&!--/0)-,,"7)#/-/,102-*#'2'!/.5'/4%/+'',.2&..1+/$".,.) )-, ',2)@/94+$/4))0(+&-.,%3//6%0%2(%)-(/'+%>.D:,1#*++405,)*35+0.)-77$%20/:))1+*(03.$3):>9#2*$624)-!)+'-+354$*,(/+*0#')(/.2./�%)-.&552&)/+.,&+68/2(13*"5'(+*+(*=/471+! 1+("#(+)2,B/13%803!(&*.+.(.1,7('!*()?*+3<".%2"7,,*2ـ؀ՀـՀۀӀҀрǀӀ݀׀؀؀Ӏ؀݀րրЀڀՀځ4.*2+-"-/5*'90-.')'(5+111*4(6-+,'!%%4%-/.#/:10+.0)+)76*,2 //&?)-5.&-2$*'4//4#$#/()0%31/)2'E/0&$+'"",$%74'.%.+$ .'&1+)8/#.$.-'9105-.#7)!00-.5))&%#(-.5!)*)(+:/%'*+6-+&2/$!/1/%/,.,-&-.5%-3@#17'4!,-..(#*#$>-7)"+')%-(.!#&1=(*14)/3,5, &'3'"%5,.-#&-,/+%%5(1)/..8%9(+".5$7%+/$+,#&3*#%1($)&=*(.87#*(15--+-22+/9!&%25(1&'51#0/.0(+$/!34(-2/3,'(/4.40&)+/(/,0+-%,416$;5'3 0),<-(550::%"-79*2$',142/7/721,-=4%+1936,06499/.1'$/70?0-3/21*278&4713391,,5Հـ܀؀ۀԀրӀ׀΀ԀۀրՀڀրـրӀ׀ԀրՀՁ !0/41"/)116)%;1(.*!04.",*-%2/6+12. )&+3*/(7965:8+**1'/*,3"1/.-!-'(9(<$9&,0&5'#2.1.,:&6)6+)0%!,-2- 5+#-+))".(#.!-"-!5-*'+06+$+5#*706%7%-6/0/02"),84+8!'<533'21;3=6-44./&2016(2$'3*1#$2/:!1%&) 0-/%**-114(6'%/!1.):%-+/&(1(:7*#*+/<,'.:($.0+.($*#1.1$=9///,+%)+%-+)+&./ )-&//(,1$;,'4/"32)#-0 ,4.+5/0%-+57,'(//95(*-242!7"'21(,5'*-)5#*+33+?"%-00!4&, .(1(//-0;2.&- $.&%'%&4&;-)54!1+05-3/##(/-+-'"97/(29029;/*1.%(4.&*&0,0'2/%000*%/K1&%.)'9-73-42 08+24%"/'-&"#)+-.+0!3)()-3#4,<+$!@8-$:08.))*//+2$#*&(135;?CրӀڀՀՀҀ׀ـՀƀ׀Ӏ؀ـ׀Հ׀Ҁـڀ׀Ԁрׁ%;$5',41/%3$',21+&+.04:#7#4(7143$)1**32&-4=/*/6(#%0-.%.(+:,.8#)$1&9>/'$#!2*,%*2(9$.9"'+"5+(1!)*.19&)*'3,)%'042 +&65&%4.6*-.5)6,.3,7&-5+-7()2*0013/.30.--%+3246,)+'(3&/0)2='2%*.3+.+0&.4+1)./0/0++0&006/4 ;/#&;.;%-02'1)2,,(35-&#&,0.,*-514&' 6-,;+%5*'/%*+)1'50-,#01&9/?&5'-)&)(! %;(">!-)%/55)(/%--417 &1'(-/+'/--*2"-')02/++/1,%1$61)*2&.5+*.60)&//95&&,-+%4--"./1!//-/$'1084+),.-(+74.+)&%7'5&%28*1.)(3%+,6/12&1-6"+//*# 5''2+4))=17( +0.34(+&.*").3:)-3,, 6++&&")-+&")/'$)-63072+*503$22404(&(0+,(1*/3+12+%76-.׀ՀـԀ׀׀Ӏ׀׀Ȁ؀؀р׀ڀՀڀՀ؀ӀրԀـف/03'9(6233);#-1"/$#,(/3)"2# '&.#&$))(*82$)1-(%/6('%4+/90(&,,&+(/;4 /4)45%,5/*+!* )*!,(%*?#&+)&"*3(/'9(/*4'/*48)+.-*5/%'+/0/$&524))79(/+--08-120*172%)).(+);8%"4/(/%,)#++*"#*34+-&+2>(*1:,34'.'&;"'91%903.*<'"251.+/'4&1*4+/)3&)4+&3)63,(;'24/1,-)/)).27*(4&1#0,/705&%2&&/507+"0(/$54,(41(.32+,($.)+(.&) /!&6-1"/(73* 7-50+)2-*.8725/#'$(),*(!,(("*)''(';!4))-!'(-=:/%+09(,((:$15%34*! 52'9(7)#--'.((.2-)-( ,1=27#6'1/.311-12>,$-7*&,42-.-"/%3-&3592-+)0*-204,1'%,&."&+/3.3-3057.5)0+'$'0'-1.!$4-'%'$.'29,85׀׀Հ׀؀ـր׀ՀǀҀՀ׀ӀӀԀՀՀ׀܀׀؀Ӏف&%461/:)?11*(!')). (4-1#*+'$(7'$('*).),4(*'&'+.20/-(, 0; +,--"66$+,%,3+45'"313**,+,920+,'/-#4%-,.&.**/A%!')$9)%(4()1D*)/$)1-:0(.,07'%/11,9%+:",7.225"*)05-+*'1*",-.''*-3%.*4-,&6'46+#,'!1#,$1./-/6##*%1+1/*1"/1./37/'462)4)5/3:3%*/""2!.'+1&0./9-*4..?65.$'*/-6%7+/*,/9$'2,/-,%1#*4*'<./#&(82&2$.3/5%*%#32&(&$)*,->$>-)-+&,.#+)/%001('71*9&($)/9,02+1%'**&(4/2#&4.)**'!+",)@$/1+59&,&$*+.%$&").4.2$).)6.4.:'-69-!)-+."-!344#1-#4+1#+2%.2-0+(+'3"&,6/;//.-4$.<)*2(-*6/- 1)2+-'%3"&,+,7!9-1-4#1(2&&'3(0/%/7ՀԀ܀ӀրــՀ׀ȀڀՀҀڀրۀրԀ׀؀ـ׀րՁ,&,1#*2-((:-(7--%5$,02.&5'"(!.28&/33"'(%65 '+2"8%3/(,'+77!!/-051,8-*0-., #+(-5(%*35$*0"4*%+%1-23%&3$)4/&*),+41.'0/1&+3/306(&2(-17/")31)(3(2/3**/$1&0/4 //0(+#*!$0,-#)031)1.!)%($#.#2415&)%1/$/''.$"/,%%-11>4'*&*'+%+.37,%3''/)8&%/43*7(,.$4-,%2,6/0.0%8.%0-1&+%C(%./#%*/,65)&(/4.(+$5(21. -%=)'/,32"&9$)'"3'&.((%3&&&!4"75)&24./4(3&5)0')"5%-)3/#/+-(/>8+-!*+/''.3(Հ׀րԀЀӀ؀܀ۀπ׀Հ؀Ԁ׀րր׀؀ր߀рӀԁ:2(/*!/++."(+-4':'-$)/6* 3-$&5#*#.40+4"<(*0#$21-'4.%)&2$)5>!6!070(5$3%&*,',-0)5!7-/./#'010;+!*1+.0#2,+*(*)2/8"9-+3;5-/**)"/.0'/,5,(5*72131$8*+.3/':-"02++((,-%+(+-3//2-'5/),#/ +,$;#!)/-3(-7 %5#'*3$&+/71#,50)+%/#"/.+2*+A(20,/,*"/4&./,((.5:/+%-,+/5<"6113#&'$&58,5+0!(,+ -#1 .(,((* :/#4,,.('/ ///)'$)#+4/'&+",'3 ='+((.).01' D'(!$$+/+,;(0.*/ :!-/3.371$5-1+(3''-06#,1830262%6''2'04.6)$0*.#"+'8*+&05&&.5416)1$(*#*/-:*/2-7!(,.+&*/3(2*5/-+5/2)0.!/0&&1.%0#3/++&*/*-01'103*67./#-AG'02(8/5.&0381'"2+/-/)؀ـЀԀՀԀ؀׀Ҁɀ׀׀Ԁ׀ۀ؀рԀրՀրրԀԁ/-%-,((+28+#96"%&*,,45$-(%.8#..036-7*0(0+/,//)00'"'$-%/!.,#'20-30*-2 ;")':-.#/+6*4!*2%,-#"3.%-9/4(4:!+)#(9)-(0 0'-%!(, )",/3.:(',('32(-(2,0$!&*.):555,3%13$455##4- *)/3&)*.-6).1<8%# )3%)&.(6+:/1 #)+'-+/%,/4$.9$+%:.))0.*)(#40/%.,7,%*0+"(3'(%#&/1/510#)3'0524)-2"*5('&123/*2)/%/,6:33) '(/+-)2,-(')7&1,-51*1-%7*=*1.#,40;-*/',,802*'7/$/'&(, %1&(=-)*-#)/'.+4,&'3.+(#$#7((@)/16-+&,!)(-//*-+-B'*$%*)10:4)+)#*$*+(//4$&-+8;$(6!+&*.1-377*).*-.-(!(7#6..**1.+1&$94+9"3?+5%.5#$(2,%//30#//0/*+1-3+#06/'2.',/(8Ӏۀ܀Հрـ׀׀ՀƀҀ׀׀ـԀրրـҀ׀؀׀ԀՁ,:(-73(*/--0- 8)0.5.'$%.'-'7+"/"$%1%%)#1)-")10/$*((*-5,?-5)-3**)(($18#.+-0'-+93/,6"#$0#&7/8''+,)/().4 0.3.-*!&-7*'5#&6,620*32":// $)&%4/,6.(/D,(65, 22( 20+-/5+734(),,%'3&61.5'22#%/,)25'-% +.14)%-%32* $+$.,-*&%#//251-%#.!%%70"43),8*-6(/-5-'0(6("0.1$.58-08-(3:+,429#6'-%7#/=+74?'*#.3/972$-'9)**3,8!0*#%-6,*#/# +%(&"1../2-%%.52,*-*.- 1 /424-2*'',2/8+,',(/#,1.*+'5.-/%'&*/,36).&%,4&62(#%/00'2/&2//7'5+4*++.=,)1313,<2&21.&+&'(-*='13+,+"))2)6+1(2+%//1)),2'5# ),);(5!!2,/256/'(0-"B0(7+'/+3234#B-؀Ѐ׀܀ـԀՀڀՀʀҀ؀ـԀۀ׀ր׀׀؀܀׀ۀԁ&,6"/.-/$/&.),+/0'%,97'4.,*!,&;&%+0)('*4)+)3).));.0%!16 %1 .4/1&,*,(/#3,'"!/H9'+:&2,/&+$*/1/&3!2(),3')25/+'&.70(/,2!,&$/-2,/424"%#,2/41,7++9')-%'1!42-1739@+-.&,-50#+,.450,4=,!#*8'*7-6.&8%!(1&.+0.'60.)/=)/--"7'(%!/)(!'' ($.+0()3#2.(0.5+;/*+&(.&#-2'&+1-('%)#-)))4$)!<1/34)2(%(95$(03)+#(.#/6,6,(21,312'+&5.6+('18-,1248)+6#/*.39"&-&*,5-",="%7/1/))#-)7-:!,-(* .1/%07,66/*4$4902045'22)7 ,$!!2&8!0?&+>)%73+,")-"*.)25.&,>$#32/7/5+.*'")--%,."/%)/.-&40,2//2-*(&"4% 57'1$+70'$30+063*",85.*=,,7416-1ـրԀӀԀՀҀـр€؀ՀЀЀۀـ׀ԀԀԀ׀ҀրӁ91"%;,,&&.4.(04,-/#,/91&78*%+-8+.%+0!"*+%3#'6+%)#4#/712+$&!2"&-3-8!'302 9!+$0 -#&--%2,9-.5(,4 .0)+31*02/342(/0!)9-3/'6.(97: )3'15&("()+/6=/7+.8.%251,1(**,82./778/''9-&),-:-)1+(%'9),/.%."/3!11'2/)'(&/*&(3.(.*30&6(,3)0.8(#%+69+'@/ 2(,%6&"*,.$),(!'060&.-(+8)42,#1#1)'')5!.&1-"+.+./')-0/+1&(&.&%*4(03:)*$'-((4'+'+4'*3(#*)(*,--+&6,4))6),3<;7/#250%11"%*'/&,)/+ 14-6'-*+:/)."+%.-5+4,"60+-6+&--'3&!)1&,!3.+;-*//,!>9$ >42,46,79-(..3*./- 3')''*00.+%%65%5300-4$+/*6($&*6+.+2 /7-)&"*+/#7&<14"1'3*+5-2&7;#2*%?׀׀׀؀ր׀Հ׀Ӏǀ؀ـــ؀ހӀ؀܀ۀ׀ӀҀف&/*23+)25%1;%0///6%102&(.++.&:+&&1(7%/%)&3) &,6(,)4&1,2!/".-0#2./"0%=*<5!8'-,0752+1(5/+435!&3.70/)1,!2%//;"(,-+/&!0$#,+/-5!463/*6+(.%3+);.%,&)/+@+6958'*20<*"%!9/;/*-3((,"23&%=(%;+,4)#/51#*$)A%/*600,%/0$*#1'712.%(//,3,%*0'62%5#'0.*(9--=+3%.3001!':%%/*//'#'(23,4-//.#$#&$'&- *0%++-%(,73**<1<#7/--$(11-("0',,*194/23)%0&4*%,+&%5),.-*1&5(4%4.46%-5/%($)1"714.'$8/'!'-#"34+!(".+7/(% <+B3*(+7$$1/123/2-+8*8'/?/1(-3$)0*&/)'//-'',#89(,",4-+-3)'((,=*(0-&77.4--/671275)%;?24/!*3%/502,*0/"1+)+3'8/39,6'?1рրՀڀـ؀ՀՀڀɀՀрրҀڀՀ׀ڀӀ܀рրրׁ#1.3+!#$9,&.:!/0%,' /8)5,),75".7#,''#'&,51.2$(.2.87''0#/($;0(%0 7##(+4,1&+/)321 ,$2 $+++.*35$/,46)"&0662%+//2!/%*-/2'4"64-3+1)4&-+'5=.5/$2/-+++-'8@-J31.,?35:9)1&/31A5+)0)?$"*88./+-!%%('%)-*,5.=$ 0270%(/*0'//9+(5-/-#,&(-1()%)!8.3!3.+4#3.$)28;':.&+/;#6 3)!:$%1)&0,,540*-.$+2!)*1-&;1.133.%)/4$D*//!3&24"5'2./"')&.%#8/6+1'!&/.)% 6/9&&'/$. #%/#(':&*&2+!&4$.1300"")-%13!2420J,;2)*3 ,))*./(!!'#,$/$18/-2-*,$.)5,'&%&)/,:%3&11/%+-"$/-1$(>#/0.4)&')*(%(7(-+(0--,<-&61"7.:!-'.:)/-1+*(-'/*0/"6;,.-"/&$262,؀Հ׀ր؀ԀррҀÀـрրրҀրڀ؀ԀրՀӀۀց24-&-#(!56./+))(5+;#'475*'.-)."#(-$1*31/44)#/#>39&/5"6((3))#4$+%&4;-#/7*%+42'2'7!925-&%*-&.,*/+(&,&'7-//)*7-/++,C.&&; 1."(*)(/*30"7,0")(:)59?/<@>=:@E6)3'@+%3><.%*13'2#"..$0+!1-(//// %--.+0,+*(/. -))+ ='58(',+)'3-;.3%/'1''1.;"/4%5;-* '30&04'1/-2'02)/,&0'7&@,.#)'./*.'*,*"-)1*#"'$')(-"!+/'/.%1((0/0"9%/7(0/$/&/'#)/$12"&3'-1**//,5#-/0+#.151*' *+(165/%/*')&.&1-2*'1-')2"('98*(,$-0:493))7/(.3&1/7+/2':*1..4<#708+'" 2-&0*6,9,.*5#%1)23+6)1%*26/.)+1.0!.2,#/,+".*,/,K%(3-;30(*&,&11)+56*/-5#'$*#+)ՀՀԀՀԀՀ؀рՀǀԀրӀրڀ؀Ԁ݀Հ׀؀ӀԀҁ5)9"42#.0%-,,2/3&4(#.*/*&'3-%1.*2!,1.+'(*8&%)/+($)$/'2)".4':!*6+,*')(115.+2211"-3$-3127,.'6&"3;2'//$(,4-A+-''*64'('7-5*'5&4(#.;,7)-,#.,,1854.84;8;193866L81+8/2&10!*1()34!)*)'$+24%-3, ..>-%$-#*,$..""$*')45"&-12$(,+/-1)$25-,-3&)+0+45)#9-&,'(33+29-!7"./.+&-.#)/,30-(2&-''(/*%-3/',%07*35%&3,-+7--80*6)#5 - (,/*$#+$3,)&0&*/+3&%+($'3*$+$&'+%$'.&(3&4&'0$5-+4&2,&11(*'""/1**0.2,(1(31/. &-+)/&.&;)<383**/,..)8-+%"!<2'*.<2.+"5$*-(+)9%(7:+/9*"-@.2+.)1&(3;)254'0+-.95915A/+#-7,.-/18&''$0--*.112-:ӀـԀՀ׀րрـрǀ׀܀ۀـ؀׀Հ؀րՀ؀ՀӀс./($'+4#2-2.$--/56-*,.,63-)&.1#3(1$-$03,#)<)-).2 7(-##-%5-%1)#,,+/)("+"/5.%)509*+*-<)'4!!".)7(2/4#!%1,';+%-++/35.47,8(11*+,$9.*1:/1:8+2-<74+;.=@WG7F:=93/24&.2&("1*!4+18.:"(*----!"0*6.95&2%**%&&"06"%13'-&3'5,.//3,"//((66*'&4)+'+'24"2+&%%7(*$'33,+'&0'$2#3(.'-&5-')+-7*05241 $+.23.&2)/+>*+/&")+#/)0/-$*3 ,)(54!!#1.11/0 %&+'%.%+5$*2)&1)2".6*+(05&09B208%,+23!+-*+0--((%5(0*+ ,1B,-/.5'+'3/&1,3:+&&$+06%1*,7,$'&)7((.(,#+).+0$*8 *-90'/ -/%4 +)!4=(.-:*4*0('2@2+-&+-!-+$.&5.&2+$1!:.%3%/,2,%3/'/'4/40*'95ր׀ۀ؀րՀՀԀۀˀڀԀ܀ӀЀـր׀ـҀڀڀրҁ,25@#&,'0!,&82+%/12%"%!6*2'35,%:01//2)(#++'8-.4/"/*0*!*0/ )/4'$'1-&-,%3*/$3)1'-&&3--0+./'%+00'1)%&.2/=.:-."7.0*/&5 21)!+89//5+1/-'0&1'<49>@:JMOFN9JFIM48F:B)%)27&=+--.2(*2%.-+4:!%)+*/2$-,1+/.,8*+#7!"-+,(#&/9%-(5%2)197''(:.%+.2-2%&1*2637$.&*,%#&+)+(@+1 7&*30''3(/*!1-&--+(&/%)2,+2'+24)3)3/%,3%./7$))5)0(1'(..)3-2?/3"+'-07/(,/$:#2=/+/343*>,/%03'#/.&&+1(7-/+) *8'2)/*4$.."(.4/3.*(*41'313++0.+=.*+&2-+.+!1-240/)0!$'-)$/ %*$,1!+4.*)/+@."'%.&0,"!/104%#/(,)-/+&.7$*.(-)*"&9.%0,):4'15+#;#0($#&9!000=8ـՀԀ׀׀׀ր؀׀ǀ׀ڀրԀ؀Ԁ׀րրՀҀԀڀӁ/#0"<2$4-.%*85"2%+$,311-,0:6/.3%)",,)%!/.(5&/6/'4!&1 "/#0-*4)*+*&*'6).01%&6%4%;+03'2+$.:*<$'049+#1/*'+0!161%-&1%0.'95.4'/ 0/-3-:% 3C/GF?;E[P^Sgk[Q]TGCC9/99780.-2 /&%(01,/"9;;3+/)* 0&*++0613)4'+"-'./(2$6'%0(/++#&7%0)'",=0+*-#)//&21)"''%* ;9,#- 0 -,1!%2%-98)(313/.)1((-5'*-(*('$1.3+3 !%9#%-,4()-0$1,0+ )),6.),*!%'& 3:)2(&'-$E@6*''#4*<(412+8',);'7*,*,$2%..&,+'#);*'((7+1*E+$.00($1 5.2+$0'12.177&(8'4/+(4,,2..+-.0$2./+59((9.1%2&.6,&#*0#1*+*5.?4-"51'#13$/&$,9/6;0(385(',$ *42&(-,6:-+-6-+25=2ۀ׀׀Հۀۀ؀ԀЀÀՀրπЀ؀Հۀր׀ԀҀՀЀׁ/,(++,23-$&+'12#63,)*1%'/.#659.430'(1,' /**75)47-0-01&"!#$),#(.*)285&(/"-(%(2)+1'30!-$+,*%"+*-",'+!1/#?$0-03+$%&,',/<%).*&'4',. 1*31*3.*389IKQYeXktowzlTeYE;=4G,229:2?9%-/-+*(F/7)3*)@1/)*6#+#/"',-3/'' "+,(*6,+0&3$/)#*0/72.!'8)106+3!(/"/&1."'-8,/(!%-*'(1'-,-#*'2.813'3+%*(#(-75,+"%//*)''*7%**.+.1 1+2/,61,()1''#2*,/2/2#($('%1+()4(+)-+6)) 3(*&231!,*+-!)$5#2(2)'*/%%)-#)02*,,+, 01*."!* 11)'(6*2$'//%7-6445'<-4.7*%/.&'*.*1-'&5.&&./ -,7..(+5+.&:.+.))-!%8/-)/,.232*7*%#(**7(*&2"0.70,8/0/8*240/*9*83+6#6.:2*"*Ҁ؀ՀـՀՀՀ؀ЀƀрӀրۀـӀՀրրـԀրրց63.$/*%4$)+#2-*'333(659,8#4-9./%1%%3*')83#!).%5! &17,13+' '(:'/'%'0(,1*,1$)608/())*122$$'.-1*(#(3*6(-054!(-+$*#%:/*&)"%"4+3/3,-*)'.5$=1?HFX]o݁܁߁ԁp_^S?;A763;$1/>+)&/+ 4'$+%++,(* 53+$4-"*0--"+$%+-&,*:)5-)2%%#."#+.+702)*&'/ %')15'1,*3"#*90)4*7,8,54743)$./,+"-/**1-$-.+%1+%!',1.4-++2650%.'"#0('"2+.8)/1/#) *+%!,)$:*79'+#+)0.'*'$02)/+.30-)#,'3&3 &1+#(7'-6/,+'/",;016.')083&,$2"-2,012*)8"$#:59*,1+)+3<850)0'-2+0711,1,"7)#%3)!)%/% 30'5$:6"/;3+(./,)/).42#1'*,/'5//%($+$%,()8/'.?+2$9,2(-/03ӀЀՀـ׀Հ؀܀ՀǀـԀՀрՀҀـՀЀπڀԀՀЁ0%8#<#$09$1",(.5*/)62&0*102%4(*0&"+;9/(/4'+9(374"*%4,.3"# 3(.)7-%,42-+4+"0() +)7;#,!3%&,30*/)0(04/"1/';&0&-%-*'*/+!1*%..+0:0#%.:9,//%5=/\kSlx/O;ہfW\M=A729/:,'+')%3+806) )(/2&1-):'0#..8+,,,/+16()+4*,+*).)01+&&0/.*0J-&'-*+&)3&%03 016/C&,4?/.-(#(5#+3.+)+.+# (-.(,('&)30+/01./!7* .3=1#*#+,-1-*1/+-).&'/#%)..()2(.(-1/"/3+6+'$6/5$**31"2)(# -3,+$%** +(-.03+<(%'!:,3#."1" -($,4 &8,=--1/-6!3(!%1&-$5-&+90:4#.':!+)+!1$,)9&.)8)9""4*%."(+"7%++-+10',.#4)++&-4%5/,(.$3&.-'-;++;+0.'.111'*+/-5-4,232349׀׀΀Ӏـր׀ڀҀ̀րڀՀ̀рۀЀ׀ր؀؀ր׀ف4701$+-":!5*5/$4':):/- '+2>1.254*$-&,1&5= /84/(271,%#!%)1.)0$+/%145,'57",--%191**5+5&-/44) **41+230)-5$6$/0++!/5/-3((: #,"%5.").-,'1=5BCMYrłUx邻pÁ]_NH:5612%//6'A1/3=,%'/.#?*+.'0*,7 /%&1/#8#,+1$)B -* .*/(,1,/1=),;34-%%..7,"($)*),&$0,3"!,,"$(.2('>!'643/$;/11>22-%)3'&-%+""5+$+#2.2.0!)2%()3$!'*(1+12))9-3""95&4+2'3'&-0/'0&9 ''++*%0)5%3**$*)' )%+1()',&)0#9#-%2-074-A+&'4#1!)$(.2.'9-/#&'145%#,(37'1.45%610:,,,3'##541/2"1+,0!#>/0&2+3'.5,%1*38).,&$1*)/&'-1.862&5++%+)#'&2;3,9&%*2+1*#-45%9,4&--,/32؀؀ـ׀ԀՀԀ׀ـŀՀր׀ـՀӀ׀؀ր׀ӀӀ؀ԁ2-&-4/',-)0/!'"$3882&(1-2 )'.5'"$*+..+/4..:43!3)'0"45-%2- (3#*).#-++0+%3!1+ +57++,-.(!1)0''!52-<64)84&**%%*."0"6+++*))+71(.4-6-0(321)+-(=9OQWyPF3o遗qaJ5225?/>-1' 2)073*0/46+:1%)'20(6/('(+0-3$+/#(,;+'114-'+../2'-)4+5" 02&1"?--+(,!'6)'4')%%',++,(.$5)&!144('838-*)+;+()--**90&)03'*.7,&'4,,()4.$2'",/.%/&&)72/11%".$$+2F0#/#+'"9$8#$')&-2#=".)2%2/-,2,&2!'+%+/(!7(&#&(';/)3!9)&$%2/.1.,*5/((.%/+*!:&86'6*%90/++1-(*(%!+7,'3,5/!(,$8-%:#+ *$).1,(./-7&0)13#$-:&4,'05G#;&&&(-'1+*/('042$!/+-(&8рۀπۀԀӀ؀ӀۀƀҀրԀЀ߀׀рҀڀրՀՀ؀Ձ%0%'.+.74)042%*'"-60"+$!..%),8''2(.!+'/26,>/($3+1('+%-,/*153(/4%3+-//4 +!:(4(8'#/**0.*5-@"3A'%1/!3.( )--3$#/&) /)/1'$!5++$/14'#0+/;31-ND9S}ÂnۃsӄYMち}^PBFD17*)5$'+61"-+):)01.2171!./#+-++&-84+45+/81512+,.034%31.-/%)*!+#$()*55/"#*-,/+'(5(/()+((-1$#3-+'+/-"/&+1&$1*:%2"&(4/'6%%-)/$3%&4&(,+&&:"/,%0))/2'-/.!%,50+$8/-'6$$'".*+$%982)!0'9+"(9%(3 1*()1(10(7)./%.+10+,*#2&%.*)'$ %2,/,#(+4)>0-++7:95,%4"()544,.',(2)1(.1'/*/*3)1,+**'$2@*6)&&.1&"$"*!9,2#$/)(,/34 %49/,4+778'4-42839'&+*4##4&'.24---ԀրӀՀ׀ӀրـӀŀ׀׀ҀԀр׀րրڀՀր׀ՀՁ&694*$060/-)(/>47(/!425//4/.'!-1',)7,'.93)#1%26,,*'2/)4'(-'4--,)'.+7'#/%()($((##3*58/1&046-)--58+).$)-/0&)"&6)6)+(18&/-6')20=774,RWiqԂ'cÅQ}JxqEQ?9218323'$)'/&/93:/A,2$!.111.,),3-(*9#5(15*2//(+/3,*"1#)(6.1/.#4'%$-+2601/*7-+*%*..0@,+/$&-.':4(.'7*&5(4.).0/!,,&8/1),*-/.(./'#*%.4,-$)%4)+")*--6$1*5.*+)2!000%-'-4'3*,"; /)."0.)"#'+ 16.--*5*%-/''")"(;!91,/3.-4//,',-C;7+-+-2**=(&-##6/01,,&*3%/8*503$(/*-0./60+='00'/)/'.),/ %"/30+/11&-&6%+/*82.1)-0/0:*)),;((*3'*()(%<.5&7,4/86/,׀ڀ׀׀׀րҀ؀׀ǀӀԀԀ؀ՀՀ܀Ӏрڀ؀؀ՀӁ-#18:.:%-.165,#-,0:+8$5..5)4&-($5*,1 3!9)(-3+4%/+(/)'-3#%<")')"1*+)7.&(01)('22)6)-#-6*(-)52-3)&,71%38&./01+;''-1&80)/73$.9*0%+'31+8::96IZ_͂ ӄi񃁂G偶ykTEC6EB76>13:1(/:08.%1$#0*63)4/'8#.51'/2&05!("1$.=-+('02&-$&621)#,.!.4(4.#+/0"-1-&"*,-*=$-0/%++(+-(2")./,7'1*)4/'$/4-6--//./&6 7#7($+<2'00*-2.1.&0++0*>((,01:,1.%-/4265')*$%5+25$(2/'"2,"+5"3+$&3',,/1)*%&3"(20( $/'5.,7)1+1#%6#*++22!$63+-&&3+1"*!( 2-''-.5-/,3#-95,).5"5""+',5.)'34$-*#66./#(0*7'/7%*#5&:5*17&%13%0,,.12/1#6%-2*"*+3%4+ +/7(%5#+ـӀЀՀՀ׀؀ـԀɀπـ׀ـ؀Հրր׀Ԁր׀ڀف0*'#-085('# .*-/-#1+/57)-98$02+"7+6""'*('50&3$)40%*%3016-0&2.,&-.*3$5 )&/3(0+2*'&"27,2#-1%&+-#3-0 3-(0&:/($? % ++2/2533:.B%#/-6"87(ABDPahҁq5-@S=끯bF;A,8=723./6(-3//$2(7+ *110/'%6)7+&"%(.--+,'-/$!6/+%4+'&4%4!1%&//,-*/#+8!-,4/'*&'1&67'%39&0%3$2(!)(5'0'0,-+)#13!0$)/2-*4(+21)(!5$$/8(6#&+,$1.)0*32)-.&$)(-1-#=!/&,3/(%121&(4)*3"8+4,-,050-7;73%/"0/-**,'"("-*5.-%#"*!3/(/.&1)%,*-3(31(6.+*524!.)-3(2'%35,:&&+&)&5,&5!*+-/&).3(*;#-)7,*,* -$-%%3/,-&,/$+*&00(%/2*)"*3-+/)134'$-77),+42/6*)'+؀ҀـЀ؀ـՀр׀ȀۀրЀ܀ր݀ـۀՀЀڀր؀ׁ6#/)1)<'5/0*,-7.1/94"2$("(%)+3-..>"'4;->%.(&%*:)**+#35+,331(.1&9'%!+++-2%.''-.-7+/+5* 6-&'$)%#%:"'.0-%(3'4,+8.''5365,,#;/4!+;+*&-@5274AKMZÁCx˃v}mP΂k XbA3C;11;A#5#A,85*4-(&3*12' !.()$2,+$(5/#!32+29//(58,(/3.5.%$+8750&22&+1'!-+5,//3/$/%#+79*#(9+#,2%5.---,)%%+0)0,"(1$7.0'545,(!5B0%+1)((#2//"$'4(2)"+(%#&":,(3"4@/:''(9.-3/!/&*,.+#$-0'(%8+-.$%#0/#/1. /4+))#-6!,5'8075+*/.;*#5;4*":3/3$;+)305+*%/+0*"/2)-1',.&5)))!'4&'55)2<"'/"91$-/1*521702&%/01+3>35)*50;5.5:&.)746(#,+2/<*'0/-1008,+4/7"؀؀؀݀؀؀׀׀ӀŀӀ؀ۀڀӀҀր܀րրՀڀ׀Ӂ,70($0<(C3-6/36"/<7)(.()**-/1*5"7=,.)*(%'.+)(&-/) -$5/"9+.:,,#+,$/,'."/+.7#0,%0%4,/7*-8/.1$0&'0%*,+)&22%+,4()16+2,&%%..//)**4/)6#?5(+<7+OK>V_wʁ/|Ƃカ`,ځsNO:A>=:A8043.))"-21;1/;.3)5#%!6,/)#,004/%%($+)!0+'%43!&&7*.'"'5//=6&+$,-),!3",)&%-*$/2."$7*/,&,125$.2'322/+4("('+02#,/,'"!(1 051%)0(/.*+" ,"1'(*0/ /:'+/''. )%*/3!*1$%+1+*0/57',-*(3#,2+"3&@0&3/21,$8$)0/*.)'6(90#51/11%('(/,1+)&"0-+2-'%/22"+38-!().,#2502++1>(=4&2'(&/10)(35&'/+*!!+740)5,42/++5%+/*(2)''74.4*-&2/$/'*69:2"0352)/*.0 )ۀـڀӀ؀ӀـڀԀˀڀՀڀՀ׀؀րՀـӀҀրրԁ$+-*)%#&-+66&/*'' ).(**/0*$).,3*&6)#//-!,- /,%-*/81*2:.+3#.20432+-,* (1*'80*1.*650<&///1+.#3+-2(11-1,%'"&-,+)(#&#15+5.&4,3-$,""6I=//16=F@dn@FEe\5فw^KS5I/E7154('4=+),!068.)/)(%)2)+66*+.&%0''4&+090&%'1&4..2&(30479/4%.:30#*374(,2")/2*+#%5/ -+#/-%'*$/2)/,)/%*%)-2($+/1&+/)+/00-3)'0/ 19-,/*/551(=*&-/)*$+2/1"/"4("*%3#+5+%)5&0,--#*%)/'2)0%5'(A2-,)40'/&5.,!,/*0!(/&5,'1-%*)(.#2''* ://!$'1--,(%7,8(1-#,/'#)0*"0)-#6(3/:*+%6*6, %/&)0!2'//'$-(2&35813&,#$*01+,*)&'1/&,#5+5+&**",./+')3G".1!822"1-5׀ՀҀ؀؀׀րڀ׀ƀ݀ЀҀրҀ܀ՀЀ؀րπՀրԁ3&*4*.1)-'/'$ 2%0(6&3*//$:6-96-.1.%06$+'%(,7'"(5&%-A3260<3+ /+')/6$6-(14;*%&&*+8)3)&,.!+17-'%/!,./7%-.:)-)5/4++0/4268*/3-26,-*0397*()1,:=HMTdvŁԁȁuvJJGCD0*-%())?&$*(*#-(.1/"2&'+%&0''/3$&3%.4!)($&/#(*9)!.0%*4+6,.2+$))2&+׀ӀրۀԀ׀ـӀՀȀـҀӀӀ׀׀Հ׀րՀ׀ր܀ց132&0:6,2'$;*00&/"0-#$*#)')&2.4+&,#'".**,*+.%0'*45-%/%5,/--&+/+0"(4#*/84.42-'5./'&-#1'-&7)+*+*5(///03+,%3',,0--7*(+*'!.6="5+(;),:1..';B<61/-27!%9053"7-3#)37"(+--26$0/#*$% #-&!-,-) 5-5/#(%(#$6,(&*."&"$1#!*57)60(14*5&,50)6+//7;.2/+308()/3)-,*,./1,* 5#/!01147",,*038'"11+.5*'+.9-2,;+(6&2',,18(1(&53'!%&2+#'.!7*%"4.*1*+!/2#01++/$'$3-37/%5&%909.*22.,-#-!3$.*'+/10.*3*''--6)93%6*..&-/0#,2-/',/%&2+-)#2=.%$(");4$($(6/ -'"06(1//."+-$.)-*%0 :60)"$*9+(@53%6##/4$,060.//.47ҀՀӀӀҀր׀Հ׀Ȁـ؀׀ـԀՀӀ؀րր׀րրҁ"11*(241,*!6%70*"2*4'1237$217+622!8";(%50/()*+2*"10, +*$+33#/2)**/*A)(!0#41/(%*.,,:$312%42=26:)))%4%1';#%5,0+,2)+)52'%+#-$.2*5%(0*0%(77;5->BFR[SXjjzmpkQV59;/A&-574+>50/+27&4((,*&7,%0()%-+&%<5/+67*-+5*%/7&,!<7)".+/'22./()$1#114)()%+'1'#+&3*,1-,&,'#-+$%.3,#*&,30#;/*(/%='&,+!.. *56"1)$1'5'*"'5"!,#.%3+12-+*,7%+1'&1'')('-./3%*(&)*'#17*&85'42#+)0$*6%*)"0%-)-/))#&)+$,0/!)9.!)6.-%,#+( $/0. $0+,/*)6*#*')$&-#$)-0&/*#)/709,24$+*38,91/ *")4&&)-*!4.17*8)300)5-16#.)3'73/'(+35;4*63-3)0.1.4;'(577Ԁ׀Հ؀ڀҀҀՀ׀ɀ؀ڀրڀ׀׀Ԁ؀Ԁրր؀ӀӁ0!*1.&0&-%'-$/1*1&64*)//2)(-'):(*+":54&/#10())"33%"%.0*#((,-3*2)0($-1(/"&7&5,$2-*2*&8%&.863)#)*3#//"622*14:(730,>7)-'),,*+9(3#+'".$') ).;9BKIZT]cgmfi[`\C^LJ<41=3)4&-*5>%5/#456621+'5(9-'.13"1!(/ ..!(/,1 -"./)3% (+1')'-/9'%-,305&-?5$"#,:$#2//=-0$ "-#6'(.(&.;/$*)!&/(+/(#%)('(.,4&00)7/:(,$/5-"% 4.1&##5..7'13./*',1%'()&0'!$7$)&%(+;1):03+4$38*00/.++*/("'<./0',/5-&(4247+$,-&%3(&-*)/8 ,-5!'0&))2+/1,%'43$. +8(-)3$'+-+..3&'*-+54)./0(+)(((1$%2)./#14)7226B/&7+1.83*2*%3/1)'4180%*+)/73/>-)25/9-&.51,,Ԁ؀ҀӀր؀ԀՀҀȀۀ׀ـրڀ׀ҀڀӀ؀Ҁ׀Ӏԁ2'-6/290)2/+,(-%54(#. ''+)/*2.$.,7(042!713&.'6$1*-1$.'1+',0,-/1"'1&7% /90(%!*52,9)3!&7-&%.''.000-',/81+-+*%).=)-(,'(/33.1)+02-/5(/42198BFFC=DXcSWbiS@XIHC5F163:.+)70*,84))&/$$)7.7*('71*,%5%0'51+011".',1$11*)5),5415,5)50**"+D')"<)(5/*.-),.(2%.;30)&291,.0-*&+#4,33:#/-(+5)4(3+'6'<1+%$25$:2,-18&(5)+.-5+5#14#70/27'30(9(+)-)%(3(0'$)*-1&,)2--,2,$*-:"306+/&%($-)3!('"5+/ 1'!)9-0$+:(*)3),5+2411&+(%+6(3.'%2($(**)(#:'(,/$0 42'<,'&%),&489-69;/<.-)22/)>3;//.".12:$0-&4-1&)73,/7./)%)8'(D+&/87:-8:/2րՀՀ܀ӀՀрՀӀƀ׀ҀԀՀـԀҀԀӀڀӀՀӀց..#-)#++(%*5%-41.(,1,>.+.,-/('3+!754+!$-+4#)$#$)'B4)--0-(/%3-"7/"%%--..-0,,7,+()(//2+:0"$+/&"/'7-/*()--6,0/*%-0'7)"/)>/603'-3.2'(13163*>54ONRU9L9EE=6C20%)40-4+!#,#.+&)'3#-+,5,3+0(.0-"'-6+.+<*.533'0'#%2%:&-!';>#+(1'*+++,340:/6*.#+/:5,!<)'+* %,5,//4%)':+,4*,>0%$0#-*--0#7(+)+(0 3=,$3.1),,3(4)),*!//,/'')(=)7)/,-5*.( 05,0'3.!/(65;1,*#%1&53&!.) 1+%)(($1-/'(1%130#/( '("5#),"!'"/&33-(1$5.$2&/$/."-")):/..',)%>0(4'01-+/.-&/0'3$85(1+/-20-/*0;3#&:2$& / $-7/1&--7*/,2')+ ,!)8#0%*+#*.؀ՀӀӀ׀Ҁۀր׀ɀۀـՀԀՀՀӀր׀׀׀ӀՀׁ7-+!),$,,;;(-+-5+*.(&7,4#,43/0**#*!/7(,"-)4/+!-4-,*+=6$%!(".!'*(/0,'*=3(-(414%*.)!-'00-24 *(-3/:"5+.",'$*/(&).&$'7*,#0+2.(<.33&*/.,)0#-)6=.$-@;:5ID=FBK[<=D./=1).:$+( )+&2',(*03&'+.'5,%3'--7!$.'+-7)"("4*'.*,$*-+90&0*(!(2%571,/232'&+/-/)1(8 "/9.5$+2'-)7 /B(910, )05>,77,*,(+/-609%!,)$)&%%.03-7+,:&(#.**2(6$#!&2+!3*!1')%('(,'6%".$(23',34,4)20))24!$00-'1%(1%#2)/).&'>$1+//&')#/"))#7-4$3)/%1-!&4%77)((0#$/.'+(.(:1'(3 &)+5,//6'2=-2/(.8$$,-.4,./6:1#'-&2)/'++$((05)/'"'-6*+1(0-('2/!-1/?,#,5% 6(',,#+Ԁ׀Ԁр؀ҀӀրӀȀԀ׀؀ڀՀրրրԀڀӀӀԀՁ3+2)2+')!*!1515+/%+3-2-&1030-28//6 '+5')08#+$/7-+--#(+++0%5( 7/)3+2).".2%7%#''.//(*+/*,"*36%/28*.+).' (,"4'*'&-%0'&*#$0&-+#)1%4?0*@+8D764<93+D@79A'I;E24AC9+/<844&./'(7*/&*)1'#--/)(12"'-%-2+)(/&&)-9$(&(0!30-+?53)-4!'-&,.%,2/)2#$*&*+(7,='&-3(9/-%#((45$=!, '7-'4!(=)(9'-+.3-3-,.'/("+%/"*'-'3--22%.% .2-,.-$2-+2/-)8/,%,--'&*0!)*,$ % !*'/(:!0#//:/7+"10' %%'4+//77%!$4(,1+-**6+"+4!$.&&8,'*$-"&/%$2/,/21)*(#.8,5,/,&"9-9() +0)*'"--0<0+,7-*/&1/!/0$*-/!"&)$0.,0+ ()/B$'+-4: ,51.1 7-2+$5'1#*2׀ԀՀ׀׀؀׀րۀĀрրπۀՀЀӀ׀ڀրڀՀԀ؁,$./2.4'05(($+-&)1.*-./)63!(0)"*/6'7/16*9.;*<%"-/%0) &84,+$7%$2* ',(.$5"9&8!&*)'21**/+"*,4$1.+.,69.1')4'&'*..*4&7-%-1-+%/*)38:#6&1"2#-:*0.29<0<,8=7981I562@47)-+/&/26+3;38*(()*'-7+3#*(955*/&8)-@/#(&+*/6 -#,/!+2/'741"/+63-(+.3)(**//&"3&3.<..'+'&,3-(5.&3-4,1"#*0).6+41"*')*(//#*1!++/*-4%-$7'6)!)0*.,-)*#/$6*-03- )(!'-65%+#*()&,.0/&-'&,%(017!"64#*'(2-3>$'+30,&/<-'#64,03#*.$.*0+3-)*5,)#+1"2.,/,15/!4*1-.5%),4)43*+;3"&"".5.5 0//%+) 4"1+6*/146*)5*-./7-$..,2)4**95"0"*".$1<1/##1,+'(.6)/'.!07+8,(׀ԀۀЀـۀ׀ԀۀĀ׀ՀрӀ؀؀ــ݀ـՀրՀЁ#%*.&(1.* &(+0.-@12+$,&<+)(.7,-&!+'')-0.2-#5-0#)-)(#-2!&A@*>+0*0,!%&'1%6#�)'22*.&/+)/,/829%3&!/1)!;*.3,1.17%"*$,04.$+76"'&2#2((.&C*--,%,*:));5/*>7@2.=<$38-+%%/22*/&,7.$/''5+2/&%7 ()."+/=.*&",+)-/3-!(* *.7)% 6.1/%.5"2%*.&%+.,1%32.!0 1(3,/"5.+/+36*-*/).,+07/,/40*-,!(/6%('"#(3*+&""/37.,+0).-2&/21.7)/$..#$2(/3)!(/+2)(%$1/!.(+3%-/'.8/(&+.+%1.%-415(+/$)*3&+2;-2#&%127#,1")3.= %5/,882$.-1/$(3&/;3&"':-&@&2)5%#+,-.#/%,0*53'++-&.+'*!.3&1+-#(5,+%1%/*(3+,5% '720'&)+2*0,1=!'*3.3*5/.775)24.+=6&45ـԀ܀؀ԀԀՀՀՀǀՀ׀ۀــՀҀրӀրրԀڀ؁+-96/-+94#=/,'(&!)(,.!-$2042&-011//20/)060'.1*(+0&'*-+,031;*%//)A%'2*5#%%%6-2#'&(%*3(3&(*21&-/1/*&.0-09$//6.3 3!-7#227&@321&'4#.6--9)4)/.78205<136;/$2264-%<8%<349--(:15*3=&18(0+-/+*"/)&&-)=+-9)-%6%%)(-& ($$.,/(,3-&19.-(1!1((%%74+,(2+06'$():7!&71&2)3-3.,(.40.*+7$#-+#*(/65)!5+4++5/-"&) )224/,6)((1/401*B)3-5,','(7$+-4%-&,-% .&.)/'/"2-$,'*+131%2+)2+3%<!/.)%28/6/)&45-.) !/4.4/#%/. '&,*3"*(2//5,3-52(('/'4$'))8,' !2,/1)2.(%3($)!%057--(-(/)!23+"%!(+.2;(,/2,*@3#*#82 .##8*,("C-)/3-+)#-/%1$(.51%4ـЀӀ؀Ѐ܀׀Հ׀ʀ׀׀Հ݀Ӏр؀׀׀рӀՀڀց0 ),:1*')(2)%/+))285%'/>1,+&$*$6/2+2"./-*($",,2'%.'3)05.*&3<-1-(.&8+7**8*<-$0/)":%'4$4+$,2&-1%052A/;/()+*,,(&+$8>(%)1%*)-203-/!+.;4)!""()-%,*%B22(//=0-((0 38*(;.,+*" 2!)4.7-(#%2! 6-('&16,'1"4+.-#(153)#'3/0;.7+%-2+1$3'-/-!),+' ;/. !( &'0(.*- 318 ,86%3"*#*+23-(6;"2 -"+5-.1',&%/32.+,+:/+-+3*0))'&,-)%!'3)(1:5-1. ".') 7.,$.0 -$.3077&'(0&%'*'/+742%0)/1,1:4'0'1"..3($8*22$)1$/#(%3!(.7/&!32!3'!%./(.23*-3*0,(%-,+!67,%(-1*2,$!(*+,"83(9. (&$+&(=/=!0/%9)"+0!9+&6-/+/.#%# 7*82/135&453<')(؀Ԁрۀ׀րӀ؀ӀÀҀԀӀۀڀـҀ؀׀ՀӀ܀Ӏс'&0.21)1,*&15&&-4-))-)-4%-+'%!)+#*# 20',*/3%)41*(*1!1&0-(2+"*''(0'0*.!-%"-%9."+*,.)*1!)' 55"''A 2521!./-(9&/3+,&87.647.*!21+%.22))1%+0%/'$8/"2+)'>),+"0*96,#,!)&().&0)*%3 .5**3+$-8%-&($5,.49!09%&'/4%+"'"*$/.'$85$/#0$!.0*-*2+ $01,)//2,"50/+*-%/1/6*5)($2.41(5') 735'30 %'113,#2#+3(+.311,12.0",&3. /2+41,&%%%,%10)9.3+2)34(*(/(1$-/,4% *(!+/,%:$4()$-#+*5*43/9/74+)0+!(,/.%/).11#!/).0%2 *1/4')7++'1% ,+9-47/46,6.*3-$:93+/!/4#8.7-6/0/1;:+#$<%%,1+'/+'&/4/70%1׀րրՀ׀րҀ׀ՀȀ׀ԀӀ׀ـ׀ӀӀ׀րրӀրӁ:#;2+,$((&4-)**'8;1)+3",0%2'+&))06'*01//'.'+1--'2!4'5*21(*1)#412*'/./44)%/'3'$3'(<4)/00-0%%7**:* ,*##-#+$-+&%*,#5*+'19,))+&/,.!97)3'%')797*5(/($/<69/::,,0*("+&7-*-)12/42$!01',0(&,0/(!.,'!&6*%0!,1/*4%/3)02**,1'?,/0+,7$,$ .,3%+,13(-/05,2,2#$)-/*')&-/%. &9):)58,=/2)'+*/;-0$/8!';..:+.---$-30"-,%&6+4:0+2+' 5(-:0-+-(-'((475*+3/33.3*)'/�//9,+(+3.,#!),'$7()# +32,%,,4(@"! 7*,+"+$"6/3#')*3'!2/,"(-0.&-&*.%)'+-3/6( )%'1+-87/&,+)-'.$'+%***/2/+'-01;$15./+1,*'1')*)."-6)$0'(;+/9"&(9(%53*=3#ۀۀ׀؀Հ݀Հ׀րǀԀڀ׀ӀՀۀ׀׀ـ׀ՀՀ؀ׁ7!(.7'50%;&)$%+(,-,6++1')&9!"#'#20#81++6#+%*#;$)"*0,"')*/). (+$-**0!&&)#'19#3)-)04-'4(2),,%$,!0-))'.:&,)(03/, %1)&+/-1+0'+*+6&+1*1//3&+4%*-?*1):7)+(&*%(-(2*+'-3,' /"#&./3,1*$&#%*09&$+,4"/7*'-(.-+61.## &2+',(4(//,(/(#/&*2-47.5)+',+'&&1*$23/3/1"!/"(+-(#0)24, )/.+1&,&8"*, +()$%1/ ,(4:/!1$%=:;$/$&*/-<16*0%)%/7/%&/4)(&)6)++((&,.1&/''2,:1*#/+%$*'/*!*23#,! $0'!&2+!/&(9 '.#.&-"#,()"-$()370&)'('0-*+-+&11,%1%&31-.0#()/)2*(.,<1/()2))0!...-6,432'+%05'1&**1+*!0$2'.%1*1+"237+9'&97(/7663,)>)ՀӀԀ׀ՀӀـۀՀĀՀۀڀҀӀՀ׀рրրրՀՀҁ(.+3<7//'05(.3--@%$#5$3''."!.2*$0))"/)*$,"<23(0+,/(./2)2$0,1#1*,)2)#%.!,+0."-'(;2+*+&!'/#&0+1( &2#-9,-5//73+$.1(.+5':9+&7-'7#.*3.2(+, )&*'+*3-+;&+2&:2/8'-24+.$5&12 :0$+2+4)>!%,".9/+*+'!)3-6)&1&02!'('+/68!19!-0*-2)+&.+,-56).#-/<*4824%314,6)1/%%+(#"1!*''+)/.6*1%/!'*/.360'"'/'0('+)B$,2403/+,661*0#$/5'-+3.:1%"*4,%(#/(;12,+*'/)3!"15/8$*)'*/17+0),(''.863482)+&'7#1'*(!!.,--!2(+&2-)*"326--(5,5!'1*.8),&:*-2%$),67-*"#)2,"(0(&/,7+0'"*0115*! )&)-*-&6*/*6+0#&'+.1/((;%351*6;=+,7.-*24'../2,."+547'/!5Ԁ݀ۀԀ׀׀ҀӀԀ€؀ӀҀӀ݀؀׀؀؀׀ـԀ؀ҁ'$53$'2(,/,(7)1(%.%-%83+-&%1/.)3#/(-$.15+)'+(()$/.)672-0//,!21.*/:((<./)(&;*,%1$'/"*+")4+'69//.+3%&&19*4($/-2(&"'.,4)''&.*)*'2-+,1"@024%) 6',-01'(+8).,#'%01+&1'67+*0B'1:)&40$&9$-+/(;$3,.'.%/123)=,+$%/,-1*354*$-B&#)0+9/'!%/4,.,#-*/8--1'-15/.-!.2-+'5"($**0%+2+5-8ՀՀӀԀ؀ҀԀррǀڀҀҀրӀ׀րـԀրԀՀ؀ԁ)%+)-,+!0#%#-2,%*(1&8.+7&1 66),-7/")"2 $/$.+-'"!.( !.1 ,,5$%-1'5'3!5.(2&!3,1,%/%(.7)0.1#(+%48/+2.)1./)"5%+-+&,,#,0%2,(,:/3(,+0/'12"-/8)0/%-10!-(*$6 /0#*7+5'*)**'#6)#! +(-13+3#+/11-(*+"4!+%-,!':.;+*."!*)-/,"*+,)( 13%#39/3#''%'+/+*,2+!1$51=),02,9+(0/()1$8*50/*',//631.$!3!3.+0%(,29'.*73,/'0,2!*2(*12"".)1%(2'-()/&4%!,(&,79'$/:3),$*#(3'((,,4*2)+,+"+)"#+70)/+4#+-*"%!*&*/#(%0,!+50(7"0+/"('")((2#(,-+!1'=0*,-8(%.(25(055+#5%&+**/., //,3*60+.-(+43%52-$=/00/'-+%&835>#.3%"7-9.)>$*831.'&1',.րрӀԀр׀Ԁ׀рǀ؀Ӏ؀؀؀ڀڀԀҀрրրҀց/*3(*,#,7/'*'9!!26!',,+$ .,7$%7$'*"#!"./%&%%$/(2+!$%'43#3//)2%!/#)1*+94*&)(-/&*B)+3+1)#04%"15++(4#1+/*0/$-8+*,!237*#,(&''&'$05+)),$3'753)'$).3$-,4-,(()))9+72!/);")3(*+6-'8!/)*-2:0".0.%(+(/(&11-(*',7)5# 124(/**--(95/3',&736(*9()%&13% 9(/+)*&#%3&,'$-;3+,,#/+!))&)+1"48.'*"+,4,*2*.38;*802/*-# )*+7)*/./%6-/(*#"")(*(4*'&1)#6 %*(..&4-3,&-)+>02-/& -)%*)-$2)./71)/3!)+.7%(6"1.+/ (2/3$)2-*!71'(-,+""1-53)-;.**.(=)"2"+2+.+$'=5*1"#'.0.*.+$!$',"-15+<4+,'&+&&-(/$/*$0-/,.2)2./5-*4&%--4/7:./0,41/=4&$ӀрԀ܀Հ׀ՀրՀȀՀՀրԀڀр׀܀݀рր׀Ѐԁ)"01,/5,.7)".--4/2& "./%&%>*)16+)"2/,42-0++-(&7'/2%$4$&&!+)/% !4'(72--47"*7'+//5,'667%',) '311&1+3/.$+-+*+*-,!-1%1-('37(;6.-('+,8(.&(*0(.9/").:)("0+%4/5$)*$760''/&$'%'$9/ ),03&2+//---4..*-,%%+&/%)71!/7'+()$)*1%(.1# +'$&*7%/'+3$1'6$0#30,:-4:93""221#93)31+*/!3/-8-!.(*!('/,0(-#$%-2,.-'*678"62-05)*-"(%%&/-.(+" !612$%,,(0+*%)$+%'1!5+%&0,3-1$"+&45" $/* (-11-'+/+7!+1'1%0-.)(,#>%)-$/*(*)'('%,&$,%;*033'0.),'$('')#$" /('+/$/')0-$&)'26-6'/-2/(4#&210--2(.'1)$/0390+70.&4!+"-.,!*(!3+-2%/(-&3530%#40ԀррـՀրՀ׀ԀɀҀـՀ؀ـӀ׀׀ր׀׀ڀրՁ'!@*6'+'-3."/4'3-0;(('4(5/+.-%+003-$*%22,)1+)&-.$-1)2.1)4+'&.*2 $2((92-*(@-."1-$#/"1/0)$(*0//24+%/42#(79+.*,!;-.0!2(+,("-3%-1&$/!:/(('7(/0*%5&&#)!&-5?+/*2(07272"/2%0.1(<'##*($,-(/&))(' '%"+43+-*17#'-)*--*-4"#-*2)(&/+&-3%+5)4()5 )561*1'+"3$+6..''(- *!*-+3030-.2.))"#/ *,;0,%31.5-. .6(92.)43#";,,(#.9*+2'0(-(/-*'%%*=,2)-/3'!* '.+..#.5('2)$-07.'''1,2)1*/'1*(/,%/)3 % )+'-+0!0*6((/)'".#!(004+'&3'//",,%10*!-*'"2*%020/51&* '+$,1*#,'7-'1$*+/20#,+%,)'<"17-<+1(+(6/'(3&&-/86-5,.'A!$&6&+/.'-9ՀـԀڀԀԀԀ׀Ӏŀ׀؀ـπր׀ـ׀ՀۀՀ؀ӀՁ1*3%..3/'#!58/6+ 24@,)+/$+8*+/+9)-'.%&5$://...2.)4-2%):'&+"*%,/3$ &"=02*6(-%&840&%- "/"&)+3144$+(!$-%4?*!--'1/3*1$5&)66%'.('--56/#".(/-,**9%%* /'+-8$+()"#2&0/30+28.) '/+0>=.(//7+ B2(3-2.,#('%/$''1/+7/),'4$1./-'A!%*,;9', 4&)367/7)/-/$-/).."+0+""**&<60$2 +!0#++'3*./41)0*2*'#%'$5%61*(!=*:/*0 ;$&3,03.$/(/380($0%)('2#12(9.'.**(",&6.+).'*5+"*;66 7--%'&+710(0+7$/+/0"'-%'&& /9*,116"40*//%)55,/!,.'3+3.'%(2/))+)'&&'*-,*'14!7%3/,=101/,.90(,95+1%%)+6/B<24&.5&*1/-0;'$-702%*1)'0.9'؀ـۀހـ؀ـ׀րʀӀՀӀ؀؀Ԁ؀Ӏ؀ЀـӀڀف/.$%-0)- .53@+,)*44/"5"$*.)%2-./1+)+1.*12-6(#&), *('72,.*(-&&,$.-4%++)"*."/'3*,%6%5*-2%)'32;11'2($7&-73+($#''1A+2-(**,#*//,!36&$B30/)43-.#-0 '<-/<#$72+2++00,'0:)*%6./7-+5//(,%'"*+)/%&(:*.-* -1(4(8%)+0*)*/,#./2/,&/+,8/*&65)+")3%)3$+$-8,*#%'4-,!( '#50&3'901531(/"+3>'(&&-3-!!.-'2*"-1+&3+!#92.'*,*0'#%()3.-1#'.5'2&5+,,#0*0)"/(30&-)'033&)25&"&)+#.#0'(/+1&)8)$85$1'$)*%("9*%-..+$(0(./+#*/'(,,.28,'(1:.-,6 ".#%/&%07+7(.0/#'8-3(5%.4,5-1*7+%)0=/)+81)'!?,+46%6))8-3.+:./4-!1,&/.4(!3*-)/!A%,)< -$?ڀӀԀ׀׀؀ՀԀӀˀՀՀՀ׀׀ҀрՀӀрӀ׀Ҁׁ2..000+0+373(,4*61.51426/.+3# -!&#/6730,*/6%12%2%)0)("!152 &)1+30%%+2+/1$+(.!{*,+(,(&'(/""/-&&)2+01#"10&*-,'+-/$,#69!-&14/3'/%+3%"!1,/7*$;&+(%23&(/ 453$ +)((6;,5/*4+48/1+3)$.%#//056*&)8)!0!$9'A,,,(-8+0(-1+*%&%+)3)#1/'#/%)&/&+1-&''3/-!.#":;% (7/0.,%)+*'02+/32'%'&$34-''+0/.(+ ,.;.2$/5#&+.9-'<.5'680*5!!22++%(-,+/%./+!%*-8'+/'&0&,;$*'/ /&',!(+4/4%4)),:)/)0*0*)2#34&&/&@2''"1'#,#-'"7-*0"&%.,7$(&&,$0..(3:"/1-#&(4".+5*0)58,--,:0&$-)$2065).3/7&)$*&6%0#3!436, ).("!2)2+)/,/1*2..>'&-.5*%-+& 4!,16-Ԁ؀ـрـՀӀրՀʀӀ׀ڀ܀ڀڀՀՀЀπــԀׁ13*&0+$,':$(,+2!)++/).5$%?('(%7*+!30/-!13+%,''/-'0&!*0#(7*)00*(0+'#*++%&/+6"%:2O +#*$*,4.%)12. /'&7)4/.11%)+&$(*-%%$4))0('10(1.5))( (-/8*"-*$0'(%.''(.++-9+'/(,.-1-().13(.01(2*//6))(25*%/0+36455*+/")"+*#,*#*'9':,22+6-"!&+":')-+5++#$*)-->4.-+#03!$/( $*+$3!+135%+6,1#++3712 8)2!!&5+8/!'-3!3*)..%,*4'75'67&(#1&>+2+.(4.("-7!:78(".)-+&&&):5-'/1))**.2./*, *(22( .>+.$2&0(0,#"/-0/9/7?#*1+,+$+4%-5)5/&-+ 4211( 6*&+,+0?()+/0''64-,('*%,:")%.3%4!-, /36*&1+9;/('*216?%(-/8$1,/*/7+,-$#%,@,/'&7)* (ـ؀׀Ҁր׀ـҀՀǀҀӀՀрՀӀրۀ׀ӀրՀրҁ8$-.-')&+5$))3),2*0&1"1#+0.)(1%/+,*'52-4-84.).0&$%%/+3.&'///29'4(%/*0* #$(*-%&*:,,&*0$1'- .))&#.)&+5 3&&)-2''++9&$1)+()6/4.(.6$4(''(%4*0")-08;*"#.4'+%%!!%& 1).)*&/&.%40%.*#-,%&,%)!13$2-$**=*')(,+*8-$6 '1(8#&#**/'-&*$'/4/4-*2+.)#*&$(6$./*1->*0%#$)'';++&3$.3-/ ,1),3()+&*47"+6,-5-2&(%:'/1,**!3*,?)$997@-(&/'+((-&.).47-%. %$(.8/"2',.'$"+):),2+!/'5-('(/5+&16 )./+/#,.-+(/110'/3&2/$',+.%0(+$&232%)<'()4+&2"+'*$.3&4>'*%&-2'%2+/;003"'#)3.//(&.#,'*/)'1'%1-19!<0$9-+%0,)"5-/)(3 19$+2512)&249,6%6(-.*ڀӀ׀Ѐ׀ЀԀ׀Հˀр؀ۀՀՀЀՀԀـրրՀҀׁ8$:/# *%-.0.0//,'8.+,07#!1 7*+-/)*,55/+&$(*00,'3!-$#%'(2+&, $,.,',11273+/(%'/0"*.338-242!'51**.)'/,%("0/3'+'<'#0%8+#<'+)5%1/.1#+41+%)+,1-$650/-')2-($*%5*:($-!4&*0''.)$2%+$5# .7!-!/+-4 /$)*+.+!)+'-*,<)(-36/!,*#)/.-85&0+*'-8-%->;93*-)5)3 91)+(-.)1#2/,,*-+)"+4-5;7&%)(=&%:&1!-'/")/02/-'& A(-4+&!(.#'%-72'::3/#2+*%3-2(%#''4),&(-&<4.'+:/&$062*2"%,!/2#/1%"'-&03,-)/2%+.-++%*4-!% (,%4+')!,4;)%'&/9,'5,46*,,)'--(0!)4+(),"2&0*!?.*@3/'(-!/9*'=2/3-+/&:/"'&',0.1!/'!22;67*5(/*)=+:/,.%/$(789$3-)(6/='#ـրՀրـՀՀՀ؀ƀՀۀԀрـՀ׀ۀր؀Ҁ׀рՁ&5.#%0&+++5/435(,/1/--7+-#0%%3&..$:(/2,1%15:#+')!,-,;3) 31/3(6--%#-6(0*10-%1 ,"10-6.1(29"(1(*8+(/+%5.+5(7"5')++!$*4)4/*2(/8::+#**(.103&&+3-)4+*0 $6/)-.)+05283+.-,8+#)')444)''>">!&)/$:.845 (1(%!3-!--"7--)#..$,)0(+3)12',).-0%&() 7#)25//!/#+.4()/+,+&*-/*52!.)'%+%'0+ *7%0"(5-+15-'0#,57./++''-6''%-0;+%6/6,3%4+*3)׀ր׀Հ݀Ԁـ׀ڀ€׀ԀрҀۀրրӀ׀؀ՀԀՀց(3&4-+2+1'.4!8-*//-'%4!+.--$+$).92*,*,3 )"&7.20-,)/*29(5%5-8):,(-+3'0(-*5-67&6,//+0-$(50#3#"!!*+17&.#('#,+$0%(*,*4-/..36'8-*)//#&*&&.#,%-2-1)/"./)+7)0.&.'"0&&.1//8'-$1$;&.:2-2,6%%2.-(/1/2&*,-,,(,1(00/)'*",.6.-())*-6(($)&+'.%'&+'3+(,**!'7&&+$&"+-1)/-6274<,#9*3/-(2%*#'1 "%.'86,/%-0)'0"6!-&/()'#$((+)%+&/* (02.++0(*!%3272()6)* 70#$0/,,#)%#"%38+(!$+#<.'3+ 2*-'<'*2#'&'/440*%2/-$#*7-'-7/--3)4%.-#(0&/,%*6(',@&4.1/&0000.+&"-84/"*-)%++0()0$)%0#0$%+7 6($5<44&4!"*#',*5,47;5A+)+2)6&+//.,,01/0׀ـ׀Ӏ؀ր׀ӀԀ€؀ــӀՀրڀӀ׀ӀҀ؀Ӏہ>;"4'-/0/7,753%=&)&'/)0#5#;2/'100)+#/1&27(,8".2:*.+/(**-/+'4"!2/$/33&'),$%)7$*%%-00()+/&1+3+#% 4/02.*2!8,).%'/3).;%$.-6.%,#1,.-!)&9).)%/#3% 12%/ 3#%).0-/(),&#%").+/*%+34*%,$!$.+/!*,847"+%,*,04,'4..%.*!.?-&.3-*'+/.1#+(*7&'"+#9'#:+*1+$71/.-*)"0)-,#%$%:$4,,>3/0)%,',',.(3./(7)(&*'&4 #+-*4)"*--(*72$>6<)3,!2-*+,-*-2/*.,9(.2.,36-)-("#10.-'12-&*61)-*("&/-6.*!8-5&.25-,/%8+-,*$23 )#(*/))0&2-'0)7%#(,2*02%3&1))&-.%*(),/1.5%+.3('8)1.5'.*978-,.6-01*$'5-.***>8* $.)-///1)8)+()03",-1/6&613*6-/׀ӀۀԀրҀՀԀՀƀ؀׀ڀՀڀԀЀԀ׀׀Ԁ؀܀Ձ81*!2*3#/73/45/-/*5-3'.$-7 6:'+7643+1(6*+08()+(/.7,*17*.*-89-&")-&*-%.1"%1-#*2/0%%:-* '.*2-/5 "#$;(4(&.0 +'7.(=/5+#',3:,/)-/.1+($4)/2.(,,,,/9'*)*+.& /%-.+*,/'21*'-0:.-/$28',3%6,)/''# ''4-3+***8#)5'3,(!'1:)") 6/$ ")25/-&0*.2$(&+,/)1&'>(0,2$+&34)0)4+)9 #3(;)9(.#/'2*+)-.*2.//#($*'-&%0).$0-6*%&-13.02*0'0'%*0*0)**%/:,160,'/3..1.)6//&0<+7(12''/ 5'+#-%-,+2#*7.0%+.3(/07,#&!.%'#-(.6!&),)"9'1'0(,$.)))3 2"""/'))((,)"0,4-*(%'-'*3/!;. '1*)+.,/-,9.17.+(*4'2/,'$.5%7#%346-41-'3%1.)-&('3*71#(4+$,.+܀؀Ҁ׀Հ׀ր׀Հ׀׀ҀـрӀЀр׀Ѐۀـ؀ҁ5)/E/0,1!+;"#4#2&)(202/6#"1+%!+/,).25%0.-*%41)","!)*-(';0'.&'*7)5+#:$-!21+9-*9%&3($/6)25)"4+3)/1!" 0+8."'"0*+/,* 3/201$/.0&@#-%2&%1121(097.8*8$-402/+.+65-'-.$3./5))>.'2#4 1/+' '"'5/'1%/,'%0',/5$3++/+.$!&.)#8# 4"+).0$-5&"(,+6!!+*#0-?+&.83)'&'+).,( 3,"54(,/*,+/41%5')'&+!&,-/1(/3/!',/-(!$/!)3"&/'(#)+*&5&*3-%),6/+)*)'%/75++' /2)'2(3#("/,%,5.)2.-2"(4&1*//.,3$51,)/!"/.+!+&5*)#/%4&'2") &.'!+'&7,&."$.&&'776&6/$&($031-,',*-=-7?1 75)+-43/1(11*'$-##(*).'/*(9,"&/205>/(%61$5&-,!,-1+%('/0,+34'׀׀ҀӀҀҀڀـ׀ǀـ܀ր؀׀ԀՀԀ׀ՀՀڀۀց7&6$2.(4/5+)9,'?393+99*/* 077&4.(:'5*3)+4.!4. -(*020#'1*!6$++#'!>136<(*-# /4*+/6/ '41.$%12%*&)((8/*-"1+-'3,/#&0)$2%'(2%#1-!%+&.,.%9-+63'5'"-).'%<60+*'*720)'2+3171+-.$/($))#*83#)1-31((.&-&-'&0++-&)+-0+$!**+)52 /+1;$-00,0*(/--'//$#0(-")%1+0$:7 ,'8(1(%6!*5*" &,+7$(513*-,*-%% '02#+)1&%22*#1$0,*3'%%.%2#&!3#'-"$.)#3<5)/1%1*.+6'+"(%*44+-368""'$(061 2$','"0.%*4/3!#+),4#8.32+$6$1&#*$%4%2/-/#1!08'/5124&2),)5,$,)#$56*),0&,"+*.",5--/(,5-''/2.%.,+* //,D&-////)08(-",'/)43..+30*143%/.!3$)(&+,C6!%"4-ۀՀ׀ҀՀڀӀԀ؀ȀрրЀـ׀׀рրԀՀــրҁ('-/61(,74*1"023,2/()0',%(17+#4037+31,;:'%&-7/380,'"+&-& #;'1-##0+--*;-)%1/.%)*5(')321.4*")(&8$7*)*$%0,-'#%30/"*6&-'03&+$!(90./,./$,#'0*(#!/"2'/,16")&'207 5)(*0(-$'.%%1,,)2<*9)+#6,,,8%40#2$ (3))-4*'2")(.15+(+%+,='+'.+",(794+-3'$&90-)-/4**+*-','.'2'%48!24',1!11)(.#/4/-/3)*3'.,&-6;5%.-#3$0%55/,'1,9#*$"+!++.#(),3"1,&'*5-/"&'/-+4 )($!#(,1/(!,5&0!(,1/20"/*/&.-%;,38*7+035&+&-01/$.')103++.;3'!.,&,3/*'!104$+2"/.-!6-''4,.03$'*7%- 2/+%''$*%'-8&"0(*))2&6&#'A.-"'.//6:+)4300&$3--04:+#22")/?3,;ԀрՀՀ׀Ԁ׀׀ՀƀՀՀ׀ӀՀڀՀՀ؀ԀҀـՀԁ#3'@1?:.-+3/1:'!#/2',!,#(,#0.+7(.4)/,,0/"(.)+%'(*12/$ (#0/%(/%!*7,6$7."68',*.'&&.00''2!&D672,(6 0/2+!2#*+/#!)07%6/&%16).($+63/.$"#(534*/)1,'-.)/2"*"'/.!2.22*3%.'(1'(((#*2,-0#+&/!#6&*'/)/$2 - !"5-*1(%)+, +!+(310,/*3/6#3'2/-,*57-(!>,38.+93:,,+#64)*!(21$;/+3-',$3!.)./###1.0&/'$%*0-#7'1*+,%)'92,2'%'2.-)--!-*01"+,/#1$0---.+1.-3$+01 *'7&*'-7'%(3-%6(/0)547(*,,/G%//'-$$* (4":&1) "&&00*/%*-& /(",.$//0+.%%'5,)-%.+,.,#5-!#/$$!-1. 7)6"%3//+#(*-3,#)/91,1+&$/-:.4*%+/+/5:-+(0(6/3*&1'334(؀Ӏڀڀ؀؀ۀՀҀȀ׀ԀԀҀ׀ڀӀӀՀՀՀ؀܀ׁ&%3(2.--75&5.+7$)*( *%)4)#14(401,3->#*5+/",4,('0#6(:5-0('*,$*#!2+5*%7#'3/+-1**,($!1-(-$4*'6*2/$42/)3*5)&1+*$1'(,',2$"+/$2,)+%(1%5"&-0/,+5(+%-"(&5*/&11//!(6'*81-/&&&'/$);,3 863+6+4347/-*38-3=8"5,$3(-5*''%%$(,2+0-;1+9*++(&+!((1%!"#)6/)764(+.*"'+437*,%B*/<2;''(+(+.2*23')5** #!'/&*/,2-;&"1/'++-)2*')#"766#'3#'++3-'1+:3614>+,1-0!&,/,);#29250--/7%;(+.',&**,(*2'''+%-*'+&0'/%'3.2(&$"0"-3.3665*!-(4+/(8('5'8,.42)(+)(,7)**/-*)2*+3'/5(#2,*'05.!-!)9' *-17355.+7*073,*)+015!/'76'')'+621'(!2$+2+*8Ԁրր݀ӀӀۀـڀɀՀԀрՀـ؀׀ӀՀԀ׀ӀՀԁ)<')/0..(-25-,*%+(&('.4.,$*1 <.6,!&4%%(1(/4*&++3%"# ,%83#:2)$*3!1-21,/,'-/(>!003'5/-1(,'%,6--#()'$/&,/*'*&/5+ *04.&#:!"-%0/1*"%2(,'.<*(4&/&2.(**3'$1--2%'&%+<5-&'//(/-2-36%$+(4-,)-0'%#2.%222/03(*+-/&%.(5+,'/'%-"1!/.*3%6/+*3+5.),$,%&'5((/!++$22&%)7/)' /+0%'&"(/!-'(11),).8'./(-%/#,&6'/+$51(-,)(5,1/,(*%.8$1%/*.4$$+*!**)/.97 (.(!$%2 '/;8#+*!#/!/,'!81()%//&07+*/"4*)0+2$%37(-&0&'5!'+/9)3/##5/)15 .: ,&"%&&26*).%/!'(%)+,.+*!8 2/+-$/)+-/3,-'.%'(/34/& 5%$-5)+1:%<-"*+6&/6-.,%1%051/(1-*&/+9%('4/׀׀ԀՀـ׀ՀӀԀƀր׀ۀӀـԀ؀ҀҀڀހրـׁ:*.0'$6'06 *5)..4 15''0')0--1)"'' %1/1*1-).0-," 1)2/+$/,1!&(6)(%+.+-(-807,37!5+2#0'*24+ 2#+.-0&&+2*,&,-)0!%%(3*+))3,&--)0++*6*/;9&*/* (152(,+0)&-*4;/9.(0,=.0)6*($+652,2-(0'*,'1 )!6.'= +4#1&#('$+2*-/%-6!/6:2(3@'>,*&,033!5/80''&+,0'*1)4)&-4*64$..+0+$.)1/1!#%+5'*0 01'/./+*)&7(2').(4)0.2.;#+5%A<<'..1/'))*1"#*F3(/0,6)4,%-2.%(*%6/'#+(($&0-%+81,($*$)*/'"#1&-'&)"*/-7)25$*('*./%$(/8/(2.+)!220.12#'03")/+%(*(0-"%":"+-1#3).1/*"+((.// *( )1//;33'1306.+.9..2$1'*+.*(5-1':-*,&8>7.7.2$2(((1).-2(/1)'0-*(ڀۀ׀ӀՀՀՀր׀ǀրπ؀Ԁ׀ՀӀӀ؀Ѐ׀Ҁـҁ",4.*#8-#$13,'91&1/1*(.%)/-'((2(24*.#,098/+*4& (+5(')$-5/(71/)+/%((#2)*3212/2+0+'-$..*&&#/#(/,2!+<$+2()0+(%/2.'0%4,320,( 3+!03.*2<'0*-42/'$/(4/'5%$(.&)112-35 2+4*:3=4>#$')-+((+%),/#$/$#.25:05.0#-9)126$/(6*(*-#(1.,73)#61+4%3+1<9"7(#%-&&1%&&3.%042;)2'(/)*49!#(2*3/1.(."*(,1+"9,++*/',4!302-/)1-.(+11.*+#9)* )/*3% .+4/+,-":)4.*80((3++/ ,5)!%*.!(-)4'*.'/+,$,)31))1 +3++1**(0!&&(* &5-1&*'1+$,&$3,$)1+&#(#&03//)01:(18-)9+/())++%(%&*+).4)-",3/'6#14<<6$&7+/*+#4A9+-"1-3'&/,-*(/%/?2-0E%06++0'&ۀـԀۀӀӀ؀րԀŀՀԀـԀӀ׀Ӏ؀ـހրՀр؁.)68',,73(6-,(9)402+1&+-)/(#-+)'*-*",,013(8&5,)9)8($'2!3.'/&*-'%#3,*3'#5*,$/++-(4-3-%$)62)+0+.0"6+=,*0/%%*'32./0,)!7-(6"!,(01$.#+/($',,0--76"0.3/)-6/..%,5%2'&.'-&,$'!02  */))<,$)#*',/%+/0'-/0,%7',3-3 ', )*2$%/''-)/),)1%8)))(2/#!'*) #)5*.)'321-1$+5 --(2) 8+& ' %2.+"$ 6 %+%90+#-&*$-0%-+*)@/.9'-*-('-/9%'5-**%(4+-*./@/,(+40//(3 7.)2+-.0(9:2/B0+*0$6/7/*5+#%031!,82-(4-61-/%,*&62):-!729(;08+1#%)(*2(/'2627&!&;,(4<#"2%-+1,/11'!3)7 4'3,)/()0''(!';)19'''%- ,$3%&&1.5'14<*'.3+7;+,,'95-0/)'7,%C/ۀـԀ׀ՀԀ׀րՀǀڀҀҀԀ؀րۀ׀Ӏ؀ԀـՀہ""(,-)/$'3)/-#.-%-13$5$275(0'-/*)23,,7215''@&/3,-5-:)-((/"33(%826*/!.1* 11! 700')-->2$!' 0%#(&-',.2.$&/ -(&%5%").5)4#)-,%/%$(0+95/$-!-2-+-)%6 .**3',3;,99*&%)0(+%!4+% ,.0'&!.$-'')+- .+!603(03*4$0,&) ,-,-$<(3/))-%(3&(' 0,2$(%8*+%#(:)/,?&*"%%*-11$ %5%"'6&$?,$#,55)+80!7,1"04,0+6+'*/%-)!!0'7,3*&/!4(*!050$87$(,$*1%%+'//)-*$>),)5/-1/-'+5% -!-//1 (-+;(2/(1*3/'(0:*-(-0+0)."21/<-(0&))'+/+%!*.2%&")) 21)&1 -$7-(10)$-&,#) (+3&2*&**.89*<1*%,1.'--//*-'0')0/);/*7)1+'$/&;'73#/*%-+"$*&-%()+9.'0=+6))+23"-2!#=(/((2.$+/#5%"')'#%.//63#&.$)40<(8*/@.-"./'&-+:/)/#/$01"(--*,.)'+.(6+24/4+7#13':*&-/22/+2%)-#%׀р׀Ԁـ׀ԀЀрӀՀԀـրۀҀҀӀـӀ׀Ҁҁ?(<15"+62).'&.$&(8+6%-/"/'**'5,'8,),*),37(1*)(),-+5 +2%**,*3-5/%,35+')4%(4'1&*3/,518'630/(.2)7#.$+1//'),+*2."332+-+)*(9)/(%0"+9."#$0/('$,5+1-40%&,*4/%2*)8,+451..1-$+.0-/2) 12;&&&'7&1(71/4/)"31/&%#/'+%+(1)$*):* $));0#)41%,.5*1.%0("'6'2"-"5$8&0+#'5*5)%*2(95//32((1)#+81>$+/)/0/#/(*8..2,0)5-)!-;(%%9!61.)2$.%-)*3+-4%4$-0+40-++4$)-')1"%%-0$/,+5&3,+%(,<,&,)*#$(#+/=)*,(5!)-7)2!9+1/-9$&4,/#*/= -5-2'7. *3/#"(0)(*"0(&702+%(*"/-(.-/++-.2'4/)%'--.3.7 ((0- +/'/&&/3*.&/+)- -$(9)1.,)-30,6,352.+2-܀؀؀րրـҀЀ؀ÀԀրҀ׀׀ڀԀՀӀԀՀ܀րׁ#:)5'&#.*72*1 +&70/='3*60''$.'+2-"8."((.-7 -4(..*+('1./'/5*,. / #','*4735+--/1%6/#-'0+&"#1%0%04+/2/+$)*//65/7,13'))1%'/+)5*)'4(-!:,, 71$21,*4+%%+:)/+2&+#412 ++"-13-(9.03/3--&<,)$4$:)!"4)!0+22)+#222&)4'&/.5+$ '+!#&""2'&2*.,*3B'/%)2)))4, ;&#/%'-+6;+"*)'!( $+#0).21!35#).'-,5+6).-"#&,$/183*$/64=+!4%00)'-1-'34,-*)$'))5&-+/,'18&3/#*3(/(/(+.%'(%+:&44(/-/5*"3/53%%)/32&9 /'.-&7#.!+$(%$)***5&**((1?51+2.,5:"()9&03+/!)&38111:%7$%,.(3(!-$5;/-.9."(;+'-7/.,)5@++)-))+&582(('+510423,%./>*#2&)2Հ؀׀ӀրՀ؀Հ׀ÀـӀӀڀـڀӀҀӀԀـӀր؁.)'2-*%,)1595,1/ #:-)%.1<306*./4)%+/115/29--'+**3.$/*/-6+*(70*2+*(210(%;.;#10*0."@#2.+/+'5%3,!4"+(7+&$.))9#+)))*#*(%/# *1%++'0$22*%,*,( #1!!3(+.,+C$&4,+0'5*15+5/'10&2..-"$33**31(!7!!';%48+)/0+/% &-+1#!)0 9'45'7&"/35.)/&6(0&0&2&2 11*2'1&.8'/4-(%.#,%,&0--,0341.)'4+.0;'2//*5-&/73(%$1')#+-32*#6-*')3+ .,-.00$4#*(.'3@5)#2+.45"0/65+*:*(08=+#()6&--20"*#3!*.9&%1*4(2*4*%*&2-5))65'/2.:9003&$",-&+<,#+,+0;,!#""".&(%/6,-3+&#&-$0+/++)%2'&/1#80'2'=&+72+470'&5:,+,'/.*,+4730.$-02$2+/+ /%+%2)8*/*ۀӀҀـــ׀؀ՀĀ׀؀ۀ׀؀܀ՀӀۀۀրրրс/),/*+0'/2$.2'-91'*'/-58*%$-"3!2,1*%4)%3!"((74$8)"(4/1'51-/**0&1(#)0-:.312. -#&*&*+-&5*'')"3.%+6/ &&>4":+( '.+,1/0905/#/,0$*;?2-4'*615/#5#$)/$(/$0#0)(-/(&(()(&*5/&+/"3%.=42%5$(8+:"-,'/#12'+8%"26'('1#&31&$-**,)*07#42'.)='/;8)7,)-#,("'@24%)*4"'0!.%/06,%-3(9*)%,/,5026()54",&3*'%1:$3+(1+.(.*2-1.630$&$-1*0 .$$,!6' '"%(2-''&.11)*'4+'%!(/0%++#//*) 5,"3#,&/?++:8:/(('$"0/.7(*/.<+#7 "59/*296&0&(*&:/)+/"+1;- %5/- )*0.-04(1+#-14 *./%%3+$,4-'2-3,'6+1$7,++-+=.632+-3+54/235,3''/76323+,3%2)B'-2&&-Հ׀ـۀրրـۀրÀЀՀ܀Հրր܀р׀ڀ׀ӀӀс$+%*)2<6()**-&4&,0"0G-(/245+(1243,.8/%)*/-$20,'7%/%'+&29#2%.7+43,7$7/./',#(-))).4,+4-->*6(:18($1+,%(+"-.#.*($='/)3!#1E)2"$27=% #))/#B")+','%!05&-*".'5#%1)0#.=.%3/1)))!&3*$2/'&.:-/$6/*011-0&-+,3$''0')/(8,3+#,!"74()( %7$+-$#(* -*&;2,,+-/-"-"0&&3)'1;#3:3*.2+#,#$+%,+.4/6%&'E(/-#4#-$9$3'/,:/,)"?1,,5#,"&*$()*!,,(02'8.'$-''2#"'1",,3&4.+(#1/)&*).) *!)#1/8)*"3+/00*"0';1.+'"/,,"$-2*"'".)%&(*&5!:+).,#&.6$*-4).')+0%(,*9/+4-)(-*"/%),40(%1)1$*4.#.-))(++-5<2-<290*0%%0,)+3.&(%2/?C-1-:.+;7!.@<3,+<#:,181؀Ԁ׀ҀҀ׀ҀՀҀĀӀڀ܀؀Ԁ׀ՀրՀ׀؀րۀՁ1-*#10&)/5/357/3',%8)3%53.)%(''4&$)+/4#&8'2/3.+2$5#21*3%3*&/+.'-''%5'1>5'&9)';,2(0(81/)%29(++4#(.4!*.'(*1))#5&-&((4"/**38-09+&,(*/+$($/++)3%04'0$73++$-(-.%#76/+*!1-3/$(%*2),,-,1()1.$.(2.#+1/2(%..(,/.+(6.03#3')48*$+$"(51*+%1&0."?&'!&0$3,,&!*&2'+)30!<#,.3)(")/,'$(=+!,:,'37.16*9.-*-2*'"5',)6& !($)*%&4/+0/.+*/0*1#?/9//6--/. '/6!'4$,'1#309(+11.+-3..0/5'53146-)5.#-+&0!3.+3-,('0(8/74-29')1/+23#$%/-16)(+-.+-/*5+)*2+7+*.2/+$3-/$+*&(+$+""+,'6(!1-0$)0'#-67)::-./5747*3*'%"3-$*%6-++)(<)!4//35/"&/+%$.66ـҀ׀ۀҀՀހ׀؀ŀր߀ڀ؀؀ՀրԀҀҀր؀р݁&++1/'2!+9%'324+F2-*-:2&&7;$3/-&2,/),"1">-%)3 !%5&9-3'%(9001!.0&-/'.2'/3+1(.-E//1)%'33-*&/1.&,E,0&31/+5+'3.7::.+-572-)&5&--4$/#!#/*3*&./,&1.+*$%-+/<' 7$-(%'&)#4#@** 1(2,-:))24$5#,0%""55%3,&21/301$./)/2$0.)02/6-1(,)#"!/+-1&$.*#1/*#)343/'6-#7':0$.24#'%,;1&&.'+*$.+-5%-4'*.(*()!3.23&)0$03,,25-.*6-)2)(,,+,6%#'.(%+$,21,#*27-/!+!'..2/,".<*%))',/.>5'! *-1230%4,,:05+=(5/0(&3!3),%0%0 $2+)&&/%<.,*4'%',/,- -.(%0;#.3**(,(7#:..10('0--,-) 40+&)/7+"+&-,:33,.6+)""7205$&+&()5-6!08+1+1 .%45$!5&/$%))%/8%+2*7/׀Ԁ؀Ӏـր׀΀ԀȀррԀՀـՀԀրՀր׀ՀՀӁ/!()&5/4/9!'+*%%,2+&%''41:(,-%/87384(/+/2:5310&26#**#8+7--%,/-%01-#+0-!11))-*(3+/833.).#)$$50/'1/:$'!-0&<8)3/23*#$/&"#/2)&!4+4-0' "+"+#2/15.1-)&"*2.,,(*+. ,&7-,14!#-*3.*%-6-$) 5*).,$%/&0'4,%/#(2!%*%81)'<*',/27%/2((*!9+*6'(*/@5)&!*%/,,<5("2*1'%0"5&1**-*8$1*0+'"+--11-'6'57)&-$&0#;3../-'*0)+*<+15,5/%%),#8;*0!,-*%/19$'/"$0C.%'1/$+24.$/)&+04(-),4+5-#-*'-9-.*(*)5+#+2)3+(3+#"(4:04&2/"&2+*++0(%( 3 #!//5.#'/4#664'/7,+'1'.1'+18)%+0% ".5&1%9?)4'32)%#81732 '#%!'02#(4(83201+3..*9).(,,)+5.$*#/#153307',:2ԀـӀրҀՀՀӀրԀҀր؀׀ӀۀրـԀԀ׀ӀՁ+').(&3>,#,-/440(+,.(-.006/18),!1(0/--#5%/3/,10!+*0/&2'-+8'*!,/0/*1*/1%2-'0%03%7*,//9(&* *7'/%!.'-4+-+("$)$%)6--%*'(5#*)3%2!';$6"<&#);(*'$(/37))9%%-$93/+-08,31*-%2..;4'+!2(211/(-*+ ).--04/065,%0) 4('.%1,2",#$16"46,804(׀ـҀրӀր׀Հڀɀր׀׀Ҁ׀рπ׀ӀՀрۀրЁ,*0,4/05-.//16%4$"4*'2(<43$"/,"31*5*%;/)01*(* 45-0*/46,(0/.1A!4+#&#(;--'-8-7&&+")6!&-$2)2*#=#//.9"0&#!$-).%+)'15@$1#7'',.)16 &2$/*44&,'.+.-3+$86/9/4'-3,.,#3,-3+4,+#, -3+%"$*32"+#(/ -'!*$$0/&.F.*&! "+"3 *$,#8&,*(,528$/((./)'*!),!<7'(40.49)21+'&+00304.(-2!1''-1-632.-90 21-+2,+6)+..+)+$1>, ."#2.2)$)(*>%+-(. 3$(,/(6*/////)%(.'/8&7)31%'*(5!/(),!&4,*-*)4,.+/)$50*-=#5+&./#15+5/' /486*8'*)0*+;))+1+96//%&0(37-+7.0'*'%'1('=9&$+0+#;32-%*2$.'&7)"%9%)*8/"03-&9(+()4)0/0,*,,(1=7/*,%'.*71/3%,=07/+6Ҁր׀׀ـՀրրՀȀрӀԀրڀ؀ҀڀՀԀ׀ՀӀՁ0*%+%35%1.(#,) /+3<:.$15/+1,(-2"0!3+3 "8.!6)$'.-/'!-'&.+*))4"/2.(&*-,)2/%(=%3,2(-.!'-.!&.(.!/-,:--83%/((#*//)1*..+&+10+.-+0"('++8++2;*(-04)(2-+*#5!1#-2*/%7+66)-/%%3.%'.215,2#*+;3'(*6+:)?3:534$"#+#-#.+-0":15,"4,=; )2*5/),?8"1&$+-+1#/02!%$*/-)6,-<23-6%+0'1-2$-/+-2/,&"36)4/%1-'=%2*0+0*+0":!95/+, *2(+51,%.411)./),(/<"27!!:2*),$%/'65-/+.$,3*+%,20-7$+ +/)+/)-#8/031%6//+90*"()-1/'.-(*-)$(8'0.$.(360$#)29# 1$8A7/./7+./-%.1+(*.-<$;'6)*3!+))4#.).--,*+#2'602%2/623:/'1*)62+"1*#46/243'+%#53083-3ррҀ׀ڀـրπԀƀ׀ՀրـۀـڀڀـЀԀӀ؀ԁ))1'/-2#/9.;/9'&-/0(,%.0#2'$-'4-,&+1+%#3',.&-:*-'*$*,%+-6/76,!--,)-$$*67$40-*+)05)+6172,,84%0$#203&)#!%&91 +%+&*.&+.$:&3!(.+%"0$-+-""/'.,+&1%(13)#&04 #5%'++#/2,#'8601&$)&46&1/436.,()03&*$#<,3.'!/-''1/81,/"+*-6.*%)2)(2,)32%;0'5-3';-5))."+!2+5#&& +)14+)*-"'%%)'./.*!1!%#0%*9&/$2%4%4)$012*/+%2+3,*=1!1$(4.'/%421--5)0/'.(.*,-2--)!/6@/$'2*"'*0.21.3'/2+%;4/02&(*+G%/'()61%38.4 "+),).$)".3*1.,*(#..-(%,%220$)'/!'"&9++,/.%356---4-"',,#3*3,('#0:*+/)0*)7%4%<+3):%28+7*,622*/0*(-//1&-*;$0" .24.#(-,))8-)ـ݀Ԁ΀р؀׀րՀɀրՀӀՀـ׀Ԁ׀ڀՀӀրӀԁ-1)620@.44:A,-5(4(/1,'-43)0'01'2!$/-.2.-3*/<02/6#,%(3):&$)(!(-&,&&*+2-03%.11-03#+++1230"):.2(#&.412<#+'37(093/&.0-+!*1+2.1.2-4-(-<:%17/1#1)*2."7'*%!2&'368(#"#0,0!%7*+@/4*!//2+-6-'#*56$,&+3400,*2/);+2-&$1/)&%,,1(1-&:'4)(#6(6*.%!,.(-/&"-$ $-$.,3/211:$.-/+0,&-*/"(+0).7$'-7)"/!51/'3"/5-,'',%- 51:240&$5423&%/+1/ +' 50&)(&5,7-#'&2/*)',/5'-)-/+0*$!$%-%('$$'6*15-(0)40600')3'2'''# +$6-.-%7)''4-*&'*,1,,%/%/+!8 1)8*+0<4035!/#)-2+(1$%75."%6&0*;$'+=&*(/00%*):,)/!.60%7##&/";=0'*&*139-5135/(53!%(/3*5-316ـԀրـՀӀՀ؀ЀĀՀՀ؀Ҁ؀ҀҀ؀ۀ؀׀ҀҀׁ%)6;63<6/,*-+30,+%#/%6*65'+86+-)/9*2*,035++ !/*(2)#%-//)$)$*< #,(50 +09+(,,,.+1(>:=*0%3&/)%4.;+/"* 21-1/-&&,),(/2(/)*% 4 2.*71'%$*(&-'#"()8 )#%(0-.35"()/**)!4*.0(";#**+/&!+7(+1%3'*.1,)!1)-&+,&0%2%'0-'8(%.,)+",%$,'*% 24!".,($3!5'&%-$&/=/-)/<+!%$'*/."++,2/!24.7$,-).(00"0.*-/'+5&0$+)29;'*3('1048'-257"',.3)/+*.,.&5+)/)1+# *0*,&.,!+.$041:,0"51.)**"'#$7-;0)42'%'3./42**.)'$27,3%,*)+!+)+33;>,2,3201*(-))(-''97&,/3%*4)++* 6#65("1(9,12"9/30.-5#+.2//ՀӀ׀ԀـՀԀـրÀ؀ր׀׀Ӏ؀Ӏ׀ՀҀ܀ԀԀځ1.-3%5142/,*7$-7/+//')+-1+-*'!/0+2.1!201!5*-+7'6*7&*))%-*$5.4/(!- +#-6(-+.%+!(2 0& 4-+/1&(.-*07(--)40#(&77!,.#0/$)1+,)$%84+"%!%,*%$+-$4)''!4'!)!452$'.0!+01+.+#71+)'01//)5-+/!,./6">$((.*#*5$ 6.6+>457'/.$7-!,/$2"!#)&/70.$22,/#6(-&)(*0,##;#&0*..,240+,$"0+1!,*.1.!'*%'11/+%(&/)/ '&6(/!51///(%67)/2!2';''%(-+ '&,-- ($,&:)/,(3.# +:20,#/21.%')*>)+3#9++,'-/,202.(2*(*8.1-705) *4&)(!(-.3("-&7'1#(#+/**)3"2"/)7-&/,''.,-'/$+&((!'(''$#(*%0#4$/9/5''&))0=/#=/ "0,#(&>5&,/)(*//9+/(!A11>2+*72,0&3,)<@(/BՀ؀ـրԀــЀڀĀ׀ۀր؀ӀـҀ؀׀ԀՀ؀ـׁ794)".65.*1-.5*)9*,%)66-)*)0,(3'-,/$%+&52*+!-+!/)(6++/10%()#"7;(%%2)','--+"1,#(%82/$8$'-&.1&/&&#(-,"%0.,//11",52$/25((730'(1/(0"$3*%7$'2&*/.-&86!$01.-')$.!' 70//1*(0/-$*)))..)&-%*'*5,3(*%'?9+79,'#(0144/$1 50!&1)8*1(*/5/4',2+3((+&1,(4$-24&1!.1'(.#*%-'=883*(!'%-'9'0,$#/*'-&3,22$ %1-( +$)')1-3)*+7:#("71:+5#%4492+.63-*!,5'//##132&$4.!%3(/2-*/14&+'5,300!5'/ **.$$&87(!"6#8,#*!!31)#=9-*()31/!,)#/:*%.$-$&'(&$1!-#-+!/8)($1++4"*04%&3'.,"1)#29014%-$1-(4!0" %/4171)&*.6)<0>-)$+2(-1*9,1 5--&'63*1Bڀ׀ـՀրՀ׀׀׀ƀـӀӀրՀӀ׀ۀՀրр׀ـԁ)%+#&A ,.7*+/+8%-+'/32'&2#(1+1+'",+-0*'1/.=)'/'-//50+../-$+.!*'-+6+8$1%2%9)-0+-(*#'!(%,%"0-,%-+".1+5 4+*' 12/-5'1 /.)2.#+0&4*-5-!%3.70/02'(3&**5/!//52#""/#53$&//60% *)//6#&*-2($$$=3$#$3/*%(,$-# .1'8%.44%/5.<0+((#.0#5.)&/'-%.()$.14'#(4*/4#"+(.05*.')#(+/-,2&4-%2':)8/'0*-143+$)2!1*0,3$433(% 5*:+).)''' -8*7$0!!/6&/.)'#5**%/&'3 '/.1*2/1*2-%2+,/*-9=.10+4,&$)+,-&6.%3'1*-#(12+..(!.(0,00.).-!%-) 0,3219/'$2-*(.)(.0+!$-/,/(''($.04,12423--/497%/*%+52"#,3-3+/3145/9&+0- "0"-(:5)/3--)5*'')1.+,&%23,+рҀր؀Ҁۀ׀؀׀ÀӀրԀՀրҀڀ؀׀׀րԀԀׁ;42)>+573*6-+)03,8-:607)4'3+-+C2>A#2+(5.- %)(+/%+8/*32(*+-1)/#,+#/3)03 .)/(+657!/-/ 1+9)!0*1%5.)+ +$3,,,&"#48+/%'5!(4*7%%/&/*%.2"-+4(/-/,+*)-.$0 "./*,%/%:+3."'82*#%"*&!)"*/5+*-.'#1-)+%)0&-.'30'"3)1?/2.14-58//#'$+$0,5&.-+/111+)/$/41/+(0+$*(-'/5'-)*)))1.&6)'7('.'9+.);-2,#.&+#/0,)*+#3&&)24*.*-8,//3$+'6)+(+#.%(+%+02'.)-%4!0,$0E-,,&/10*4$+3-$:*#$+$ %6%&0,*%7+2!++-/5#(9-+,1'+)!$4+)/##)+30'2&+6'"((.+()5##,0*-&-,+&00'''05%-+6'-#%*+,&20'?+3-2!,9-%59%.'#(,+5-2-7(.$)+&%5&"&)(6<>׀րڀՀӀՀ׀րրȀՀـҀԀӀ׀׀׀Հ׀րـ׀ԁ *60++220'+/-",.1++1.&&)'0//3(35+/(4+$9+,,)+,916"*++'&4))+#-+4*-*+26')4+-':2%/171505(*'6%#$9'+1'3,(,0%&:9-%=.1*0-3/.2'((3-/-!2 4!,*%+(*+0-*7)9<&0)#+8'--1+2110,422/-;.1$(4-60$-70''&/-"13//764(/(4//$%/&63-"-2").+*.6?*3.+*#/$*7*-/ .%)*9!0-#+,#,*$'):'+1/)2&15%-*2!+.#*7)*6/&/+.1--&,#-"*3!.2(1-3/-#)"):"5.+*.043/-/$'**3&'4)366(766))7#%*$"92-%!,,)/,11)*'+,!*,#2/(/4-7,8.8./''#-)09$%9+2().(4"%-)97/*:)/%1*04)+. ,713.70;+14'+4*$-46!!')"3%$/&&'*0-1:!.'1&+,#$'/"/0$.725(/:*&(/73,"/13+ (-+=:,/&4ــӀՀ׀ҀҀՀԀɀۀڀրӀπրԀ؀׀рՀրրց8*3,*#%,#'*(//:42/0,5%/1/*5#,')9%406./+)).(1,-/*$+$//-+66131#)/1")+0*2>31)+%'$. 2#1(/-&/()0+.4-*)%(';-."#,+1!*0/*#&3'?,%',-%:'".&.--1,#'$1+-*0/41*(2%+..--40!$%!$1'21..,(8-/.3#1 ,)*.'4(,=6+/4 72/5*,//6&&),-*/,1&*3(7,)#(0%(*";*4*4(- 1...&8+3:-(5/%+%(.9+)1,'+..,*%)/54+0*.(?((*&5/3(.$':'.:,,<'14-.+1,1&+3;4,0/1176+)'/(/,4).4!+.*-!%-58*.'+,.+5&#, 2(2)&/4,',)(/-/%,0$9(*+-<+1&5/9,&;5/5!&"&55$'-*02&.45;#.4-3(-="1$&$.&"(%1*'&139$'*-*/0!$0+0!,:+-=)9/-#0)#,,2+ //,0,/&-'19#,4)#%")90&+/!2,,-";6ـրҀՀـԀӀԀ׀ƀՀۀ׀րـԀрۀրڀ̀Ԁ׀؁(+#)-?1*#%/0"&,03.-)-'2,)!2*31$/(.)6?2/--/)%#470+&0.)++%A.$.,3$'1<+/40,2($31'*10) /),:+5.#-*0$*-15#/'/)!#())6 !&! #0$#%,!/-#,$3,%1"%4,)8'8+(&*) ,$ 2+%931%4,,#$.:%01--)/-)0*!/')(%/:*.3#()2.)&+$(()A0(&-$#1%7./%:1+13)'("$$&)+$+172<$1)%* $35)2.&)( )+(,52'3/.$'7/.0$*+/! 6'%9%%38+6/*34'/.-77*/'*./)$/#7- ,7,%20,+$.%6+'1./%70'*"'%,(0/-.!'.&2=*-%/(4--;'2%(/"/*8:+$%+"') +:62$.)++4),0*/0+!7"()20-.51-/!96'0.8+&.'*,#(3,0**5%5#!0&''.!3+)*'6+./#"+,.7.))0&20# (20-52'95-&!;%)4=%-)26(07.-+,,+,'-6.400-#ӀԀӀ׀؀ـ׀рրȀӀڀڀрۀՀրҀҀ׀ڀӀԀց2#<-03( 8-2%,/(/)$/+ ')-(0).&'3$/')!",'&0!0+!#%8&49$,,*-3&(,-7(<8--,$%%*+&%1+/-)0#$)+'('&1*8&.(21'6&4)!,0%5)'$,) &.(,0(/4&-* 4.-'.3-0,5 57#8/,/01.;+4+)0&-57!%.1,')79$-'.!/')1*='$ !(-).8#5*'/-(4"!0")(5+&%5.-(- %2(++0%/!9%54,,4)#('5+-'-(02730))'+,-05*0,,,02++&-C4+111-%*,=$50,,)7:0'4,**&5%)- .%!#''&//!%2"2*,:2"-$:-4$/-$2/&& -&3&+4'+.6$#/(2"$)*(&*+(-$-/+ #(*&/,+1;05312''*&B*+1.#.6.11*!+4,!03)//+)31,' )-!.'/--5((),'"+3---'/%*00,()0*3%1(-%+$.+)++)-"-64-945*) )/,4/ #3:!2/6*)6/&0)*/0%#2?$3(׀Հ؀؀ՀՀـ؀׀ǀӀـ׀Ҁـ׀׀րـՀӀӀӀف7!6$-')3!0(*36/*01'/'/1%*+&+.(.(:0)'0848''+'")3)0/.*5%&$*'*-,).7+-&/)3&!#-3,$+%%++7!"0$+5694(,,/(2!)/:).+,%:1&+-%/+$"0-/()95,/'32 %,//)#01'502'%#2$$*'0-0!)1#0'2.-/-0"0//'&,5'(4 '/'7.*1(-%%&/#"/+6$.',+(6(:(%)23+%$)/0!71(0(+'*31*+&*)-636''132+'(+&+1)4%5)/(!*$$&19(*%++ "/$$3(,:'%67.&*+./&!',$&/%+-("052 0#3$-++1.43/)(-*1+2!,"*+;5*,$3."4/*29#443$0-,$&*5-.7&0'''20.&*1*#'0 +- 5('15*&543@-$1,+3)5#0/942-"-*"+!>65%--42.%)-($45+#!6)&' ,-+,&/:97%,/48,!2%*6166*/-)6.(/'51'%6201,2-3.+/-./)/0&/3!8#/ 1%-'2",ր׀րπՀڀҀـ׀ƀր؀ҀЀրՀ׀ۀۀҀـ׀ր؁9'-3)28+2%*-.,().05&$'/%6 2,4&$<1,03)%).-+;-+1$(),!)3 (;-4*1%05!*',--<#0--,8+,7#+,*1$-3('+)(6,(8*'01, %')4%+-,'-*451-0'*5'&-(-+16./* 54-8//*6))-33#%-4#/.9*3+#&+$"',$.11)'/,2)*6,-)(?".'4!0%38'#1*#*--10,2/,"/2*','502')4./2$/71,')2&/&*(!+-.5#6(/,45%/-#-"%09&&+2"/&-7&224+25 +3/0'*/,,/)*)! .5/+(%4+61!.4-,&847+*0/0 %;+%+./%0*4',09!,,$--4&+1(.*(-3(%4%:1+$-%)0.-4-.5/2+6.(/$+0*./$/,'-*7)/72/*(',/**/).."/+-" -(.$7"2+#57 (+&!2'$404+'$("3&20+--%,-++4#7 41.0)*!2#'/6'%),"4'<(0$1/(6-5:'=,.&)8ڀր؀ՀՀ׀݀ـՀÀـӀҀՀՀڀӀ׀Հڀ׀րӀԁ+(),2/+-6%$.5%0%*0,912)14*1)/'0:*#$&7*7)6")383'18,'/:*--+1+#5-!$.,.+-%0(#,/31/+582.32/+ 74/0-'/$&:/&0.54')4)+,**&,-*-.,(.1--)**2/,-19.6*$, &8+/#=/4-5#+'+-/(2"./4(0%-(6$*%7**-,3/+1=220)0.-#12./-6*'2.0(3%3',) 04#6/&.$*)..(%$,('),/3(8303&**6'*4-5&.*$)6.+,/"$6( #)?/ ,8.-,3*(29&.--,#,@5)')2,.27$'5-)2)5%)7('/#%,-/$%/5,+"*"/#/7'.2+'.500+("2:2*-*%-.//'#%!%(4(".,74-,*&(+..+/$(),!8/0/$&+046)8%,,##!.-1!0 :$#0()'4*#9/(+05-/%#2/6'2%2+34#$./*$--,7)+*$0-7-2;9%3','(2'؀ԀՀրـ׀׀؀ՀǀՀ؀׀ՀЀրրԀҀրԀԀՀՁ-6"./(:.&*,+1%//09(#?-:..#-!-$.$55! '016-+*))6(+* -&0(.(8,0)+"2)/,-*.,,/-"0'$+%,+<%")$7**--79$0//4+?$3*#.2*.(2)7.%0&4'+.,.;9-2%('/"%')%1-"%/125%7-/74,41+'0!*%"))+-$:0.-1)!/3'7#1/29-/ 1#!(+"&/ 722#0$,15*#2:'$/:2!,**/1)'0#5)+0&!$%*("!3)0*0/!4**-6!"')#),&26>+.13.70+3*,*(24*+-)&((5) .B+!3/3&'?+*>#+#/$+)(-2!/% 1'$8'%-3%)11-$++*++'21-02>+51%&'4:0275+!.(".2%.,4!)'%+30:*$'2"5,7*:*&(/9,9?93+6-4/3׀؀րӀπ؀׀ـՀˀԀ׀Ӏ؀؀ۀӀـրڀӀҀـՁ230/0"2&/.2,=')3+2&0,2-(3/*6#:17%5+70/465()#5 /&)!1/&-"-/*/+--('*9%$/6*'0+'8312'/#5'%(/,-+ 16%;$,,%-:#(&/9.%0+6+'*1+1&-4/+(+*,..)$'*-*)'6 (/+%9,''2,1;42)8. )1*-7$'05'80%&(&&-/+&&8-&,)54'.!0,1/+'"#.*,4''.53&--'"&% 24-/"50( 5)-+ *215)24,0!#,%28 4.(-02((*')$%#/%1)(7 &,'$!4%1/(,9 #,)1'.4"*.)-8*2"1*3$''2.-%4$/1$+")/&0*,(/1/.)042#&/$%/%-'.$'/0.G9*0.1/1=0)(01*%"" 2+10"/95;'((11'2'*-,-)5/-+51-.#-;/%'/-.,/!-.,+*!)2-40(./"/"-/*%%1#1:0&#'!+ $+/%)0(0++/5*#-*+9/-*5*,,5/("2,+/6,+,44"+171 !32+%5%'(1)/24*#8/4.0$*/156+(",+*+0/-/$()5+1"!-*(1,--$1/)+&%%-6--/*(3*',"839*"0.,1)&.+2.(8*:(-1#1)-/--#7,2/+-#+"*9,.%+*37+(&004173+<)1,92*--#52:5)(%0.-+&7..3/$'"5$2430 %7#(6:.10)34!5.%')!'*-12'-(/-&-:!#' 2րրрԀԀ׀׀ـӀǀրڀӀրԀ؀׀ր׀ۀـ׀Ҁہ)+-%/,)/,.$##:)4$0#).+%3!1,*/(01#&6711'0//1&)(+2;&,8 0*1*%$2-4A'&)- +)'1:'54-'+3'/5/.'!0)/%*--3:4/,%.*&%1$8(&5%.1'*+ 2:1.+7***%1**,+2!;3$35,-,+4,,+320&&.$)(-.-8):(0+%J'/).&'4-/):-04/0';$",+,+*)-$++*#-$)#/'/- )/4% &2)9$-+-!*./1*30(70'"&-',7+$+81+",.,34)#0,1)30=-2)!'&1%%2"4.84(,&-&+,"#"?)+2!;(*#%1*(*0 1"(/.:272/#,8).!)2-*./',$/(#&/8*,!)8%*)*8?'-%!4."***1*2%9" )";!(+1)&.2*3,)(!"4(+,2#'*((&-0"+*- *.&&)+-.)*&.380,%5) 3;/)2/98&7%+),/&'3#'/'2 342)%1&(603"3*/-*;,)**074(%/5,/+2',025-,$-2,**Հ׀ՀـۀՀـՀҀɀ׀ҀڀՀրрـ܀Հր׀Ӏڀԁ 2>5(&49$).1%-*.0/0-7,0-5!1//3):&,5+1.'+4*2&04-",/%$+10+0(+0,*,09."08,*/3+4 /)**,'*5+3.2+%!,):'&,&-18%/:#21 -$-((#&.6?2:+%1$3(&+/+,**/.-'&11$4*/$94-$.,$+2'8(64+$!(!//3/!0523*/"061'0?2"12);0/$4 '%91"#%$-(-'$54*)/8/..57$-,$3%'.62+%+&/+7*$& '++*+34/1"-"52*).0+244$.*'),6'(40+/+#/)0*)+%#/-.2)-)2)02,.24&% %20:(0)-&0=$,#6 %-.!*(/$*%+!),+409)03.%-2();+-0+,-H0*((3-))-2#+1..3"0!(*&4/'0%$1110*"56)%&',!4",/)2*,0%-)*(-.!.(:4)*%)+3-*04/&%)48-6'%/%&4&3,'$.28(:.+#'(#+13,(.*4& *5375+(++'('15,5-*$ـ؀ـԀՀڀ؀րրǀԀՀ؀ԀـրրրӀՀրـՀՁ4++62'-35>7),(8$#/,$(/->':)372&)/%%)71-10..,#,0-+$02!8/+:#*0-.04 $%0 /+#/+1-2,/ 3(#236+=7"50(-$'-*,*7*(+47/)&*!20)%1'(3).+)"2%2/(;&58*%,+&1.*0:-/3-***12+0&578!/%/)-A(;)1&-!$$&'-,&,/&(.'+)5%(/&.+5*3##:!#(,&)&6)//#"'!60().('(1+ +"$.)/+';@1#-#($.)./)&(*65. ,'+%-/+/(,'3%'4/1.'(%02.'0()0,(+/*&&.'/:.'-!'((0.*++4$,5*/4$4,'-9,"2,;((#910,01,5-4'*(4--&2&.7$2#P+',*$$1!,(;),.'&7%#)*-/-((1+/"+-%:231.&*2(50;&!1 *%01+&1(7.2',10"/5.-+)!-/;&/$1"/-<31(,( ) 7$50,7"1(0.#&0#+!2+'4*&.5.34/%$..%.5<9%0#06Հـ׀րԀՀЀπ׀ڀՀր΀׀Ҁ؀؀ڀԀــҀځ(;-.6()&$'-.13$7)1+'*0 %::28-758+++3(%1-)(/1+(/=)'0601#"(**#!).* )#& 56,-='%1362(!'*)3*21&7/(,+,',/.9'*-6/(+2-* 333,5:/,/.)-,(056)+1-,1747.2-20"/,5&*60%0**'+/#(""1'5'21-!)/-"00)101 $30+('),=*3:/+5(0.221..6$4(%,2*/82,) /6)*,3)4%$43/+7*:57"$(0*.+8#>,%0,,-#/811*0*#21'*+'3751/(+8..!-,))!((+8,&,"5/$*17]oh2- ,)3!30',#*&(+*$2.3326/!-*&1/#+.7,!),(7+6-=&("),*(*04,*;"&+'2/+7&&-8&-9$(#'6"/.-0%3$./,.8()2)*4/,+16/06(&82/ :.*!7*#&2,--#'.26/-7)$>54;0/.1#/,!70+'+*3:+5)!*4 4 3259($'**-&.C0Ҁ؀ҀӀ׀Հۀڀ؀ÀҀрրӀӀڀҀՀ؀؀Հ׀ҀӁ;5')#+18./57+414':*#/2-4(/!(7":2/(%1(7+*%%*-)&($-&-2:.,118&**")/)-3).$)9/'75#1%4&!1+2-,$*)$+.)4/!3/ <1&%%/5/"+ $,1-)7.57&24)$2+&)1(#,8'*# .5820#6(,0#!1'./,/3%($-=*:*/,970()&/*-$&3+*/4'"'+##-3#-3;)$3%*('$(+#&)2+ $0,*1/%** 7?%+3+)56&27*51610/()(/+*75,))$04&5=1-%/1(.,/2=$2'8+.&.-,3%)3>#3*2).& )*"0.$ !+''2*9e-/!;''.$+0'(!0(,/.*1(134 9+3(4$;6/6.+$'*-+566./52463-*+'0$2"&1)("141(++1/-'.*0.2&:@..>,.&!,)*-/,($4($!.0/,3313*3/+80-2'-%&&3,*4 '-2,)/062$).%0-2)53')8-1'+5/*(3$ +5*06)0&/42'%7.%+#!/,226$4#.0-/0&(-'38+&#,%)2/4-5'-8400"!4.1+3&5/39/')00)-2-(,!&$)!27#(3*.!2+21)+:)+4)./(&*2/(-0#)%3&01-91))5+.+4&),(*(',#*4,0'4-)/9)0/1,75-/.*82'3'%(&4 #./(./4-1".-(.$1,-/',+0'-4=$&()6!(50248-0,),'4%#+3+,1A,.+,&0,;%'+-*#/ &>-/-3!1#+-1"'4/!0-*0F5A3-*88.*')!((0'.&6?2&4#0077-,(5(02--1/1+0,1%8*##3%'(/)).%$$))($,/$21,0&!)%:'5-3=%*%91()))(17(6.0..&*4*,4$;!(-!+1+/%%3."'./=6#013,%=.3---110.233'2.;.7A'1('/1#.)"!9126*1%33'1/-8'7/Ѐ׀Ӏڀ׀ڀր׀πÀԀҀ׀р؀Ҁ׀р׀ӀҀӀ׀؁81(+3=+(3'0-$,=**+4."#,)(!*(!*&5-16324&/4 *472,1,53(&%<''221*-6$$,<7(",1))314*5(9/"+-18/,2)6"(.((,4(:,<=/ 3+.7)/#2()+*+. /**,,2+,&,+>+),.,0**#%.&%$/85'2'0#3+(+,9)*(-)'*02604)!001-0-#53(-'($3.1'(26+(;&*-&4$%*0;=9)1:*% *)/05'"/&%."/,4-/,&%/(55)(/'30% )+("9-4/ ,--3+1*<2#$*#/.)1/$15%&216+%!20,0-!0*%&0*0'**$3,(3#*)3&1<&2)//"0.'2%%3+#90+.!#3(*'#+ %2.,%%.*546597(-,-4+7(7*(9/'*,:+18+4(#! 6010%'09#/*(.=/!/3;$%((/1''5%./022-6#83'#1'2"3"'+$+)'&*"*/*6*0'(+'144")2'23#&/3*6(%)%)+'154**./$.*#!,6.142()''6"+ԀӀրՀր؀րڀ׀ǀـڀрրր܀ӀӀ܀ՀրրӀс='6?$>,55+%,-3.(#5,2/20*/+4&)3-,16(*$##3./ (-*(4&%7(*$/0/&.-,)2-4*,5&-%4*-&**,*3@(--7,/((0-'1!1)*6%,4#%''+/,,+"-+%&*#3 /%/47&0'%/0++&/,4(&0,616"70,!-/2 #&,)( )3#=%))1':41*'72-#+++$(--(!','/6.2,2()+'('1-+,($*10$'.---,5&('&&1)*))+6*$)#(+.!9-$('17#,30/*#.'/)*'$B!1*04 5*1*70-/-,"'1682#+*.30,$%654, 26"&,1 +2-423/3%331,/!%*3 -'($+3$$-(-,39+/'-#%/60()+8(4-,#&*#+30!3/8)&%/!"4=14)'.&/16,&(*9-160&/.4'7,)9)8((1)(%0-'20$,3%-%2+-)0+(&.'*/+ #7+2+*51;6"+:2*')6*95.+,77))8*$442,1,0 ,).-+#%%13)1.9=4.(0:5ՀҀՀڀЀԀрـրĀ؀ӀрՀԀӀӀ؀؀ӀӀՀ؀ԁ+,(.&'*0-271.5*&+0+*2$4.$$'5*& '/37.'4-'02(&6753-/.,''++.7$1%".&37/:*#-.:7$)/$1/.!)*.$#&,3/10+4'+$"'/;--@"*9*40-*"*-,' /&$)2(+!61.+#;$/A2//&1)5320(."$7)&0,2%4/# 7)6%)/6#,2('&5-..(*!&&.5(-3+//+3')*",1(+3-)/%-/#/"5131*.+)')!$.!(6'+/+/(061$.531%+/+()'.#!/-(2,3)0)2472 $&1.-,0),'-'&3)+-+34/+09,+-0+)17/.#-&)"*483:1&!'%#&'1'!&%%,-'4(/=0*51%"4&+'"%++!&$-.%((>%'61,",#"@& +%)&.> &+)7%*.;.)&()**'8%3+/1)0+,'&9(".95%, /+=8#/7++=-%*1/4$#28+3>+&5=$51/1/57#>&/!/,!8-,''$+*84.;1,3;&**3'ڀӀڀӀրԀӀ׀րĀҀ΀πۀ܀؀Հр؀ӀրۀրՁ!03(&3)20&+-*38/+1 41+7,.*/++4 (#"&/,(-&%&/.&2(&61%62&!24*:,.-+-0+&)(%0*/(4'0&+)('36'7,07=/%70(0'1%5;"&,.86/-/4( # *+9!#$&-5-(. /0'%%,.768'3-.( / .#+082)'/!,.739-"++""!254+1#(4'!.%7-*5(:2/,+,*0-246,&$07)*/'-,2.,"+.-4%*,0+-.0-0*%-,&'-*0,++'%&)!*9,-+8(''"#&,. .-0+%$-$)+,(/ $9*/*$1,.++!"*63D-$%01*/#/(+$",$3B0$0.$1$1.<, $1"B3''715,'(-1,/2+$(6+$'+5%&+($,)(%6 +#& 1*3650**1.%/0162#/!'+9,,,+13,26+'=5--,)(9,'("=2%.4 1+2 5,0,!5%-(,%)4,"7/,0+(C(4"/2.&%+2,)$-6,())(.))8+14.**.*/+'":%.0)+"5+-$+"'.%2'(+ӀԀ׀׀Հـ؀րҀ΀Ԁ؀ـҀ׀ӀӀ׀ۀԀԀԀԀՁ0+(0$(1+))-&6""+0!22,)..2-.8)5'/.&1"**2+*4$%.)-*7'&/4+3/ #:-,/&%+2=-'/)(5')+'*7-*+))&(4/64''%$'#/=37#52//@503-+.&')3/+=/-$41<"-6)/$5-1&-,!'0+(3!-.+,'%5(.#++.$*5"5)7'=+0*4)*.,*1$(.,-20'0.&&!-0,)+'*0%,($+/#$&$'.(&,+6&.(& 5'?; *-&#)*!2%,6.5$$&:5#.,#+/($$&2&%$),--/6(2"%0*1.,5,',38('3,%*%/$7*6$/#13+ "( %/0/*-.))8$-2++D')0(!4)-((1*(%))/('/21%,)&'!,,1<10(47115-%*&:#2,/%$$)/5%+$-$5*+11-!,10#/##$?0%*1"-!7?6404+202:&.5*3%,.21'A)&(6*8&!)-3'&+,>0.$)7$'5:,=0&%2*-#20--+"+7.2/(#,0'3(1"'10493,/؀Հ؀Ҁր؀րӀ׀Ȁ؀ـހ׀׀Ԁ؀ڀՀՀրՀـ؁.,:.:8+0-1''3'*169>%*36*+/'1$,!'*'$&!()1-.*-&;71,(*-(+"".7*0-+.&,"3$-//,%-/+).$04'*'*/12+('-,--214 *#''---#1(043+-,%')-"04%#2'5:/.*0,1.+$9!4.&-6%&.( .& %2/9("*/,/%8*'%13/9'+%*)51//)37*+!)**,1/' +03((#'3')().)-.0-.!-"/' 13,)9*5/$0*0&!.)*$#,(!;(*#/5+$9'/69=#*&.+3(22+7+(#(-+0,'"./'1(.3$010$31*(&/'*0702"!/$>7)3&:"&)32#*&'-%2#(+%*'550,/?-270!0519*+(/7.$4%+-)4"0!-/) &'/&.-''*33,0'1%./*#4.)%!+ )#*1/&('4.-+"!)-$*"4((/)8-+$7)&257515+,"+*%/2+0/4//#/80)<0)(3)%0" 875&,:)/7:8*.*='76,/(3؀׀؀ҀԀՀԀրՀȀ΀րրӀــԀ׀ـրՀрӀҁ+*$31;,%,-/ ..-.3$.!+.),3++$$0&')2-33/?"'!4#-48"$*/"-+0-%//*%&&/*1-'4)*+,359%/80%25+%/)-,"@3$26 4*.+9(34+*&%/3$+1(/$&--,&,,..,(45(-&';'2,- 0#&*.:'3"3'-2-2&&#."D+:$8/.!0%6,/*%31/(')''-/&(%,5.21--*"11!%.3/).'"-./+'%3"$/*# 4-,/4205(&'3#'2( ,.'/42<.%40-#(.1/*A,#)#/4-%'-*2/*4$,9)+7,1)-#/*34+$61 /&:.2;*/+5%%3&3+-"916(?.):),/72.7-'%*.0,,,(&)$3/-#%(-%,6205+'26!''!02+0!-7(/,2((-'43)211"$)1-/3&6-'+%.21/6!'#%)$+ 3%3'#,,+,11)'/(/-6'/++!$1"//+;!1+",+/1/1$2"/)$),1*+ .0,).5&9.+92+)'2+5)2ր׀ۀ׀׀؀׀܀ӀȀҀр׀ҀـӀ؀׀ՀՀ؀Հրہ5)*3"*?9& $*&61/2(+/-+*-:*3-7-)&*4<(6R83/3-/%*0/'%'),02+9,/-/2+/+27>0/$+%#%.-#+ 3.*"(/-','1, 04#3-%##/5//9##?/+4613*!+,#$#)4,.!0**0")"+-(#$'&.'(3/&)/?-2);%-/#/'"0($,*./&+,2$"7('. .1'/)3','13*'/8..(0.&.'3,//%"%.#$/..#/.7*'"+4$,)6-&-%/'&+%.,:',#+.,2*$)*%7F(*0/:*2*,(+&(/'+**#("%/*7,-5+.&7,*<'21)2$$31&$03**8($-#(0-5(&16.; *&-2=3,"$)=+')%*&23#1.-- :/212,,/+/,%9/)1)//%,!.+"#'&,3$05/+"/1)/-0#&/> -0'#9&'$.--/**..),,-/&.+*!%5/%+& + ,(,-5.0)+&'"+%&'7'1;%'8+/)/+05*!8+;8&2):"(#&!/.18$34-0,.24؀Հրրр؀׀ӀҀɀ׀؀ր݀ـӀрԀ؀ــրڀׁ**6.5=0#=-3&"*;)4''(.-013*:-/;,4-'(.,*/&/!+(&7*/0;-8+5&2',7%-/%$,7*).).',' 1,3%/&$7&1+,'%*'*!-'-,,%*'%160*%%3#*3-+%$$1),,$#,&+;('39'!5)!4>&,&(&/14)3.66925)!#+)221.,2%4207(,,*3-'!3+(,0&',-&/)%$02#->2.98*570$**('#)4#%*)04%0%%;61"35954,1135 .*(720.(.-"4%5 3$3$7,)'&()'+#%635.)(%%.%,((,/0*"+-*'3$.*'/%()*-/$9./0536+(')("#'//0$$4)3%+8(3+,.+./*,9+(6)-*;&1-&!3/*-&(2-!$./'61.,2&4,),*<,6--?#1ـ׀րـ؀ӀԀـԀʀՀـۀր׀рրـڀҀրՀӀЁ,/)9%74'+('*1*--/,*-/.(#1)+4,.(,%/58.)504*-%----'#:'4&&9*)/3.*/(!%''&!5(0-->3%4/*-#.),)+3;/6,200?23.(<)!%*(8%3/306*3/*"*,94%'//+>!)2.+15/--)0(1$$/%$#"'(+12"$./&#%'%5+'2+#+.5"+ '*7,0/.!41.&60)/5'&*&/,+'',*/,*-"%-#,4.2 /'0+)(!*%(+/*-355$4(+$8.),(+4%:+*5/17-!/<13"$,1+..).(ف)0,456 %'*4+/&+(,/-1*=%'*0%,&+(46/?-+3/*(&'')5'*2!'!<2+0&/+5"#3+((1-$0%.+%2(9$&54'5!,%%%.*1-+)-1$1")5/"+1-3-+&(2'%%(/'.#%740(##"!861*2)'- -,*/!'2,,/,50$7'(7-%6'-.4%%))5.1+8.++.$%,)4."1-*4+91-+)22/10-6,-4)-,/#-'(.%642.12ՀՀ׀׀Ӏ܀؀؀ҀĀ݀ڀۀ׀ҀӀӀ׀ۀԀـ؀ڀՁ16+2#0()#).-D'.'/1#?"+(+?2.2/&.%%")$&-7-@2),;470&')&71/1+#01&8)'7:&5"/)/,'#12%-%3)0<(.1*.11""/##!3.*461' 91+#,/','2$,)2++2 <70//:14+.',*5%$"#(*+/)' +##./#%(+2$)9+*34($+'' ,)/ +/"..+:)+90'2"7*/+2#0 4-32&0/().$*/-2"09/'+1(.*&.).3()&7- .#C(0$1+)$%%0%0,'3&80)*49%&'&$1)!52,%//!$(3(,)707-+4)"01+(!&!%/!#6/-/)".)1#2*'+''+$&-&.(+:--%,%00,8./*+.,'+%/2(=++3)/)&32(,.B&=2**31/),,-..04))'&4233:0!) (073$0%2(*-5*&$%')&!6..5$-3/,3#%'#*.+"6)2%42$'*,,0!045 B,/<'9-&/2659;&:'+30./#3+01ҀԀՀӀԀ׀ڀԀրǀۀրՀԀՀـ؀ҀՀ׀Ӏ׀Ӏׁ!&'.:/+*))"(3!',)")',&+)(,2 4&!++6-2-!(1160+&=))-7/)+1*,+(-)*)/5,1833%2%*4&352,)0%/$1#1/-6)4"0*,(5&'++##/2'$ (9+- 34.7%*)&!:5+&$27#&$-'4*,#+-%+00-5 ())+1%)-)+'.3"2-*7.0'-,"1((+!0$=5$,/.(+'092)(',*0"#3-"'&.&($*/*+(5'3#)!(!(%-:4-)#($#+.-+%/.#+-.1(5$B)0&>,3/)%(%.F$./'(%/5.').'0"(6,250(-+/6/&$&"2).1'*50#93%*,!')3//3'8)/)#(43118$ (1-7%8,+,13*)*'//-../102'+'61(/#&'"/)-5")(k#&$*'")#1*4*+)&%%-5"1"(%35''73&,4/(,3,9'3?'+#2.4*- 2'&-2$/62)/10,)..075/%&%*6*7*29'-,7(+/+(&0-#/1!:0*-41.+%(22'/'./ 52؀ۀՀ؀ԀҀـрԀɀۀ׀Ҁ؀׀րـ׀ր׀րڀـӁ505+<563&49+&'3*30,,(+#0%)-1.(8/$+*>/+/)8&3+*,(',.*("14-#4)2$5+.4%*$#2. '&.23$0.)6$*8--3"/#+0/--&/*-!-=1"7,-+,,-/2&()7(1-+/C+(.01!2?%',(#",'0$22 ).7+'$2).,(1*%1,1=.3(.50+!-.44&-#0#.*,23+( 0=&41(,/.*.-**.:%&",1!-'+$1/%<3/(+(/&%++&!+,2+9+"./.'1(('/1&,'$50. '1**"1 *$+:'3.(71",'&4)&!$7+,&5(1*',**+&!& .83&04,/9#1'.%#5.)'"(-.'+*+,+-($/)#/$$,42&*7,.+3*&,+-D)/&)4/.0.'+*/%!++/$'3.,.#)*-,!.!/'##!71-3..%"%717$,(/01,)*(()4)/('/$,.8!,+4-%*,8-"+3-6(15+704)*!3D+1+!1/&&.3*189/87!')/*7(3"*14C23.A+ЀҀـր؀ҀӀրրȀ܀Հ׀ـԀ؀ڀՀրրـӀԀׁ/,$+6217"*-*.#61"1(90,A,-/#5,.)85'-1$/(=67/0)/, #&B )%#*2 -5) .,/)*8-),.%+.6&*.,.+1")*$%:01$"+.'&+'"#).-#00-30-"/*.%2/2+*(/83.7/('.#:2-(&*-/.+-)$++*+4,!/4--.-1! #7-%<",-(%4+;&'(0%-*$13)&'/,")5*526'24$&-,&'!/# (22'.-.//"#,/2#9%!"16&$5'1,$53;3-"0-3(%1(.'&030,)6-+/&'9(-3#..!1)+.!"8+)$,+')/-*$0-'!.0(<(!.!-)+"'"))%!,*2+--+(2'!//(* "0-*2&.3!+)&.%)(+1"+4(./-%"/5$!15,),,5/$1*.02& .&' '8"5053$).)2-1;2-)1'-1"#8%05+6',(#"(7-@(04$-+)-('>,"#.(&$3.,.''%)/(-%?*##+$6'<+!6.+3'%'/4:&2'4*/)4.,6+݀؀рЀӀԀր׀ǀ׀̀Հрр׀ՀՀՀ׀؀Հ׀Ձ-'+/4&,0+/1()''2$5*+6.*6*%3-(23(('1-<2?42=&%A/1.-3'&1,/0#$,2$'(!.).+<7+#*8"(-'"%/"411+103.((61$/1D/*,))0+1-*'%1 ./2+#.'!(,&'-(7'40(&*%%",$/#1(-% $-(7 /%/4.'+3%,#$-#2/"5&2%+/,8(!0&2!#&%*/0))/,*3(+!522+*$/%3,-/#2%*+%8',33$ #/%.0#(&$*,&+-'+,4..')4*3.- 345+)"2-263 '.)&33778*#+)'.+&$-6/#;('(,1,$*))2'14*('42+33)''35/('5/";*81$&6-2..+'.()4&1+-3*'*9&4'9"1,4,*3-$+**)(%" .%#,$&&/+&+#"32)/*(. $%-':42%$-+),0032!'7/ '$%)/,*/7&2,'! ='/*4,22549-'*%&,*1."=0+=7!32-($4<.)(()-30*1325/&.*5-0,6+081&׀ԀԀԀԀրڀՀՀƀ؀ӀԀҀ׀ڀՀԀՀрр׀Ѐҁ00)2-#1,'(0/0$+)15,)51-, -03+="))4&31),-6,(9'3.#%%-*0&--.&-&!/6&)*,11&$%121,+#,//-&6',":%/1'.,$0*0".5081-2)+0;"*++0$5.(-+*$1)!9+.'-.$')#((':-*&#//(+51",+*1$-/5*-+++0,-,6-71()0,!$)+://%+6'(+-."(&% ++#/+4*..23)+)0=0#.%*$'+//;""+,605,'9.#':(&6+*&,$.:,#3)9//++'#!/5.(/!6%#3)2(/+),/-&%->+-.$7!2&+!1..5(7402)(* #/:)11&6(& ""4/>*&&)7+!+$.8;'33-1,".(-(-.#$*(%"/) 7*)-6$"!"%5***2)0+)$'(2%+$9,'<$':#16+*/9&@ .',$9%%--/!%41-.'3&4/16*$5=#'%$/)3(1*01+%/ -82('-#/'5/&2'.'7.+$(.9&.00-*(:*!+))9(.&-1&24:6*:%7-/=ۀՀڀڀۀրрـՀˀـր׀րԀӀـրր׀׀Ӏـׁ&')+15/)2/.'(.7+'0.!'-,/,*:38:(*0.5+#.*#9"-'(#$+(%4/ '1&2/$)/&62,C+#%,6+#//'/5"/-1#+8#6./4&6*(("%/20#)3)--#1&%$1-/%+1)*(%3)>2.(9)&:;'+ .,-1&'.(2*'$,5*/.*/('+%,)(-"73!53(/%80%0-+*.$-.73(73::.: +0/331-8,3)26+%2+2#.4,-&&7)"(1-!3/$!!-7,,14(,%/0*"--2)/"/-/3$1<%-7&.3(56&",/'=-874*!)*/'%,/$//%%12$0&$+/*0/&0./0!25&'+#33#5((-3-$()51%/5#)/'403%!&($4)-/8%*/)!)/8%=',-73), $5&,&5/-.+$1.<.,+0,/-<(264/"*$)/(0'60"-.-1),5'*+/6-3.+!#""-(7&C*6&/0%<(*(10,"0+/10#.$*)1%$/*0%)"8+%)&03+-$&'3&8.*%+.&*,&-/4,.3++,+./239&3)- (8,#..$(%,)'2*%.-)./,/7!2)*3,)1%*2*/'"+*':/,2++;34#&%6*1+1,!.$#<27/)#-?167"/#(&#,<"5/2'0)+3+,6+",<)/=,.,)/%2.*%7&(9:+1+6-0'$,4#-1. 34#%/)10)(3.(--)41-(.+-)8(7%),-5&"#+$'+7"06%%/70+/#(>+ ;%/.0 .*!'6'#,02)0,*,0!93*6./-'&290#4#'(/*7>//,-- 4%06/4.-+('/- "!2*/%*6 /,34&%/62,.0++-*1,2)3-3",9-4;+(1<5+#++2.ҀӀՀ׀؀׀ԀӀՀǀـҀ׀׀Հـ؀׀Ѐր׀׀Ӏց$7(6%,7//:-.'%/'A)2/8"4070-/%'$3*..9,.*).1- ,6**+;18#)2,1-,'( (00)&.*'5.'4*!0(-4;:($";75,4)*-.!,#5'43-*/+57&&.&0*/"405%&5+;2/# *3&(8$)!5/62,*4*/+/0.##%40'"((')+5(+-01! 112)&'<5"4&%6%8)'/0&!-%'-,(3&+''7)5'-12%3.5)+ 2271 2 )&-041''56/!)-2!%- 149$*&2"+(6! ) *#()2.33/6,6'0-,*4(:"/+$,&%;.2%*(4/"!.!577)00%3# #57),*)9143%/#2-8%/2)518,23+&(72,%.*4)%-()21)>/=6'',"'(&*+0.(2$'7%& 1&,(!).& /+(0@)+4837/2-'*)<*,?:.).,ҀҀۀЀԀ؀ڀրՀĀրڀԀ܀рӀՀрրրـԀ؁1/.5366%3+&6./66-2-1346'*#6 5<*)4"(.7$-48)3&-*8)14%'634$6, "($/&!4,))$1('4,3-0###'-7)6.+419/$',))-,..2-!,%:2.2,+#&3.*.0 '71G4(@2$4$-35!/-,4'1@-3,-11*"(-<)')5((+.11/,"3351+;2!&1/6--*,%$2,,,.+5)'9*1'+)7$,0,+-,-**)5 /(%/"+-/9#<623(1(9,&0**-10)1'+(-:%2.)!((;%223$/6218(.)%*)0.0$#4(33:'-.&.&9-50.+,.4*.5<-+,=&,3/,$15 841?%+++,.'4%"9/0$)+&'/+44'%2'2$#2,#+--2,$)05.0./.,*/+6,41*$/0%9!96'*&#?#(1%4'+++2:).)'*$.5*6)/8!*0.$3&$*$0(!&18$6//54,&-";#)6..'0",,!#0+2(27&,,--I&)/-5 $1&)!*12,20,@DB95)1ԀۀՀ؀ӀҀ׀܀ŀڀ׀ـҀ؀׀րր؀րрՀҀ؁+13*";%"*7.)2/0):5(6/8!)/1%+*:.).+1-2C#)++#3%)*0&$$14*5%*#6*-.#31:,*335//)4)02435#/')4)+#&-(-2*&+6)*457*(**33"-,&/-"++%!"/$()+"/=/2*.$32'-#%*;&+#43%")4!(*3)'-%*%&/51 !.)4'%,%-'-./)#')/)-1,%2$%/!2&!1)3%).-2$(1/)#,23)1()!4*85*24*.,/-&);82/) /(!--2$//(...'2(-+.2*&'(;3&8 2-07-*'*0'3#--(/&"*$!, 9(%74#)0),5,44(1-#./+)+2"+-8'/"1&-&$'/E!/',.'2$/%-&+% 4(.4)(/;.%42-'&$")&(#,-+)0-7+4&6"+/"3+'&*!(122:&/7.*1$-+#5&%&%(-+%*/;1+$!8:1//&!./.!#.-*3+./24/ 2*,+(1,%.#!(2/12-( %:(/(/+$()918-0,/.1*/-7:(0/$5,%րҀـԀـڀՀԀՀŀ؀׀ԀՀ؀ҀՀՀրۀӀ؀ހׁ32.%&.:25*&!(-;%H!.51.#/3(2)**,6)".44,)5,%4 )'2('.'3')$#(1(4'&$,$), 3%1/+'*.(9#3,7#&.(%.)#,)'/'=9!01!#++12))*5'@".2%4 (80.614+.+/0+*-0!#2/(-%+) $++8(-2*(+3'&+/#'3&"'/25"+)&'!+5+,#,34**-5+/(=..$-&.12#&%.+D4%5,!..,9&(2$)):**%)*")&/4/(*043#+6+$%!7'$B)5(3+ )700)0(1"$.)17,..(%05-5%#..$#(&##3'+-)-#&.2/0-=3%&-.0#00%25*D02+6+"&/$,#()&!%)/0410#-0.)-5%/&(-$&,*5),48,)'$0. ,+/3.24.(,-")"(5'$-,(0',((',(+..!#'8+8*/--(4(:,/)*+%-/.-(2"2,4302.)#,&/($.31-):'9556(+ '5753" #7/.,$5$8&)160+7%4.0-7-"%14773.:3ӀրӀـԀԀ׀րԀƀррՀ׀׀׀Ԁր׀ԀրҀπف&.6,&2)0*+5%00-*'-/+9,-2:1/ .6<4+11960(4350<58/5.69(."9/):5%42(/2./3!,*?2--(7+74/3(23.,/+;30,2(?(*-0/*3,8%+2.08%6'&?*&**'-+-!)6")61,&*$,)$#*)!--,)1*621*1%+.,$%5,/)')2/#!(#(+'!?.4-03+"(#96+#1+$4)(1'.**7'88$(*1/-%%.42/9*+1'!)&!/+-(*,,'*".>+0:",4:30)#6%/?!(*)#*-/")!(.'/+)-2# ##',*%)#@++)*)! *0%*3%2"5'17-#$01/624#/.3)0//-(-4736*/:#778.#*,*+)06,$:/)&6-"('>/4:@0!/#0(.)$/'+6(2-&;*$/(,++&245)%&.+%!$$1-!$-(/)+-,.8')/$1+2,+&/3',2-4#!>69#)5#*.($).,(1*&*(<"%>,)((<',52',3.."*95&7-8.%1"-0$2.#207 .%*++980&%$/)56+3#"4#-1" 9*'4(*++0$"2//"6'2.02,/'?7$*.,!%*"+*)0,&.(*-#()/&/$!2+-3,.(+,&,"-/<1/*<)+-,+6',+7'-5#$*0')!)+,-'# 12/*#-&/%(-/+-4-3C*011-0&A3$')%-%)5*'*&3!#--+8,+3$*-,>#3+0/;" *0.<7+'(./!&8+:*%؀րՀӀՀπ׀Ԁπŀ؀րԀ׀׀؀׀ڀ׀րӀ׀ڀՁ6/5;(05$70-$&,"#'6 .4,-2-$/'/#8/(3.%831+..20)$$03%.80$8%'-326)$=-4'43)"#<1!7,'/-%2 #%#:!!/2+*')'.0#-,.1!2-$#(6/2-4#%/6*+.%/1 /'"=2,/#*%/"5-6.(#9'/03'$',37.+!%*,'/133)6.&7+ .**3-18$'"'15;3/4'.2/@2#+/16'%&8,,&'%)+/+91(80/)+"&&6*'*-((".(5''-.+/.6<$6-48$4-14)4<#+)9'12))2/)'#0-+&'/+((20*2+5-1*++*7'$.-(=-/-.**%0$4+3024-*,*07 ."2,/19)*//!'/"'+/7,/('&((.$+!'-+$-/+(.1=$661-(.*)%%!4%)- +1!0+8+;(,;+*2%01$)/+',4%($-+*!,&,*<(1%'/*2$),;3/3/$&/$/,.%;,)5.&/ <&+'3+'#3)*08 1( %10//,+*2/(7&1(*,*-'&5 Հـ׀րڀ׀Ԁ׀׀Āـ؀Ԁ܀Ԁـ؀ҀـԀրՀڀс262<5).%9)$%,'.172)#1.9(13$#0+')&+$+)18'+*+,*15))):(.%8'&,'%030#)//).,92-";/&$-%+%-42(+7&#'.+#61"#+"#4%#21(*-'(0-'A(3('13,3$+4((3%8%'(3/ -%)%#"#'#*$)/!1)(:#)"#%#"32*/6*.&((#(3-83423/& 956*'2#*"1:*,(%'*)-*&%/&'(3.%6$),,,":'30+12)(2'.=!'9!%%.'161.37$)34+)/0:/+'%&''3&&0",262*-..)3$&.5"00+!1%6(-&"#2)+&)( ( 2''&(%104)-+',(15+/."/*'&((&0'.7"$0, )).*?&*)# 08"0+,-*5-)*%.8-":+11+5")2&"50)%%-*+.&2$3)#"/=6(,!%+%#>#14(&).'(%(-%'95/3(!,4-.581)$+/)'!05*6- 02.,4<12170+363'&%.$4,.(3%=2#3./'!%--//)5+-(*,&'#րڀҀրրՀӀ׀ՀԀՀ׀ـۀـ׀Ԁ׀ՀـπڀӁ1,7+',&--)74")).!9).*'*/%.1+3+,)51.(6,##76$7',&1,,'20/3+,+3''07-'/%.+6!04/<.%/-.0>;)>31/212#52=*0*&%0,/)<)$.&(.5*2-!&$6)&25&)."-,*&.*0"4*5'&-/4,$$&-1%&-%33'63+.)53(63-/'80/=-,:+2'7!+*)9+>"4)&)(/).)2)&/4-50,1 -,$4)4+,0),.3%7;+,(1-!(&1+-)-+,-)+,-,*(%+* &/3'#23/8-%*4.&$8!2/'%)',&3-3<3)+&4/-#(+%/-3$8$1)!&.(*?33*()/1'*,$0.81921/(3/'/*,-$+" !)*.,%7-4"(75/9$1)*,*,$/-#2 )/11%'+0/-(9/2)75,,:$+5/)+.$/0%"*/.$"*1&34755*/+-56(85(,&9',+%.,+/,+!;8$(-2(/8+3.4*0+-4)*1&)063&6,)9/22++(-.*)3(/$(++0!:%62:-ـӀ׀րۀԀـπԀɀڀـ݀ԀҀҀ؀ӀррՀ܀׀Ձ,/7."&2#;$-//263.!/6-/'1')-$(7<%%:)/922$/(-:!(.2/"/ 36!2(3,+520-&.$"'%'%.0352<0/*3+1&-%/$*%052/-8,-22%.*%9*=*'')3$) 7'/)-%+#5'*'.,!&(+5/ =%:+'0--43106!:-0-(!'1/(!-.9*&15#,+*.6!1$)-5+),+7*5!%)5+$/2).+/,))%2%%'*+3-(+'%-' .#*.&0(&!)4,/-.1)=04.+/ %3%1.*(#(;+'0&)',:-&!++0"+(++;//2))0,"8!*)!).10.4&$. '&+%)-*/.2!)) % 4$0$'.'%%$0%./*"!-*8.8"8+(*')5.-*6* (,4/"0-$")0*!*)*-6+'31&2/)'#",7-/+(4-03109-'".4:*0-&&. &&/,%(%"*0*)).4,',+)&/2!%/% 13--4/02+/'8,.3$262-$%+0=&93>.)9*0%.4+33(/,(7+.%-ڀ׀րր׀ڀՀۀԀǀրԀրр؀ۀ؀ـԀ׀Ԁڀڀف+11?047(;/74*191-0&5)1/69.&,$*0,0+4.&36&-++"(,;0$!5(23;.2&.#-()+&3)$3%''+7*,#!*,13.,+5&*2)-)-E73+#0,--925)*-7 //-/#6+155*/+*+)3++65**+/9#.#7.,0&)58(53& ,.)**)'.@-.'!14+8"5.!'83''(/"*06.%'1%+*)6",*>)7!4/*,"0$-"+2007.4*+/)5!)#:2/+5+/6$''*$6+74(%;1'4.()(2+)/7%*3$%22)40--+1/%'&)*)8 8)&79-!/(B0"26"2+ +7;%&)%**7.2(+8=#15!4/&,(83,2/&)/2,,+,,,0.' )2>2++ 536'+&4!*$(-(2($)/$@/$/1.)/%$>20(--)-(8' (!/$66+%.-$=64-@-*-05"(,)--//*&1)7.!+&,.4'.)9*&'(*$$4',3.'5*/&5(./6+3+3/#'!+$."70-07#G-+&/003,$.,%1$9)׀Հ؀Ӏ؀ـԀۀրǀրҀՀހԀՀ׀րـ׀Ҁ׀Ҁց%.+ 2+35+4/&805$)#$/-0*,4'/0#1)'.-$6/!15&24"51+62!17(3/)96)&!*#+')|݁F%#0>:&#)=+&,),.+'! -!#*2,,+-!*.)).')3'3$6,.;.<1:--/-)&(&:3=0.()*&&+,,58"$0()58;7D64-+!1#*$5#-+.0"/!$0/ '7.)21+1!/+>,-0'+2)2&).-5-%%%,"%!,2#-/(. ;8+"#'()0.0* *.+(2,+' 3)90.3/'30-.-/+4')"/,2&,$,-"%/($",#-2/)$,/***%+!7'$-#*&0-/&.'-&+'"(*2.720%&023"+&-"'0*# +/)&*/30/5836.'/,1/&:<-./--%5%%.2 //.*9'%;,0!',0,)0,6)$$)))'*&$#$-5%.""*+;1(:3,!()/"'-65-(8+)1)/4Ҁ׀Ԁ׀ՀрրрӀƀـրրـҀՀπӀ؀րրڀՀׁ+*15.1>:&(.;$-+#6**+*(9++04$'-5..+*$60+'/##0-6$/%,B(!:'&".,#,+$(,.2.#8*7'(/-/*22.!273%'1&"5+112:#.53+1*041''6/0"/ ++)931/5'3('.%2,QR6/*/+/"3).-*()"//#/&,-&* $(*/$,.*' -)/.)( ")%*#(-(2+4!+=.4/1A/ 1.65%22./1.10.,,3'150$02$3./$ ''1",.#0+1&%1(#,!2)$2,5=,;%'(+7./3*%:,6 $,+"0/)-%*#),*?+&%1$-#.*+)7#015 20&%0.#)*+%(.#3$2*-+/&.20I-5$)7++,2(--%*1-:34/23, /)%/+7+,3=(&!("% ,/&&5*(-,.1+!&%+/*''0-+*2$&;($+5.4)2)!;$3*4)80--/1 *0-1(,)-* 2%0&"/42-&()+6-17*1,:6/*,+-$!!'-75,5)%'2&)5/4"0-8):5(.;,6,1(%/ҀՀҀ׀ۀـ׀ՀՀŀրՀӀڀրր׀ڀՀ؀Հ؀րρ)+35'?5%.':-*,3C-+!%,9"&17-2+24.%2/%.)+32(9'%,8/(5/>?+'!;/))(#49/%*#12())&-5$'2'7//!6*00+&'/*--+)+/&%3++,$/$$,6(0".+/++/1-'7*%,1-/+7 #14#"1#)%%+"")+,+(6/(1,&/09*&3)/&8+;/.8,"(0#0:7066//4&,-7'/3+43.2+/(* 6-)4#1$14&0*7+%2#&'246#'3(+,%&)*J-/8%&-.0(*23*/31+'71-"+&.*)(-%#2*4;+./)5%51(17)"###501'*%')+( :#<&/5#2$!-6"+#/$/045#(+(1(-/0*62'?'*2&(.46%!($)/'+/&-'*655+(*-39-% +/&-*'2')3$-%+("#"++-%)8,>&(9!:*?/,3''"--'!-9-+'&*<(%2).+"1$.##""!$0+%--;(%6-3*)&?%/&)7(>#242;/+/#7B4*0*+!.7$5%*=%;4)&:93-1+-)ԀՀڀՀۀ؀؀׀րŀЀՀՀԀڀӀـӀڀրπՀӀځ 5+3'.&2)/2.+3,($.$7(#+'(6,.23%15D4'..766)%-'4%*%<9'"-%"76#+2/6(2&(,4'()*#$&*&.6.2/%<(.0',*/"(#*/-14(7/*%(%'%'&0@*65-/""(!)-4%2119)-1$&+9%$'%"*1$55C13,&)8%#'1+'.$,5-',/0('-$-1'+.)-%-.()&452%!5/5(6 &0-)/3-/"&1+%+'!!8*!*-!4+0)*/++'3'$&(*"$$--'#)!+20/!6/2&/8$(++&#&++;)24',0.&89)#,9-,.6-+>26.'($);3(%51&./-,,+3077׀Ԁ׀ـ؀Հ׀ր؀ʀԀڀրՀрӀրրۀҀۀ׀݀Ձ*369 )'$4-/10/7,)./2/2!7'"!5.%/+-.7?1+(*'5"9%4&/&*048+''%(/-..1"/(1307421+(+)(+2 &6%$#).%+,1(-230'. )+:&0)8/)4!<)3*$/1:9+7+1#,/4#,#')#-!148.'%000:(((*>'6"'1(#4-,2-#!'&1&)!.9)# 6)' -*&2.3..2+)$/014*'8&#,)+2)(#,#2*3%(&''.&14<4&6$* ,6&0+&3%(&31*$*+!%%:28.$;-3&0/35'2&0+4%4-2$-+%31%,%+0.)/31//36;**($1+C+5!*"(:1*".%36&#$$+#0"&3//)#"*-:%:.*#%0&-80&'*4).9(+"0("5+)625-.22*42<,((+0(75"$>&"1! +<3,+&/,("+;!,-922*/),"5'!8+-6,0,(+)*+3/,.(..6+3(&4-*6+&/-9#".'-#;/*1(+859,#(:+3.'-& *(,*)0)!('@7-,*,5(16%3$6րـӀ׀܀؀րՀҀŀр׀рր׀Ԁ׀ڀ؀ԀՀЀـځ20 335,//.;&=2/#/*,&'-+6 ((--&@);5()/++)111(18'(-',.+(.2(#(03+&4+52+-)/* 1)<0#$D33*0I%))<,)0&15+)%'%!(!:-'?,.-%)/,8"3%)'$!..:.#7%((%4&)).-+-%)10"-13$&:,+%6-14/(+& 42+86$,.//+!"%+0$'0:/-)(%62&$,+/&&))0(.+#%$&.22'652&-%6(/)/+!&.62*.4+-90,2-$2/% 75%!,./90<-%-31.!&%3(,.,+.'25"#./!'-6+-*./3$$-"-%2+#05$2/&-%-,4%5@//)&;+7/$%/,**497%/7*.;#4"(:23.-&+.-$2,+;#*#2#)'2%7'*0-$,83#,.9-/&D".')+*.%&&--4&*)6)'"$(&-2./,2--(+/-0-7'7(0/4'3$04'(.&-&/5&1$"3'-2?-% /5-)%$3'+/%#$,0/$7:-.,/*+160/)=5(3!0.5&ՀЀـր׀Ѐր׀րɀӀ؀Հ׀Հ׀ۀ׀ՀӀՀՀҀՁ/;.2&&&%*'2/6-+/9%$)++4%);<$61'+,,+= .*.'!'338+2/!*$*#/.#)4+,$0/&/:*')"2(&(")'*3-(*),75+5,*/.&-1!,3,#.&.$+"0%+7-17.(283*-6.-.! &-2$-1#,$&(( *' *,)0( "-*+2/54*-202)/.)74+-3/",--)/)*','6(3)**--#)-'&#+-17/+$*/470"53'0(%3,69-$-/'*3+.)%2&/.3/%-/-%55#)0>(3-5'16$%*50/(()0&4:-5-8),'($$,+6#&61*2:%#,)/6!&,%).4.)1##*,*&06/**3%&H%*-(7=#+6*6(55,%%#1+/.0'--15693,7-),+%'1/3'(6((.(,4($/ ',+/.'9$$-6&*(/2*;.3(2/.3*,-1'65**!1/ &#.(#..$6):+-0/.#-05//'A-4%)..*%%7/'15"/*&6+36,4.'.92"/2!+/"(+;0+5/, ; (5܀րրрրӀӀՀՀˀրҀЀـڀ؀ՀӀԀۀӀ׀Ԁׁ."9:1-+ &,-"-)-3!'.)%(1",6/0',.0:'#&2)%,('-3,*=60-2+*3-6#3815*,-7211'"-")+0+:87.3"*+0,8 +.=+$.'/815#'*1+,97;-$(&'!71"")01.$0'/.27o7) './$0).#)#/#+7,2!$**=-*$/+/1.1*."2&7,&7/11+392(#)+1)#0+4(3,;5/)*1('&+*10+-(0*12/5/*-0-%**' 8-*)(3%7.&+0+1*'33++*92-+41)+1#..10-211.'.5.,#'/'4)/.."5,1'+') ,:.30**10,**+(-:4)&*,+'-;..5!1%3'62B6/1/3(7',)!!5%#$*3+'5.$6'13#%.)+."3#/*&''313&3-(50'+0*-/2-6,7))*="#/ ,)$%22"2,5').3&)$6/0+#$-31.-8 52''7#.'1:.'">&+-&3406/3),)/+$,)1/.+-/!3*,&1+)+$-,/0>%3+-*200$/-61&Հڀ׀Հր؀рր׀€ڀր؀ҀڀՀـՀՀր׀րҀՁ/<6(&?3)72;!)1/)2(311+6)2(+5(9.7+.=44)#**=*2&+#0#;$(((.&( -+',':-+32$+8*"'$-/$.-20%.0#(.32 '/-$*#(33-6,+)*+2,13-+4),'--6&%$+'5!#'%'. ;&,'*)(+3.$-70.&%2'1*#4')#/&$#52-).,53') 5-7#0'70,1+.5>.+3&13$(.,&9;'2%-.4205 */+0---41$/ '3).&).;1-3%1(8$*/8/-01/*'12/;-!-!0)&('23/.,./0+;20*55/&7"5+)5'')/*.($&#**-9*5';/5)/".)*7('9)-/@116+)3%+-;#.:*0/-**.'4),7%/),)-,-=+:$$)034//.3*+)-*&1*,/%&/0*/+,#+, ,5.+.%1.;/)-.5(.%1-2'%/&*&/32+/47-*0%*&"*9-7.%2#*71-D+"&/''8/),)3"?19(2742594+(&1&>-*32#%"@190-#11-8-:0;/+؀ڀՀӀԀـ׀ЀـĀـԀۀڀԀ؀րҀڀҀ؀؀ـӁ*6/-).6%*2/#.474'*,.,-24'*&%6#)8$-+6)2+?000+43//,F174"/%, @1/'-)--'228)&)-<.- $','C/!8+5(*.%-5-2)54(-///$-#3-&.&4 $3+2'2(/*6&12,766().+6-*$%.++*4$@+);)+,('#0/%)+(-/-'!<3'32(##./+"-!%46--4.&&-01,&&,/',!&09/$-+.2(1..*,+-2450>#,/$3&-(11$'0*#+,8-&,1"1 )'275+(/0,7+)435($63/#.!1)&*-+0/-4/*2&'&3&%.".,++,,.2*#-)($()-",2'"1/)%)$', */",%/##3>12= +%,+5,.;'0+33-)+.&1-#-.'-0)-*3."15(#&&%.)6.10#-%+'$*!9+',+(+*--%9'/0**&3'>5#<(&5""0+,+-%)%$(+<5)1$40+')/4,!.)2(/'&-/29/-6)#12".1:.('2./(+69:-%-.1*&ڀـۀـӀڀՀπҀɀــڀԀڀڀ؀ՀՀҀրրـՁ5$'+.#*5/4*)/.%&,),!!7(2+1,(,#11/2/3!6,/-%0'7/%+*.%43#/-5).,(,%;/+#"1+72>3--4'+0),;-,6)-'(++ (,"36;*( */#%'-&.+1$3)6'&;,2#"9)/-"(+&98()+1-8/)'.!"+,//%!,.>$*/:@:5,/41/(&%4$&,/*())+(-6&*(-".$'31="0.*(/=).'*,06$72+*1%$&5,2-9-0-+!*+'2'-&+0#18('7.'2)+)6.*,/(5#:4,7)+!6-%0/'4%'#5)$41:25+0015&.9",9(5+1(4(.3.!02>:2(0(F*$2*07(&672).'+*(.47. $/'&/01%*,'5++2++,9$(#.*+'208'"90$)!+$+$-/6(+*'#'70.#!&'4 %20())83 3/"32;+13+'0)1+*(3,*!+&'/1,6())633/+6),%+6)('/=)31)$'&1/1*-&+$1,*/+/'*/&!!1'%'6'/#4%10/'.<, 0-#//"5),%#/0+#7-33,%&($#-2,'-*#!4+-#*0'#1#. ")"*9/--.;%+,&#/$?7-&##8!&1)-5/6*A'%0)+%%0+&261%&,$=/6*"7(,!-1-$/&6)5#,+//=(# 94&') #1?$# (7,%";-"1/$".076.3.&&/.21),#,*&$,4.1&71-.0&53+1+#.#*/9&.*4-1&8.(92) 1!+2)*(60'#)/,!)320*%8#8*--/-*,)/+3-&+(3)/.8/5&2*%*+'2*('-2.%) .,+2#2(1)-+/0 0- .#3&J'*+;),$3'-++/("$"772,+>45--$3A43+.,8#7/3'".,) (1914*12,5%')&$/.%627*%9;,<&րҀԀ׀׀ҀՀՀՀŀրԀҀրЀ΀ՀՀՀ؀Ԁր׀Ӂ.+):3#549(D/-03%%$%5'-(2%+'':$%/422'017019+0)/'0(2(0&'*9+0#)45'*$&-%+#$)+(#2/&:0*(,*--**),0%*/,%,0*&+1,- +.,7(/$.+!/*0',-)/81)/).)3($##+'.3**24$0'$*%+%7-/1).##"/"#..+)0/07$21'1,/-#4#7:-4$%/$175'+%,&(&1.2--3'1)C*,))(#0&%2$.+&60%(,%$23-"&(35-$7- .:-(%5$&/),$)**(0./(/(--#5*$!-%"*4/C-,0""&!)'/6126*)84#51(1-&%17)"')!3( 1/+'*("/5$81 =00.+/!012*6)%'?/0++3B&6')*546)22#- &5C+/'5,*($)$+<*9#,;2(6 =3(%*))&("%9$1&=#/!,&;.065-7(*,7#00/604/1-/*80),, #,"-3C#*.40;6 50%+./0&+&*. +8/25(70;>$1/1/%(67&2<3׀ԀۀҀڀӀրՀԀȀڀـ׀׀׀׀ր׀րՀҀ׀׀Ӂ//5,**&24;'.37-45#5'/1/1+/)''.7."&6-.)3 /*2+=);0*,3:,$(-(6)=7,/>1$*(2(/%+6').02*.!5$)*,)(064(13/*#&5/.-/.#+%#,')%99(-./)7-)++-*7.*/+#03(/+6/(%+!6>-&6+2%",/0-411!"$0%$+/0#.**/"($8)1+&9*+36, *)/-&/+&/&&)6 -*" )#4.2&,&'/!/2+'2('/+/01--'5%1%'#7B*)//)3 0%/,1/,0+.(-%)**(-0*.'5*4/('0(&/-)2$00-&*0&2..&).*+)+'-/0/7)%521.=#0+-'7?&.1#1%$%*+4+),&+&37*01)<"+<)21.4$-1#" 0 %!%81+4)<*.-23)0-4/*,#/+51%11,,$)3,7#")#-2.1)4670 01('.5C**1)23-"),4* ,0+1/;-)8''.3"+)+75.*)7,9/#*&51%---))*73/0.'--%*(/$4*:ԀՀӀ׀ـҀӀրՀ€׀ՀҀ׀рՀրڀрՀՀҀЀ؁-)1/<+1&2#(1*5,(,-()'13+%..+,/(4'5!-6##4&9+8(6,-+%30/>$53$2-&+" /,5&3),+'1/#74-*!,)00$.%,$&-&)&$)*)1:$--0!6$2"-+"7+1-(5(/9+74$04"1(30,%9.2*',)%&)('$9#4땠",30$/ ?8/4&#/)+ 3%)#+/)2**(-"6)'&*"'-0-7),)5) ')%#6'04'+.$'6'."2((//(/*(8-&-* -*9*-&0+#/3.,);)141'$,)(9/%431$!1/)+!9*;%+7."1%*-$4.")'0781"(.0')#3,02:,*0.3:#.%&7'20'!*/.$$0.-+2$(:#"+ .)(*6++0*,*<3./&'&-7,'7#+#+3'))308/0.@&/($+9'23$76+-62$59(0$!/;& 3/1.-3-,1(%&2"$ :/))(302&'%/+0'*/)4'(3)56. *2*F)--..'(028.81-.-$5426,(06*()6(*׀րԀՀـ؀ԀڀՀǀҀـ؀рӀҀՀրрՀԀրԀց4(2!#1"5)67,**(4=),3)/2/(+'0+)'6,(67'1(0?+%;()$!,*+8'&.40)!*%0<1-5/=#<+.'1/&#$9+4".;<4:5*2+*!-!;"+/'(:28 &17''-+2(0,+%+4$$)2-2/,('+#*(+1/%+0#)3*:.)*.0&-(1'$/)1--".,83)/%*1%#'8!2ЀҀހπЀـрـՀȀ؀ހ׀؀܀܀րـԀ׀րЀր؁/012*23$+29-&29*&2)41+178&/"58&0*,43/-"#$,().4,$(7%1,'8-$4.)22%.*****///,1%(%(/*#2/0&0*"2,(5#/+"+.)/;%+-$5&+)/%&--#,)2(&089,(1:02,'2$'-.85,*/")$$'-*&&6,(**1(6.M6/1+5- ,45&0,!&4,6/09*)'&+*./##1 &.&')$/12)0+- &&!(+7)-!,).#&)**$+*-*8'$2$2*4--()3"-&60/&'81#))44'#'0,8&#&,-.!$2%46 0+;-2,1!+(211/"*-21/93/*A/,)(=0$3%.$("!1(,",2)')*02#71&0)423*(1-&37)4+-%,/+-*&0-1"0&-3&#$-6,-00-+8$85/.+,.2%'3+0-,3(5&)0$)34*/2"+"//'*.+-.-./$/.1,40010&3.-)+"*0$6/-$:2+(&00.2&+1(*$-/*)/63"5223.#$.1+/7*9/./.րـրӀ׀׀ۀՀՀƀ׀Ԁـ׀πԀҀҀЀԀ׀׀׀ځ2:"=3#3"%6:&5,*)6'6&(B7*5865+*0/-',"*/29,613%*-+'0)*.))&1%3$#7.62()%',%,*;;+%2'$.''/*,.?(?0'*.,!1$")+4+* 5@5")%0,/0--5()/-/!%+*1)++%+)0!(-+)"+/&00,*)155'5/%*//$")-$*/"#+001'0!.*++*(+.&%&4%10%2-)5*)4.,2.(%301)%.-%381,2'3(!%'02-3".%/#((0 &!',++7'+3,,8"/,,+8"(&-2-1-0)(+"(/&8.!'*22*1)/3/).6%%%0-23.3++-%/'.5-3$-/+/,/*")"73+92)02+315.1+81/$),*&.63%7*0*++4//16274*ՀҀހՀӀڀԀՀՀɀրЀڀՀ׀ՀրӀՀЀ׀ԀրՁ3.';-%.73.%*0:0,-018(0&1(60/*72,)&12/%5<+0%>-4/0+6;8--5:%&-.+1*.$2+*."-584&!-'/,043-?(/3#=11.%(((&!("6.,20"*$"%&(-3#-,,,+,'&*435 )'/./!,-!#&(1452(.(,/)!-$23./)%+1.(24!)+#(-*%2)2/',+*20&'+*+()51;0(+()52)(2$*10*3/32!.(#50�.&05('+,/+.4&1%*-.,'-*:078&/*0/$&.+6&22%-0'0+%+'9(2(*!%:'#(,6,/*12)-*+.$'&*//'5*-+/17//"6;23%.(0(- $/"0.!)+)-7")*0-)#*$<-*1&334(%4,%%#+;/+(0*&*$50'+:)5!&$91"8'/-).&*/+)&562$6%5".,7+6(#)/',.(.*(-2//5,.!/30)&970%81,-.$%6)4'%,0)6(6$*6%&58+";5&(4*'((&9-4$=.,,1#'-0*(/%;Հـ׀׀ــՀҀ׀ƀրـހҀԀՀր؀؀߀ր׀ـԁ503+3!%.&)-2''+*2),&2+5,(+/.0'+1(+,-8.)6:/%*49',001.72('5-4,+/$&%*%$,06- 6,)/!!.1//&0/%&/'/4(0./6*/,-.) -50!'+/"/-0,#0+(+#5#60"96*1%7".-222$3 (*$4.)+)7*/)$(0+'/*'',(.#?*!60*-)*,'%*/#%.-2/.-"&&3((&,5C*2 ),)5'$7&,+$4!%'-2"#5+%&%>%9-*4:'636/"'314;/#+%268.)&6-&&+*.;0$+'30#21,(1+/0 3 )1(1$2":9)73#/)4/*#/&(.%%34&)#649.,*%('(0''02%0.')(+))).);--9&53&'(!/*/3*-.,$2-#,*, *.%1)-()&%)(*!!.)2-*)'+// 1&,$,(2).()&#+,#/-,*&**.,6#$%*0,):1$-7(-<(.8$ *3/0"215*+101*+; ('4;31$&.4=92/*րՀ׀ڀрڀҀЀ؀ƀԀڀр݀рՀӀ؀ӀՀՀڀҀ؁-'(+11$1+*7D/*+*0#.40**2(4'7$/522"+%,(+A8?6,)2.==/2B$7/*5&4"*,/#-&$2.(52&)%%./38$"-.--(33)-#&%+)/494-3)3+2'-/,1(-$))#&,0%)-,+56(2'0')&3-*/#%*(&%&'*(3&*23**-1"%35'))5#08!-,!'-)),.(**00/81 (93>,-&,8")(*.-&5#&#)-4(+/$&/2- *!.%.+.+14&+51%.$-401)!+%,8713+--6!*(11,-3$$1-&3 3***4-)-*-09, )0*+%$1-+-.(."038).'(8+'+-/07(-!-051%!+1,*-440!+2+01('6,7302&0'+<0./:%/#%(-"2/!(/.4%1,*((;%"-1.&0.(-,*//+)5*8+*&5/05+/& 0)* /'.*),-(.?#+*%/!"%3)--0;(3'-.%%3*64+-+9-%!%+27(&23$'63'!2<++''6$"0331(1*-3(-,14 74##$.$ՀۀՀــҀ׀ڀրƀӀրրр׀рրЀ׀րڀ׀Ҁԁ,)/-($$+)>49*4-*)02./2&,(#77)#"21&/4/(%-'7' "+54*"9-.(0+*&,#-6/-*/+2')514--'*646/.2#3,4#"1-'8/*0/.<(4)1+&*(3%"(!)&'+#()(9()*%4A1/.)(/2""#!,4#*#,+/'*4'&'-"3/"-(7';"1/9):(.'%;+8(. &2-'-/-7%//3-(%>:*%'4$-$/2-&-%0'!&%='(#7/$&,,(33/,-2-$+/5'%$31/'4"/4$2/-+,/03/,.++8(0&-/26*+,6//*-"$#/3&+#&8'B&3+*);29/!.*2$..!+1$%4+'/''+/% '#/(&,6&.3#,,9 0*4!52/**-/("(3'-/%/',#),"2%*2+' *=)#3&2.+*5#%4*0.,0*)()-$33)0+1,2+,(<$86.*,47/#),(&;-/()+)-*'(0*-+3,,+<,)'!1141-#%./03(%*04%%:'(6-**"":&9,";1:#4-)*-+*2'*06Ԁ׀ـ؀р׀׀րрրۀՀ܀Ҁ݀րրր׀ӀրՀց$945(#%3+"784'*2,1(+)-+."/<-1 @.:3/,1+1#,(*7&001+)4. +:2+&2#1#03.)#1&--(%/-&/,52/3*0*<5+,*. ,0/.*/.((/*&%/55.*+*--&*,,-4$3+,/5,42+:+%1-32$+)+/,7. (,, +' /"2+&'(/"1,-111%+)'/$35-5$",%3),&(-)9-),('%,0$'-@*-1'4/$3.+-.4''-%7%*1+94*()..61//&%/,0!()/,##'/!*+.&/(.5*4.2'5'#.*;$&,)4323'0-),!3;(".2/4-#*),7:/04* 8-*2!-5'+-+%3#7(&+ )1'-'1!-+2.,*)*'*11-.7%%'&(0#.55225*0-2'8)"0',-*+(2)2.,#$*"0280(/=3"6'.%12'+-+73-/5%#'*60<%!! 3&7*'-'"/0!98)1.63/.+++*4(-4/./"%$+<**.1:+102$*#*032!%6!1*3'/,րԀԀրπրՀӀ܀Ā׀ـ؀؀րԀ׀׀ـրπՀ׀ׁ,$$'7;-5,"9<1(%9".+5%(/+8$)043.1:)(,,$''#-1)(D*((!$('*23+6,#3$&#)234'"1,0/7. #<*-.&7'4*-((-$ ,"./",%,2-$,#(/%+-/,',)(')! /''*$-2%-)2/0!1%&3$)$1%-#--*&1.++'0***5'8-12"/"0'")1 0',-/=5)#21*"/(3,%6143/,("65+,/%/2')0);8+$#231)2/5,"&'$3$*+*:6+!.3;//#@(%2#,/82&(9',"!00034 +$)1*%1%3.$7%"+7-!,-'-"&<#*4&5,)*(.+//#)66$504$)5/%(-+23/.!6%5,$7&*6/*.+25'/*)-2++!/)+ ,!'2,-/$/'4+/-)(3)-$'0+5*1325.<0&5# )3"$2"*()$.1,,*",2*)$/ #+7++%+''1/7.50 6**3.''6#0.)5*2"20*-&*%&-2+2*52+27.+/53+.+4-%/րـՀ؀ӀրԀ׀ՀǀՀ܀Ҁ؀ۀ؀׀܀ՀԀ׀؀ـف580*31*06--&*+28((*)/+5/4?$372 *-<'+1%%.5()"1+&1 =+//-1,.--,4!-"56&+5,,+/*3-'+;'--$*)++/#.+"?2$805-)%-#&62-**+-*-2?-. 1%&1+,+.31/$$*00)&!,0*'-,(0-)5 8 --%*/+4(*.#H-+' -!%+*7)4,3!./!*$$$8$"'09!-1(-!042/$'(-/,)(12$*22+*0/",<'3)-*!4*,&.5.*,<.(0/5**/%+$+$2':2'/%#-/,.,/$2,)+$/"//*/.4(.140*,#4,)+)5')0-#*0,9/=(/*-#%;'#%')8<)'*(&3&+.+--'&:5$(&$1&1#-,,3+-*/)6"/-4*6/0$,=.'"!() )7#.0")5((+.:3)*$"!), *7"3".!.#2007)2$/1.1%'3*+./31,@:(.3-/3+)(()'**+%/&/(/-,0*'40.)- +-*+,=1"<%*802<'+:.1/&;,.0-&%4ӀـҀҀـ؀ۀڀҀɀ׀ۀ׀׀րڀ׀Հ׀Հ݀Հ܀ׁ09+*-*%/:91)<-/!6+206',45*#81*6+/-21(5*#;+.(,'76%*'+)'$#(.-*./#-(/$7#363+-*),)2)%,@)41&7).)1"-(2/,. )02)8((-32/+-/2)%-&)-1%.%12%/0-/'#&%,%./,/501'#)%1-+&$3#56#8527+$/$43(12*5(/""*).91"$5$5&)$/2,:$(*/00.1+3%)$./+*(/13*.)1,+4%'%-2,60#"*., ;-!%,11.,0 .'#3(++4+2!/$*)=,.)+#*>'-3&2*/+$/)%$-9-*1)2-6*&++,7;$'%4+1+*+#:16*44:4ՀրҀЀՀՀ؀ڀՀŀӀՀԀ׀ـրՀۀ׀׀Հ׀Ԁځ)&$/(7%0)*/1../-/'&/,#,$>-.',*$1#5*++.-&1<,+'0(,)+2/& 94*,,>!%#!&(/;*4!;(2,6*!#*0(6$&11.*.0,$%*#$27<'1+3%'?3'+0017'+,@"/9#-$1*)-/0./?8('6*&/0%+ +1-*"2+/4*+91)*($1;('*)".56:5//%.5/,3%4&+/-+0(*#(3(.5*$<06-'%)-'4"1$)%$.-4..5&3)-*,/#+2%/3!#+5*,*15%3)(+!#*-*-!*6%+,.)%5,-1+0,-:+,/$5'-2,%'+*,$&!+..&//+*.$+0!,.'B500:<'.')*'.)()1('14/0$1+/++$&=( !1)308/*6,32/*)523,/$(6%.5#0#-2.&((4.--:.%..)&("/(4&2 (#3%7,2)/%80/41"7'17-,,$.&'",*)*!%/1%,015$#7"()(,/2)&.2.>+318#%8.637)*/+/55%#7$ <2/!2)+@0,,*2%"30*ـՀՀӀـրՀӀ؀ɀ׀ՀՀ׀׀ҀՀԀҀ׀؀Ӏ݀ց"532"(+637(.14$1*01%4!4E&2*.0:*.%.-77)$ !!0((#(6" %+*/;&6,,(+00#((5+712-5#?"14I+-"204&*&5++*.>+!-6/3-*+( 2745*"76$-&-.',23/+*)3.4/4,%1#&$)-&5 !%72&".0++8-4-,02-) A,,*,(.6.' '2"2.*,-,"/%3.&&5&)%#/-/;-%1+51**5+-+001*&/-+/.(403-#03+2/041׀؀րԀ׀ր؀ـڀɀӀӀր׀؀׀րրրրހЀҀځ0.=*1,1'/'+-22,+&.6 )+1,5&-7/ ',&%*,/@4,!/419*1$#*@,*- -<%)(.,&06'/('+&#*+;'(/ 0"#7(8/#)/-(%.0#27#+/15+-,&2"-#/C)(64&1%/38.06'25,%1-%43!&.97+5*"*'+)!0))&"%#'2%90/<+3,'/+'9,#,(:/011((-1+3+1)9,.3"3!/)0)0%-#./-$#5@0+7(*,4%0/ '410(/,/)0*.$.'43-+(2()16/%1+09/3+*2" 200-1"*!#!%-*./.0)?(,)23'/46,&8&""2#*-+16%-,96('%4).0-5036039:--/$*,+%$*+115-'07'-%%2/05-)*&$..'3/',0=/*4.),%$6 +/"'5!./0#/6':+%8*)/!#$"2++).6)-,/*7/!;)+%+=9(8#1()-3)1/3'.4(& (2,"0/**',8&%',3)&*)(0)2+%4<*/+)0*6/<-+8) ,4+,%$)/4%*-%<&5+&2)0׀ӀЀـ؀Ԁ׀ـրˀ؀؀πҀـЀ׀؀ـՀڀ؀Ӏׁ(*/.,=*9:&+35)$"#,+!$''.'"+)!-)//((9*/3 2*9:57-68$.)*/8+**'(-.3/0$26)&4#5#+-(+:2-%(/90/))20+1/#3!)14,7(-,(*9*%0&-E0*%-'!,2/#),"304%*")2/9*03"5+&337/&/)-.'-/++=%(5.+6;'<(&,""210-7++-.-/5%252).($$*-(2+./<,9151%5+*+-/.+&4-'%!5*,(127&&.8"0-$2)')=:3&%.<3!/-(----97-:, 73.-/-%+4//#()7'.1<3''23**$),!,)4"1*/&8)/.*/-:&13)/'*4981-.'&/#('+6A*/&1$65+#1)++!/(&/(-2,7'0/.')#1.+#-(*9-32!.4/17+%'&6%.7**3',&1++7!-*&5*-1.)28.40,+)'*-(97+3+*7$"# 0%2,"3044!*#+/3,.)"(+5+.24$K/'0.45-(,//(#5)1"52-0:,,5(3&ۀրр؀ۀ׀ՀԀـ΀ԀԀԀ؀Հ؀ـՀڀրրҀـԁ6)!/&.-8 (./ +2&/0570:&9---/*,810.-+,,/"+22%<9'-%+-(*%+;$(++-*1&+&!!%("/**!(*&+-&7-1, %,-1,%,($2-54<753130.9-'*1)%()/6<0&/>1738+".()8()/.-.,$1&0!"./-,-,+,60*0'5#-%(*-.9*0&.&-%!2/3(/1"/9%/.+*'-3A00-03//2';'.2#1"+0:$0.1*+-2-%07/'.1"3%4/5.3( %+%) (2(8(/$'%'#,48)783,,&)67%&.&#-#&13+-&+.-),+8)1):02'31*'!%.;)*72),5>(7,:-!%.#0/'91'4 .&75/(+4.)0.+=04) /'#/2%02"/ ++$4&+'0#%2/'078&++-(.4< 58,+,./&30),738"5(%)1#.!%060,&6--&+%'9$*2/ *,7,8,5(/6'/3.**1"36*''1-/,.11"*2*)+'*$5&.%4**90,/6, '.:=+46ԀрրӀـӀҀЀӀрــ׀ՀՀҀՀڀрڀڀ؀ց<#');)-&='0/;/"-)-+)356&40+245,(4'!.4+*+.-7=14/?.12%./4'-4),6*'.)20'+9-(&.&>1*3,&*3)-7'9"!3","+!'+6.& 32+51+,'-<#);3"&3+01.!*"7%&((,3*/%(21&7((-23/22'&)4(,++)''2!(3/17%(%.0)%0+%21,*5>,&/*13))/"++*!'2(1/'/+&2+/.!)'0/021%1*-2/$5,(+"()2(#"-4&&3 * #/+'-3./,/3):*-1 )4(.8',.-*/$*(0&25)$++*-653;+%/%,,2/5-11-7"4512,*'**0+,44/-.,613,.?1!;78''+-3$12,;2 /''1-*) %17*3%225,)-&!1!&%,**(+%#/74)1$,-&.,0!,*,!)'-,'0630&$+2')2,&6/'**')/1*!,)20/0620'.-#<-443$0#.)#%0!($*'#.5 .9-3%,/+)22/ 967%-/+(441%-3.14,(1ҀԀԀۀрՀ׀׀ӀȀـ׀ԀԀڀڀր؀ҀՀՀӀӀ؁6#(=!82(+=-A4!+-*/,+)9,/3'1+-5!'/)).*-9(7(.)A.,$(+2..>2(6803&*-(0,.+/..)7.%(1-")'(.41&:0/3$$,.1.5#)/431(46*.#*')"*"!5".--(054'$'53+<14,$-.5->2,+)'/'#',0+%.3)-*3++#7423%5/',-519-!(03'+134#%6+)',:/++))@6&%,%7+*1'*55).3/. &+/1++1/96"9.&/.-:(=#,(%/0,-$,$4-&5)!7)0)+'0$%0#/*'!>)0*0.,.+022.?/##5:0+3+0+)#'/(&2*+/0*)(!&'+4((*%01++(/('/.2304-,!(2/040/(+&1'0''=0%((0)$$%&+1;.,&5-"*&"2)-5-1+<'#B1/1-@/+$#C';%'26&)-33-2&&,214(0760*.+*!*/5)'56)&*,61(+&%)B'-)32,7,-/"50&',#5#*59-65$@/5(/)(-#180,1*+/ +5(4π܀Ԁ׀ــ׀؀րĀ׀րӀ؀ӀՀ܀ԀۀՀӀԀӀҁ+%(14&6/&*8./(5+*5)#&7+>: :('-,1/%62)7$-0026320(5!,,;*5!4'%*(27-0)*9'4+9);2=C+'4(/13) %0.)+.&05:*4%.7#+8,2->1+-.,60='/'2/-4'+8,#5'5/ --8.'(6*1%*-(&2/)%/-.-23$(/-3+/5<'6$7"5-68!C$ ,&.?%((!&<.'-(#+5%0*+%3263-4'6 %'5('295)(01,/,#9)''<-)+,+*/&.'%'*6(+./6-/,0)*50-25.*,-.*,/0.26',13(-88+/.)/+2% #..-/-0#&+3&&)!*:.5,(/-,($+2((/,)%) &%)/2-+-:"*)),3' 8#%1'/1657#$$32$1*3*.))6&+$-'"//$+,9&'*(-:*).4)*- *#,)(%+21.2(<0*343&/2,9#')-F/,%"6)1-+/,-+7",* -+-)'-'+'70;7936%%0$,+$7$""33/0/-+/:9),/7&<,Ԁ׀׀݀Հ׀ր׀Հǀ܀ـڀۀрԀ؀πـ؀ր؀׀Ӂ:#8'(&.,-6(+1+1+'9(65!8#12./$+/4+(.72=4,0(32( "+/1.)"40(0#!3,,$1)0)9B1172%'' .*''$6&2#,%(!9-($).5/$,%8(-*.'%1&+245*")-/,!,./(*0%(/+$ '(#&:-*$10% -.&."((:0%,*("(#'$8)>3!6.($5#/6'&((,##%0. ,7&.$0(1*5,'6+)'1%*-,)"93)%#/&.;+-2&'./*%'*3/2--- $;(8"%),*9((10-(%)63>'4&D2*#'"3//2/4/'$-;%.,*$//")+&)*)00/'#+.%/#.0,242,.87 /!<19<$7*).2%),'; )/!+3 %-*:%//!4,1/!0"+'"4&3#!01)/0*)+"'*;6)&)1&&**#")8/-6077!%-.-%3(#40(/"9/%.0-,/5"6 10-/'*)*/7), $2$-44*&:.8&+()6+(-'")- -$#*4)-,55-/.+3/&,--#5(4-97؀ՀՀ׀؀Հր؀Ԁƀրڀـր׀؀ҀՀՀ׀Ҁ؀Ӏف6<5-#+!7+7337-0-$ 1#-4),3/(-(3-<)#..+2'2/$(%)4=/23*(#2)4&%+&#://3#-14)53)(*.-"$1"/:*/6+500)33&%#81/281/*0).!7.9)**$+-35.14%0&%)9'(.+.2/)*)#11-./,,#)8+11(8.&/**6'(5*.1 '/)..-+$#-'"!//-5(%(59/+-58-12=0&);"/(6-54'0,-/&04*)5*"0"+&#+3!,0$8('%66(8!.)6(&+ *),321%&0(4.'%,*62'(,)40+2.)1,/)-666%02-6.1$),0)"+,&),/$97""-/))21#4-*2/4(0#($'39(7)0+41$2/-7.#-01!*7/6-'+(2$/1*$-2/>/+-.,%.3('-*+"4'#/.03$-,+/)1+03+(, 0+273-2'%,':3-!84$:*#*2.++/%+/$-.4()%1*(*(.)*))908+*01,.41(!6,/1,,)05+++0%81"(4*7ڀڀ׀րۀ׀ҀԀـǀҀۀӀրڀڀրҀրӀրԀՀӁ9,430/5$-0&/-%+&0#37-*'/,%51#4=3>'/..$./3$0(120(+,--/"/+"('7*-*"%2 &4*$ "1*,.)3&328(6**#*1323)(,''*'%(+**)++/&(+&-3)<9+$7/ 4*(*,$(&-)98.)'/*3($) &9/5/6(''0) *(0-0-+(%11.-"9+/0*;,,#-*9׀ӀӀ׀؀Ҁ؀ԀՀĀـڀՀҀ؀ՀۀӀӀ؀րՀЀف)-/),'.0&-,*3)(/8$.34.2A)/")%7$,8+/0.+%/*$211#-2)$#34+$5&0/1, ,++56+.'035,.-/9$,"01/%;-#..') /".,&,'#,'&+,0!/;,.44,3,,,96!()7;/*.012*$0.'-01=--'62-,&->.2&8/+(0("/3-53**)#+3',&*+!!#!-'82()-)70.29*)%2&+'5. +3./8)-*-',9+3,;0')'%+1.2%!%1,(,."))&+20'-+7&" *1%2))2',,-$"#'1-*,%-*(=+()61+)--/0#7203:.*8('$6+-) *.2*)-0,77'7:*+!4.6!/)<322,%/&87&1*.29)$-,!)/7#-$*:5-5!4.*'('%-/%(.+0/.),,(6#1 >046!&1"4 /(/2&#/?*'1+ 6-3$+2"!--26+%3'"40 &,5+!1,)4,-6.(+!-,'*17)/*5,28+43/(6/%9/=,+75%(-,2+41$*(4!/,)@9/&4Ѐ؀Հր؀Ѐ׀րڀŀՀـրـԀՀԀـ؀ԀـڀӀف52*1*=2*--*/%+9*06'7+8924,#'7&1%=3#750'*4)$=!0+.)/!&1*)#-&6,!9! $,/20$1'2".&*, 7'((4& #,(//#.&"./$/($55227*/1;'$%%8'6/+(/, *,/#5%*-)&5-$-!,27',(%&85/02,)()#:(%497&1#3%+-11-.4#)3-(3&3'3+-4$$,/#6-"32-%.-'&(+#+1&5+01'05%,+.0!%54$2/%.6<0% /&,-%-2:%#2%1-,*.2-,-$2,!"*((,,?-.(6=()"*2/+-*2.%#*4'$"((("%"(#).))06-!2#(%611(%*&.)*&*-2+5)1&+4.1/3,*.(!7!/2)-3#&,0'/121#'(/5'!3)'9-)#<$4&"39;+/'0,/%!$62'--27-.1&33',3%'(20%/5!,+-*)G)5--#)6*'-3$:,<*)(%-3?4/0**,'3-2-%-%4.355&#%.<)6$(*15*+/.1'-*6+6<,)6'28**րـրҀ؀Ӏ׀؀Ѐƀ׀Ӏ؀؀рۀր׀ԀҀ؀ڀ׀ځ-//./,(/+(014119'3,0527 4*@+%+"=0,0* %&2,*+#:1,,((5$+/10(4))(.'+1#.(&*/$"-5;#610 -1**-.(/ 2.20/'6+(./)1.(&.*1)9'+33#5%%(0)',.$%2('*1,0*/##3)1)(2'%:-'/3/"%-,%3+*&&,,(32/&.#,#/?05 65,;*.(/"&+%-5/%2$1&$$33+)))+(,04%$/3#.31033*&1,4+&!)21&)3"' ../%,.+&+'.0-9&*0%1+"3)65)/,+%1+2#4-+)#-)/#,74'*020.+24*341'/($#$) 4'1(&- ,$%28$++"'23.*"21,$"2++(2%),3(%+/"+%0-7,&0+%.*8,;+)*1')(%,&:*%,'"*),3,$%31-3.+006/3*".*-'+&+25&,(/,25%,,+"&2!""4(%+55//"0)%$(0 .0'*))$07%(/63=+(+18-/59#+$-+5#&+-0-6("&(-=$Ӏ׀ՀԀۀڀـԀ׀ŀـ׀ۀـۀԀ؀ۀրڀр׀րՁ:28=!!;/*20&61%414(8 (*8=(%0*-921249)$0= (+!(%8#+&2+*""6&'#+(03(/ %5&!1//.)/"'&+&219//2*%%&$';!78%/<2'*,%&+.)),&")/$#-$$2.)/4'&)7)836(''(-&++*,)%(2&5*0'+9'*+#3(4((-,$'--*+*&'"6,!-,%+0"3/-3 -,33-::*53-&#(1%%+'+*&+4/,*'. '/(&+)+1%%95(*!-!"8/ --203"&(.)#%;.714:;2'$-4F1.+=(.2;-*012*+*, 3-9!4))=#&5*01'-4 )'/-*0&'+*.+*"01413"/&*+-:+&;0,%%/-+5*'/-25& /3;$%$301/3!0*2(*'55-2**(6/6215*'$*290''3$)."3(+((/ /0:*+?$$)=)<-+%,3'-/)2+0-#6+&'#$"%7,".:0*/-,%6*#)/*00%.*/,/(-.4"177%():!/,'//231/&,2+//:$/)7+&Ӏ؀ـЀҀԀրӀՀǀӀՀ؀Հ؀ЀՀրӀӀ܀ЀՀҁ/5-5351,.92*)+,%0-:.%3,1(#)1.(/.'"/()/$*3),1.1$-*:-+10-8,)-1%1023.+/-0*./<,",)'%./20./22+'1//) )&874:-2(1#3,'1-#*"*#+-!!'.&1215-2:(2$+ 7'.*1*/'&0$7,*!.);84"""-*(11-2-1.'1!"48'0! 64*)1#3/$5-(#!$-%92,%(0 ($.!"0'+'.%4+))',,-*!0)'13/342+9):',21$/63!&7$'-'1,4177+2/$:+)-2/'0;"5++5%%'#$/")(&%,);-3 )%3-*%.%-%"#4)*)*-(%)',-'2?)+&2%/-,,390&!1!&)((187. 10*/;6(00(%$-5+/.00)"-,+/@,)("+3.'(3%,9$*//<$*+."3++3!0*'-0&.8+13-''1%!' %-'7/2.5"0*"/+5%%-&*'2+5-5-210-6@")5*7&3.8+7+'7'5&7+$9-).6'( +@2-'+ۀӀπ׀׀Ӏـ׀ـŀр؀׀ՀҀ؀Ԁ׀ՀՀրՀ܀Ӂ8*5/%52'*-41%0!-+2&!3++'$4//#)4).+7&*+31*1'0*+1&/#043//)0 5&/-/*/81(/2""7&4'703*41*0.))/!).(//#)24((/10 3((0<++:.>1,3+7'81"+4(''3%0/ 0,',)"+&3$$#6*(9!""(.&"0$/'-////&/2')/5%51!&"-#5&.&"&2$:7&"&+&+()7""2*"(+''068$"1<*#2/.+>0)-)()!+0)%+1)3)+%.(43/$)2#.&6)8'-'91).*/()"2''-*4&/+,"3+$)).)70)"0%29%84(.(/+'+45,)(,(1&+/*:**)7.7+)+4+34/3,1+,46#'&.*) 31?)-)./3%2.+,'.3$+%05)/.(,'14(4+/-6%:.175'/0.!4*/7/+)(2%2)('<9-+*+/*%9)-%82,2&+.*!++5 "&8&>/'9)5/"'/&),8*/#'-81:+/$/)1"-71/'--#+'+-*.(14212%14):&/+)2ր׀Հ܀րՀՀ؀рÀրӀ׀Ӏ׀Հ׀׀ۀ؀Ԁ܀րׁ,7#4-1)-?"80/$!.<27#(/!0%+)-88=1#54-33$;8./;4+0'--3011232"#$-1&76/,..853!%%-75( /+6 ,/1$.5*//"-586+.0"2"5%16#)0#,-!910'-20*$3%+,)/2+&&4+5/&3,,'4=1!"/&294++(*/!.'0"+"/4(17'3$)"(6(.!$$&2-)%-.$+)(04)*10*5+!#&-'&.$%/.&1205(78*2..5-2+%1&"'9'+53(/;',..-6/+3&8"7',$(0*-%76(,-651//1+/--)-%2201*4-+/4/(+&8'%0,)2/-',404.$29,':8)/*-%.4(,+3(1+#'$*&$:/+"%1.$+34(/70/1%*)#6"101206"2.:=$-'+'47+*30708%5-&!+"2)&)3%-25)/&!).1++4-& 5"4*B+/;35+'2)/(1()?*((/,6'.7>87)18*/2*1/03# $/*.4,041.9+,3)110-+7&*6.5700$&ڀ׀ՀӀՀՀҀ܀ӀȀր؀ـՀ׀րՀـ׀ҀҀۀԀՁ.+56#+/0,!-./!'6-%0'/9!7&2-'1/0%/7)/+ 1%1#-'4.+.)4/!6/8$.3 ,;/(#'3/-*510(1,*(.%6046*:)&5,)+++8(6");*4-/*)&+#.2$/,0*2((&-6#5)65*!#+-+"#!6(,%'.40@,1%+)3/-:.(&%0%.;.+%%/&$&,0)>).33+*,-.#*(&"(+(4+0"(!(**-5;*0#*,) 4-'.0%#%"19-*+($22*5(:)(*)03)5)##$1.+2-,;%."-0)52'2$6*&>!=0;%7)),6&,/3+11(02")"&6/9&%-.-(25,8&3/+-53*+'8+).)4*(11:2.(&'5*28 .&&'$-4.*$ <'*+'/A8+/#"1)'2572*'+-.7054'*241/"=21,+30'8!**'%*.+57/.1!,)&):-/%/(*(-*7"#9+/+5!--,7./29&0*5%4#%2/)*-)/+-(3/86>5.6-)42;0*$0 3:*-/&'/)2+3267܀րڀӀ؀׀ـ׀ӀǀրـۀԀـ׀ӀӀҀ׀ۀـ׀ف!6(/#,8':/(+(5).&/>53+*B?&,0*!8&-,.*%-$3*&'17/.+))$,,"*#-&3(-#2&22()/1180#(.)2 /0--..'%((C7-#+5-?)1')9 %*+*22$/+2;.3"$!-*+0*5$(7,$:4/',B2*7'2)))54)" ,1.%$$'7/22-(!,5+3 4+2/*7213)'0++ (+-''(.,(01'''17)+./%*&/&/)+9)5-.+."$%5.9&-=-0/-)*./**1#(1("D1#-.1*6#3.0$-3.++)"6 $1#-*-4A(5.-$%1/-*&+4''+-8*(8.+/6"6!:%44&2"3($:3 *,+(2/-*(#-&&&.-.(6*'6)0-"-(15%%4'3-!0,-))3-(,!683$%3$)$#=+1%&7285(3'%++00?+*.)5.,'//6'((0,&(./,06-5-96=)0.%*2)311.$-,),),'/+,0%+!3%0+,!%)2%4=,//73344"0,4//,+/%''#7*&6& $);2Հ؀ڀր׀ԀڀրـʀӀՀ؀؀׀րӀЀ׀؀Հ׀Ԁׁ8'7,*70*5)253,+0+A0-,*:+$-!5'&7:-,%75'+/.%//)26($)0 $5/3'4#!?3-&+-+..2 1(/%3.30+052-"2#&05/(&,2+*$*./+'),.((,)63//43%..7-(350 ,',$!+1,,-+6*,,$4*+1-3.('#12#0403/!2#(,)-'+-).'4% */$&(()2.'"0-!'%&-1)+)7+.'"+)-!4#.+0#$)4$+'/()16?%+*03'4*.)+*008%/--,,7+19'20-0:$/-+(.1!.2$+8+7#!&-2.;%2'3 #(8*--+-1*//0*-+2;-"!246+0-12$ 1,3'4+'0*./03%1$10(09$-1)-3147()-.0/",&2*,(<1)0%25.83.)C622+133!/-3(--0(/3.2&3&,&'+)&8-,972(71)60)&*40(06)*."+5'8'"$%+++20./ *++%2/%+),&%)-(,3 -,.(4,*+24%#0(-03#,0/0!1%",!0 /8),!*&+9,$''60''9+3--1&?,0-&4/0$/'.=.&5)+/(-!8.)+ -;#&*,1,-+"&05))+/8(#416?(4 713&!2"')'&#+"+A3/-*15/81/*#*:'++/"(.+3,,8*.)',-)&)'/,, +84/13('-0/11,4&2+93#2//(,'?/-6%&0'06=+714,%.-.3+6:(2,%':5'5(##5)+0(,020,4")$)1+/+,.(5(/π؀րЀ݀ՀҀրӀǀԀӀ׀ҀՀـ׀Ӏրـր܀րہ1<-3=$+15323/.62&)3-/)2>8-!.1*#5(933:2.!1*32"(/(:2".%(!)(+'.-7<)(.511,424%).#3+*%/;5",!)&%//'!%'/+5845$(&0&))9(9+70+'('#) /,'3&25/%,))5/*."/-'4(!/72<%/-&#)17*++&/,&",83/,,%%+4&,)3*0/!4./..(1!#)-3*-'(("*+(%+.0-1,%%+7/(3(-.5"!3#%%0*33+5()(50*%9)8 /(''*.:05(C/1/+,#721,((+*0'/1*.(%+0'-<24%*1*2(70-/$4')20**'/'2/)(05',(7 4"04+5+"(1#$'#'(+<.0,4-%++60*#3,+* (8%$*,%-03(/(*+/8%5&+''/'*+0.'12)+/%',"=*7*+,2)"%2*19&0&)&)/-*9$-+*%+ *64#1(23,+./).%/,<#7(&',".'):-$-!-/27$- -*"3-54%+7%#00.,')1)+(,39//-85)ՀӀրҀ׀ـ؀ՀՀƀԀӀՀۀԀҀӀՀ؀ڀ׀ـӀց1'#,-*,+/20.)-59)/%%,%5(!0'/(%3)(=$17459/18-(/8;)&2(+#6&58.0&+4(*$(-/.1&. +'34*!&7(*"5+.&+3$)$+#$2+.- #45%44'++$'+$/70-%(2<+,&&,:)/00!1/(5))*4"0.-,11115/2,++*.!/9+!<-*.(&,'1.27++)0),/)*093,4$-($-1,1&&&*75./'&+&.'++'5"$0:.0),''/12(+2+,/5&1%$,/+()0+/1414,"45 "4,),.1%'-*/"03(,+$1+(:"&##'7)93:+632$$0:.6%%16#9(2' &323"--&3,4/.1'6)#',%2'1.(39,"(32)')- 62-:#+(0)7910-*'/!=+E)14.-,)0:%+2*5,%.+$.*%10'$4/)('.-.&&-*5'"=*)6+!6*''6)*4<(,/$)/,0)+71.,#-2,&%$$19))/$.#-;.(;06/-#3*0!*)(1 +0*:-( 0 /؀ڀӀՀր؀ՀۀـɀӀ׀ՀӀրрԀҀــր؀׀݁02+,.6/!'65.)%:++0./0'34.-(4.83/'*/522/$+%*+'4+21-+% 1$7%32)(&'70&(63"%-*-)"$3,:(4*2#(-7/!#*.-".0,1* *!+)$32*.(41-/%10/#,&+)+)/,+.#*/8!:/1'.4%/5+*(*4"%*2$-*%,&/%*;*6+#)((&7@013!1/!,66"4<'&*(40'*+0%/"2&%*8,2')%5-89+A"+*/)/7&129&5- &%80,D,032-'(+'2%37"/+518./25.30;%!(*)*016.&::4/03@:*/.'//+21&0+'+--,1#.!.(6+6%-.'(=*&,2/%1)!&&4@41$))&/3120*%'/*(6(0108',/)''++/'">/.;1%2(*0%*.+5+''%'!&)!)88/-$2"-#.''$6.2"-)!/&-"6%4,1) 2&%0*0/.;*-2'51.-*'))0/(+'3"+'7$,.1)00'23)-(!2.)'(31(;6."8%2!&)'2!29)(49)Ӏ؀րـՀր׀؀րɀԀ؀Ӏــ܀ـրՀــӀՀҁ>?)00)2 6-+=37+&&//(#:+( 1-,$/#-)1.'$>7$5../0*.#%8,)1##1*20,'#/0+302(66-.6*!..-&,%):.-"/-0.('$(4#0-+*+(=91$ )6=0+4'5,3%.#2((%'"%+'$'.6.5!+///+'4%))(+,1-$8/"3*.)'#)%2'',$/-+ 3, -!1.("".9++-"!!>!.)5/%&%)'/$7-1&31(,)2)-&&0,2+,)4++-)++2,)2.23/#-/+0-$#C,31*08/(1.*-/./+)+.55//(1/:.'( ( ;3'41(.,2%8*/'/9((((+),)%+**/7*/),*,5/'&,*43009;(&-0$0+/%#4&/-. ./-(7+2,&+6)2 0")),-/-/1(2&"%+"'%,%,9"9<+(.+2,1&,(+%*2,./4+)*3+-6/#3,2.$$!-5.3,2+,)71,+/&403$(&$",$-?%)!+#'1$31 +#6&$/"*)(7-<'!/#20/2+-,/3)<3)2րՀۀҀ׀ӀӀӀҀ€ԀԀ΀׀ҀԀҀ݀րԀр؀؀ց2.-(9#%/#+7*/.8/ /$"16''/019+08(##@6%'27'1,//2)1&(&3(&3'5#$64.4=)(-;#)*2.%&3.6-&7/,-8#,1.$*2* 0" "5?,,<--(/'7-+)(=0-3-)')0 -'1B:*'1+))$02+484,,",(1!,#62-<(/*0/)$163./(&)4,62.3&*%)(/2':36$4++",-/)?G).562',1/+*#=03/'+/'*(3/23(:%-210*6%,,*):)B.//0--/83$(%&04*%0&%(,++("%046)&+*/3'+*&$.2%,-:+ ;+%7#-'0"#-1*(-.:.,.3/).&3'3)&+7'($$'51C%1&6%'*,'9,7',.203*/)+"7'(/-3$25"2+'("2'2)$.!&(+/+&1,+.,+!* (',-(**7/ &,'(+'(-.+'11'),6!.+#,!4/&)"*9/1/'(,"#7*/%$1./%"1/./$97%&?='3#/392 '2<"%000&75#(+3&6#2,(*7.9-"&-)54),*+(511@-+&".*2-)4(6.//-&57,3+2,+0.,1+0(7*+45'()2+&)1'44: +, ,/52,' /;-//1:./)77,-3)*;')+4)4.2%/+*0+1)+-)-'*./-265.3-,0-5+$$*6)# 6#(.#(6$'3'&+1&+%-:60$9*$(&4-9',%((1-/*'4/9+.%(.%%0,"54$%-.)(0'7,,/*41(4*%.#+):9,6320*.7.8/+.8%%:2++'.#&,+4(:*5*371/2)1.7-%93(/%0$--+2%#/'&4,!.#,%%-!+72.,)2$(0)292('(.'%32!7)221*223.7)-1%."1 .,,")-)/.%3(1&-1./%(*&/-&.90,)7-!$1%4)/,00(8*)#/"4,-45/%.(6#(,,++"/4,-()-/0*&,"-1'2*'/C.10(17)567.-ҀՀـՀրـҀ؀ՀŀҀрԀրـր؀׀ڀҀԀԀҀׁ-'*#0(=*00%3)-A*43"54721+21.-4+6'8-*,//!3,-''5/-3 $$,$7*0<10..>",-.'/++/6)*,"+*/1,18*.02&3,2/*%)%(A!(+*3$-85=0.)%1%'#24)5//!3-9)(-*/*/3'+*2!?/7*&,/11*.'+$4!654555.5(+'14).$$8)&0:*38%5,.3,&:-,,/)&&(!$*97*/-$38.8$%$.5&+/*$),/&/4*2:3-*&+'"322-0.)-4-1*4/#,;$,7 -!4*,*6!.).&$6%.!6***)1/0**)/%03)5%-%(-74/#+.+%'.21,.4/!('8$0/-10!/00>4./"/$.'?1/,&;!+'3",/'/6/20./'%2(5+&$(+ ),)!4."5$,,,$.71%4+,6%17*#//'&+*')+)!$,)1(+/#.-9-1+0)'%- 7(#.2(3*)&!$.%//,&+1*4(#3(&;'5#!*80(54-+..0+%(--8@*051>01%,.׀׀ۀрՀрۀԀ׀ŀ׀Ҁ܀ՀЀԀրЀ؀؀ՀրՀց/+9)/&&<)-&330>(-7 '+#,('/3(:&1.7)/#))/!(2%1'$.+*47(1(9/)&$- (-;#5'35.51&-&+*/>*4&307/@0.(2*+6)(1'82%,"(1#%-*))62#0(2>9(.8--"&/+(,/74+3++(*.0#.>1&79%)730;#/,2%$#+&+$#*9$7,,*,-4+*6.3).(0#72'%%-,*'.?'6;1*4'&,.6*.+41&,+%0+%(%+/31-8:&$3#207-0+#*,(6%$$+56"-2?+/*6(("$?48:72%-&*!3<5"-3/,9$/20'8)+(',0*3+>$,0#)11%!243<2-:.*,9/.-%-"$2-043$2336!-#"+$*30-*0-- 1.&*#/-)((+4$&)=1/-& 2$4'+&)-""5*,$7/"*!:3/-)59!-2*2,/'00)$=*'+&'.(.,('2:70.)#//92.'$/+.('&:33+0&-/:+2(/','0=5&&82+)0,15.#//.//2:0+0,++/&0.؀ڀڀڀ׀ـրـӀƀр؀Ԁ݀ڀԀՀـۀــ؀ڀց(:,%-&+2%%7//2+15&5+12+&!*%7/2-*)#)4.*-*$823"7/%))!-2%4/"-030"''60,++%"5(,'57$#) (22/%)-0,,5).4%%/&/)'-'*(1'3/*5+-1/+$'-%#/(%,1'%'#79.1-;,+*$)+?&'.)+.)*+/;)1*+)!-*4"62#.&(#%5$-/-)*82*/:2z *#:.)=#%)&$%*$(*&(0(.#'/126*,9/)1),)/,*,//<)',.'% +27+-4#1.*(+;+5$'+//*4+2)/-+>*/+-#14,0,$0-2.-04-(/,+.'*.4910!5'#*1)'8:4%,)1),').1*%!+-0113,'3(;$,* /$2./+4-',+.3 3+0,+5+44+@*1,(<#/#+&/420/59'- .%603-0'+'2:*$6/.*20+75#.29*7(%-%+(21$;.().*.$4,.01(A%/-"-),-/-"-*/2.(2%(+,,'&")-0+ .*0/3+,0B/-܀ۀҀ׀ڀҀրՀ׀ʀ؀׀Ԁ؀؀Ԁր׀ҀڀЀӀ؀Ձ-)4 7&-02$&.'',-2&/')5,,9++171@+.-'3228-/+,+',43,&7*,!*/1758.0*5.)22-(0%/5*!/) +082%+*.1-':.&,0$.%66,3-+7;3,*0/./*!/#"*8+,+;2)25,1!(. !*%*/01-3$/&/(/!/7A*4*+(($42,7$**% +7)1%# *4,%1+--*.%'+,3"&/0&+5(1)1/''(*0+35/ ..++6+,--%,88*/,,)<3$5#)/1)-,(/1-0",12+1,('*(5*.+'3.,0@<#)+0)3%33)20/.%&2073*!'&)*)(-&''*-+3 3+).-@..&.%'&(44*)5+--+)5!+)-1(*"3&*.4".*1.)6"4%25*%*#(*'8.%321&))"-5'42(410%$)*3#*%*&$5(&&3*&4'++2.# %=)&1)*;%&27+2+/&2://%&-%6*+))-%:(%!2'$8;52"('.3(/"+1))9+)0$2(*5+.+'0..!1#'7,,2&/3+ҀӀ؀ԀրԀ؀Ӏ؀ɀрրҀ׀Ҁ׀ـ׀݀ـՀ؀րӁ2&7(43&2/*-1041''=32)26+"4.///./0:/-,$/5,/7$3((,0'/13'63/$#%$%1&5!)#2/(4&/!01%0&%","=-&0-&!(1!##'$),20.36')*$ 3/5*406,3-8#-."*/*#*,'0%. 73%-&!+91/%"*(24+,02#&/$*&,%6'.'4-+0-'/+2/.'$+24*//; $3"&)0,#-..7)/)*9&"0%;1=?+3(!15,-) 7(&4&/$*2$!/52/(&*0+0.2)&0:4**#02$',0*203$%!+,9/%771/")'),+%(*&%*$'#&8*&3141$+-'))/3%(('0-)"154&3')+1/22/%)/)22(2!'=.)1'/*/21-/59#+05'%5--1).4,0+'7,*4!&,/&.-!,/)&"2-411.%9$#+".+'7').561+5*-1,&)?/7-+-#"&4#.*;!10*#32&!58/+-!90("*+08.33#:/>7 3)+/9%0/,)*6,+))/+'$3:$48/# <3!,3150%5;1&,68+#54.2-4$0(+)=)/))&-2-/*$/-.'/,/5-36+,24&.-'/,41*;6,'+2'*+6./&4)&(&)*1/548.)'3&2'**98396(((&.+',744)!"6+220 &-./*-0!$)<,"'/-0>/1*,%1%')5&.$ &0"++7)$%5)#%#%/,7#:1+*2,..*3,&30%1/!(*)2#-1#%$,+.)).8?02 3$,+,*.4)&$23&,**%"&50%)4.40 -&4++41%"-3$*+/./%.8,0+2$(6-3-,,:$2*:('+509-(31(5-1/*4#$4"4&$) $!3,*0%-(3)/*'&*,4+,+432-,90%,;;00-05-<(+8-+4''2&%.5-%2,/)%5&4-(3)4+*+.$1)*)1-2/17/"6( )&/25!,-,:,*/!5)+3! '.5$++*,"14//63))7**2%73%*-1,'-/,+2.ـڀՀԀՀӀԀ׀ӀǀԀ׀ـрۀՀՀր׀ހր׀׀ځ+318+(08)"+$)*"/'0$/.#!+7/.* -3%)"95-"')%-- 7!.02*"+&51+;+(+5=.1.&-*6+,-$9%6(.7/5(.521*+ +0/)+%),%*#.&0,?)7+6<,7%2+0)"1&())!/!5+1'&'(0"4+,3+/.#.%/0+)0/)&,+ ./"2(<&(7/,(').3)(&-'">485))4('4=)"+3$0%%.%.0!%.&03 6'&$$3-(!%6))*"5) 6**/$530,*(5,&+13%+/./;+43%/"+/+-/%,6.03'-12"-")9%!&(/$(,()()1,1,,$,5*0:3/90"0, /"16?'1>.*+,%3*35%1#.$3)*&07%&#+02-'%E,21&)%!*-92(;/( !$0.'//21,+*06"8 ,33*'7')0+3, +')#-45$1,)'+9(3 )+3.//04*/((+.#(5)8-:./)-/!/+/.(/-+-)(,$11:4,,*+%-A,/"50-&///8/:86.$**.1-6621#0-"Ҁ׀ՀӀ׀׀ԀրӀĀҀՀր׀Ҁ؀؀Հр׀ڀՀրׁ,)0'73++&#-,+1*%5://;266++7,),*1/)1#''-*'*#:'.,<4'02."3"$4-C*+3@(# &#:3(&+&3(4'$*91=3/0*+-14(+!%!2.-3(*,88,)<4-./*1-():%%+!1+9'(9&#/-#7;-)(.2.102-5-'))()13/*!/',+(#-,3**"/--,/--3<&11+0&4)6)# (#.+0.!/&)0,%!*'4,!(&.*/003 . .5*611,+6-&&/.-)4#* #$#49*-05:+''0+5/443*%.#!+3,5&%/'*B:13&)<. .50-!#86,0/2&&433",7%)51*3(/,-;1+%)4,%/+/(9,:2/&$*/'!2%0%%&/51+-40--42%(1!'+9="-2>%+0!2"-";(3"#1,(-&5+)7-5!3',0,+-'(4 &73062:44)/=//25#3("*--/$3'&+,,* 40)3/:7/'%*)2)1--.(*52#'8>1-*0+$).+%91/-,3)7-2884$'-ڀـӀր׀р؀؀ր̀؀ـՀрԀ׀ـԀ׀Ҁ׀܀ڀ؁2(%3':6)-,1)4$(-3,2)4.&&4(4-%*7.1-0.3'-+,30/'44.+6&)3#0(6+&2773+)&*,;)#-0'*)/-1,47&&4* /)$.$'*1/7(#+$(5118-!+.3!+,&(".# .'*%()2&$95!',+$-'*3>,86#&1+.'!2(%.!$"-,+')&','"(!"++!',%''-##(#1/*0,,'*7 .'$D0*')2'5(+542":6. ,'/0,A#*$/,,,#8#30.&3017/& ##39+.'39(4+)/13+&'--08--5,-(,-''00(7/.*/22",-,9&!$,+,,1/')0$1+=.0'0+#!.$$-).4<"5/+'#3".,32.*'7,0*'%4 2*(9&*-456'%.,+'-)7#0+"3(++- ..9(.2%5%?2/-#0(*)3%- !&"-;)62&3+*)."%2;':3,5/2(.3#.7-"6)-.2.2/(2/.3,"-<03.'+-'6#,'5+"(2:(-3+.'+/'52&..,7/923-*2:1*%& ۀրՀ׀؀ՀӀҀԀȀڀр؀ՀՀ׀ӀՀ؀Հ׀րԀׁ4"/).)5+/.,-(7*!+23.3,&2%37.6"'+2-'36211/' 0-$02/5&&'-,+/1+$($/9"-',,)%.,/,7,43*=/)/"64.&8-)-%4%(0:.2%0"*362&4+#;*2+26%(+0",)/3)*3*<*5(%26(:'A,+34/"#3;11/+,'../(,4/0*( +(2,/)*&&,!*()...'+ ))/*8, *.*).+'%&21;)+/*&1/41'5',69)7#4(/(*('"$315:0"*%31*243(#2&/!)#//)'.-5 -$ )&'-)1('*+3'%*16,$(0-==3/!.74'0)'1/(&)'-.$5@$)*%0$9./%%).5/)'&0)$$2!7)+':+/ 6*36-*070$+,/(-*6-'''%*(10'%($&+.#,.3)0,39.8/& '-75/20*+$272$&',&1."-)6)-*,80,"7'/772"/.)7/-"=$-&A)-,26(&?2'Ԁր؀ԀԀ؀ЀՀ؀ƀ؀׀؀׀Հ߀րր׀Ҁ؀ЀրՁ)2(/,%2$1(27 1*=)6-6-9+%'1,7#.")1-;?1;%!;3-'()5,25%*:$40&1)*(3%()4)*+ 5 '-+(-)*".)0:*.%1? +(7"$ (#2(+/-%-$.% 2)*2-%5"'&.-/9-(*62%# 3 ,'5+("1))4'0%,0+!0)>-=;0+ 0-';2#(65*$.(*1*(+!+/-+&$)/+,.)'((3(/*-10)/&-.34**0,7-..1,5)&($713,$,+0'+3'()".&)',+/7#.5&*((-.)4 40 *)$&(-(-'.7!9$6("4'&'"34 &/%*)#(10%6*/"&56%-/+%'(1;)))")/1+3-1/%8.$-1'(3*+)-,.#),+/.**5/'#4 *13->+5 (+*-.3';)"%%/()'.,#6+,&>.6!!!$/2/'6%5&579-6)&'$%+#,9(0/'*;.//#;0041&,(-//+'):74&/!-''#+.034%$:!+7*$,*'))1*&33(4-*!(1%4&+++:27&(,, '&#׀܀р׀ـԀ؀Ԁրŀ׀ր܀рـ׀Ԁ׀ـӀڀԀՀց%*'.,4.2A)!+" .1%1-3$/0),+/+$)1=18-2-+++((+&/4)7)2 ,'/+424#*+)#)1%.)%'$/,A51,/-4-%='):)$912);/('!4!3)2+*%3/:)&).+40+4+(113-/;1#7:.#0%$,/,#,++"'+#+,-'-)+8'73"&1'&;*# #%4+,&0%0/)$'-25/.(%),-*+2"+(",0--$<<,,&+6,54$81&"0.$1#+$+1'''503-+'*"6&*,9#%''+5% 4*-8%83#"$/0-8)*'&011*9*>*.-//,+()13++('79=##)'=4'+;-9-70--%%)-+). '(!0'($9:%%+)0)).&,)$$5,1)(/1(#/.520)4%15*A0;4$#/(0-0/+4':#7 '2'0+0.)/5,&&3!&!0%#0339/9::$*,7!.-*/<+&)(7*40$+*'/++)1$5..5+#0-"'$!&)6,%/),5!+5(8/@'*3./1*,521(?-<50,1-+!-4!/.,@*+'@؀рՀۀրրۀ؀րՀ׀ҀҀՀԀـҀڀـҀրՀځ5"&!34"-+*')"95,60)*1'+*!>5#20$-67#-$,..&*3/'%*(+.&9)+3*'(!*+49,-+7*,!3(&020-%!8-,5(5,*%)/0.+(,",+7A-(3-.,-7%++2%0-#4;&.()#1."'1(),)/9*727,-/6'"-$7("/6529*'/$5,*,2!)*5(-=%+/-#,-%(+(-+&%-+ 6-#$2(5&*'4-'5$"-&%%--6+3,#.$"+$,5-96#!/8*4+,)0.482*#-#(()81%,2,*.#!$=*-'.:&,5',#)#(52%&%..&3(6)B*8-4*-/=+!'*1$-/$7*"*#,-3'&++))/$1)0-*'+,.% :'##.(5..0@<3!/-1!+-<'++)1*/&*/0'0220///+++5'(*#)(+!(?0 ,3&(0+)5#3 (+)3$.+'/050(6")&/2*+(0-2*'+1-((4*))0/2-5180'-.$/3$";%<*5%#%*6"0/0,/&!3)1.51!($..'8ۀ؀ԀԀӀԀՀԀՀŀҀր΀ـۀӀԀրԀՀՀـҀց$')03)1-1:,/,8.&36:/0+*;/31*$%4$.')-'$%8$&$'1A/0.23-2/1)(%)&!3(/232)0,,4/.E$).#0),,%,)*'>,*.%&)/+*-0,')%8)+/&,(, ./1,#2* ++3)2)'$62)+0(%)5),.9+5>'//)*.7+'&(6$1/*",0/(*+/;(#'&21)%((,)2)&&,*4/$)?9/# *'22)).//.$.-'2!4&+.(.//%#) "%24,/1/1)) 24.%+(5%++. -#9*+)+4,/*((50;.!+)&#+(-.%)(=,%.' (+2 11370/b1)3932&*#'*11%&),*.)!%#)(,1$."2$%,.),&13+++01//%.)-/2/"0'-)/$81,4(.)33#$5'*!1.,/#4'$($/%67&/#4.1//7. 44+.8)*/2#,3&%))3;730 23/4%*,-'&23."-.*--%;5/,110!1%.4/>9&#:9(3>'&*'//'/1,5)5015-,0'5/-8׀րՀԀр؀ڀۀ׀ŀӀӀЀՀ؀րր׀ۀ׀ԀـӀс'&1$)(4'44%/.% +&(&!-3&/(+(- +#!3(<*52+$*&8/#)*(6-;$3/2%&)3'76%/2&7+)7."(,+*8"*("('.4%%1'-461.()%@:/695((*+*$ $)7-,"#01/53&& 2*+7))..,#.(4(*+787.*--+).#'-),(-". //27*.$'+&.,,/*62(4: +"--)%1>#(17/'7-0(9')34)32=#27/);/( !%0)7+/2+*1,%78.175%4/#!19#,*".1%.".&/-/7,)#1+1.1*'$2(,,5%+)$83)-/%)1*2t&(*-3;)4-6+)5011-&- +*5/:5'*/&,-7,)*/.+/%!",)#)%0%)"&//)*+#40%.#4,(!''%:.4$&+'%$*-.(51./,+50-51,37112-/$'&5)0 )//1>+)"-)).*2;5+.,'39-3/&3&,@-()+251+0()")"-3*-,=.0!3485&'4$.),$0<.)*3,2+*;.3&,7/,(Հ؀߀ՀӀҀ׀ـՀŀ݀׀؀ҀՀ׀Ҁ׀܀Հ؀׀׀ځ4.13:1//'630##7&#*7$.!#++00(+-'+(2/,8,1,2.#4;;6:/.6*0-.(-+60$/%-1-042(7/2)0')('17.--00("/(;$/7 /(*%+(%%5(A3$11-*"17803=/(&4,1"*//.-*2-45(-"%+4'#$520+$'+--('.1'$*.8+-/1'$*5)�+'10,A3)1353!%:)"(/-$*(/:$2(.-4 :0#"*.,&.)!")$&23#6)2&*1622'-.*./$,1)33)2&)#(2,/).-;#)>,*.1252+)+$%/3'+6':"#,5'*&5'"-,#$//+/(%/+07$+,625+'-,()')1+%+-&(+('0&Հ׀րӀՀԀۀԀԀŀԀԀ׀׀ـҀڀրԀۀ׀ҀՀՁ%/-'5??/*'/&)($2#)5&/69++*%4*743**);//9'!0% '%725+32..+.4-1)8 15&)(0&'/%/"**-%==,22,&&./:/./)"*:.)$* 68/$8/&/)*&%*,#*(8(+.5/4)%,,&16&,3&*20)+*))".((+0+ )%-(7/,/**%*8/%,-'>!8"&!+%/"'00+ (/9)")+)/0'+&4/#+10**.,--,,8-,'2)1/,/$/'5*&8#7P&)+0-'+3+++3$0$,%+,,+8+',%,'5/1(-/)+0,+,-**4)$':13*.41*(/ +1),)).+/<:&"&()+.',!13/*,;'.#6.'3'5+"5&40,C5&++2&,12!---2'/2106'(400+,8$)#%5/%3*1&+!1+','6(0-..!%$1!/2,$)5)*<+!+#!))/%'!4+.%)&+2*"'#! % 72&-+3&(20 (-6(,(33#"64261',,4-5+%1++)-->(72-%;6()27-';56&%334#02.81.Ԁ׀Ӏ׀ՀӀրӀ؀ˀԀ׀׀р׀؀ԀҀۀ܀Ӏ؀׀Ӂ&*$F+944>"35.$+73,-2+3%5&<6+-)..*("',4(333.*6#!/4.&(3**55#/:)3+-'&(*/#:*0>$5+&+'(2,1-*%7(.(+)-3+&%/133(2$/)*/**3).+)-79#*%.14%5*6%?-0;0.-,'2,(.$2(,).5,-3%%)1+*+"32(.)53C+(4/)(!%04.+8,#(.2*>,05-!-;4--1+,4#(2).6:,)-)9--%'.-*/'#*&2(*.%)*/3**91&833=.11)'020464324,-"1%)006'38J.,?//.7&*12&-//-2+423@,-40'8?-(.+/'1/&4.!%1-./"* -0'&+2*'(8%,9.(439/5&&+,/2%:0/ 3:*0-,#/606-)#-++%/$&$43%&--/5(-(*/,%32//)$*%2.,$(2*1.1"&3&3,)+1.))*,(#03*2&)6,+.1<8+.)3"#:*8&,)+*2)// /0(('07'33,"749-#3+*)&1ހրـրڀԀր؀׀ŀ؀րրӀ׀ۀ؀؀πـ؀ր؀Ӂ',/'6997%$*/,(?$.$.'#/-5*:!:#67(0+&%/%37')3/((048)+31%+#./')-,8*-*8!41+4',1832,-.!0(0&781#++'7&-#(+&-*/6/0,!-/(%C+"5-'4(32$+)0-9&-$//#%3(*$&!'+*3&5/4)%,+(-/,.,-215!8.-//00+(!$/1+'0)(7!$2%;&6%9%,(#3#-!",8+&'/,02)+,"+'#,/(.,)/2*+1/5='':-0,)/2*)!'6)9/4&4&/-3*2%#.)4$,&'(04./1'+(;1)*7#6*4"+,4-/,%)-/ /" 8/-04*/:"0/=,2,-(82"2'9''$.8":+62=#&/,3/'&/2':)&!**,,"4'/:0+230&9&/",7($.$'.05.),1,1+/'(#/*=(/25)1(17(")4+*#0+4%*-+2-+))$+2='!,.+8,.035>#- 3);2$9#(93.;&"0,-),-)))1,;8!/.(58* 6,0/*.7#)'../$6'Հـ׀Ӏ؀׀׀׀ـƀـՀҀՀЀԀրڀр؀րրԀՁ:,3&/**4,%"/+/1.9,'92'+.92&=)0037'3.F*-.3%2#$-4/74#530 72>)+/>&1%'++6+/"+'("-//",/)7+%#)1''"+9#5-4%+,'(2/'7$-)+++,,.++-7.&(%8$1/'(-* 0'&&;#/$0:%!/,8')1-+(/0+.,'(0))3/,,%?(,)(9'+-/*/"+(07"6)-(:921,&1'3,,;-$.#6.9%"(,+0,983.30-02+#%,%%*>'$-0'/#2''%+)0**(8%'+**$2&,-#2-&71(0$$&5-(/$'1&3&3.8/&/3+%!*0#3)46<&-*3#$-6/('+;+*51(/*3)+0(/&5-(1/'8%)>2)1821%/"3..+(/1/2+3/!907(/'70#?,'38.218I.2)4%&*=/.'07!-'5# &((*./22;?+&1'(+%)''",+420*08/(,.5#(7#*$% !/8.+2.*5(,(-(!-&*004%7&7%&#)2/(-'*5'/,,--;)+30"&,+'*)9&0,//*'!0)8!#+5&4-)01/)(-#/04'/2+%5,8,(9)'%,0'1///,.//,..*.!;- 22'+*7/:*1",ՀՀـ׀܀ӀրҀӀƀ׀ۀՀ؀ڀ׀ҀՀՀՀـՀՀՁ<#*%3)%+1**%.,'1)*,$-/'")///##%+,-(*18*$'1!3+0*$+8 *,+-.5#.2&='65.9'1:.*+'=0$-+/1'&/,)4:$ 2,,5(--.2/51%%)$(/(1.+!&3587%'+*215&!#45'*.$4-!(+)+'1%32,+8))$5$'')+-8-($4%.72',6*),!$%%/%2$4(2'0,-1-1)2+)!1.$/05%-*03,-,&"%'#'"-'(&)/4$711'&4/!//51&*245)3" ' )&.*5%3.>5(9 5//.710,$/+4"5)$$.6(+.&%#:)(&.)2('$3(,2%7)+)&57&%3('='4.13&(+%7(.7!+/1&4-..)$ (C,-)*3%./630-$'"%.).0/0*4*1C-,(-5 %,1+">*(0*63$(-',4/!)3*)%31$6(/.5*/)8%/'43,/2--.3/"1$$0)'-/1+3*.-4$3+)"&6#'%.+"-9*+3(9450+'/0/0!/*1<&+1*0)&-'-׀ـۀڀ؀ـ؀ҀـȀԀ׀Հр؀ՀրԀ׀ԀڀՀ؀Ձ1',.9;-),,$-48&8/%/&1:82'4.>(035+0/+,A(4#+(/',--5-)/03$&-)A$"))02!#+/**%(3/-#4(9/*75/).9#1/551,$-04).).7&,-.)+$&)&?,/0*'<;>5&%,$&0+($+:%;0.7'+8+%*6-2!%2-5-*3,333$"+784+*&)/ ,,,0/,) -)#,%:/,,/12*/3/&8)%,#-&(=,*@+%!),)&+1/+)2/'# 0%!$/,+-.3#,$&)0/210.,!#&&'#' 4%(*)4%'52(" 36<1)"+,*0*:(-!!(04('2);)--'2220.-''"0/3%-)45'0//=+'5(5<)%)$)*,.5+;':-*-0,4840,2,042.47/1 /6-43/*/0"-#)1%+'/",$))/)#*1&+,5( +('8*.(*4')07#,(+!*89%((/+,'-0 *++$0",=2#%4)5"%%0,8.5//+(/*()&,/+&'1+(,#,6+B+/14+/0:5'ӀـրՀ؀ՀՀ׀πƀ܀րҀڀր׀ӀՀ׀Հ؀Ѐ׀ԁ)9411./*2*-"10-7'"<@)B554)1*6-=/&*(+&/';%.*2*!)&+)*-+0&-*-43602,22'-.!',"-( 745'4!***&,/!'.3&)/(%+')4#.((/-+(-8&0/3(1!$*%&8"+'-'$-2,$(+-2.1)-#4,&"085. "' 2'44.=)-%(-&&)'+(%,#0310)1+30$'0,$-+',6+*!,')-" ().3,!($ %<7.25.)&%(&.!+5*(($+)-)06//+8#((&6)-*/+!'A)-%-!2%'';&,4)%+15=.'-+-'**1!#,3 5&/7,-+"+.4$+$-!'30#)2'20,/+;/-(1'+28'6&7)/%.,5-#/-,),1:*3-<--)% +2//4-*+3&4'"3:),30)/4514)#,-+#*15,!$0./., /$4*9+1222&4(3+%.**/1!,0+57"'2(6)*).'""$5/53%**')(-2/9).57-9@.4/.+,-*<-7*(%:%9$6#0<7-/#+,);092рՀрԀڀڀۀҀրƀ׀րԀ׀ր׀ڀրՀڀр؀ՀՁ#72'>C'120%++>+ 9+'-.-"2(8.-)542-%//,2,1#*.#6*,5)+-1/2%(%9".-1/-)7&/5(!26-**////(26.)5)-(500+-4-4"0,'-",(2)%(-2-"/-'.8').1%,$+)..2/3"&%')%4.#%&/73*8.* 1/.6(1/()+#))**3+/)1&61.984229*4%'.++)'+,.!+).,*4&/+1,*/0(!.++1$#&1,4++,/*%%/1'3,3).4>-/!-.&=6+1/.4($3<)6 ,:2,)"-%&+4-/*$$&5(+.!+&4 .&(427.)4"2=6%.0./,!114+*6*(5#(-6-(.3++%)'))/;,4!'2/721'3:/"%)"/7$-6; <)!/+%1*4*5!4+&$**52-.)%%+37*-7',&-.++,(4(-&0)*.3,+:%%,*((26,#/+!&/+36%*1,,%(%"7#C///-2*72550%6'3#/.)/24%*63,+3;+-014!2<7('.,806'-+*/9/-ӀրՀԀր؀ڀԀڀǀրԀՀҀӀ׀ր݀ـ؀׀܀׀Ձ.8'85:67#4$4/;0.#:%&)3;.< +5'70-'-3/& *@+-,/-!!/'-.0-+*'&/&4,--.4''$++%',%1&"*0(.06+".*,0,."2&*(<5%$<%&.5%*##'2-,-%(0/(:9/+)9))=.0002*5.+0%$1)5'8#3,/--7F/).0'7*6($0//316,")-%%44+-(02)&/*)$-$/5"1,.,6-324*;+%3*'(4'0/*?'.- 314/03+9*+.%.'0,?/#'++1<1,5.&2//-.-3+))-'.+,5&.%,2-/5!)+5A?48+/&%<)):+(.)75-1.'--(###9)1+2*.$40,+'7"/+,-+$/5+1+*/#6( '5:7#+-@$,'#.))./3/+++2!(3".%$ )).'1513/*3)%1+3,;.(-31',$**/))4,3)/*0?4:24'*,5-1/50,,,+<(7--/*,+.*,&618103)+&7$:83#,.&425+#1%-.+1,,'0++6?00.1+16)00-/,"*7+;03$1(Ӏր؀ՀـЀԀڀՀˀڀր؀ր׀π؀؀Հ׀Ԁ؀׀ց.-'+3-6.)%1%)!13//,/-)1+2,'*.02,('"3+2'812$4+)'+,+*3&-11)+*+'6;#(<2/%(*),.0.,"+15411-(*)1/ !-)#%1,$'*%-9&1,((292// 7"%32#+/:-.&8+'!(*+5*$4(%((-!%)*5!-892-3%-4 5*#90-73)318.-!$0',-1,9--*6*(&/2&.& !)1%)1&$,**%-/-0*'48%@)-43..4%?-0,'$-;&*206+206%"68%.' +,-4+''204*$.8'+&(/)/*5/'11 &(--4A3(!C('=+.0198,7'3*)4'562(33%* 2#9.(5)0-&0/ 8":,$/&&$313-+)'4,"+)%,&-4&/7-);*$00 )3&/63*0/10(,20!2+#"5:+203/2:(/*/4)+5&&33,!)1,2/;04#6,1*&'/3'5&3092&7,!)*9'11*,/0/+4%:'&0#-(70:3(.,)#".'!/," ,=*9.,:7-,1$5>,4׀ЀրҀՀԀҀրրɀЀӀـԀ׀׀ـրрՀӀԀրՁ<,)36,.+(@-77#,+%4%*)$)%/'?.%3=2+,##+)=+%+3'/1%0-.3!42+#.-(=3-'96-2-5%$75<1-+(6&*B3",:/(!)*(0*,1#+/.&,2151<)0%!*-3).&*/% .5)*(//*2+-1/7'(-%.$69!--2.+!/*5$0)(*0*-+**1**)%(*!$2!5,--.&&%*63+%&,).$&&100-'1*./*4+:++8,33%2356$&3$1+2-%,/%#0,31+!6*4+(/);/$/'%,'%2,3,*!.-%2+#./1*,/*-#!-+($,1'&+54-/8'6*+)*("%3.1*("%65'++&1'70/7($#+)4&!0+*0/1(1(%#1)+0&;'0(<+'1.;'/(/$0('1.(0/2.)(.#0(-.'0.,63%!+&-1-7'1%:#*0>#/'++3C+$0-3*/,7"0,($40$0/."**%#,-(5!#7,2**$&3%30(%%&:.-*5"$6B),6720(.%#22+2)<)-.97,=-.*/3;57׀߀׀׀ڀҀڀԀՀȀր؀ـۀЀ׀܀րՀ؀ڀۀ׀؁/-+"'+0+5(;3)+0')81+'06('&23,4433/8/*,.-3'-+20"%&)+-'-'.$#.3,)6&1+,&4(58,2%65'3/'+)02"()),/%#3'+=,%4&&+"%117),*($$)+%12*0247#//)*-(01)# 7G*/(,9%-$&:-4.8)%9%*()77'555)3+/%$%>+=$$./!6()2/.4#*-6'), ,0*+3-- *3*62302,1',#,.,+1+43@*/+3&&,5(02260-$05")+(4+*))A$5"6",9++.+)'*)%&651))4$/2((".+31-(3&07*$**&",<)-4.+:#$'%%7'!5? +,,.+&,%,-%(4%+037+2.-&-54'4$ 3473'+.)%='3"--.<23.+.'- &'6(*)*&3&(/&&*12'9'26&/&59.5/.52*)&+'%20)!$&!03'-1#=822+54-*7:&)(*'/0"+;*-))(16(-6($&4*)+37)*/''7)23.<-/.(50*7'84(*+-&ҀրՀր׀Ԁڀ׀ҀπӀҀӀրՀڀ׀׀Հ؀׀ԀҁB2'#4)1/$&"8.5'3/(/& 0!..!1+5/2.&&.'#121*+2&+)6,//)/(7&6$+/4(%56.+34-08)5.7))-/*=*3('50(/*&42!//%$'#0$%8#3)30)1!6/(1"-+&'(+*2,66+1*,&,(,04'&+'/%4'/27'&&2$-(+6!3.'/!+='$'%&(8/,*4+&-9#&'1-*%+&,1,#1*(;4$ .%3+*&*"&(#/##.%5*0!-%2 )0!!' 5+/#&- 2%11(6'0)-'+)&#./$"(-.**524-$8%1&*@,4/#,*37*-1*$,')%)/))/ B5/*//%5"+56$.'00+( 37+4%)$7/*#)!0*8#)%2-".0"9$#)7(18.7/-')+4#/9&2-527'+(-(!!909+(-).-+2!)02/'%/560'*)2%0*?9-2:3-:'4*)-A*.0-02-)'1.-.02/++(>',1/+#-*621629%)"*21:(7)*!,*1+,$55;:./*1*1+7++/03$*:.28."0րۀ؀׀ҀрـԀրрԀՀ؀Ԁր׀ԀҀԀ΀րՀՁ)).1304-?.63(-2-/8+9++-'30&)4).1()%6.37303026=+(%/5/<2/7'+0,.'#)615,.(5)!'/=((*)*, 0)'$5*$$3-)%),4+,82/430!.,23**4'.2-%4$5 ',.2+:97#3'.) 1,$-'.6,5!*.,+)(+2$/0,#+.3$#,"3+$'/+A626!27,,6"$$&,--/! *3!#;)4/#(*0/%=*1,',.9#%//2.((*41+'!.7'&1.*)//>'%%" 4$"+&$)(.&)/"+()-0%0'+/4$ ).=).-/4;+)'$%2!)4'(( ,+029")(/4 *7+;7-')*'1.&!%*011**'-2/2#2)!(%&,,#-*%/1,'/:/10*"/4(/3,%*9"22),1454/---!010*/+.50'.5%-:(2*+6+/+&57-/&-*)(-):%-&-$/#-/. '.2%0. .$%)#*-5+;;#/43)7& &,)6*)'4'/(%+//2$28/<20'*'.9/+30Ѐр׀րր׀ԀՀӀˀڀӀԀрҀـ׀ӀڀՀրԀڀׁ/#+.,8:.5)5/1812,*.'2.00.*1++!25+#4/.=+$.0+;)(&-(&'%')7%*%60'?2) 1.2+-'%#3!%,%'/0,/#"'*-%.+9#5/0-#*/(3&*),0+/)%-/' 1#/,*+9*),-/&-3$%,41)3<07*"$:.2--6%%'+%.+4'#((&2#/+5*9+1$,%($.&'-%6)*',/6.*,/%"2(/7--*(-5+.(,1+./( =*00799--<*52)7+/%(*#!)87+/1/6+./)74!*5,.,:)"-02*1&?&20#/*1. #/*/9%/*7%0*-.+;-#),/ 2-527+3(3)-3"*4)+.7-*+3&)'&%-1",30$(,//4.-#2%6%!+<&50(.1,%-)!) (/!'$1*2--*/!;1.)))/27+6)2@35 !'0)357,34++(0'"$%7.-*.("-// %/,545*11048#-5&2&#$7!*',(!+4,+(05!0-9/63 =2,7/35/&59-0)22"#73'4:A0(8)')Ԁ׀׀׀Հ؀؀Հрƀ؀׀ՀրԀրր׀ՀՀڀրր؁1,234)'5+,7/3+)-/631,:2(4075*-".0(%2'(1/,#1-():2'+/)'+,*())12'* 0$9((():,-8,+-"041))(4:-*.$"'()"2*7)-1-0).7(((27.".=550,%.#%!/;*+5 &,2(14*($./&&/3#!4%-$13()0+2+-),*10!+&3"+867#2.*90+&%&#*?--('7%2)*(,,&*?/055-,'0>-5#=#5/,.*(/1+"2&0)1,&0*77*7/&.28,)#)",-/1'***/+/-34-0454.5()*#"+9/(/6=.0(6--,(%/0(0* +A&)-'0*).:),!-6*&5+*%,2-*'1&6"!D+(+(9:"#-8'+/".)3/,4(4*<&5#$2)%+4--1-<)).//'+254#$#$/7+-6))3!/),55/1)".%#(*0/)4.2%)-/6+%!,*,4)$)/)015)# /&*2)*-,(2'1*.)+////3/1/1&1/**54 7:#+"8+%578-24%,;37)6׀ԀҀՀӀրـ׀Ҁ€րՀـЀՀ܀Ҁ׀Ԁـ؀πրՁ.1-01* .20+6#-(5-+*'1*-1%%?)4+073!*.&2):!:/+/*''.0'(+-:6/-#//3+..$5"*0+/0851/-1,10/*5,'0/--%*3)%0,-(0%'-++08>&+6).6!9+)/*/&#:%(1$+$"/.3/.6*4#%#2#(($'&/! 93/8*#'+)'2&/6@*/&&/)&8/-%(%, (+0 1!0)6.&&$.#+5%/9<+('%+?-(5)/1+-&/.)9&(* ,-,/&!"92,+')%3*8/()*+).*/*3--21,(52"49,.%!413"33/5",/6),5(3..0-'($1,505*4)(3/,$5,%9(&+/23)25*6(00()0(,%)%5/$"0/2-'-*2#5'.(+..6*1,2&).5!.*()('-+)*.+4>-8--0:*-&,5$(%*,,*/!6'++'*4!%&*/'%+5-973',%+)*)+-'7+5.2'/B+&(8*(8*-31.1/2,244*4+*-5(.:!;33/..$-530/8+3#14/568.(1-2؀Ѐ׀ـ܀Ԁր؀׀ŀ׀ր؀؀ԀڀـՀ׀Ԁ׀րҀց*$(3)+1$(2&7$3*2)8/0/0"/6!*10*020&9 -3*-71-0-+-+9.,3).:%26#(91//!'0.4-'(9/%')3%%-%*55*(+#+++'4.*+,("5+*(*.43<1&*-*"-/!/-#/&.5#3*/&031-'3-(13(',!515"*6%0%1"5*/-8.,('4/.+&( .77-2--, 6#,&')0.(.(0/(32(4-'5.*36&7+.$$)"7(1!= 3&.781" $('(-.,2+3$&+/&*(&*!)+. >8<701/$35:,0$#-/*"/,(-(6<22+#**254'448(,*-0(0*4&*/-'+7)=+3%&0&'$2,!3$ -3;)'+,",%131./3(#8/>(/.5//2&1+21963"21'*&Ҁр׀ӀրԀՀـӀÀрڀՀҀ׀Ԁۀր׀Ԁـڀрԁ,>,1*,3+14%/373.) +5%#).7.'46'(():113,$5:*+(4%,0*+"$2*6/2,.2+6/149+82%.1#-!#//0(15#73/(* *8'4//,0!-+,*!*),0&**%+2,4/1"-/#$)4*2(41,#!4/7)2=%7.-,(&.9,7+(60.-202,%.','$4-%#,++.0''5491#&-$()+(/1(%-(3672#*396-526:5$=*(*1,6&/'7%"!*-/,6&+7(*%+$%+6+(;%('.)/2.)-40+:.+#-4%8451;0303)3415" *+/,/"'-5/&('+++$01+(&;,'-./%/,2$=/)4;(3*9,%:%%*333)&%) 8+ .03)!-')) '5<./+'('&4+,'..1/(#4$,?+7((-."+/&3+,1(+',0*+7%*-+$2.6*&+64*(3%/%.('(+&!("0/8/,7)$0.30-63)1#%'5:/.,2%(%1/4*0/1,327.)2;0,,!2,#)6,0+0+3!(=+@62ՀՀπـԀՀπӀր̀Ԁ׀Ҁր׀Հ΀؀Հ݀׀؀Ӏԁ;7/0('-*&)4.,3'%(#(<#723.$+!1*06,4"'1+5:*0(+'-, 12%/8-.%*+.4'&4$/20&63#200'.1D5$)(.1"//+-3%/'.-$.3/&8'6,0! (.4763'!71; 7,(.)$"32(4!**-,*(5%+)&,)*)21"7-%'/(2-.(3-+**-!-#2(.)&2)(7+($3-++*&*&-2*+'$3),/*..$0C',$;04+36(/02>+"308(2,/%-%9)881&.*/%2/.+"$.5$0*/! (0.)$#!/-44((048!6#''$63&"-89%3*+2(&*1,*3/'').-#.-%/.12.)()70')84/1!'A(066&1#4)&%#))4+)0?*%1$2/ #+%*),*$45%&'2057(#763*/!7/4*$2/ 6+,,&.0**(4%(/!* -/,4$(&!"!4)+7+.'.0/8 +2#*$&+/7-+1#6-B)50/5)-/&'3)*)1#...,'(76 */212,'1+03.322$/84;9."րۀӀ׀ՀԀՀՀրɀҀҀ؀؀ԀҀԀԀԀՀҀ؀Ӏف.$'&*!(/%%4/1+6/.!5(-.60*+:*-1,-4/!/()/*-2-3."/*'*003&2/(0*"*9;* $-'0(.+-1)41+7%#-5&(!(%'$,(%':0/'$6))7,.40*6<&7((-%-"-622)"%/44,(&7561%5.(&$;'-(4%/**31."#6%"05/&#$&/+"+10+2+9/23%.-*-!3+($,%".39*'(54)9)37 )-&$1//+.7(2!+#3,-%/** 1-7)',3&.+-&2$)0#3+105((-/+.1$15*-%%5/.,614(--/8+' ,!<3$#3+/56)=+;/9##0&/$!31*7!1/4!,4*1=((5(/+7.'/&#-+.$+#7,'*.'-%+54/1(0,252---)/.8(*>&)*73$%4.20!&##.1+66+00*&41(4&@:&-,+,8**)//,'%4%48"&,'96&4,&+5#41-#-%*0)!*.3!&9'*29#-,>(!.*+/-'$)/17#&*7*-1:'-7.++(265+*+69*-Ԁ׀ۀ؀؀׀Ԁ׀׀ǀրՀـҀڀՀ׀ۀҀـӀ׀ـՁ.)7,521.,2#"'4&.2('6.0$0-7-,'+*4/+3)&&%'?/5590!,+*++1' +)4-)15.%-))'0(*0)+&-$11)'$(;.94)5,1&4'&1#./4(5)+9&,)#-2+6'-(%!',& 2/6)/21613, )0.2+)52&*++7%'))+&/#2/4+-5(.&'%&//./"),))(7/0%7((13+%-'+(9,!4./%2$,1"%(-,8#(/#%.':.=/7%*$.<2)%243+./3-!5)*&*7'.&)/8:20-39'0+1#/'232,*=*0%))(/-*1'1"'5*+)9/)+'!,/7/%0%*/8) .'*,-!($%(5&&/'&79!+&2),,2(&/0+-614--*1+2#,$)!5.+/'1%)(-+1),+!(",/7501$*0)1,"9+3-,0-0+.$-+(0-0&5-5.4"%.(+.*))A+4$/'4&-&,4(-5*??>+#19.241-)+-+!*12210+2*3(>'97--.?1)6$84,0,5'2)&-+=(!2+22/;-/;؀؀ԀҀՀրــԀȀՀҀހ؀ԀԀՀـڀҀրҀڀց&:+08$0/&1,0"-#:4"+$5+/-<+4>*3'2--"0'6/;+-+1.,.%)2*,13-1;/-$,(&'.2%')&447*(52-. +'/%$#/&),+3./)56;#.),1%$2!$%+/5(+,3-'4+/2+/'1$-.$)2/)1.)7)!)#'(,'"0&*+0)'74%")#,@23%5$30/*53..+"2(+<,+<,-4-34@33/2%'!!+./21 +*-&%*,44')-&:'))(.4)'*(#143-,'++,!"+)9%$-,(':;5$40),/*-!3:.&.4.<, $:6?$+!")/ //*"(-)*0,!-&.+*34$;()$,6-2/9-:81=7*=2 ),7*);'-6+,/#:݀Ԁ׀ԀրրԀڀӀɀӀۀԀ܀րـր׀Ԁۀ؀ӀӀЁ-18%2!-72-5&(4)/-),2$(0*$'44!11+72-*24-+7$"1&3 )2?3/1&$"'8%. &3$0%!.,7,)3.-,1-,4,-"6)030%/;1'&)8(+6'5+./*&(.(04/23')'/3(.2)3,(-'*'33&)+!1,&''5'$5&-*230/3#0%-!5#%(')4/(/,4+$3735(&&!&,1)44#&),-'%&0(*35'/70#)01((+$/1/$),,6%",.,67$0)/+4"-105-4<(2.*$1+44/%-.%00!$(/$)()+/#+%* '-4&/822+.-0(,*.,#!*&).61*#25.'421--/*')A'$-,8+.+.&*/ //+6(/4,06!&*/-# %2-+.!1*6/%"1!"+& 7--0:) '*/+.*7'--//5-0-08/> '130*(-#7+'%0/%(!''1/,6"#/')+"+*%1&6#%1,%+22-)2"(%)&/,>43/5&!!6(*$)'&)9)'.)63-*#(/34")87/07172=-32;9؀݀ـրрҀ؀ـрŀπԀӀ؀ڀ׀ـӀۀڀՀ׀ڀЁ687+'/;11(+7)-),9&).71&").$;)*?"&#(6#%3,&+-86.,-%'533/./:7-2 0,%1,(*2/!)$3(&7!-0($7 $1-6+&".(%!'/!+%)23$'/$&'%6)3?$'*)%*7 ځ:0)(24"$'+,4/$/6),"1/+40:$25+..3+=(;'71+$.-$3',450/2&:+#3*5"!8"+)0-,*'<56#&8+00!47<70/)*6547/)%3(,'(20#3+#4458.+"+-''&%&&'/(&93(51,-7 &<&+%--9/,+-'/5+/&,-0#;./))*'*/*'/5' -,5*-00"/-*58435%3)8(%)&2700E2-)*-077-080,+''!,93)3%+0&3#(/@/// ((//'3&/,,2'416/5(*25'+244-/32<6+/(*0"'*)%%7 2&+2*231'5/+3)#"6;---B3/8-*&9-)(%**.+.)&',6+3(% /1)/1,)",(()$(9)& %,/*'.391!%/&%)/1%/,14.&2!-5-/'312-'&.+$'+/&3&-75)3&"+$%3%0(&73. ,%/'0/)&),+%8&+<6).,/(06()+6.6+*(/++)%1'15 2''4'&-09*)-//17.1-)0&$(0+6%$%+#)0*/0',0,&+/*%%)2.+5!8$3"94%.-)26-5/*'=36&$+,).&3$*69*$(#'3"-%)3-%,2#(,*/2-)+(7%0-,:()1$-0*15"&$ ///,8:,.2-,-21)%%+&*#)+)-9'&%)*%(1-718---!,-#5.+4>")0$2&&(,6%)1#1*%0+1(;%730%3++@)1=Ӏ׀ۀՀـ׀Ӏ׀ՀɀӀրՀــՀҀ؀ـ؀؀׀؀ҁ4/"> /@)-/84./3*'-$=)/((/0<.:1,4+4/.-.6/!-1-.*,*$:''8+&/1(<+.F.&$33,//)%+(%0-<,(5&1101(9+1/6;#.$+,88$-7)+* /534)6,,((6(14/()-:7/2+*74&5+&*)-.1=')-5*75,0'$26.-$/.'*6+-$5)9#'#0.*,7/-)&,-"&!&(1 ++ -1/'<.'%)/.*-3***(352)7&3*-@.44*&")12(7.+753-//&323))%0+"-,,): )0/+9(/./."9-70(!+-%',(/4.",<.0$+!)3124"-)%'-8&-6$&'0'77*).)/+#%(!6%(+'$143'(+.-+++3(7+%/$*-" )/2+-.1)$63=&().*0$) ++,-*&,304+1315+$*",*&-.0(@-+4+()+-'$1.)*);+''3+$0.!.,--0(312 /10=!#../>02&'/++(360&1-+7=(16-7(4 3616&"4;5-81#*+)#܀Հڀր׀ڀԀՀրŀ׀ՀԀҀ݀րԀрրՀӀԀՀԁ'095'$)8,;#513"@1,+1,'4"1-22,/1/(7="051)$':.$(24,4,,2246)0(5*!;(!,33(5,.+!,/)*1),&4*+20,$1+$0*/&0'5!2-)/*0%9.(;7.22%+(#+./%+..57%2&' 1&;0'*&$-/-,!,% (*3/53-"+%2&'3)) %&,&)-#%$*(&&(701+-1*)+&51-.%'>$,8+")%1((7*(('%#/&(*+(5+%*"'83*>1?/%.+7'4$/,.>%!..22/()&.%60805,3$%%4)%+-+.5**(.,,'2?.-4+'*1)4)%):)!'3$-/8+(/-"0-*+72/>-./:9#(- 0/1%/,-26*0#(/10+8)&5!-*,.2'<.,&.()*1/7*%/'(,(-:5/3(.73'#9-%16(50-)(,*-"))4,60-$(+2411/)'/4#)2/$%*/#3(.<+=,84(23$*2$*51*7+*%7.'!-"#2*&2,/+()590-61+/6+(1-(('-0#*./%*%77*;+&.(1,%.))6&:.33)'7׀ۀ؀Ӏ׀Հ׀׀ـĀ׀׀ـӀ׀ــՀրՀՀӀ׀ׁ%%*")5'2;6;205*)"',%1<'*4387)5<&+.0)(,%*205&220+&((1%%47%*=#6/)1*.0)&*@%+0()0+)(+04/'-/'3)(.-43'//2-62)3!&+"1-',&/$3$ 0)-+###/&,&$6%0.-2/2+!$(+)56-*+8,5%/92.(6,%.-,4)1%:'-%90-113/0*$#!7 (0.,,*%"4?;/(-4<;"("1&2,/&+!/-'762(*,25.0%%((., >%!+3/5235$**/1(3#.3,%2*+1.-+11(1-,'#$$,'31*5$.'*#2%4=)-(83./+'.//',))1-))%18: 9!!"0.3%;.",/&&-.(?1,,+.+459-.23)(*'*+06'1520$-4)(1./7-3650-8483(4%3)6/790!1+'0-29)(81.'%+5.1931-.'%0* .10+*4%36#.82/7)3>934Հ؀ӀԀـ΀Ԁ׀рɀ׀؀ՀՀԀЀրـ׀Ԁۀ؀؀ׁ.(38+97%--,97(+06)#1.,-21).,)6:/+,,%2'(2.(:#+/&*,%&+/)#*2*37%(*")+5!/%0+6+./,-0)27(+- '&2+!H//2860*7#-',781%20,1#33=%11@$%*),+)('2&";(#2)#'30**44()*-'20,0.+!*!*4+8+0%*$'(.(3///)C)#1/*34:%%))22.(*)-6,&3#5%3 2%1$.5-+40%)-&&3&7/021'25"/##4#)'46//5850'./(.$,*:34#+*+,0)'+"1#3)7,'&3 %(+/0+/8432'/+/0310-+)7-5&&7$+' )')&)*/*3*%-)(.% ,1%$- /*,0--2@#&4$,1+-/03.6-+/*!$$1''96.3(14.8%8+23)-4/&2#.*+)0%,$11(7('202,)55/7(16#-+# 657)&2&&+5.(%90-""-$352'(71&0)&,1&/'$.3,5) -*+("1-.-5-42,&<&(33$'-&,/#'3%)3%ЀҀ׀ԀԀՀ؀Ӏրƀ܀Ӏ׀ــڀҀۀππԀπ׀ҁ*/33 %$+0+:80:.%))..85.+19*/$:)4.;#/%)+7+&/-,4%%-+;.%5))+.'1$-$/#36,'+*&#*/ 72#%,/-")(09(/1?5512#)#/#'/#,*+0'+>24/*0:510($)/2)+2*!(40%- +8,5$,*.-,%2'+'*'- )26'3-3*+"4#79 %(!'"(:6!#-//.!#2&)94+66%!*39/0,1=+*--&74/*6()=?)+),.'30*;.<+./6!-%7/16%$5,+%4;806+-4$'#!2-*,** *%!30#1230.'&3*& .$<+'+*5/+5 )0<0*,.0(-.(/5/71.#.4-+(9<82;6.1)/(574'4-*+$.;8.063+(9(%2+690.3)'.*$-695%./'-$+++#)!(# -3112%-6,'*1-&-1:")"++%"3)+*227)3%+#/*+"*'&/':-9!<(0(2/--#(16#&*%+:/3"5('#)'778701"-)"'%$/$9*1,/<,)*51,)/-146)*$#%%.+)+"136;+*%.ЀӀՀр؀Ҁ؀ӀԀǀҀԀ؀ՀրӀԀ׀ڀЀ׀Հ؀Ձ-+!15/400++4+1(82*2'!8;=*&*>70%&:$ 0#%*1--(%+0&"* +3-))&&))-$.*9/.0-+<'0#--53#);,#/)!671+)/0*271/5*#<"40/0-,4>%, 12,6",,%/+5)/-(1,7*)3&,+$)3/*((2;4'.3$"18'5+$33(761/0&&9%*4&1,0(,+0-,!'#43#*1,64445/3+.+7/3.-+5, (5+8-!= (=2&'#2$),*8%/."+./-#117"42'&)/"16+#2 #0%)#*%8%5 ,2+/3*%*2)4=%/$+22)&&8)/(1%.$&"8)4(91$1&").)*/1,#*4-/0#.,.23(()2--3(+/%24/0'. 6,/)51)&'%'4@*(/')-*&+('4+4+'6,&$',-,%4!.(-,).'/(/*/ 27*4%$6.+24../0'2%1#&&-(0'*"-.,1'(/(.6&',.!(/)3(43-!1'71+!42$'7$8-+.:*/-.84/)1''$-08%3*40-6рـԀ׀ҀۀҀրԀǀՀҀۀۀ؀׀Ԁրـ׀׀ր؀ׁ,-+:23%63-)"42;8,3320'+(*3$$./,B)/&);$.%7@-7("'/ --8&%926-0.0 A#73948.115!)'$@0!0%$**=)0*$)2,0%+>*-299/&%-'"-1+!*0$$"%A1?-+2$/K%:2*/#-$ /#20$%/0 %-5+61+-%-(+12-9.>' 142#+079'(/0+ %,-+&/'5&'-).#20. )02,+!+%/(/(+"60-.'7!)&,,*#/).$111-.&)7&4(0/!*7';.4*#18/--$-/70',#-)721*!5()+'"+,*0,25//3)58)-%'7 + 6".''-&3$.,&+,/3(6())&%20**#'')09,0**$,3*03.%9+&.0425&-'%334"&',/,*-%',=+!(*-.')1,'%*-$&'0/831.8$(-)'0(6.+*#*)*1%6632*)..,21*+45#*(4:0##(1+,),+#2!(21++7*(':.0%-%0!%<7#..,+0+#;&+**./6+$'&;-+1/$#500(%&+7)&$)0#*5(#.6)7"62;,-3'&/-755*88&5%.+07)3Հрրր׀ӀـڀҀĀԀ݀ۀҀڀր׀׀؀ڀ׀ـՀԁ/$0$0-%602/20"2-:(/#214+'&)'&+/%C%962.-+'3""&(/"4/3+8&001(9.8)"6+559/+&*B*,47&-/&3)*0%2(*(5)3(06&/?)0*%9/'.6'(0*/&71'*'+)7:2'&0-,-/*4(*/&&.'/+&80 '+01.)8()35 :%&,/,65+&+)5%!+/.7"*&%+../,-:"$-.#!3+)/*1(*2.3 /&+%)*/0+7/-2#*-,#17(7416*//(-/37/A-*7(-)+"'*$%/+:.8690%1#.3+&-%2=!7/)2",&0%+=6%&//'040/324(3 &,.%,6.)&(/+4%'**&26/-(- ,0.'$3)8-)-62##5/+!)/-/159??'/+*08(-0+!922.),%5%/$+35.5/$'+/ *"(('(2*0;,))-10*+ ,&0+%)'9)'3'6-+(+8**./,/4-&")/69/%),08'"$!*1%'9)0)))*;2.3420-$+/5&2#)0#;2/,883/836016"-6+0׀Ӏ׀ۀــ܀Ӏ׀ŀπڀڀڀ؀ҀҀڀрӀـՀՀׁ1562.,$'36..,--/:53600,2#%/$3,441+5,1+13.B9/*.7062##/#01/( )1&(:4/,43(--+'7/(7-74%)1.!'#863:0)2(//6'1*67+**%&'.-++))"(@70*).&"!<,,,/5/ 15$'&/,'&0)+(96)%-"')4 .4##!-1*$6 */*)27/"%%',$&/0353(,&/<(5>&-'06+)*(',+50('-!)2$)'(-035(1%'/'613-&'1#$)01182.$6/%'*/'&4*,70*'(01*73**- <32*3++-6#4+'2%+/+'83.'-5:4 +)()-(,(5-*0'*,&,& '+)#5/;*))/78)/407"1+(9%%./*.*)9:(/)/(-.-"951&4!6);-($+/$+-&1/,%'30*43*3' %%-3)'/%"+.9/%!/!.+5(.-+/.--2(+3*.)1',,--&3*-11%05',$-(!0#!-)92.)7"&2#..02"$.-80",+)*)5-1׀րрԀրՀԀӀԀǀـՀ̀րՀڀ׀ր݀րـ׀Ҁׁ/&)/.6#+-=0*2)0=81/)7'32-2,+)&&$31#1-&)%$/-+#!$3-(,0)%+4'$1"(/2-086<8"$%+-)80+*?.12"'++/(..,44&1(0;03'1/'M1-30(0(%0%,+#'7/'!*0/'&(0(7-.$:,<%.,1 '1')(&:++1*+2/2%)1&4;5'19'-).288++0.'+('2*$2()/2/-/.-2037/.7.'),'0$6%(54(#,.2""+3-%86-)*+(+!#()('*40)&%("+-5*,.0(/)5&!)*,'+(%'$;*%-+&/3!+%+ )/),0,84/./<'+#*'/"!/&,*1,#%/137=8042*9$8),)180.,3(&+-(+*-)3451/*-/4/).))0*%5+.-# +&0#.#,9'!/0% ))/+&*1%,,,'%!(88>($&1+$ "3-4)->" *'-35*,*$".)3)%'3*21*%;,"*'(*.31)-"&@&A-.,)0&+0;&%:$4%/,.-%%.01#:,' %0:30*%0608 4؀ڀրր׀ր؀׀ҀĀԀԀՀՀԀ؀րՀ؀؀׀ۀ؀ԁ&;0-2%6($"%'31*)165+-82&&.-,<$8%0;&164+ ,%0%,/)0 2 3$20'.+07$.$--"'8"*.,84**+2)#3*$2+*',0."53-11/1'+)2&-$.:1)&-19;!'& 7'*,,*:%..%7(%"),$''-"-'./73'3$*'1+ 6#+6''-*'%/%&15./3"##&1.,.',*$%!*%$)/**%'+&%-3(!1#$"1*/-)'849/4+)>(-40';/%)<-3"5//)"+'4(5-7(.,-6.(,&() **23"/03'3 -../')*%686'*.44('20:)%!3'-+7$'547-%(00-,,+3')9%//0+,*&-13/(?#*%'-,* '+8".-*60@/,,.+ &%*2/1'-"-4812*-*+5+4".5'-%*/3!'0)*+*+1/.,&'/.'30.+;#2#94$)</+%-'-,,1'.'((+0(".,)0$53(1'7+20*%$,3)"8*&)(*'4%0.>*&1*(23(73-1:)*+%8&1,%--->);3+5Հ؀ۀ׀ր׀׀׀׀ʀހـՀԀـԀـԀـӀ؀׀Հ؁-9('.1.+-)?0';//$)&06/'0-9+&4':'-+-#0/,0*))#.32),#/3)+%6/.0:#4-6)6075,*19$%)$,67.9/30,*7*--4#0)*+)&/$3- (1+'+0),1&1&!/%&3'2)7*.*4$-$/282!>4'&)1*-4'/&&0-*)(10%-4&53(-!.*/8)/7*+0/,++&")%1(-63*0$=!6-36*!--1*/+&'0"*/")!..+'),#/430-'('6/&#))+$)5+7-,#.&!%!3'&9'3++1&-) !1*/ -18#-+..3+=/-%9&-4-0'!@(822' ';3*01*.(+%,-7( &.'"(>+/.83B&'8/,5,3*&()(##*30'$3+)!.(*(/(%8721-/(/$6)$ '"*(7'+$%!$(+17--#<4<"'12* -* )'%*++"'/0:9('4%$,3): *0'%))+(+.5)((,*$//),&4<-01'.1+-0/.5;%3%+*3/+629#-,7&40@/+)5<-5:/+**ԀԀ׀ۀ׀ՀրՀӀɀՀԀӀԀԀ׀׀ՀՀӀ׀ӀՀӁ5,'G$+1*/)14-'& )<.A036,+)*%-)33.+9)3347)$%(* #1)5*;7)+*'* 0--$(&&9+-)3'212,*&(%3+/+-//('"-/.,()%0(3"1/ "",#!'$'323(,;..*,0&&+#8'+'%%/%!20-&4+/,;*0!').&3'*$,4,&$72;03&$#'%:'0(;0,/.3#2;-)<(1248#2#'9.3&'/*10%)7)/2)&/-/83*9*$5.)+.-%#!,'(0/ ,!9(33'*/"4)&2-++2-#++(*'3(/6'((/!-2/((,%1,)*$36+/"2/62:4-&1)&%47-!+**7/'3./,50&-) 6+(6//(+0/+<-'7)*1-)/134..%,/.$/ '(,7*8$/943/1/0*1*-#0/17'1!1 1'474(#)'2%&5/.!.:.")%)*-'(-'$3#/)'(-!*$++ )/$"7,8.,;7%*/1 %2(1!5.$;-,%"6/"// *0+'55/.*9+.0&36+*&)5*%53,.5,&&.%2/'8րڀـ؀׀׀Ҁ׀ՀʀӀـ׀ӀҀ؀׀ــՀҀՀՁ(67'%;,)-3-#-'1*!&9%.4%4,))%)/45%3(,'2.6$.>&'0#,(,31$-,;20511(/-"3*-,/,''427/*5/*90*/(76./#*/,.4)(%;#,+0$=;&*+*>'.%?&#!24(+(-372(+*)*.:00-3%)())0'+*0&0626'(!-#'3-3-,'++(/'-/4%:)4(*,527")()/*./-7/****/-#22*2&6/(+1/4323$'#$9+)!((/+--#-+*-%(51)$5)6/%21*,''1+%10.14+1( :-51())0 '121)50/0(?/) .151*)-)&'+6(3:$31#/7,1( #2-#$0-,5,'*0':!&)!2/ 06..# .0.5+5=)(5)047&(++0.%#.-9!*,5+1:'6$-)01)0.#.,-6"+*/,-/2(6+>(8&/+6<4,5-4+'- 01& $1#-)2/'23+7/'.-$'#31$5/10<*(.20$*:'/!29'!4"/10'!&+,:2*$/1.03(5,.--,5րڀրЀՀՀՀրԀŀۀԀ׀Ӏ׀րҀԀրԀՀ׀Ҁ؁3-.)*5%7;, >.2-*079!0+%/(&0%&1),+1-/#;%#1665+,,))+.0)%(*%&'3+'(/.(%.(+!+21-++"$*(10-.$,.,1#/6/)*!+/#7)(&,7"&5,&+(3%3(+*40*&,5+/.9.-:+'16&+$24>-360/9.=2*+24.%&) 1<%"/+/2/8;-&,59**24-'*0++206"#/1,&/+$.+/-#(:<)#0.+$*30&%-'.+4+.*25,(7&08%!822+-++5)87-'=80.7,&+6"*++$)'&)#333 **2-(/5.8)',':,'/3+#5- &+("/..'-+$9.3!(<&(3%/+!51(03+38-,0>/$-ԀӀڀր׀Հڀ׀րȀҀ؀ЀՀՀҀ׀Հ؀րԀـԀҁ/.4($$17/5'';+07.1:*$4,1%C&!79+-5*7$+.',,7+-$;/!#$53&%1$"/7.5+,1$-&/7)*0/''-.6 6.)#,#2*,/(,#2)(#.)(02/%1).*$1:25-19601)#'+,++22-,6-/3/%,9-3)%*-",3,1(2'-&-,1%)8*4(; &+:-),('*11*&5&:")#')$40(4+7/)61420%*+(),'8,2$4(5$%)&>4%1/3/+1"(-.,8%+2 $$%-++/!*'(2&)+3&)+(.-'13#*$;:0+$%=30(E+*/(3)/)2$)5'0+-0-'*'-$/'7&2.7.2/(14-#4)2"&-70;#+,)+5$-0713-/.8'/%!%,*,<.++=$.24)03!1")4'%4:)$)0''''9-&-(7,1/( '$,//* 5((%(/,-1*.+*/$/%/1(&"+-'1'*4'+*-*)01)!##!&/3*/6'*7..$*)/'-'/2"61-%07 " %&842*+/!&%)36$8$+10&'&$8Ӏ܀ـ׀ـՀӀր؀ŀՀրӀــ׀؀ڀրۀԀڀӀׁ#).&:!-8,*'..% *(&6/'/1'44,)+%1/,).)-81*(,.'4,+13+7)310')//0%*"5134.;)/-'"9/2+374(,'0)05$1")+.02+%170$("8!%%+.*,/20(,.#2',0-,+1$&4/"),$071**':'1&&2//!)1% ,$$-(&2'8"%1'..".+'*414)+//+)1.+/&!'&//40( .'#+*&&;(-%-*(%&3,210)-+2=20#"'4'-#.+#4)%.6,)33.//+-2-9+$$?*%.*-)7/3#+1(+&5+4$/#-567'5"/*//''';,=-/%-.22*'#)+"2;3)+$*0*6A)'*!+"##/@##345&%+/#>-)-,.%2)--#)(1,25*,//-+%/02&?14)-%-3 %18110+$!5)1;.&6(.92/)24/3-"*,/;(&):4,,1",$.;---C'#1$*$5/)3+%/$1'211%#2 (57'1-7!A32653/?+'.+.//$/**6(2($@*&,2-,31-/%Ԁր׀ӀՀ؀Հ׀؀̀׀܀рՀԀӀЀπڀՀ׀ՀԀց@46+',3-443/6&!:'281./!,57,).6-2>3<+%8+/01->56#).>(7(&1+2*-'",%(*73,..&1'/**"%('+/*('91($*-22,)4'%-3(-&/."/*/"*52('01, +* %'33'32.+,)-6+<.#5()+>+34/0-)>55+#1$6-86,()3!&=5+5)"+%7(1$2"69(11-:26!.<-/04&(%,&*--%1) *./))*'/+1*!+*,'-0$;%/%-$'*+$4%"+.+5**20,1//)0.35$"!<24-002$$$*!/+0//++0(''/'7+,).(6#+,%7, 8!#,165-@/-4%!155"*'*:..'1+%68.'$("/,(4,!,20:(/))+9'17#24%-+1#?1'-0.-/)9)42*.)&(7!1.34-/+1,/$4,).*/6*-3!$3*2(+-%+3&'0=/5/'$60.010*-)#-+/(9*,7,:%+&)/5;032/$36;5;5'3('',/2,:/9:.=112(*$21&ԀԀ؀ԀԀ؀рӀՀŀـԀЀՀԀՀ׀Ԁ׀Ѐ؀؀рӁ59133'2?)-*3-5070%/05**3''=,+*7%%+-! 821**3(-'53/%(%7'/3$28(5>E")'/#.(-*+,*.%2 ! $/#4,(.*1.!-:)=*,00(0)4.)-*'($)&4*+'+14+6<+)2"3*0/-#7% )5#:/++/>5./,720-51+1!7/"*%&.++1*0+%3!,#+0 +%'%75(0,,#./.*&#$#;%/&&3'5&F1'*%-1/&4*/1)202()+1*4/),&01*&1-)1*/+72%31-5,C-1-8.%4'0/A?9+<7)/*1+)%'-:20,*94(/.!/.-/*./&-:+'')'* )<,#,5-$-,>& 10+*9)01/36$+6/+%(..+.$&1$/./+&*2'+"(91+%02,)-*(/) 3.3+2.$=7((./3$4*%-%%**5 +'-(/09/6.-1,'.:(.-41&5$-30',5%*,,)*314328)( /-1 ()450%50$!/!50.0)+)45'&;157&0+1;&2.-6%')$(7/рԀՀڀՀԀЀՀӀĀӀԀӀ؀րр׀׀Ԁрր׀րՁ--4*+$.- 31;,5&2&'*03,+/''0.&1$76)1*',2+/%3%*"))+5.(")74%+''3'&2.&/-,+3&63)4-(1/43"2/29"(0#:%8.3'9710,*8-+* #4(8 -3-5-.4+0'1"((/-6.(/&)/2: /.1(2)/0"1$(&-44,.%+'&30-2.+7!+$2).5#(1%#.,)*--,./1&1(,-0.($9,21$)!*/4*.-+-:'&3)(--/(7)200"(1//-"2%91/32/':(5)2)-$.'6-2)#;'4-+6+)'**0/1%&3- '4+2&)*+ 3#.&3.$$25!4'?';-%$1%'B#9*011$92.(.3-2%...600*(+883*)++18(+&3182:)'.0**4'&%/0/*,(6/)0$0)/!*',6$=14&))16)04//$07*)"-/-6.)4(#/; 51&&3%,0-0+50.*"4+2)5)3.'/*3&,/-4*&(.(%(/,0-(3-3+7(#97E'+/3&32>/-,-&)+/րЀڀـрր׀Ԁր܀ր؀ԀԀր׀Հ׀؀׀؀Հс..3.,6)-,.5.$"(( >/);1)&(.(/8'3-:-+80/7*25-*#%1.,'4%)(%0,#-6/-'))+#-/0,&))41+/%&(',1(-8+<,#$<2)+.''.'20%'' (-.,),1.3)83(89/-.+.3%+//43//-'6+17 /1/117'7421'"%'/-22.29+3-/#.9,#&3 "5*84-150('&'((%+(&+#,*.1.,03&+00(-4/##,6*(432($7+3,#-'(:6190!'?7"/(3*-$!0,$,07$.4.,)!'((#/202/--,0753+*07*+'-'+54+,#%(/&)-'.*)/,11&$05)">+16.4+0-;:(43'.<-+&0)'4, ;+=$4"/ +;'(/*-*938 %3'/#) 3#3/0%8$$1!/06:0-'&*)2(*+1,'' 0)0,&2005#!.2306#.,)6*,/(52!)#)740'61 +4-54(,':+'),++#02++*-!3(")+1-*-,'//):9+?-3%/4,,)* *0&؀ՀـӀӀҀԀր׀ǀՀр؀րӀـҀրҀ؀Ӏ؀Ԁԁ6*",):(185.+>!/+/.$0.0(,.%6*3.131-./-'9$-36%#0,.).00".(6,+)108!17411%$='51+1220($$#&%4$+3/8'0.%#0+"0*8*%&6*#;+*%'$',%"/+(73,:$.-'(1,)724/1/"-40-)9>%37%<,)*)"3.$+.-7/752)*41&)%=-++$43,8).4.#* 0$2*4&&+,$*"$%)6,/+'*1#-+))/80$,2$++71((03(2=,8, #-8((5++%2.:/"*7#++,8'/ *(-$'<#1%5,;;+-6#'3/#/%#3,$6;(;,5*04/&/*(%3#5(0510$/2,%) '*2+%"$-%,2/2=#1$/71296)+/5/)+/-9+8'$312'*#.6)'-*#3=/3!,*1.13)1$(++)+)1*%'&&/-/,%/+5?)-**2+)$+/-,/300& 1(/%2/%',,92.2-(0-*+6%9(;&5+&&3+%%2.32*&2,?77+71%#+'3+-/*4+;'0=.+$%.//3217/ۀ׀ӀՀـԀՀ؀׀ȀӀـ؀ր݀Ԁـр׀Ҁր؀Ԁ؁/&91-6/0*.$-(?/'+'/#36)3*,3(%0..--//(7#3 4*02+114!'&$0/54 :'$.-$-,+3/,5#0 02''6+/-4(++3/4.&3'(-'(*+#:-""00&(#* '7&,/'>"092/ .),-33",%1().9-,+*)#(% +#.-4!)('*2,$0//$*1)3-#5+,(-++!%*(4).%12--"''//6)/,)-63+0.3/+37,.93"(0'3)/54'-!1*/+,)$+%&/0/)@.+*'-':.)3,2%%../;3#*,#$%+'91.4)+/&1=+(/,-1()2/=-%/,+)!/(9'/7*.,*-&.21('41%0!0-(5/%+#/**..2"-1-*303378)75'712,%5*)42"2//3;%'&(1(*,2+6;))- '!&51/*3(2+13(,"9+/."$/-%(/!'$'(-6347-4077/+6,!6!.00;2#*.#&9)()8,*0*67%%> *6,71(27(0?-.'$#(7"%8///78,'9$ЀրՀ׀ۀ؀ۀڀӀˀ׀рـԀՀրڀ׀Ԁ׀׀ՀӀҁ/61+.:6+1/142)1+'/!0/3-04+/6:*/'%)"3,*,2D)1*-+**@!%21,/>2227'3/) %6-6#1+)4+0!5&+%2& 5/:)++'*)$.( *#$$//!08%#.!247/6)#/.#.*1''**-2*"6*%25-C2'+-) .)(7+1,/+&*-00$.4)%'*":'*.8*,#--'.0-'(1&3),.373/*)'&5*)0)"$5)+),5*()*5%,5$*+*,3)'/32)-#/+"7//!)2,#.!7/,63$*-7)0,1'$/1+&%.-%"*(&3(-*6/'23%)050/21#2'&'&3-%/8*3-%"+.4/'%--0#)2,'/$+#-#,$6'&**'8.0)+)2-@1.07/,90/+4+1$+."+"%6),,,/-.$''%$!2+.)"'+(%$/,"5/(1&/*0+#(.#+*+$1%=/')(,4".-7"#',")),65(.)#)+./?0.92%1+;2/(,)/+!2./'.7*/1;7-1&4-2("-%).6%'*=,-2#0*1,-#'&66/,.8؀ԀՀրր؀Ӏ׀ԀɀՀԀր؀؀Ӏ׀Ӏր؀ՀۀՀՁ/:-,)409-4*..#,/4&0+12##(1*)1'/0-'675324$00+&,'/*7"30 ,5+0/4++/270)6+)'',('1(-(*7-7*:&.1"(/,&2(.#;'/'%)& 667"/3$/5%*..$(+//*/1((1+5!7+%+#)..!/*('%5,-(.<>/1#2,%,3!0+/74*4*(!#+#31$-%'-0038%(/$-'+'-(,0+*"(1(/6//42.7&/-&27+)%1%)+*83@+(')% ')&+*'+*!,-,*2'5-:+!61,$1/.#+4'&1*)0)337@3=*+=$0253"2."%2,301*22'/.1#0"3+,7;$%4.3$<4׀ր׀؀׀րԀրـȀـր׀րԀ׀׀ԀӀ׀ڀ׀؀ف/(.3)$3(1/,...)+,362+:183%499$" &)-%')&0+19-500+# -!%-)1)##1#6,9)-(3+,12*"")0*&/.'++4?0*-//"406.-#3 0%+(1*//-')/'+'%:.)(/3*,0*)+721-7(+*!'30'0-)**+(%;*-15'-*3&1,#12-//440!11/&+-;33&-5$+1.(#-/)(92-7)(0+2#-/6.+601*.$01%.#4=9*0-,53$1,3*'$-2)$!4%'!++*"21;)#1(0;06+! +130&)(=#*/,+>4,'4.+/.0"8$91-%+./-*0*(-$--*!54)(.390)5%+/*<6**!22 ((7@1+,$)./%*-$9'5)'83,/+2)//'/)*'-3'&'%3--#%-(.(17$+.&#,0.5).'..-9 ,/*(&/.'3'3$($%131(/-5/(0,!29-*/2.! +/(/'#4&&/-.'!3,*&.-50%/ /(-/#-/&F4.-1.(70'*5.;('-+";*-':րڀ؀ր؀р׀ՀрŀԀـԀ׀Հ׀ـӀҀ؀׀ـӀف;").$-09'".'1 00487*$&*)(31()+%($1%/2#449'3*%+/,#%,;(.&9+"#&%/-./'4*&+1/2,;+%3+/$+,/%"+/.3-+:. 3#)="$4/)+0*4+.-<"0&',- &"*02320!)',*2'8*52259+5)4*&5#&(%,04.%(-5/)(,!(--$+-6+//#)-/,*./0*,/;$'.402001(.,1.-;00%/&.7:250*;&3%!5&9#9'*<09$*&+"'(4/1&7,().3.,4250/4***+'1"#'/&**'$130+'$70*>4+),6=>&%#2=(8,*')1;&#()'28(+'()-'50*/'(1"&$'1 ;%*.#31;*((6+@$"/+"-//-/&&&%407-'/'5.'(3)2-**%)*015,#/'''$11 ( 0.&".<5'0;(21*-(-2'*$)%'"2+$-"-+;$-*/**(#-2/42'2$)/0),'!@10)/,--'(()(, 7#,565.,77/-'2'6$:!;؀րՀ݀ـӀ׀ڀۀǀՀ؀؀Հ׀րӀӀրӀۀ׀ـՁ0*&4-1!/12>)/54&1.161(%34*5,02;4*2%#/;&+2(.03*1*=0!+,)"6'0)##'-0%:-+(',*0+/'42-4/4)+%)/69.*)2B/'./0/45/34+3)&,'('#%*1-(%+6-9%-.$%&&2!09(3$%0(-'1+"-/-17+5&**//1$!)7(&3!/)&"3&+5+0'.1(09 +-/#63*&3*)41-4/8+.&'(4%(+.,4.-,!('-'&-78- +/0//-3,16/*)+420*+:)1*!3,5((('5/(--.$'338/5'88*0!("*,$,(.+2$*,333-.&/*<'6%0&:.3"2%&#(!4"+@0352/('&%(3028%++?9-)5/5*)&%#(5.3.%+:6132*//3&+%)+/#+%*+9*'3",+<**29.".5-4,12'70/5%5('- <3'%3&7*--/52(,*<%2%'/,1+6:(.%((9%-1383112;-0+0*)'.)+*()+95/.37).&"02-?%,/,;:1//1.(:++25,ӀڀۀՀՀӀ׀Հ׀Ȁр׀ڀ؀ҀՀԀӀҀր׀ՀԀс2.1047/-070)+4$-1)-$=&,**50-0<"&-*#9*,1(/#:("$).%2/+21/".040,1'0.022(&02$)2-,,+.2444433..2/)&'6')4**6:$,3)8,-5%4#+")3.1"%%1($78/6..'2-'*-!-7.)+?/- 5-)&36*%*$1/"!.?(#:*%35.."-+,:7-+."/66'%"47*2"%46(*%.0$.(+8,7/-7..-5-.4;,.3#6'+"2,%&$,%7/,323/%10+5$&2 )&3+)/<4!13)7((/$+)('1&,6&(3!4C+22++0..+(', ,(-&;**/(3.-"(*4/6?88)"3,+??81&4+20 %'*!)&)+'3:#)677+(10)6;5%1:;3-#(*209A/--)"-,+08,.!2,<4(1.&(#-,6&* 60/71,#%7)*7:7'(/%(#*-"#245.1=/..41+6,-%/6.1&--'/&0#7ڀ؀Հ׀ՀӀրրրÀ؀ՀԀրӀ׀րӀ׀ۀۀڀ؀ہ08?!+/@570&-7)%3-1+,1%(,+-'!/1%#79+.&-8-%+3.4(482&/*!('3//(;,,3)7-)5&7/--)-/,+*82*6&(5''$--70-$&7*'!'8//*64&&*.)&**+/%5-&*-338,4%&!6!?4%)"(1+4*#$!),"*=0/"-#/56)'7.&&1)$39,3#+&=34)61<"8!((0,)81/$1*-&1*4+),3%.2 -0-5.0.$)"*%B068:+-,%3(,14;0)%+(1,=-?(;(*%%!)4/24$%&$),;++'(**(%230,*1*"*+09**03/9- &.,%,-//#%)4"/+'65**)5&+5)%.%1#$235@#!%('3"14/$%%1#/%0,5623+6'#'&/+/-''6; @-1((+"2.-!,604)'1+$(**-"/-+.9*;1/(+1/13#.2)1(:,/:21),/-.))9րՀրҀԀԀӀրЀŀۀԀԀ݀ԀـڀҀԀڀՀҀրف'(5+I*%'0,!2(,#+#0*,$'-."$//'-+.''!'7,:'!,'.'8$@4';8!03*5.1 +*3 /(0!--6 ,5".!5 /3,.3#:5(9'#&3(%.9)+/&3!/,&6.9$&--<"'(*'/)')+,13#+,/"/)0*0.+. #(..+%%.%-//053%&8+'+4*+..)!,,$(1/,>-3&&-+,3%&"'6-*+9,'4+'(+",4.!3+ /-(.#7,-/#1+2%70+/0++,;143'*9+3)4(/04"/&-#(#*0- '6-,-$!;3,$=,-?'@+'!-+4'22'*(-,*3:#.*5'B1.-4/*+3/(69-.+/ -90&21/1' %1,%+)9/0,&0)%*8#)A8:).#42(&3)&,.)-&&%,(,410<11-"1*0'7. &"(;(&-(:/#0/'-1++2+8%)(%*.0//%!(463(&!#134.+./'/;/-/9(.1255'*'8+39-02+'3(02%2)576+0-B6/,&36-:(//1*.(%*&-56:-;+?րՀՀـ׀Հـր؀ʀ܀րՀ؀ӀۀӀӀЀրۀրրځ7-'/-)/+#&40&/;303"%/-80/*--8#1&&/.6'2+) ;47,', /34* 9-2(/,D,%,-!,:4$'1;.;*-.51/2+3*!&)(/+.'$,;452(,2+)/,/*,$'/('*;'* 8*/&2?57%51)9,#-0)*$512'%6%3942) -$%-)+((1A3-/,&1,!)$ 3#5)(%7*%$$-A+*$/B(:.)5,2?".)3-11$!36/-,4%-%1+;-//(5%(#-#0'1/67%,9':"///5,-+)$!. 40&/1./'&)7/%34),"-&:.3*(,"381'/,//!-+''121( &$.02*8& .8%/&22'.,-4'&;587$)&37;$(:58;2/ &08'-/)4((-#6*2$+4)-'..=-4-/'7/?+'1,))#5*061,+$'/('("-923.0.*/3,*10%+ +&.:)"),(*8*3&!(//8/1/("<$24)(1)$'+!/*'A/*752&%32'0))/+.%(01,**4231Ԁ׀ӀՀ؀Ӏр׀ڀȀЀӀҀۀۀ׀ـՀڀЀҀ؀؀ҁ.( )%+-1++"+7.&4131-#.-H/-32%+#.!-4/2%#')-5*3 953-.,/+*<) !10/.'$'/2,&.40)8+&.(2()6.+7/ 57".*$*(6/(%=*+.6"6,66.**+-(/0+(0'/217-*(-6,0)+*3++-/4+&&1:/4!*"1--350%!,+1&-/5!-)(046.%)'0;,+36:20+.*'0611A,/*3+&5-&.-+,%8$3#2(030-( /.5.,13(+)(5.&3A/+"4+-/%$7'1.*%"+/-)&7)% 1*+$),1/4-4)% .() -3$%+.)43/! 0" '.#,%'0#/1)*),+*/3$+ 26&+,%!/<8-+,4+!,/+(*-2((:&3,+$/".01-"()">*0!$114(+%F$&/2713#2!- -)/35&%)$-'%$0*-1'!/+-()-/$(."/06+$.5'#'),24"!8++$+=2952?#2'(024),/9 ,;*(#4-7"!-,)82*5%3)'&-$:77!68+.),7,.@0,7ҀԀӀ׀ԀҀڀـπƀӀՀ׀ր׀ԀӀҀրـրрۀځ,-'-//-*51,$$(4./&,1+.(8 1;)#'80,*0N+8%/)!/$=,+71/%!4(2-&((.6/17'13*)*.;.>=$%+/2&)$*0./-(/#'/(/+4'#4%)55**+++4; 3%*2-0+2++)/--9%+&/46%�/.),54,.2!:+4"0,+2?+#-#0../+".''9.0'!(-#:"28'5)!<1%+)#70$.(%)"-2/8:,)4 %,)++/'5-:#&2',1+ $! %632#+-1,()0&.#%-)#&1/.#3#.*.-2/0'+5'4A),/0-'.0.$05?3*&',.-2<$+.'3#-0,-.$*54;&4*4,90*/),)0,3(/-+)9))-42)-2+4 %&0%&%7,1.:.5&*;ـӀπՀՀ܀ԀـրƀՀ؀рـڀ؀ۀـՀ׀րӀրՁ*04&.8'&8///0-++,34-<)8%,*.".*1"4%++/.")&*5:7&1+.,4(,*<,1-,3&74)75:6+4/*4ӀӀրՀڀր׀ҀրՀր؀؀׀р؀ՀـӀڀր؀ف%5<5,*1>%*;9/+/+.$%(6)*4+#1,-6(,123'%/-D-86#+)6*2/+.)),'22%#15+*512'./0-&6*#"%!)*5%23.%0"+=&$>$%7+52%)/7/13/2,'!#$*../)9/#3*+$&$-'6!#+172#!)51'+*4%2+2-+5)9105>29,%06,39243+34*!3"/+3%5+6''"++$'< 220=+)+-2).-75(&))121%) $/.4>-11,-+%)+23./*"(/7:2%(%*,,(8/-*/)01'41,(1<0/-2#(0*5*5)(5(*1((%,'#(+,/+-.*050B43(.0**7-*'"&6=18#+1/#/+8(!*/*7:'+20(-.0'++'0'=0"2$++07,."-),1"*%,!'%6!4*/.$)951!5:7+/,+)&#:+$*-)#&0%&'+'3)#,8-7/.+8/(4)$/)0,-1/(4,)1&&+0+/&.45&&" (2,(6!')++"7%:&+""$--$*6).-'3(+0&0.-&;5A/@,ۀ׀׀׀րր׀ԀӀǀԀ׀ڀрԀـ׀րӀՀ؀׀Հԁ&)()&>/1?*+%,,(4763';//$/)7..;*%//*2-+.5%./,-/&/6-3:"+ ./)$(1-),2111($)*0&&1$8<-1##./+/*/6#7+ -)+&*;62&//-57-0+'0.*05)'90++-%/$&0'0(&4*5##*+,-$+&&)).6)7-,+&*(1+))#/"47&4%$,',!,/424.&%*,13'57:0/03C4%4%*%-(%#,-9'%0!1-,20.5/$/0&,%.+/#(-.15..++3+"*;7&00%1,4%+..)471--/(-!*<)/%4)35,/,,6%8/;7&'*"/)A1)(-,+>'$--31.0(*",54''($=*:.(5(2'!%(-0,./2&553+ *1@"*23#/+)@%'%:*47"-).4)'/()-0+-,)0/)01005/31)8*'&/3-'--u8'(2)+!.(%.1,'7%3)3.171,<'(24/(-5***(3/#'('1*/143&-(($&,,:1%5/-93+:51,*5'./2.'$#3,%(3'/6.")!%-+2*1*0(7$75(3ۀ؀Հ؀Ӏۀـ׀Հǀր؀؀ڀ؀ЀڀӀڀՀՀـЀہ%($41&,.//4#% 0,3# .$'/0&+--$46/;3,6//A#/ (%($%+7-5)$)38)(".7%#:)73%,$-$))&/3,$(&/-&*;)'+ (+;)+,!(6*2/%'(%'+%#*/2.$'+1(+'40, :+.,02/72%%)5-5))"1--76>10 6/&-$-'4.)3+6'":%$#+6!9%8,.,0-&12" *-)2.9)032/8+*3)5290/-0+'.+.,--5(++(%-#50+.0!2+)+)-$ ,/)%*%+11I!"*%)'+'!6++>2"$7/*;$%*,,'54+4'/%.;510$4*570+()5-=45'7!(+$14.(%:-M*(-'/%30D%7/)41*9''03%%,1*%0."-&=+%0*>./(+%0$$$*4557832.1$&.13..0))-%,(5*2'0&32>441&5712.:65-/#-03*1:-0րրр؀πՀՀրҀŀ׀ЀՀр׀ԀڀԀ؀ڀҀр׀΁,)05&56* "2(+7($,//,.63,+)/ &&'+0/+!*.746(.-1)/-(0-//*1;-9*%'3(3,2()E5'+3/>8-9&#-(7/ $'-4-053!(.647)*%$)+'-2**.+321+&8/*=+*#'-0((4",-!*%+3 -7/1,$& (+1#2+/7,-0%2&300),+*/@<79!+#)//)0,&3"/.6=92)/1*,/1,*+2;/,&#!26-7+48--3 (5%-?#59268++/60,)*#3/-3+72'!$."5-(&;/#+%0-(((-"3955('!)1'27-)-6*%1*#%,3+;1'7*-(9,0)/*.255(1)-/#+*!0!'&!!*'28(.64,2*#103&2($*.*&/+39++))'(%-,/+3/+*,.-95"65(223.,,+%.1/r:08(.*+#+02(.-"$8'+$2%:"0/+*+*1*',,"%)*),703-$-'.4#+*&$1+''9()4>(/6.((.7.'9)23, #,+$-2+'%1&,7-%%*-+2%)-;7րՀՀڀրրՀրՀǀրԀ΀Ҁ؀ԀڀՀӀ؀ـۀՀӁ*/-0/44+%1."$200/+*+$4,23842;+!*."!,).#3/63914..3(+',+* (3-70-32'+2++%))4-31&1/.,)7* 0%7,3.(%6&.(#)>)*5870&.+20-)(*)0(#)'*/(&6#.2*#*,'G/.+#+!&,%/0!(2%*'(%&$-+/&())$-0";32+''0)+)')(#-0/*.",%!'3)(+6.1/&())+ ,))#3&7/0,+5,:1$-('1/62+:9),7*3##+4'./')-0"*4-+>8,(10/+-.-.,.&)+*2*%035'+0#1%-(+6*3=30;(/75,/-,':'33%")5".83).%,*'58+..)A.#0--)2(#64,2,1'(#+-332$25/:/-1.-0/69+-01-9'3//-!'(37+9.*(21-<4%/-#10010) %$)9#-3-)+,)(1*59 ;/%7## ;**$(%5.; /)/3&2..&)+8.#".)6(4$705540%+63'8161<2%"53**#.-20./3=1-6*03)׀Հ׀׀Հ׀Ҁـ΀̀ҀЀ׀ԀրӀڀՀՀԀՀڀՀҁ81/-(23)7,92+4)7+*/)7-4=-(%*)'3-/7//.!-++#'%4*9,21+/*9,2(*%6%3+1*.0$'#0'',7//#*04)*&<'4.- 424+&/&+2#3>2*;2*,./("#*)'#"*.55-0='2!+42/<'%)!'1( ).2/!1)!0*'76)/2#7'&;-0')!#+*+4&&-()/.7'*#2%51'3/)46(&(+. /+7+632"')(8*.234+*,$*1,30''>)&.#+7/$"01'&.$"+.!0,+,-5)()(/:7/@'$(#&-/-'2,("-142+5*2!,622;%G0.;:/097*4*)0-**10$-*9!%) 3)5+3+,#$ )+.1405!*+B4&+'*,306+-;/$27./$")/''0)20(167(17+**,/2-*9#17-23""3(.1#4#3+).)'0*!27,<.+-+'-.6+Հ؀Ӏ؀ۀۀր؀ڀÀ؀ـրր׀؀р̀׀ԀӀրӀف8)-/)0#0+(,./)3#%(--..2/&)).(910(2026(3".70)#.6/3(1A .)1%3).'3;$@(-/&3.'<0+/;*6)'+/2#0/,+##4//5$-37%(/(((3"-+326(/+3)!-'75/&60)1/7&2'3)2,1-$+)%,1*2/1001//,+*-.2'%,30+/6*/3.+447-*7204.8((>./)402*))10-/.!"23(8' 3**(-*<)+$-*1826172;,+73-+%* 24#3$35/.'+30/')39'/),1-170'",%/6,.,%3160-260/-'87.-/(-2)/-,622/*%+213$+556* .7%-)'5%*9# 8+#(:4%8;)9)+/$%%%#+)(',&56'+>,/-'):;85@257<9-.%/7-8+/:.%*34)5;3.4),&7:)*#&1#04/)=;"')'01-0)!0$>'+#",2'(&5()('12"/+&/.;./&%8;(,0)./*% (*/+1)156.,'41=32&.0823*1'8,рҀ؀׀Հ׀ـр׀ƀـրڀՀրҀـրր׀Ҁـ؀ځ1/.,=*3+11('7%&%*+*&-))*(0'%0$4'664-6+.()+/-4*+*+/0'5#- 3'##0-,,(0B-8&7'/. *0#B94902/(6+''+1+61-/003!&*0*4.',*:3("(&=*0'",+5+3)217-$.*)'#.'#)-)& &9-,7))4,0*,1&*"-$/%B#*+95%-)7*33.4500/#4<*),2*.-2!)'%314*)#)-$+4+*#1&/32*-+36*/&,*2#!146*)244+3>%-.0/ %!1!6&++,%,&.4121.15/.)'.%31%&,-+)!=3/-3&5.1$%310* 7*&$&*714//!*'D,"/-(2.,/?/ *".1/))+/,0($#0%)1,/7.' -#4+./'-/.=7&.27246B87+512.6+0!.-3-+$(+!&/56 /'2./@/+*.#85+)3-#>+,/'61.2-0-#/1*0,$36'+(%'*$&8#0+2**28(+275*-26-3/#2'11060+*8+(2,:+'1;+0**#+րڀՀ׀ڀՀ؀׀ـÀրՀҀـ׀Ӏڀ׀Ӏڀ׀ـրځ+.,5+9%25;)-0(-8;.2/<&&13,')/.)-/&*/1(?*9+*3/,+',%(.8/)825*#1+(-,*,2+<'+4*/")131,2.+*(26,-/1'100)208***)(/8*71.2&6++2./(+$$(&)5"**.,9)8(+(.,4$(1#)/,.&1#/#,2-*1,0,0#.)*!,!-5025563+8):+$2*@%)&%('%4,5)1,%+!4),"$4.&37-67,.17,&.4,#32'8-$+*,01/-.!(:/9%/./,3/357$'*.(,6/#3)(,152"4!0)6.+,6;30'1)3+/+$ 65+#=&+(5+#0.F("$+&'((",+'2)(&7%/).0 9&/8$('$,1C*0'1(329.#+''/)'&*$+-,<%(('3'/2$!.0#,./1+'(**/%(9+%$+,' .%'0&2&'"-,,)# )6.06+7%7.01##./*5/%*..4.40-,3/13/3)79'%',''-,./-2!*"+-7'8490/,- +(+),)-&80'-#'./(7,&+49)*6/3?#/$41%%/3,/ *515)1 0./(275'-32,)(6'*3,#&80#*0+''/'-*&?*34.&&#"#,,#,**()/'3*,.9-.*) 2+/$%02/(?'6'7$/3<:&):--/1<;PETKBG^G871(=(/(C&$>.+/13$)&,11&/''(,%*+1+"/&'!+'#'.+//!%+)#4+)8/."1,--/6'+3/6*+81#4=+$51!/411'11,(&%!-=23,:/,/'12(-9,'1($2(4ۀۀڀՀրۀـЀۀ€րـՀրՀ؀ӀրԀۀ׀ՀЀف*5&3(1,;&#*.39(/)5&0(-/142. -1&+-&3/"1*+&$))(-2)%#*$3/6,('0$+$./4.5/)+'*(,,+2%-)3+6'+.*1,/+.5+'3<4!.'0))7'1-)1/(0-/,' +,(,.*2*)0##*'311. %#(')* -/:##)0+$+#-*/'+%3+>"#*(7'$.7**1!6$>,3///*#=1/-*8#13*71 1+,&$1%48!0'-12/)''6,,,7(3.15#+,#90!'%)/0.,09%%(- '%&3:1/.3//59)26-)"2*5*!+'/..)44-+!+08.;1-2-/-)''7%-*+/9/'#(410"'-(0*!+3"$-.0.052%.+$:'3)--&")./*,804*8+,.1"$5((%+16=AEMIaA]O\HK5=1+//=.1*+%3-.$$)3" .0.'2 2/("+7:/25-%&-(: 70+/7-12, 001-%9&/0,$-/*7$ ./<63!'1%7-+!##0)+,5651+#:629-+.(*+4.&/!&2,2ӀـՀրрӀӀ׀рɀӀҀҀ׀؀ހր؀݀׀ր׀׀ԁ".:-=-!,@12+'6&/;0-0#-/(' 252(+=.2"3>1))'*!'2+@-.4-(// &)36.-(!9'$839350)#:/,(+$//4'/$+"//)3,*)+32*&*+#/500!3%%#!)4$10.3=1"+()33&)$&"*$/&)%+5'&)*)+*;*'*4'#*$4'*/%,29+6+50:3)$6$,/.+/)%6(*(#/2!9#;"$%-3'-+..*:,7'8:00-(,/8%.$ "(2%&./,5%0.%(+*6(*3,6')/))-2*1#2/*+7'/50'02!"):41)/9/'3'3(/!(/11/,*0:"-&/%0**$/0,7 ##-)$2,,+/$/9*(#,.(/%/*#)*5/%*6&2- 8(&?)2'2-4.9*;46-.2+0;(-6)/D@X[\NYNE5Q272335%/%75.+)24;'*'221%5=%% -/1%1+"2),)24-2.$C%'*,0+&(*%4-&2(*-+4&*&-(-+72,?:$%9#-1("%2.+'%42 .1*/++0,5<)1$6/<ـր؀Ӏـ׀߀׀Ҁˀր׀ӀӀ؀؀ԀӀր؀ڀ؀ـՁ6%/54*&44/(.1)51!3718-"5#*%8,%(*)71,(!-4207(# -(-+- 1(-))4,6*31)(!+01)8"-;*,#4,,'#06/#9#0!47(%*,!!*-%3!6!#6)./')"2,/+%*55.42'47,&&-5 ))5&"#$486&0'895-%).3.87#(,$(.)3<.@'6%&,/#,-'3250)--4"%'/1..6*1"!/9-#'824&/3(%3$17*#,*//#4-",/(2%?- 8-!)#+#042)+#0+2?&5-5//)6'A.+:%. ..../04))29&6)/0*,66/,,/()+- 3*,2*)+*,.,/1/1),+/.%)'0"*)$2&/23"2)(*"03)393):-2 /%-6.7%00'-2-6486C:E@RONbT`QNR6;3&1" >7+++016%$'2%2815*+)'1*52,--00+,.4'1(6(#)7'0!! -77*-2#8,+0.+/+1$,"*!8!/+"..,-3(46.%.."0,/(27$/,-..3505)2+(݀ԀЀڀрՀՀ׀ՀĀԀ׀րՀπ׀؀ـրՀ׀؀ԀӁ+B2!#=6*/'*(2.8(%**8)7."/8 *'$."&98)32+.3 6+-/5!5,2!--6)'//#/%/+)##1%3##+0/5*)'8!.20/*9-&91#/12"&'%4,-+<#:)((*0',9$9%(/'*7)&1!+(.014+70'&11&2*#6,+$2,,'(%'!/(/)1*+-(2*!*16'1--/)-#.-*".&02-)/&,;,*'("/.2-6*).&05%51$.)35;/,) -34%#,:,$2-,3*-,+%0-&-4'*00!*032(24)'*)-3-"53*41'/('-0'22)26*+#)&).-=6'5(-6*&,/05'#%1'7"*!/*$,$/(+/."%-+-,.0()/&!6%0#'38.(8&6$$*&%.,6.&&-/./%73))4+F>4:;[OWMKTF4>51,/*8,7+,*5&)/-06,?*)(+/&93,92/*.)70&9-%+35!-(+*405/3$,.2--.,,-0:$2/),-/720,(.2"/ #%.#/)-)*$./%1018+)@3/2-7/3&.383؀р؀ҀрՀ׀ـՀÀՀրрՀՀӀрՀӀրـրրρ.1:>1(+02649&02-&6-+1186(88/'9)/!5"')8#2*,(*''-1$.)00!$9+%20'.500+0#)&'&.2-2)+#++0'&2"%)5$&&2!(18#7'(&,*:--'0*1!(5,,/'*(*-%."2.3-'/!*(',(-)/15*/*+#!.*0'(644"+,&.$(6*)6)33+*.6('9;(-,.+&2&4"04)(-//87%*".,1-0,<(&1.2#8% ;*:32*26%%*36$)4.+& 3(%41+797!%&14+6,:/.68%!,+)/*$-55*-2-('2' 0'.-2%61!'2.**,6,/)90)1+.2''*4(+7#25/+8+20(5"4")*-+/23--/,/%,4&*!3(!%/&2/&('*6%0-40:1/2"(:/8,701CAbII>>=+9*&=%45(5.$'9%/',*#.(0(4848,,,$1-**+-'%;.;)'*4'1/'%'6'5- ,1#,53++3/0+.05/'/1%5%5+,,6)),.-/1*01/2--7,-&*,*09&2;3)/)":*'ӀԀЀր݀րՀـրɀ؀ڀրր؀րր؀؀Հ׀Ӏ׀Ӂ-7,25,!+6+1$+3&/+%0.0/((*19:*-3'+./&0/ -.0228(1*012%#7+)$8/+<,0:./5!27-5/5)-):3,/236(5'!$+.6,1'38*/8+/+2&+((1&7+1%$,4%$/($-,72221"')*)'.*1%,+%*+2&)&2".(,,"&/-.-.3(4(/*."+-13,$%2 !"' -((+//4'/5;./),3*9/&://'$(),.2/,=-0!&04/40!/11+,)/:/33)+"2*881644/+<9+''#:5(/)&)%*)-+3:03&4&,&=++&'"(:.'"+49 /#+!1(24!7+0#&*2.04$'./*'0+(8%-+*/#)#',)!,7 *:)2,4+264%---+%0$7./2,/$/'%366/4I# AJTE=@9C069(/#(*=A'757*-0.)--2&*5240"2,%.61/-*+0/*-59.0*7$*8#;(7*-6.(01'7B+.4:)5B1"511(52%2׀ـրӀ܀ڀրԀ׀ƀۀրـ׀׀ՀҀԀ؀ڀ׀ۀـׁ=%04<)2!-15+)C/3&1$(%!%-+(6!8;!90)$6%%(-,3(-3'7(7-8$4(*2!-&63-+20&"&.(+-(++1;.4+.+%2-,7)'0&1-&%''".5!.2' ,,/.!(+$&(/*9/)-(-'3&(/6/))3*(450///'*%-5/.&2.2;(-,2*5*'3*.0/+*,%85-1<,,/-)!! -/9#6/14&1'/,!$+-&-A,)7,1/1.(96+/;+/* (&33);0+-.2&-(5)&4"-.:%%#.0:8# ")2'%7-"(5%.4/%2"+$2+*<-5/.32(.(9/ $%>)/.%,$+@/-/3.*747--,<*2)-,+8/ +)+)2&(!%3-0 /.-"*.$05),- 3((+'&.(+*'+29(+52157/8;:C*>F?;,@'5146(!4*+0#(!. &*7'3* /#3/%5:&%+<(/&,1*.,1$12&!1*55-/)55)&')'9..5+-'(-5''7#'4"+9%7090%3"'/&"1(65;1;A 4*)%)3(-:(4*6ۀۀـ؀ӀـҀ؀ӀǀԀۀрڀ؀ۀՀـԀڀـ׀׀ׁ-4..)*:=)04#%'3791+,'*3/87&*&5:+--!2-%5%+570/'4/*-")"*=/)!68)$*%+',9*0 6*$$09+%3*- 0'5%)2) $++%'#.,$6(+931('/3')/+3">-)-2$$1/1./8#0/+/- 1)$5/98/%)+62#.5/*35-03/*49-,%7$1#4)2:+.,2*/-2/-/)/+*.-1#13(-//'(%/)#1>'2..6&01@(-7!06&/+6/1/*$#(&'#%5*(6.-401*0341,22,1*$(4&,1&-'.5*,/5.2->+(76/1+"*&2#/14+* ,/:*21.-+&71/%..,,.2*/3+/,&5,&,-''&%/*$$;6+/,15 */)$)'&#8"3,1%+3(,12316, (284,6.33"52!":5+)0,').-!-:;3,!#33(-983"25+4'5//1)1-9813',0'+*(#0&0'0102$7!/!*)8/46$%(''%4''5&),(39,*"17"4" )-2/1(+(&.1;*4;*/.+8:7&*'181$/,.7*+6+&131(%"*/()/4*.;,10,$"1-0)2/7!.2'27%)1"7+$).,))*.1+3.(-2'"()$14994*-07#,0!0&,'(,"0-7(!2((&*3 0$0);/<#&+)(+(2)2#1+'-, ..,(*+#)3///3#+$"0',3#)#-,'+'0/'*-./,,3!3#08.+%)/7673<(-:--+%:(61//22-((&4,714,$"0.3/'/#2&0/)##)).#/6-:7;1:(;9%(5/37#5-00/3'61'0(:/.2+'&32- +'+:+/.*,220'+-'6"0*%,0.32/2$()2(;,1:%27(,4րπրԀـ׀Ҁ؀ӀˀҀҀՀՀ׀؀Ӏـ܀׀Հ׀׀Ձ5$--#9*'+)/0"33,/*-$0(320,,/7'71'3--"-1.#21)2-"<$2-9+/()++67(,+9,1(/::(""+3&*(:0+#1**7074'-#+&&5"*+0>(2-8(,+',$.24.67-&.1.>$1#!5()%&11+,1",15411.+6!(,'00*(1+0.?/+5,.%%/($',*1-*#-/+0*-",//6)3-+"6'4=,(%;*'%4,6'*:3"2.#7()0%')128/*7(52',(,0"-6*,06+/2+:-)%(-+'0,-0-8.314-/ 34.&*0= +'H'24')!%+.=&-).$, '+41$8=-5#2%.--.*.-)+,17.#-20/'4&*',)"1-4+&#&5''.6514"&6.3/)7-+*+56*&/36:4/5%1,6=2),46)7&3%7*////-#,0'.''%$)7/:$"0':1(%1+."'4#'905&;5-5!7'#)02 /505"6'1/4+(/099='/-3%23,65"0$*/-*&(=,;07%-2)%3&/./(Ԁ׀ՀـԀ׀ـԀҀˀ׀ԀՀـ׀Ѐـ݀׀׀Հ׀Հρ.4)(-1*377!$41/11),4177--*1)$)75;-7+=0()'%-3'12,+030%/6&!#,2%3%+%9$/ ,/,!2(.%5$5)-9)$:,,":-/%%?'2-+)#.3&5-%-3%5%-,&6),7&0/2,4*#'.+36).>6;$),+,,-)),90%$"//2+&%(.052+%0%*-'-'5:433*/-1#$/,<%).04+-0,'*5(,/!5*2&*8, /(/)+1!,0(*05%/55$0;8/*1. %8;(3+.8>,/6,#/,*.3),2)=='$..2,+"43(*),57&*%.,%9,-2--%3*1/&.7+/'3-(5$4!#0523(+/4;&11/20.6"417374(%*/32/0)(%$1$6+3.'&-/,128)$+-/133*=0)%)&4%51"8%,/"'')$11"/")*#3+2,* 1",**+41(& )%'7%"(1+7&#/.5.47'+/",*2../-4*-#-!(-',*-?""104"'6,33*1&36#,33(46720..1!7-*7.'01-,0.2#)6/%47/)/:>)*-",).,)%00,11+*8#10/,;/+*"$725%*21*2*$41(.3:6(4355$00/*2+.$$/&2,6$7,.3-*11/9&*.'3-)+61.%/%%7+Հ؀Հ؀ڀـπՀՀǀՀڀԀ΀րπӀـрր؀Ӏրҁ/,*A/*,8*4-0)0--6>10.7,30$55/-;0.*6 8,$0+"66,/1#+8/$+'8'(&/.((2(4*/(3-7+)/!27(-(=%+(/-2*";.%*.$5%/'24'/0--!7/!&;&4& **-+/',+")*5)/.!'.@.15-))-,/A#0',-$0-##0#151!5/%,1 -346+"%*247*,.3#9-*,/#/*:',2(%4-4!.,..,02613/ )-)%0!$3,36&)**'/*3(."'/')/9))*/$+ 1 ,:/ -$4%12,84++%.%"06$'3521&?&).&/%-""&$,-'+.3/. /,+-'.333/--&+.72,$$"/-&402124;+22.8 $%+(0++<2",13(-(2=14.54/+(0)-05$'0**$-)($26-)&4($%6,(+.!/&1, 7%,;4.-1!/13!.(-*)(+-713-%*;-+0-+-'/!&/+#.+3&1'(2,351'!0-5%*+&$,%&+*+/1-.',..2'D'62<-+/12745!*&2+0!/.!3#<10 !1*-.32/$#5&),# !4/7$%57'1$-/&872''%4$:6#%$5#&$&,+,'//(07*#/"*2+0/(*,3,7)&3&'5/5375-22-/521,7&0#*":08,1,;&(*+3'1:-7"*794%;)*%31.$24/!+$3,%)*:5.)&%%--9,7.#)84)(38:0!4+(%5$(+3'%%4,3/.6-';+%.(!53'%(8".%$3'#79%.9,6&)"1(2C+"%+*1/021?*),558;9&2+'+*,%1:0-()/&3+#.6%%)01),0$3/%*++#'%2.)'586%,%;)25*'-%/"48)$-&//)+!))/(0)0891.40+!+.7,517,*,+9.63'5:&3%/1#1.)%)3/=+)$ ؀Հ؀Հـր؀ڀـŀЀӀ؀Հ׀ӀԀр؀׀݀ـ؁)-49":+,$$.*$)*0+6%1'=%$3&)&.2'2-'3*+0-5'14-.:<,/-/.8!%7'.)0-.,#')11&/0;4'+:.4)$0$(%+7&%*/&#:,59$ %2%0620:-0+81'<%1%0*1##++5())"&-*$ 002'&5/4*(#.)-%!.23.).&0-,'."#9"&1!5/7#*21%7*?'("6)28#-0/'=2#5*/+*10"./35'-#'#+0&*1(8 /(82,-A/&9"1)+-/)(#,/,#0'62$ 8=820125-*݀ӀՀ׀ҀрՀՀԀŀՀ؀րҀҀ׀؀ր׀ڀԀ؀ڀ؁"738(!,.3;'!-/.-1(110321+&%$27('')%*%01&3,+!$01'%'+'/'$&%%;+0#) ,5.=44,("(+/*/",7 )):'(+%/1-(+)6)&)5230!797(",)0'"9&5,, +..#-#/)(- 5$/,4(%5)%-+-/?+)"2(1'*>$%*#23*!( ))7/'(1#(*(*22$'%.29.(-5%,+'3 $14*!735,167/0&0$#&75(1%-%2)-3+'"+-9%.1*-.3.!&)$$'.(1026:3-%3*#9&'- 68),*/9+0/8/4,%*%/!3)4-4.(+)/*-1,,)#$/!-'&3",!4&%;,*())//)):1&00&--,1(+5'#)-3&.).%)/$0%$)/&%/53,'%2,,/+,",)3 *27 *)',!,(0,--7,'4(.03!./:54+1'**-!)7#+259 7)/4A2352.-246+5553(.&09 3ՀӀڀҀՀԀ׀׀րȀ؀؀րӀπۀҀӀҀրՀ؀׀Ձ-*5-0$)403$+A,.*)6,0().('..9 (22*.1,+-0,'0*7%*./)(7+/!'////.+&4))$1+1$5/'16$*!+*5-H+8).6#('/("'+"+'9():&/(2"1,"'+-5/-1'+!''#/'4$20;;*&"/%3&5#'""-2:''6-%,#('(2$'03)%44>&$)1'>73(+23)-*,/%%)'"C)),&8*:<)'%0-,)0;,2.0%,39-4++5"-0/(++)'/&:&/46)'=0,/5-02*70-(0#2-/,-;>.0 --+1,+3%,3'/+1+80/3!-6,$1(/$/-)+'&-"*),/)&!(9,#1)%&*,8#&,/&2,3)%)-%4&(5+&/4(213+$2*-3&60.-6.)(//3$'(:+/3/(;%.&7+/(.;8003$/,!*5-,/0&1D/0+2%$)3"16: 053400*07,8-5/--!;+!'&"1-)*(5//$5$$( 3)7.0&243(*/)$;'*"7-)0((C5)5."56+6<2%+473+)$8'+'؀ր׀׀ހԀր؀׀Ȁ؀׀׀؀ڀ׀ҀـԀ׀܀؀ҀӁ.&+>*#-*+5..,&002.).:0.9%--<-+-$; )/.'22',.9/5,'.$%95%- ,(.*5=2(/!42?2#'&/,'+,5#5'',&922.3,'4$+29.:=.&1'0;)','/$/,&-'/1&$.."''.(!)/'.*(5-,(*3< '1%-9/)2#/+4=-'/1-;',.78%3''-5%+;,-426'.5*)-3*'!4.*57+.13*..(#"/$(,()4(0:2( --).%3,#"/*,)()1/2".,5:16((/-441-'#4+9..2;)+33&-(*$%&+0,90)/&.+,#,*-/'./)$1<,-#;(+%$#+-%,;45, ,*+'+>$%)&3)<.)4/<%($-3#,'2.**5+5++$+&/!%$.-&.(-/3"&"&!4881+.4..,/-5#2,"7*389%"5402(-'*'*%+6-%+527!8"+#+2'+/5(,'/3&..%+*&-&0&,((/5/64347=.3,'1"%%')0/+7(%$3+**)%5?*"/,*,34*"$%)0&5(2(&#)'0*)7,"/''(17*")/63)('--',5*(,'0.+).'#)&,/,$!74>/2,4+53'/.$7:26# '-1 21$34.?">/'-3$-='1(.-)4-*)16!./932&)854."6%(,*)/'+,1(.2$%*)&,1+8/+/86'+2-'.&+.404*Ԁ׀Ԁ؀ԀӀ׀܀׀ɀԀҀ؀Ѐـ׀Ӏڀـ׀׀րՀԁG+%8+*6#-*'0/41501/9&-14<2-4)!14)%6+3:0*1/ !%#5(*2+,&-/#*)$/443-9?,/*%E41&,,#5+),!:)&-,,01''4,/%)+'$8*44/+/-=/)/+0.& -1-$2 1,47(2)*6 61B'1/$3*52!,0*53&+"8"!(-3$/7.470;)%%(1+/#3$4/"''/8*&7-4*-"*41'',$'.3)1+0.%&651"2(+7,:&4*0*)>8'4.)%76/!-4"")&1,/<6')(03()-%(''*'53)-'1./:1)--8%.+6333(%9/.%'%:#.0.('+;))%-9+-(8-*=2//14/,2++/+$/'/0*473&#;/%%4.*1.)-)3%1 (2#),7-:4:02:+-2*(;/3-!-1.6..2)+%('+9&),(%/'#1)0-(.3-5/1(*, ")336,0+(-/7/0427+70*(',/'61-5##17325."'+4:1(%#05C2903-0, 64.*&1+7,91):'9&(0(,ڀԀԀ܀Ԁ؀ӀـπÀԀՀӀڀ؀Ӏ׀ՀՀրրӀՀ؁338)'$,'9?+&'0))'3(*&;/+*:)4.'!0!5//%*(1/#,)6&*327.:+#//2%-$,(4%-5$8.(,&**-+#2)*.0915/2(4%%(4&*./(44+4+'0$%%4C..<),-*(20 &%//*./+1+&9% ##,)),-%94'140-!7+,# )$),7.494/8+-))%,')')9-'2,0+#%-0-+)#/(-$.1/20'.-?5,'""/%*.0%&2('!0'3", 3/"/6)4*.) 2#<))1,.+-/*/)8"/+1/+4+."1$))2)/3(+A+1+.'.-.+//,8(5$3*07!*/2)'/-!+:#),-#5,,$*."&)10.-03!+//,(5&-+.-/03,877&13&*(+6$4+.160-(5%44" /"0'++#'-%%( "/?7-.&27.-/(.7(-,++2.*:(-*,4%).=#+#3'$9---"$)2<-!&/%2407,16>/'-/+':;.1 /.%+93*%0&)7B/:%1:*).48)*/9/+78,&,60+׀ր׀؀րրӀـҀŀӀـՀ׀Հ܀ՀՀҀ؀ՀۀրՁ47*//3/*<-7./)('32).74%3$ +0/**2 2&)6%'*90##,$97 001''#-%.1%#0*1*.$2)&+(1"15/)""86+(+!1%,:4'('60$+2:01,'+*#)(A,5((+,#4'.1%<,+*0'2%',#+/#>1+6#+,.&%32$,4-.-') ='026.1#2'.)13%!"1.&('(%;#'6%:/"+0(142*&565#&-/8&&/-.2%4&/H'32+++"285+(2/<88.+)847(/0*/)8&2186*1+%*5+1'6.&+'@+;7):/7-4%6$76./&2'$".25-:**4#%/935!.+-*+3.&A%1!31%B$1,%"('3/%'8+453=,93,390/(1*+.?#(1*21+&+*,32&74)&,2'0-(&,,(/--1.4'13,(#=../.+(,(-+0/9(,0"1!51/$$3.$(431+-@'(,(&-+ *&.*-0 /7,(1+- ,)&-;)6.7=7.57/)'1%;+") (/7(.0-'3$+,/!5%-1.-/Ԁـۀـ؀рԀӀӀـڀӀЀՀ؀ۀр݀Ԁ׀Ѐـځ0116/8'1(.7)0(*9;$)53)/##*01023!31/) ?/-)6%(8,&+',/-2/-8*$+&-0/)'3+=,$1223,72/+7+&9 !55$/.0.+394+,9++611*%#-/($5++)!(%6(24"2(-(7+/&-747*(147,.*-!6*))('(5,)*2-/"3$..(;:/)"6/+-1#%%+-&,,-%-$,$'- '#$+2'*/262*53E-(+)4/-0/'+/(((5.116*).13(3:+)*..0'%$*4+2%2&+'9?($++1,/#)/(). 2(+.,-#313'#78/:")08&1#3.):*(*++&+8(8/73(1'+))-3(3+/'-5$//8),.;,03%0.'-2,$):-"@!3-)%.%6"0--341&26$86%)6:7)27;0:/))2-3#+/3/(&!%'+%632*2& ",$+$/'')047,+3')*2'##+#.86*-#-.2#2/*',+(D'0(3+5%*-.2'+,?6,*2,08':24-'-1'-0/=)+-),'&73ԀҀ؀܀Հր׀׀ـˀՀ׀Ԁ׀׀׀ԀڀـڀрՀՀҁ%':()4+3112&%"21,)(;2-305(/,(5%':%%-3,-10+=',#.>-,!57-&12-/(-++98451*(359%524+6&%),9+34#50'+&*26-+"59).3(/6.%6&/'.$-/$0)?:*2.)320%.&0-/(2625++.(/)03/$+%-3-*+0%5/$-+6$)"2.1"*-'%7$-+'0)))-")#& 4(41/60//5)6',/40'#&0':*7.)4-12<14.))&&'*704)-9)6)341 /4&$.1139-..-+*3%/1+6'';(/")/#$8 (137.=*+3$30"1++./3*103A4%&+4.-%5)'9.81,'52-*#,/1,4;,13, ,)#0')4'-+0)4#9'$&;/:.9(58))(*-$&,+*2.,)#4!$%-"5*7',/++-')+33/+#60E' 5$6'4+/ 24;C'((%5+/1$+0/&5,$"=3/C,/4<3)-#4 7+858/4%ՀӀրрр؀ҀԀՀŀ׀րՀ׀΀π܀ր׀Ӏ݀рԀځ562,#66%*.!0%#)+)/64152+0/))=)+++16*6+1'.(465,-5;1%,,26-%*'.-.&&//*6#910&1+-%&$'/%8)5.1*&1,)$2'+-$607#53#;5.3@3*,--&)01/*/02.3&;A%+)0.0$($1*'#*8!(+*0+.-+($+)*9)+1./9)+6+6(/ 233+4$*",4++$-$,5'$12/. *3.,9-//7$6 03/,$+&#%01.24/20,!*&80(,28*0(3--&%!+&.89#,*(#/8.&<3%-1/0%$)*+(,0), 6(3/)+52+*:'$')%5,;'#%&&)&+--$1*(6;)'"$+/"1#)7=$32,&#'&(.-!".'//'.,'/!)02-.03*%+12>)&(+.0''+,-85-(.$"*&')")'+*1*&.&3(-**&)+(18$-1$(1/3"(-(*0-//!+0'4#0-10*3.0$>#;,,*')&'%/3&(-(+8.(&0);2/)1/)7*2:)//;-( 5419&19)142-16$5ЀԀ׀ԀՀՀۀՀ܀ŀрҀ؀Ӏ؀ــۀހЀՀՀրҁ/,//<&#"51(;.)1.'E/:--/.3)6(-#$"1-1(4//+59/7-/'*/%."*,((7.+/,25;/):1(3-)%)$)0/2012)+*+23('(%.0-&-) (%45&,&*"93-4;1,+4. $'835.#&//17)--&*7%+(26%1-3*-&+094.:$%(#!7/-''$#-241)-0"+7(/2*21"./+&:%.*"+'&(#*)/5&)*+1$:2%&-06554'#*--)/*%*$()&05%.%)%&1&&:+3+-20%*/50(4.7(8)-.#4(&,,&*/!-**'5,&81*3..%"):*('%4(4'54*-1&+*7!3!#)$-300'&910+#7*&&32'%+'1((1=-6/B/0$2'710))(2 -+,$2+9,4%<29-7;&& $2)+(*/60*0.! #+,+/84.'(%6032-&*6//%3*-1!4&++$")3)1&'9 .3/++.-0/6+1$/955+'2/*701,.+-3)#8,85*-0<3$*1-0-6;.2-9(10'րր׀׀րՀππڀƀԀ؀րـЀ؀׀܀ЀՀԀـڀہ7.+-%--,7-,24))(&60511#)/23'&-08&('2)-'9069).)52+-*27+70/-#1/#/8+2,.'1/+5'%./0*./(0:*#*5(,)#!&.3+/ )1/2+,*/,%6'+/9.-/*1:"06/-((2--&)*, (76+.:+#''$#+/ '+?+/!1"%)$)+' +++'/5+$'/0/!0$"""*-.&46/9)(6:.&0-9)+).-1,1-5'+.$;/74+ 1&0+3)7)(/),''-(#1)!3(2$*'1 :1&,">)(2"#,&*/45"'(0%*5)!8'.2'.4235&&(4,2->)4'&4(-2+#92.-:')"/625),/3(264(-*)095)-).5,",%7)!*("0-16%/"5)80/43.*/%04%4').1*+<,'/7#"!4%%4)+.83<)&'*,,%);),/3-2.()/7(!&%).2'+$(57229!13&#.%7'+5..40- 8055")**-.10)4*"'+763*18"'.$7.37/'1!&/.84(-#',8-1)׀׀π܀Ѐ؀Ӏ؀ӀǀՀԀրր׀πڀ׀؀׀րՀԀف2'76)#+$(*1+.)/-,8.-+.+-&34#-#6+5-&&;'%;4*/$/30;=.6"2):-+)5&./9)$+1'9(&6410'';&)')7#(/%(1)%#)++2,,4133("33&!)2')!./#40-+)5,+#++5'37,1+*!..21!,($*1-7*..5&--,/&&0;-:)7%5(';)&23//4'*E1+74,%A3,-,5/, >+&)0)D+1(.44%&!.(./A%-*:#;7 ' &"6+12'2*+86!(+-*'+,)27.-+(+1?"5'200-4&-&%":.60049&93.,.)*0#5B59./3,)-3"+$((!.%'")*3$$790*,+(/$0-*(%55)42,@./-3)"/+/"'>5)(<-%%*-&1$//')/*-/$./&53((+)!3.9(6804'# ,%10$7,9,*/(25+,'+*2@12/2(.'6"01-2=*0**)+*53/-.,/ 35."(3/+4;9-<*,,)*)7/"/.1.2%*422.3'(95;/&21-0.1*3&50-Ҁ؀ـ׀׀Հڀڀۀʀ׀׀؀ҀӀۀրـҀՀ׀ЀԀف,%0/(4#0583%."&20,%&1+0/'"&337+1+#746%3+,1,8'#%1;"*-$1(0+/;();)+%+2+9$11+&(-.*10,&<&3'-"%-&!"15-%.3%1&%6+6!83..&3%32(7'2)2&(*52*)6&4/6'/,6')#(1%&/&*-73+ &'/3*)4-(-25*'%#/%,'34($.-%#2342+6&*3*")1/1(/3+74&)1/-))1(-(-5+-0*2323*( -!+1+-57.22-#/+/1#),5%/-,/)%%(15(""&1*'13%-,)1.07(-.++68A0%*-75+-, )4'(0- + (#00( 0.537*7/16(+.,1+501*,4.6;21&A-,.$1 14&&#"->.**2,711$5,.1+,&027*77%$;)),+2-/85+"1'%-7%/,3'1% +!6(7(4$/1512,׀Ҁ׀׀ـ؀րӀԀ€ԀӀԀՀڀրӀԀـ׀ހԀڀف+2/*,+-(,*:$13,/36?)55'/-%&')%&/-2--94)-**.$/,($1+)3+'-).+2%$+264)#4)8'-1**0:5-,,3('#2)1+-8'7&-7,-)!)823.2!-47,/.,5--=5,'6/(%'/=!5@/4&.49&+(*$-?!)""!&))46 )$-.(+%&/%@2/2,/,"%1,%7.1<-+-.+'+$-%(5..-!)$'3/'2+61$(.3.#$,")&,*+":&,*5*:'&#)5$-+&$6)' -(01/1/5.0,'0$'**2)-&/)3-+")"2-2#9129'*74/'&*(,+40--2%%+9#(<+!-, 8+A<53%02.-5('50,9/823102,%%;"$*-.*,0(66%,&1,<),/08(*( +"'-/)0'3)2409..7/>8".)(7(!.+$+)''!0)*1-$40-40-'+("**2.-1;1%/-3'2($+(, -2+'02/4.)/'.45)+4.-(4-+:).0.'".+3(-)5.$)&5&73&&1#"*-,$3,*7/9+-4ՀڀԀ؀؀ـրӀڀɀ׀ڀ׀߀Ԁ׀ԀـۀԀрۀـՁ0'*13)#*&)+2.),<26-;6'3+,12+().-"47#+642-(39<13,%$2-*-&'5@9&+('6/5"/0706!3 08&!+3,&+#&63,0&'#2,(*7#+23$.%(/2)-.1/5.+*6&1"(2/",.4$-40-../+2+!1,-" .*1)(/+5!.,-/.50)-..,/(!2B%92.'5$;!'.5),5-'(3%1/6.1,/5/'6206.5-"%83.,()*'4/431,0#,/*'2% ,$-12*+33.("#+.='*8.0*4.)**-6'/84)3/1--&//0/5.24(2*-,2 %'*-6")2/1--302,"/3$1--*+59.'/027%:./*./'/-.#((&5:3&$8D/1(-)51.49856'"2,'+/!7+)/9**&(:+()+%*/)&(0'--+4+84!,).&5+2**-,%5./.,&.*)*,5+31+:2%*--,#*(1)9+4*/-�/=.)/68++9)0+6233-*40-.532A610/* 3/.>21(2.-2-93Հրۀ؀рӀҀ܀ҀǀԀրр݀ـր׀ӀۀԀՀրՀ؁?+(,'444?/+*#/5&,77,)11!/5&2!**5 4-57+.A///27,61+=+3&-17+&+'*'-))5&-.7&3/(2;474*/6&5,1(66-=/2,89.6:0,'1/.3'&*, //*$(+. 2*'1/.5#)9+3*9+<'20;*F9+30/*75+,5$,0)*11! +;+,4)),(0%.'$"5/2)+:7167/5%&#%(8!'2#9*).+.3631/1*/&:75#1B"+'357H3!!5&3%-.#,.7-$&$%4',@1%"2/*7%01?.!.#0))./&+1$(3225.5',>+0420#/'.70+*+'&3/01(*6%-*8!/.3-' 1.1,1%505:-#47%)+'-,4/#2.+1*7.-&4-136/42.+*(-+16+/,<9-0$28*-*.$/+,$# ,*,.+.+=/+%#1,(7+538&-* &6#/1+8.3)1%528+($4( 6%'0)<4,;'3,').24/!2((2/!-+433+/-./3...+3,(-$4*9.41..5660.)532:*;.8+#׀րՀـԀրՀـրǀҀـԀ׀ڀԀ؀݀րڀ؀׀Հځ=4//64*4/,=6/,7-*';4(*7$0*2//771)3$)06,57'0'502+/8&2:'=/3-'9,5++,*;-300/,*8 ,01/*. :/6-%+0((.5>/9)**0#32/+1((;1,//9-!/%.28*//)/.20(+/# %(2.()+)'31=8(!;-)>5,.,,;-6&0$22),92%.*-',0)='/,+&&-2$5-(&,&$.)$)#!#&**+4&%5(,1'$:6-1..'2(#6(#(*;04?1+4);1-$ .56-% )!/-0+% ,255,' %1120.#*)?4#,9,1#0$+,/38,+-:./:2/ 2-:$/6/!2/-)+.+/#<+'3;0.4 '8.'1/!-+-&!(#>3-//9:-6-+2)(/(29*()&2++2(-&-/*.'0-''"(1%+#986)"2+0#13/+-)+?%289)(0-7)/*>-;.;2:7593.0&2#1%*'0&',*/4$1(,,#7'4,0.61+5 (*8.5-'0:*.'.,+'&.531/302&*2%./*/&Հր׀Հ׀ـր؀ـĀـՀրـڀԀӀԀրրЀրـՁ2/>//.;-4+0$/:+%14%)/208>56611-(%"/5<*+)0+,)6764(&/'$.!>%"/!-'-'-#5/1,'/+(.#0&47;,(/('&5+)4(,!&/++,%*.&./5*&/0/9"(:'(3&95(-(2*+- *%-)./*/61(&00(5)'!(1-"(*$#.99.-%+,+-4#*0+"3)/,#41%./+0*2 1$&#./)0&).+2 )7($!%,#051"/52:+0-&)('#63)"5'//(+,0&-+&29.&+);9)*'45)6+0$%2'(-12$<)1*#,/$2#$7 '/#/-.*2++-?++"1(06")1&*.,(4-**(."#8%%-*40544*3*(53-+))(4:'///0)*))/2-28*&83,:*''-*)&-0>!+4-"(91(/2(*5B*5//;.2:10)"1.*($3%-#/+-'- 75&''//8##%0%1!D.:/8961+A#086230'/824-86-*13(0)3' ;((4'++(($,--656(%)*'+3#=-5,#7$#,0%/,'35*:011(9+/,/=&28-,-/8(&/0).:+#&&*%301)'#3"#'%401&)=2((9*!(/C-)9)0:/54,5*33'73-9)7/9655׀րՀ׀ڀրڀՀՀÀ׀ՀЀՀԀրڀԀـڀҀ׀Ӏԁ.,?72F61654!,+/$.(*/+%(17)B1.#*+%12-=*3../-+-4.561/1*"!093//8+0/1<,/='1)42=-/'#)%."*&.=@//2%9+'*/(5%7/*.'++%-9&-85-443/2**)"*+4590.256;-24%=),4&'+/)+0%-<#/ ++:./85,-)3,21(312032,(%244/'4*$)!034.&*.&.8 4=%+8"73,'2,#3/*&. '3&"+4:6)0-0&53=(#*7/'1$#*,869$/:4')$/42&%)A,$#&1)A))"(/*-+-/&)/%/04,6;:**13 #)/(/+.99:1%+$5*-')/*)2@,+)3/ %,)@(-7,,&09>4&'/$#15$/7::+3(,0)/&'35#:853+,0*%138(?1#")%45..&)%:--)/&..5*0*/&5.#,!0.1-7,'%35+1"0&2!+/&6-))/34+6$':,(/5"15+$&.'(11B/*20$0*+3%0#8(482.!114?-(1(9+1(89ԀՀրՀ܀րրЀԀɀрӀӀՀπՀ݀Հـ؀ـӀـс15%23(+@$69:1/@7,-$-(-..-')'744/+"/+56>%++*-+!..4+-+9.,*,71*(,9!,,+0-&!-)0'#&93;2D 72/3+,"6. 52//*45,+4+&+!20-/&/5'5,*!5%39(50%7*26+-*1531//*1.55')3%0 #&0,"+/)<%24'--2'0*,,-915208.%#3'-1&$%0$5/++*3&*$'!60-(*'/)))2*.5)*6-*5$%,53'&/*/2$!9/-!4-(3(1'(1,-*2$5(##0,-,/%(1*/!2*&#/'6-+$()9./+35650"+'**.=*///5+,-")'42*&.7",*1#$.&0..-0".!/'5/&)33(.+.3*$4---(2E-5537-5)('50"/)A+(#(*2.#41/2&28$3>+-6*+2'-'.0*7-.*-221<.,.#2"(&#-0 )4*+,/+261$-:##*!/'0,./153320/,/03'*4,!+!2;-13./)+83/6.3+8&-60(.6**6=)3;*2,81ԀۀՀـڀӀڀ׀ЀǀӀــՀڀ׀πـӀۀ܀܀ڀՁ53/6*5+906/':%9//;.>*92)1<(*6.!*/!)++,%(/;%-:)%)#-1+"":&)%0!-61*/)&&+.(((.34/3--./*-(4%167//1,1)1#3*5+8#1&')/3' /.)1)13*3.1!:'.4&+/$'%+/++*2-/5',',%(/%)!-)0,!(*/2"*+,!5.(%52)//-1,2!/;,3(0%''*.)62'6173%),*%%-..*&+"(*+:".04#1 2($%16%$-63&/(52/005&(6-/*-8":+19*5/'#6,%'-('/&3*(4'(3+6!660-)%/2#.05*!!,(-(;20$,+#%6'26,'.*+)%''-8:@" /%12/-.':)276.,*/2=223%77>207,9,&4.4*3(+)*/1,/1% &;# +;-(-.;-0/"452*16;;+%&%4#55!07$3*,")-6&52$.5..$3+3-55+02'6*-0#.+&3%1*5%<5"1.#.% /**+*9-930!.&7+.2()2&,,&2)-#69#/2;,78ـҀπҀրր׀ՀրȀ׀ڀـՀ׀׀ҀрրԀԀڀրׁ&0%/3300,)?,600.&('/+%,#/6// #62&63)-,1-$)9:3%!$31@,(2"-"<3*.,8&/0,2/;1/"+622&63%B+)-/,5/0)97-(4;*!9*5,&72*-64-.#,&7/2;3/4365!/)/&22&3-2%=#.+(,2,#&,(4/,0/'#)5$,$960,, 2)&*284@,36%1636>"+//+(+!2;0&+-2'32/+ ,)%-'+9&,."#0%)3'5"4(/#6-)9537+:'+-$+$&./*('1(5++," )+5.!#"4//,3+(3+1/#)-**/84"*+-07//'0@)()*0'0=8&,*))71$#:4-2#+6..)$'*))3.'+%,%/%4/,(((-86+%)*-*!(+23./33)(5 +*&(/85%'.#(;.),).+*,42,'"%.2#.$!/%$40&/)+0)-(8&1'91()3/-8/*6*9*)#%.'-,06/6-*22,1 =3,18+*(&4<--*-)1.3+&9>">.-910*.,.*64-+&'322+388&ۀՀ؀ր׀ـҀۀрƀ׀ـրր׀׀؀Ҁ׀؀ـ݀ӀӁ91#**=//41(/50/39.(041)2%+)25.0%5**4)5)-.6+0;/5(",*%'$106910')%=,10+1*5%5'(/'1'/37%0/*,+ +-/-%6%'.-3;%*3&99=113&,(.366#01*'0>=*,%/$++/$&&!"2,-&84/7>%+$*$331/(+)4'+7&."(&(1?.2++!((0'&"0.+1*,0-*+1#:':$-7-'0'&"2"+1,/2 6(&1&-/-80:.'1/+$3/5+5*:)**%%*460/1@/9)13--0// %-662+*5*,1(!)%.6%-72=*,1(1.*'+5/,:#--01)+0(+19/,#)1/+& ''-3:1!'1&,(9//$6--,.(05332')+'1.2>2+(%,0+%%+-,6'94/25%!2;&-(%5/%&(>4(71/+-7;&-92.#+&*(#/33/57/-+0-/)(0'&.03/%&A2--)&:*!.7+&!.+,0+,'*=519,,0,)"*2$&9+,=3;+4. -1,8,&2*+'2./ 3'.,"/1ԀڀրހԀ׀ڀҀـƀրր؀Ӏ݀׀ހ׀׀ր؀ԀӀс)856-:::9)5# 3/93/'*!-/+%*1'13@)"5)('&1%1=.1#(2/1.55)0)&*2 '',0,/=+!&;4%/&5,=)+C0+C$+&++(-/'7*1++)5(/(4#'&1,"*5725,%/-&=(*(*)&.*#%3"..+001*58"60/&4/5-*,,'3',%)32',8/''3'22'"&-!0&5$00%/%8%+,*3".)/0)1"1( ,+80#-+'.(8#/38(+)#/.4)%,0 ",13-"9',.+-*/441&/-,&+13&-5--4"///+"!( #31,1-/++)-/110%/$/$'%51!9%35,"0($84(/71((#"40)+*0.%-*6-(46492*&:-%1<6-00!/$&"*"+882$-0.+<)-%-5:$)62!-7+,'%+,///.-*0>18.'++$ )2,''.1'3-5#/+-..617$4+!,(,1*61+79"+:#!A2,.$%84,0*#*2:2.64,%+..573+.1132+0*,41.73>4*49-/..(*.31,ԀҀـӀҀԀـՀـ€ڀۀπր؀ــр܀ـ؀Ӏрׁ2!):$/)&/* .&*$,&.'.6**4,2"(.+(#7'.&**,3*&31%4.('1'7'.02'%,'<,8*+1('.(+*),+2$%.'0%"5-352,,,/'=-+///)6%.,%&)3-41111335*/1.,,'5*2-#"*'&/1#.&++&/1#(-'6,%/(347/(4:,4)(%0/9,2("6,*-.0*)*,(1-":4-0()*2/5,<1.-5.)$/$+5+6+/)/'7/0(02-0.1-+/+ 23(/$,+1"%1)1+15",0)7%5*2+!.)"3D0Հ؀ӀԀ׀րــրĀՀـԀ؀Ԁހ؀׀׀ЀԀրـׁ3053:!@/2)5*3)'-#7-(20)%2%/6)/*&-!83B'-:3$%;,4(%(-*8.-%%4-(&2*7/(3,40+.1.+3+/7:!'$(5.4).?0';81")-&.()=9*7*2D30$.+.$+3)*6)"(73&65#0%.744#'$*(3*.+%64,*.*'+(("3(-#1(/( **12' +231)9>(/)05/6%.$)2'2/:0.*4+4!24 (*45*&//.17/-/- #/+(.0/81%&83'6+1C!*(30))$+2*%*6-02(*#.1+),$5-)7*+)*0")&&5&/.--)#+*0'6!;*00,(!-"556'57,+:ȉ++$,/$730(&3!9'-/1*02+*..10!/2<,,-.$5:/8-7)+/+/,15&4*'4.+1#%/"(4,-,(*$.(0%,20,2..9+$03.!.1-1.*"2A/%3/4%1,-;-#'2+20:#)*3.(+6///3.2-#=/*!)5-<3-4-,19:$501(7-$(&'*433-(47./3!8!?) *-1&,2րـ؀ր׀ՀԀ܀ՀȀۀڀ؀րԀۀՀـ؀Ӏ؀рπځ7*5;-5,/1-,?%,56)".%.7'+)(F1+,+%42+=/3,"=74-+'/5):(33*,+/(7%9)/'3"/&.#3&+921!3+1+=/.4'9(1*+-)'/693$7(/,&:/?%08/&+9*+$03+)?264+%0%7/0'"1D+1334/+1--'(3 *0$-)/3:"-$4#-.44 +/.--/5-:**6-"-(="%/'1+*3,*-.+--*,()5/*+/595 +-8'$$."*$,3+!!.16/-9--'8#.%.&71&..'+(3*/./++1.+)#3.)-22/141,,,/+505(.8"$3'#((7 *%220),7)4$)# .1&&-,81*/1),&:)(13;/%/+/482".-:(/-*7/'A4/ ,+0,3)58/(2201--3#(-A&11!:0 B711$36-*..,#@,?(5)813"*2:04&13-'/7:.2B+*54*&.)'8--41$0057/(0.%,(+()2+33&4:?2,341&7)/"$70/&*!5'".83,15")#"0.00--#ր׀ـЀӀԀۀրڀÀր؀ҀЀڀр؀׀Հ׀ՀӀՀف+/>;%0?%6%+9.2;:$'+3*",'8*+7644,#/=,''0'1*8'+4040-0077,.!-;'-(---3.+0--5,"+/'6+3/-%30%02+6'1$/..($)34'4)#7-14..(/")!"0/$$(?0?*2)(2!,(-"841=%%.2, *)%8/,*(%4*5A(1*'/6%$%4.-4>%04/1%/1(*-),0-!8*22>&2+"<.6)+%+05'-'*.*(262*3/'"5-/$'-<&36-3-+/+7#3)0&.1%",()/./#/%-('!2 +,#,53+117#4+/1+'09,01).*&*1+<1%*/0*,7)%$"%,")$-$1(-2.!.#"6/(*02*&.,)52'$3&*&-4$,%-2/#;(-"3(7)+8+(.1812-!)?2*8-,3"04'++9-1%),#*0')*2,&%4#91+620-;)%#%6+$/5.--424"&,2'-)8:64*4%0,'(29+'2! $634,2#0:*<3,+)5C-#%47'*(,/5 *1572(3(1/4ՀԀ׀ڀӀՀՀԀՀŀ׀Ԁ؀׀ـԀЀրӀՀՀրրԁ$7>33(&+)+'1-)2-5<&-:#-)+&1-=:,*3)5).3/6,7&-6+-(:45$ 4:;-/?7(=5(/$1"-.-,+%:"&"13"7.,3445)1*3'"6/0'.8";;-%-%35*+(+4**'$2&*+'/"!3,!2$/'!/',+16,*5,,+%+"")-1#01)3-/)01,(,/(+6-#/!.$/.7*-*.1$/".#0+2-('&(9-*6A%":))0 #5')B(*/'12/).,08*.1.&$-'*#7.58)-$?*0%2%+>-./(0*.21-(1,/832%!/6+& !,-?-#'+112)%)/>+7'.6*.1)$9#+,*+/-59-+'/3)0$-,'39.*',/7*+1,.'%)528!'/&)1/-;!!9/4')&6&"( ./*(-/(5#/5%%+-(58()2$'7*(3*82,%1*)-*+32%.25D!.%((:0'8,$$",02%&33&!1./ '"&1$.-$/3,;$)(!,!.;5$B!%<7+00+3,3167.(-"!/-21);33,14<)5%,&+,րր؀ـ؀ӀڀҀԀɀـ؀ـڀրՀԀӀӀ؀؀Ҁԁ30)61I,9)-1;,<206$/4-<,0-$).1)2/21-%7$-/#-(-+)( 28(9-#*1.-,*)@#1*#9.,&**7 ) **1/-09""5(.4/),'30&)1")&..+1+'*%3' -/1,1+')&1;1#.8(15%62)*/')%)%)+)$.1)-%(.*3'',+61&;,)4'1*>(-&6/(.9$,2/'88$<00.*.6*,+)'-40,1$+'/46-6,0,$$)%3-10.5-)%%2*0+.76B0&,($7:'&2'#)&).-.0*". +9)1+7117'%2-&'.&3&)+$0(;%&'/)6$-) $361+89-&0$4.-32'%2/3!'81140/&)-+8!/(&*)2)2 '6!$%&)+0"6=,*6732,+*%,15'1+7(&)*069-2'3,)42*'5,/'(2>-.7)1"2%5.2$)/23'+-/3*- 30''34--/65(@&'//*0/*'-$5)&4!-0,)-0/3&3).#1;4.#./'-24700,$-/--!*9#?.1&47*/%0ـҀӀрҀԀҀ׀ՀɀӀԀҀՀ׀؀ـրրԀـՀ܀ف/+82%2;5%.)-I/8$(//2/:/4)#1):"51)/4,$,&9+&.*2,)3541/'$+")1$,:/;7#"+&*//3**5-&/),2%3',/' +1+'%46*39')61;%+121,#+&/55526',+/6+5.=10%93#%+%25+./<7,-$))"%#2 ,>"2%+!11%7%303.-/13")%2-%7!'+52-/$?(,)%,)-013)):&"8450=#-7'),**/)280,/%16!'055&3#("(2,(@*61$4%.).'%0&%!(2/&55*//3"3(43,8#8!$/-,+%5643+,<$&'%6-2+*";,8'+6!1($%/1716.*(4%**$)&94)2/- ,%,)./2(4!$/43%-;'.14'0;22%(*-/31*-(**.&7.11/+/:&*%/53"-31%0#,3&&1&& .'),%!)6+ ,-.8(/./,53'--++3 #5**-%$24346+)4&#(5%.7.6'#;7.'+42#(B5.*9&%90'%#<31)66-3)/+5'!ՀـڀԀҀـˀـԀˀՀ܀Հ׀ـրԀրـրԀ؀Հԁ545/-'(5/,,9051=.=&*)1/*373.+<-*&+3-+*391%+":'/)$$"6+80,7&-2+'):0'44/!4+)13/*3#"4+-)--=/-,7#2/8!)0/1/5%2!.)*')5,1./8$),45%>%$4/+1-+6/'+-/'2+),=(51#3/13'')"2+&.#,!#1,5.5#+(<+41- .)913*/0-2)4,-(74*2),-'@5%)'330<%#.2&<*&3(!3&/.3%?*47;+/0 0:&')$$+/77(--2)&%=&/357'%'*"'-!''/#,<*2.3&*"%, 0&21"0"/.;.051",-7+)+02",/#"*4+*.1*/4(*$/2--*//-)(//((/*,).,-('1/1&1.+/4*&-%00+,=!8)4--165-./3,.0,33+!(0!*((,//+,76%5, 4.*+*%&)&05;*)--!-.730(($41%7%0/.,)-4+++%9++,+%.:/*-51.*9'!$), -'+4@7,//))+-,51$#:?9/393,/ڀӀـ؀׀ԀހπـɀրրۀـԀۀۀ؀ӀۀՀՀՀ؁-05:+&)6+9/(3$5%5/,'%)A)00 5'+*#2-6'!"0(. /(2&:3+$#9))6*;()*-*.)+/03+2-A/%+!4*)<"-,*-&.*8/!.(7(/&* .,3+-545.".,3:*)(/+-'7&412*-2<105/3&)(4-$7,./02-: +/53-1%::5#+,%3.,2(87*-,1$*+8:*/&/1&*)24'+'/0@-+'(;..!C()7,)%2./):!4"-&--8+0*'10)':4228914-1/'+-:+:7)/*A0.9/0&'+!071.%-30)-%17#-+&+05.(&/+,,+-#+5)-6&7+2. .&*.3"-/-87&#3*21'431%52,$//.#),3+02.(#0(,5*/ #-77-(60120;%.,/103(1(*/-#.59/=)0.,+(-,60/(&03$! -&.!-*.',&%53.&6':%*1640-*)34!1)+,3!;'474',)--(<'5/ *2&4%&.'-3,!8*&(2,=46""4,;--7>*@537&.8,܀Ԁրـ׀݀Ҁ׀ҀԀրԀЀـՀ؀؀ـՀҀ׀Ԁց3./((,%0(.6)>),;%&%41=)'K5""/6'5/7)));&10(1"&/((+35%3.:)--<+/)3)1)$,0'-'.'*+&-*1*1),))1"-1+$--#0(+(/<.443,)+*0'&!$36)%.5&:,<4+*58/542239$%(0-#"7($$$'5/)21.7)%2//9%,1,,0&.1(3'(%+C)()!)')-,,*'"%%1/'3)72++,'7'#:/.-%-1..'3,*554'-,0+1.31:$".-&0.2+*)3'.* +-:*%*&,3(+'("4(/.-)@32 %1-++/8-/+*$;1(%'(*7./,-.+,+'&3%-+&2)2)!))+0,+'-(<)')4*,,5'/,23'6&9+4/+*;*33+2440&:)#845A.3'.)')**1)573#$6+2-+$8.36-0 )-+)*4#*/2"46!;&.%(5/1++&!0.#/(**37%-%0.4860&)5012"3))++,516$5@63-+5&1:3-1+5&=293/,3+/41$63=(;&'-'8,-2%#4ӀՀրԀـ׀׀ـрǀڀڀ؀Հ׀րԀր׀րπԀՀց"4<52=0+&-40,/%4,' 7&#+-$*%'3/+,2>('8$1,'>:30*21*-/3240--6.-0 50=(@4-4. ,00.1/2$36!'1121-'%!.5<'+5*!/5+/+/&-.'7(,58*,)-,*63'1#12$0&.11!1)'* %&&,0,5,6+/+)* 9.&7,&:)-'-+0;$./,+!2' 0).4'+416/4.%@$/%%-1'4)1/-+70--59,-63#1,23+)4/.,(2&!2-@/-9)1-''() %;(2")2)&0$0'5.-5$1).'))-3*1%=1#8./+-6%#9#624$/+,*&,0+6'4+(6//211).5$+?*,")5&//(+4+%:'(22:.3.7%7$#/5: 3-+21,(1-+/+/*$/,/9)/3A923*6-1+/*9+'/B!/1+4++)2:'"/*0),'%4+"*3/!,--7*)1%)"4,29/"40%&",++4+)/68+/1-1$4:7/5.)30(&&-:7/33=+7+/4++()3.'1.6.#8*/630D327(ԀـڀրՀ׀ՀڀĀ׀ՀՀـՀՀր׀ՀҀۀ׀ـׁ(#;3:73-?,101/<,',..53'#B)!*)32474'8& ,-*6$)5('.86"-/*)7'9$--+15-*50&+()"#&-,)44+-.4/4((05/#562).3))$'(.*<,%3-18)5)6(-'+-*9(63)-0#"5-42+,.4$/'/0(7-)-'(',B6+0+63%/(!/'(+&*$'.>(-0/6,!+)),)1/(+-'.+128,0&3"% %-5(,;/(,5#(%&+)#*3//%*27+)&/*0*/)',%72(9($4//#6'*,)'(1'/ 9&2-%528.)0->*589(-.37)4%0$-#3.443(,-( 3&-3<#3%/87%)17'2'*,"*!./+()#4%..'%+$+*$!.711)!'()-).'%+)7;-'6(+,-&%'7-.$*/-60.&)-5114$0*4-$5('(-)37**18/.*%/-,+#(1#=+.#40 ""*+.1,8088(/,/$4/,-*(75/3.$,!,,'33#0,/$''03-&&+*./%)0'+%/-*)/(14݀ۀՀр܀؀րۀӀŀрӀԀЀՀԀـ׀؀ـԀ׀րف7+%:%)3./(+67)%0!-3(+3,7'&+*0()...3<2/7//1--'&&3&''/,,%7(#)=070/-#-/+'')1+7=*'*C*//+* :/3&)&/1'+/.,+3>+(=-,,20)/.0401$,+.&.3$/7.3/#',0/#1340>8%)//!%.&)3&%-**4#0/&2!7)% 4/.#%)*(87043>*84==1-2,60177*36..*'-+3+15(/%*//,#+5#83*9,)8.%)(5>#*52$1 /.+*/6,4(+,+,1$+!*$4'-)(,33*$-(,-+5: ++./(0).,1,!20*/'0)4*"33-*!* 1463%*/+-B8+4,,%$-3%(9,7.4..3%#%$A-/&4')!1#+'-)(*/6*#5061*,-&.+3/6&7)*)31/6&+?--+*28*,(3'*7/$/+'):2,;#/#+,38'+&642&9(522%#4/#7'.)1$%,%/-,-''")-43+9:-60+<)09/1#&9&='<%=.5410/>:$"$42.5:<0(1)5׀Ԁ؀ԀӀ׀ԀրҀŀ׀ՀՀӀـր؀ՀӀڀ؀Հրҁ:-*3-*6,.(5%14+7+01.8+1*C,0,%''+4*(,,/.)0'-'<,('#.#,%27"798/57#9 2",-2'$73,/161951)+*(3/06)'*3,2."60&'10/90.)1/1)3*6'5+.40*& 63-/333,&312"7+3432.%.1(,/(4(4%:+:*8:)5*+(0+*8+-2,)9.3-*,)/7%' /+,--7 (4*:.21,?1.,(69F+"-,$+:.;8&;;2"+,):2(7'2+/."3%3;39)I-!%-/!'*'#-/$<2&+."*-!+B&.4-,(#)(-)3'.413+(3//1',/"'((/6*.03 %0-,*10%1"'+/*+)%*.5*53/172/5(B-(,%039,3',91:/-4 8'3,37)$10;%3024."",-2,5)'/)/1+'=.&+++- /2%*##6%''+*+(7'%)(43&=+1+,20-11*90)02'3-/),),+-&; +9$,(377*+(82---2*=1/.4/*-/'9!7+/0/&02B"&2+,<׀ۀՀՀҀ܀р؀ЀŀӀ݀ՀՀՀـڀۀڀր؀ԀԀ݁,/&7$0+.24+:./7/144&)44/6-30.++116%-)(+$;).0*(/+'#'-75'+-(""!02/&,6&+$1/1,,/1+/10-+.+3313>B+,/';.0**#&-2#, 46.-5(/8$<-4$+.8*6&1)+*+2)-/3?1'6&1,=/!'+*2/+))2,/5%1:(')(2'33,3/*0,,&!00+-,8+/4'5,$%6+4+%!)8/ &+*4.7#*+)$*%3)/0//$3%,32/##)04+"'02&)<:(;4(.'546(+4)5*-%'*7)5)!$(2(4"5*8","+23*//.-5*4%+$:+622/#,&&-&21"'%+-:".*&'),/#1(8' -0).8*-1&//5,0 3'-44)7/- '#-:,(',/*/,&5')3'*!6,%-,/&)7*+0( "/&3.")4*<1#2/.&(-6&(/'/&.@4%180,&'-!#-(52*)8%+.-."10;).#46//)/-1*4*4-16*(''-0510(7&/(),.7.-0=%+,+644$<;-4ր׀׀ـӀրۀـӀĀրۀ؀Հր׀׀؀ـ׀ڀՀـՁ8++-()>&7+:,29;1-80,56.#257146-29'/<-+#2..2,-/#1+-)-18()5/-5+30(%.#*)(>*4#)(1.:>/4)-7+)"/2$#3,#-(5//62*'5$'.*%((/+(3?)+$-7*/.0.B-+':58'02.140*3/(+'$-&,)./.80)1*09#%30-$1#='.$63%1#**-.)-/184+#(%77##/)1+0))--,30)9)24&"5'83&*1-'/;/)$+(*&+(*9&+0,3,8-(.8//!8)+,833/1)%7/-2084);,$0<-''/3((.+(.4-+*)2+'/"'-+*,E*%2*8*%$!5#*C45'%+&'')2$)49(43(0-5$1'+-(3!(4!,/325'''-.#*+;(3:5-)"/3-%7!)22')*"$+&8<2/,"0151324''6,,3-'() +2*1700.-++5;--+3-"&,9*(3;, +*01%50-&2!50.03+<"+.-*32&0+5!/*)/,+06:%/$,*2)3%-):<3-8)1%.,ր׀؀ր׀؀ԀԀҀĀՀր؀׀Ҁ׀ـ؀Ԁ؀؀Հ؀ف*7005',5-00*/33'5(*+,+60112)0,561*.()7$/6.305L%43-1!6)7*,+)77#2(-2."2-.(/-(5-"-'3#)$5:59-,##*3*<6".13/./1,2).*#$+.,43/'/' "+"0!1')!!(%%)5+1,..0(),0'/(<,2/""/!!16*0,20,7325'.+-+&'5$,2725"31-,)3(5 12%-/,/6%032$.#36&:#-3+*.),)/C?'$.(%,/6105>%/(52"#.16'+A-3+/$.(0-$7-+'//(&&">/$)?*/*.'!',5'.3/**.(&.',-7(,5127028&(%+'364/,*#&&52&7$11*0+.1%%2!0+/,&27*.(.5!'6-&,);*(,-(7,5"(&(.,.--750'5786)2%3A+%. ()-6&$-#'0/9'.);3++/1$*,;4!),')*01;/)%;.23)7!+-,&(%/+30)#0'(%!624*- :*%>%5.(!-(15(+2A-8:*/&).7)=(,+:H,5.3--3*028ҀՀӀՀــ܀ڀҀÀӀۀ؀ۀӀՀր؀׀ـ؀ӀЀځ//4,,.5$52(;9/,''0&),,)+&//#0,*=6,(96116%'++%'/*/,),+)//15))5 +%0$054$.)%()=#/.,-&/$(1,;%+(<05&*4*-+?&*+5/*$0+7'*,4*&,=51%./)*21.)///#,3)-(#2'3,!#23*1/.+0.!+1$5+%60;2-+'*&-3,3+%(0.*6.))&0'#&*+14-'&$460%+0*/$/)0)1#,&)02""6(*!4-)--@6.2$%5+(.1+*()#,-!'C$440/./(2C+(!1,+,*6)0';-.06,.)(+6.002"*141?)./-1.%0(%56,-6 $),73F3%*12/0-451#,072)-6''*+//%3-3%*%(243/&";)4/-(%*03.))"-..0#"//+4(+6)/,$-%1&/+6.-).+'')(*4.6+'*+'=%,%(2(+1/(%0+1.(/#+01%50)/ 533$)&/--)-*0.*187&)401>%-3+A-9=1'.%1--4/1+'-.6+921'34+րπ׀׀ڀՀҀـր̀րՀӀ܀΀րՀՀԀڀԀՀ׀с#;='94%1*05'+-/*121#942/-./1*+0+6"&./-*.&*+"6$('/%138'47%'&#/"563(*38*+7((;.*$)'-%+2/6?,/(37'9%;06!8'#3+'0,.%/#&."/(*),,,')123.*0),+,,%&',2%**1+./.#3,0,9?/0'1?"8(/&'+2+(+*.:)9/&,72(3=6+*1!*.+%#'/'*!0:0)&*!0"-"&+.+=4.*+1)/'4+,+;)..,!2=#%%$4+&//+##14!(:4.+3'.""3!#(.,).-(*#1=01&&104&5/-510%2+527+7*.-8!(*+#*$.#+2/0:?95 ,<2%,4/1"('6;8.)++ ,9--+C.(/$843+= ,/*7/)71."+*(-3+5,*0)1,&'*!.'&%, 7:/.8)%%60(&06-20/,)(*2-&/0.(5&-61&)7+(".43=/9&3)<).)36/&((!7+..+$661/%!/1D$('3&9.+$/5*6)*-4)*(23)#72.%;,+170;ԀրՀ׀ۀրـӀ؀ǀـڀހՀ׀؀׀ЀՀҀڀՀրց !'1*27"/.5-2"!*7!+''0/A,*(1)/;;2/#/.5C2(//323,3<21 (.,33(/(--7'/'.*5*&2(0./.+*3)"D1(.$:-147)!+1,)&/"'"+$&4*43""(%=+9115"+1/+010%-1(#2-4B4+'44*#")&.$1;.03 -..!4.3%))(8().3/$88'#36%()5"!-&*3)5/*0& )'&2(%*!.05"$';5 )-/'(B5,-/2''2!/00!.,*/+.,3903&.'-, "+-2,-/*"*./&**+*.- .0:-0++4+&<1)2'#'/+4046,2#$$%/0*063,$0+$06)-()/.. (/-5-(9953(=12F&&6.+/3+1132+5$%.)(!,;/-+(!06"!-5-,-$!.1()&2*)2*')1/!#*5+4/*(2*/7:3*2*$*('*$()"'5*)-0'-9/+3#0-'4%01:3!"*03.,(&)#%.5*50(/,!9/"7-.53+68.(,/'0/.&=&-61,21>;:1*1("/0ր׀܀ۀՀ؀ـҀɀրԀԀ؀Հـ؀ՀӀրՀڀՀԁ:-.60+537+//++2/&176.(/6@74*.73+01,93#/81',*-4-&)1*<'*#'5)-3,2--&7,0)2-91(2554/.8+0$)*, :4)-)#2+0.(7+4"070/+/)""%.,&,,++1360&"'0'0,1(.*271#479(/1,$,/14-"59/'1-.'0%0#/+,,46)4*4#)$-1%*#+ -/.))*+&1(-&,&*+,>('.8.&2*#+/%+,12&$),0*),)+-)#<-$',%7!%,0A#;42)-*:#,5)8.+$2/3-+),60?).3-,-014-.,*",.&&-)'4$,.*$1+,''?/7/,(+1,5-(6,+.#611(4+01:+,0//&#!'#!/,5)..0&.").;03)91./)/3))%(%//+*,34+1.+8,49'/*--,,...',#95:/-7($3'%)/+(0-&'70*-61;.*/3%'5.)3(-620).728(*'.=-1845($"(/)-3>4:+(25')/*)/;95/7-7=1)+;-&4*;'1؀րրڀ΀Ҁ׀ӀՀ̀׀ڀ׀׀׀ՀҀԀ׀ҀրՀـہ5-).+#2-0-0'6%*!/7(+')!'0.3;3.-6$2--)*/'/-2%/ )0(<67-.--+74/-+0)1*A017>7.+"'(1)1#41110"0+3;,*-$.@;,=+3%9%9&2110:$-'-+1$& +30#$+-1-// 3"7$+#-3&-:"+;.0')1%"'4.2+)0;5",*.05#*">(".0'2-'+3"3*-"7'/1.#+13)-2!+)*3!',%&*:/>3%&,*,%'0/(+-0;*1'***%&/53)+/24.74+&-$&+-5)1&!-0'-=1=4*58-26*!)<'-;,0/*4+28-'(6&27/,)/1*$2$&%)#/)/'+/-)&+614&0-/,/7"( 4+'&+#-/0-%,/!)--;63:**,#8<(&.?--9-%6-,5*.!(#2'-12/2,*6-1,91.09&2# 2%&'/2+#$.)$81$1%** )(>(!)4)34 &5'$5('6.'')--2%+ )4*/'%'()37)+'")1'7%6A.)#%1 7* *-*5()/1#2&!04*'Հ׀ـӀ׀րՀҀՀʀԀրӀ܀Ҁ׀׀ՀрՀҀրـ܁26-12-%/'=7+'8$)49*'%+3,,279.+"/5"712$*0?100&/'7$423=25'35$94+ 7-)%:--59>%/571#-$.*:/.!$.+/)30&4.)(#37'0'17&10%3+&0&2-00&&'0+;%/6*//2#+&%"+-#*'1/$;#25$/0!/1,./17%#,("@%.+62(,#//+-:3',.-6+'*71)' 008'&-2;)7%#33+#.7)0)*5#$( /*#'(-5%C*+:.&-5/85$.$,3)98&*!)+'8&-$42&*2--**"/20-//-*1*:,$0*,+,''--28%#-$0,&2&70,()-/)3+--'81&.#.&&B6-','-$69,+&(&/-).2A!#"1(/).#-0*$&7,(&+4("))(*06-'&0:)!13'*8--+*/89,425#&0/%).,+22.**%"&,#)("641&1+56(,#!%+-(;)/:,$:'&2/2//',*-2.46++6&&.-*7,%%%';0876.-.6$2*-160'=*/7,ۀ׀׀؀ڀՀՀӀ؀ՀЀӀԀҀր׀րـ׀ـրրہ/4B74467//(=%74:9=*67.'/A'+'063,'(55+(.A2"=4&B+%5/(2,.(/(474 -0-3/>330'+0,*&+,/,9,5<%.0.-'1/"01,'025*95+=6+$%?3 232*-0++/$542,.+,@*$.5&29,'5),& ,-1+0-%+0)*.,-6)%)"/)+.% '$$"2)260);$*-)3' .)3$) +"(523"#3+;-)/'$2+-:4%*)+"#/-)040"2:&15#$&$$./03/!))5(/.+=)1,A&/,1'$7*:*//120#.4$(*/$*&5+.26-6-7+--90(/)//%%*5+)&2(+2*6*6*//%&*'++%-37(()+1.+2*%+$*-&-+1'3(:$60(0.6$3$8-.619<,+))//");)*"81#)$1)+-$'*+-2-)/0'2<+./39;0":+( +*),9)+(1'()*+/*21+--&#*-0 ,7(,.2(<&0.05+%+,,-B/,193),,.(4$+()1&22,)25?..8.,,7*,*00!2"0-ҀӀրӀ׀׀րրـɀ׀ۀԀՀ׀ҀՀЀҀՀҀӀ؀ԁ+#.2%(9/71+&-/0,(-0678&44/%4,:$&//1&=47#313100,.78+-$1/!0,<*)"21)+(&4-)49)9(5*(+%+/<-+902020$:.11+/)!)*!,$-%2+/$.2?/35'#55+&40*&7+-.'15/55';)()41)+#,%?+)30*+,)-(3/%08,"'3/+/$%'$'#(/"--6!/1,%:0*+&*--0*$"*&15-46+!./)/0 !./,3/(0:0,2.&'0232'/"4171+*-21/:2/10-/,%354.8/.8!)4*(&(2"2+.?8!*1/+46/%#1100).+&*+741144&%4,,*.E+(./-)*,%/4:%.%6%1/.'-26,)5!-%1'-.+*0('5/+-()&*03>+;)('3,(*+#)/'&-.210,&.1-3/'5/#"*%*7&$/;$5,+,*.//)8(2$'()1<#/,07(00&&-4//.)3,44-55 . *)6>(94/$-%)4,,(21.:,(7+5D2+-49/8+.7*.,)5րҀր׀ـрـӀՀŀրӀ؀ԀӀ݀؀рրӀӀ؀Հց4412;&225<'1/3/'!73./1-,&(1=:,.06(;!-;/&.4-&)74#*+'&1,1*':)&#&")(405:''4)--5)50)(0=%/+0,*"1?11)<)90)%-)69//36+1*-0*'=,11,:. /+((&1)+8*,8.33(&/'!,$$3+8,#,,#(*'&&--2*4)/*$/5'-172/=(,,&-$#)7&/+/+5-#&($&)..2"=0+/42+3%)4++2)/&3## %*E1;-.1+7 (*'88)-'4<-.3&/4#48:)'=4,//3+*3-)-/1"0-,(.)$"6'32(#13)3'-,'-$43*"2&,(-,51&7%-74/&$1.)2/:+,&+1 5%7((,/3,(/(!/2*%),4,0151).$/4 7>&'2.3($%/!0-1)"1*,# "2+1( './&-27/-3$C+461#-),3"'6(-,.(930.&+ 0/-6%5%-50('73(,-*/B-,.!230/3)-)5*3,52.1./5.*(.,4.0627,5/'*/591969ՀۀـՀـЀӀՀ׀ۀЀ؀؀Ҁ؀Ӏ׀Հ؀ՀӀրс,51;371'.2.+5+,+2/%'32?(6''*D/)/)--5&0<404+674,0-0*%3",(-0--!-11(.2#0)2#>/$/3)+32/-!1-@*"%/-,!**'')&#*3(,+0&,+ *-(/3& ,56)/1529'#+'1&-&.'+./3+(1"()1'24%/%5(+4,63./&*:(."0#7%75%/')'.,-),&$9)(*/%).(+,1G+%'+(/#)5+,76.+.*())"0,7.21,00-+',,2-)&34#1.,)4.+/$-"(4-,+/*547.,%%+=1/,'+191/#,*.(,/*8'//*1&*5)%*++$;: 00/(/''#'*8,32!'-1/";)6* +/0/'!3,/7-'+0-.)+*.4.1#&0$&5*+&6%1+80/5+1'#41-1-*'.-!-*0&/$/,85().+6,(&*'2$5)"5$&--./"-'-&+.,$=/*410.,1&,()',32"%+729$&002(307&:41-) -203A=9/0/+/>)+).:5591*:D/,314(/9ԀӀрՀӀӀՀӀ׀ŀӀ؀π؀Հրـ؀ـҀрـրׁ2-5,1"15"&4!=6,16)$);0,21061A0/.(/;4$90/.' 4+#3702,2.+4>3+1+)(%**$4/**(',)#*-&2-2-*+-;++0'<.*/30-+,&+70 *.#(#88-&4,+&&-&(@*0-*66. 7*1,#)3./,;/-#1*&!#(*0%)#3/1,48/2.47-9#'4.3-2/-/+86-#$(4$!6+2%1.+(#$(*(43-*(%50%1081,+3&+*3))2','3(9&9;2)'++%4/.)-'(-"&1)4*"$3$0,42+82,7'(5$,';*4&)-()'07,*/&.6)*.+ 3%7+&62(75/,.+/1*+9;"+)&-< )+95%:%$+/.#-21)!)11/09&++.+*:< 74/,'3+&$4#.2(0)/0."''&)"(/2/))//*-$)7 4,4%-)44/2!/3+'0(*/1+,./0-/&8"7)*-,)0.285(,885A)%6743404&,,';/'&05*5:7/0/32270$/*9)--.,20/;+5/98,60ڀـՀπՀ؀ڀ׀Ӏƀ׀Հ׀ـ׀Ӏр׀ڀـҀրՀف+2;<(1.6+/4$1#45$=-7*,.'.045.!&%#,/")+'3,02;2)//,/8/3B3&*:2,45&52/782.(%5)7(/0"'"-729+&4$@.1;4'&"/.1'0&)6'$9%'328+4<%%%21952$-'01$++0(+*2$A6"+3""4(/%/+$)/.$932*+,:!)68"'%74*)4*+$ %'.-29&(((*$!70427)'2.3("//$:94(9)16.&&/-'25=(#*/5*1/+,-5/#5& #+,,8271,*-".*!+/$*&4;,'1(62&/01'/4&)(7 8/)'300!/+9 -53/60$' !,)+*)%2-))!"%-+1<2(24-%5,+(50;1,932-0#"/3#,/-4-,".2"+-3&+9*-,0!51&6,'*//.#(7(/-)024/"*&/-3)*/!38:36()A)!*2$#/+#''0%,&0+&,*&.$'./$*)5<(5-0* -//'8$72(.,/!+5/75) +3//)#.+-,;.5)%*/,34/'',0+1#A'/*22ӀۀڀՀրրـ׀׀ȀҀՀրԀԀԀҀրــրЀՀց>./7.+7&'">0+>,&-0,/897541)$:"%,+<1"1+.:))+/:$)$+,4*-(3.#*,(%**46&.++ ,*2-'1%*#/3"/1&3$8+4&(.03)8*(&(,-:,*$*-#18,)62(",6$>14&!(0'3472/*'2/)'%+#!$%983(-*11'801"*3;%!<'.+0&& 54+#:-.56/1403!2%4:8%5"/#+(+('2&00(,',*,841:4-&80/ 1,+& 2:/$$''*2+(-//2"#36&(22&21/3-#5+(*-=,-+&0*/+/,25&;$;%,-(-#4%24-2/( ',+-511-0"-$*,1*((/34/995$-,,.7',#1, (%-5)+%6?(5*)0)0.-).2)/&:2*--"'*,#/!6+)+15,%(=$)43'2,.'(?).54/'(.9))?/102./50+)9))$30"'7*4.' (8*-+-#*2*/)+"0&.04"58'))-.--3)5#?$ ,5!435%+/#$/;%5'%:#360/,-6C%&ـ׀׀ՀՀЀՀԀ׀ȀڀـԀր؀ـՀ؀р؀ـ׀Ѐց-(10777./9%49:105*=7459-+%3&(1+&42&D2(2 5!.44)-#/'++,-11+26=+*3)2/))/4*+2+;+$;.2)$1,795-%+2'*6'/3 #47-9%*)!(#3+--$82++*6-$#$$4*5'0.'+3+604,0/(+&)$2++*#)/'(/ - -52+)2()4#+.,/(/,1,(+3'/+.+44/&+3")&26"++*'$'49#;/$-'*0+/*!+45./#(#)+854,-4%4-$(; *#)-..:".2+!-/53%5(,-6(6''-.1%+3+.'/&"&*'.)7-#%/62$/22'(210*--3(5+ /,.-+18%4(!=3+- ''*/-/.$3)-&*/3%&"0)'740/5541-$4:-4-33#,044@;.'./1-%',;+/2%%+ +$0!.4-($&.2& ,:03!*.12$&="#,),)7&-484550)4.%<#19)#,#-&+39-+(+6*(5/*3*3-*'6)!+)&'B2<4,/-9+ 613?)4+13׀ـ׀ҀՀӀڀրրȀ׀ՀԀ׀؀ڀҀـ؀߀׀ـҀҁ<%5*/*)9,(6;)(47'>0)0'!1)'$)5-21.+-"//*<2.+33*'.80):352+/)<)-/7(.*@(0040%5*,7(,/4/**/1&&( 3/&&0.,+)1*2'*).(%16&1+/!&.!95#'/'+*-+0-(-'/.$7%'0*7--12:/$#-2+ /(2/-2+(?0/9*)++")3.0*+)9)4.&'6/0-5014&6*-).84"02&*19;+%2--,12&/)(,+9$:).-/*,(.&-3/.7),52*,$9*./5,),!(-,*(933*6.%722$'-4,,'+9:(0+,):7/-!/,"(.#,,71++.68.&&.8/ ,7%2,015(*$-7>62&6+/-/$28*')4)%1+'4)<3**/.3!5/&,*&*'-3'1'4*,+5;&)$%2)+&.-%/4-(/!#'$(-,.,52,@1)-.592'.-031#+%010:(+----+.*/&"'/,,*=+'$/G1'/4-)6+(24+-6-+-93*.45)490(*5+:,33.7Ӏ׀ҀӀԀۀ׀ӀۀȀՀҀԀր׀ԀրӀӀԀՀـ׀ځ75*.="%1#.&,%--64+-B,0:-+30+'(#-=%4(2/127,145'89-349**%1/&)*/.5)34/%/.-+&7(.,+,11#%-17+*,3%+ $$=--4$%)'A,5240%*)2/:%$$/',''((+-,!"4()9-%*/52+/*+/)0,$+('1//09+*9'(-12.*041(-+9)++&'//1*51%8+;.!71%1/)9,'1(,(11#&/!/+#;/+/18%2/-,'+$-%)4*0?,)*1/+%5)",#'-7.<.,+#<$0&)-.(*.47&/05*8$$+/& 8/350/*6(1/#-)02342(/-/#,,4',%,+',&.-64-&0/#.:!#(-4((,/)%)1/,>*'!&/)2*.8)(-,3-(8/&-;50=1(,/.(**/1%3/'7-23#5,2*.,A7042'*#28256)1&&(37/47%3-%/*)'&$,%*,)94(&&,"&$06,*-62#5'#(,-2(+051/ /*2,,30(.301//(/+-)!243,/ -"'/$*)$.3%,6-0'(<$( 65%**'/(,,8-%)1#).2()1/+8"&)7*;0&$1//3.$."*01+:-01 #2532/%4>0;2/0/1(;!1*$--)'.,/('4$%(&.+"(0-,-'+$/5,17)'22.#0*3=&6#4:(':+2/*+4'++!&.'..,&-7-1.,,$*7-&..% +/(3*).50%53A%,;00/30%,/*$3/2#,/2-3#*3$//#8&+!,5(.(.+)&31"+*!/+,#*4%#)/45/'-*#,4*8( +1"3').870/*+,'/-11//1*9)2!+'(-"+637./)րрՀ׀ԀڀـԀـ؀ԀۀԀـ؀ـ؀ՀҀրրԀׁ:71&@'8/77*+,$1#31)7-4(=,3/86.+'(-&4,2%%$4+&.(&%6/)0?*?,%4*A9-2#05)7#:"(2193/1!/1.'1217 6)*05-./16>..($-1:(/:1381*(2$10, 3-(74 42="'!;&+#2*# 8*1/9&!$"*(:(*8)-&5 $=++()!4,-<+)2"%-/#-/5!'"-201(26(3,/).1+$-$2*521./56-.2)-"(:).(,)'(*+%%83/!=24-3+')1#'.*:)41/$)'$")/*%: 1&/-'1(-,3)0#&/)0+',-(),51(.1+&,0+5$8* 3.0,!$%1-$3)#''#5"'".,8)3255+>.-%"73,*465+)+'!0$0)-+&(*'0+<,(/0$!($&!)'94'/47.+&-)00-(+18%//.,/(00,/"677,,%;.(.)1$*)+2*-+),2,90%+8.+71'*31,3(70%,8)1)3'-,1,)0?8-04/-01/'045+(/3.42)&@ԀӀـԀπۀӀۀӀĀӀրـՀՀ׀׀؀ӀҀԀ׀Հց,/513%1(%0+"326-*6*'(2)1-)2*(.46#+*-,.-#00#$=2/+4)4(%.-=)-+..*-0*(1-4/76,07/+3*5"+/$ $.>*%6+4%(1)2+''/!".=".2!%1/6&<3'3+624'3&&+)"1&'1*'%-412,6257=3&65)$//#()2,&-<*$+.1"*-/'"*74%,2+;(&-//.*(0/+(8%&(.=*@'//,*;,-.26"1**)(% !7/!+2!..#15)2%=*0-,&)(!3)+*''1*& 09/215&%2-1)#5'/4*'+7-3/1:,&+@3+/32110*'/+)45$(,-++'/0"6!,4#.-#@131");8/-429)/,&&-!6.%//>0(/0%6.('4(-1#860(&&*$4*!%%&"/01.(+,(&A$6#.(($"7+1,0'4'2:34/-1+)6**--1/.+&)09'+*/--('43/5,11%&&,+1->7(3!!.-/2/#).1%#@'-)?.,,.+)/-(!-)+06,+ 87,2'Հ؀׀рՀπր׀ԀƀҀҀӀ׀ր׀ր׀؀Հ׀р׀ف741.*//!+!:/"%(+"&7*1+47)0,(&4&'93,71*-2*6.825/<!$2*&*3<+8*/,-;&!&B(*-5 +(91*25$+'/+*333 /B 31',/(8+"23#/(+4*+1),0)/)3C0**/&<&/2!15#5&6-$46*&//.&&/*&(3/)(+1+8(-&&4$,&=*8!3)2+;0=:7$./+-50+..1,.,0 )2.+D'1&05)31/67,/-,#,/0--11)*2$<)4$3$73'2').5.((4%)-&.-=1,=(*;,2"'; 1/3 85*5/64'':0,-'+ /1'!$#9-/&7*2(1.%+-+.'0';)8")13"*3((4"4(//(+&512/1.51.$(#<%+4$%$)7.=2&"(*.3%*/7)&#.5(9#)9*/&$."7*0*&.*,(6++*248"5/)),-'%-0(,.((6 #0+52:3-)0*+*'037"*)'*-&'.)'35,5+3-/(4%)/!!$0/10&J,2,399 !)/<53-0,*9*-(.4(+*$)2/+рրՀԀҀۀ؀؀րǀЀԀրԀ׀ـ܀ԀԀڀրՀ؀ց33%<+/21')2.8'6"1+-*1334++-1.%>+&/+3#8, !,#+')%-2(91***!5"%*-/,'3*-0(,-,2'.*9;/"/-5'')(82;,,)-;-'9/'&'$+34'33)'.607(-1+,7%/(,1 *#&"()%'4%!-0.*'-),3'+2."!043,$,-%:!*,+)2*-,%9(H $$1,..-'/)8.'22%*3.*2'(4/.9$&7!1='7('11&+#27"9-(.-36://+:"5.(6,)+2*2#-!#1&35,',$(#++++!2,5*,'31!., %*<,<.,33)+1-*)$-$3'-:(,8./%+1%&(3,%+8-2'%#-0)4#&.*/13"'*,*#0622$#.-'39080)/&(3:29&/-943)9$/(*$248.'(.1))*2%'5:6.31./122.&+3(!6)720%2567./).,);,.-/+&0-%.-95310,1,<#8-33")2/(&+1)8*)'*245,,/6"%(++09%2,2.+;9$30+(7*+(112560 0׀׀ր׀րրԀـՀʀրրրՀ؀ـՀҀـӀ׀ӀـӁ0-%.!31,,61+28+/%+3'&+0:+11>*4$)@-217'503'<'0620/"6,4/3(,./6/./5*,&234(?+3(.'(4-.0(9/+#3+!-0')3')21$8-)'+.-31*'7/137(02/32/&/&11#%,"/5$'47#3*7)3++*++6,+&3%22;,+9(3($5"1+#+' -',-(-+#,))%-*'+(,2-/1- +'!216"'()61$&-8-8)-/0:20*+"-'1.&!4/=" 2 +'9..#01&/53-+.(7**/-1-'2.1!4.*5*52?21'-/7;-'**,21*0*$'&-1/-1+/('#0(%&/%/(-(7,2(>.+.=%/+)1'3+2665:*-&2-.!*0(*#!+'("+ )-//$%*)/572%3'2/4,'+#1"'()--(:+-$**33'(4*)!3'240)5(2.#."?-'"/!7%$..9').1(&),)03(,('1*6!2215$932.>."%%7,7,0)-9/1(33.03352(''*,7//)3),/*59%0րՀԀـ܀ՀـڀӀƀ؀ՀڀԀ׀ҀڀԀ܀րӀӀՀہ/160/277%&84,,1''5&.0.+$..,--(.4).((4,//=*+(4+*(G1,+09A3!'+5,.(5-."'0(,:)/60555 5-0.1%6/5)*)40'.'%+/:0#-1;*!+=-2-+/4,'.,(/25'(/4&&&##;4%*8'3%5(&,!(1809&-$.*./)**30#916>00("3//-4,/3"A1,."-25"4/&1+&,9)&!.29/'%* +'.,#6+2%)-2/4;)$/%-5+0*3./(-*."&'07+3)""".*$*'($0(#-&+ -%57*,.+5,36%/6(/30639452'5.)=&+ 3+6/8&!)5471:24&6,-%1/+#,1*/31*---.%*)(')*31*2.713.22.*',-(,,#1%-':6(%-4$0#8.&--0<**+-.*-1($3/,()#'"+$30/-3'*847'+#-/&7#7-0))1',3+"0,-*1.$&/-0-*'((*&++-8%,+%--;/+#/-,/EC/.2-6-1(&50/8-;1#/<+7;.1<Հ׀րڀ׀ՀҀـ׀ʀՀـрր؀Ҁ܀܀րҀ׀׀րρ.28'548/#=*>,"".23))/)4/9#(*#1+-1/0.-':*(18*4'I'.#.70!)83$/1%2-50)-*2*9.,,36"1/.:):#*#*+134-*1-2))-B3!3*5+@35 ./672"0:(+*+06.&&%26+*2-,--.),-&7+%6+.03,+-/?'9/(1-0-+19"4+.5..(5(>$,:$;%&10/&*8%&8)81'*-1$8=(41&-$.%$265--()&-2%%)*'1-4$*(#,,&5%*1.' +)90,.()&D,(0+%+'5%&,+$:'1&/,9'+&$.*4)@3.>#0("-/6.&()-1!0,!&%-09$"1,$&20.3/&.=1'<+/.32((3!<40/")*86/)0+)+1."/!00))'-*)/1>**./%$-9**(94),127; 403!2?0/#7!0&4!)-.)-3/ $3&+2,2)-,7++'''*9''$!-;%&.1/&'*&.":2*2-3+31-, 2*." 1-5$/4 #7$(/2/*")7(&+/+3,/0&3+/+1'0+.-+..-&8&2+.,;'0)5*/#1D7,+.36-)0003΀рـڀ؀ۀ׀րҀĀҀրՀҀր݀ՀԀـ؀ՀـԀ؁7. -.,,(7(6-&$>-55:9?/.')002+!2 5149%+.*>/).;#0,#*.(252'+,621&*/+%2./#$(@0+&-3+'#).516#4,",--).6.:+9%0**',33*17#&'1 ,+#"1)3'")+9+3/0)* #2,*41.%/45,)2.(1;--:$9&)-2/%*20$&()3(/''*0;6-/3&/#(*(!04?/($+#5+1!/+?3,+2857*40.:'',:,)4%,,(//-((+*4*#6$)-9;54+%-70/1*7)''$-0&+,-' ' (//$.6./6/&+;8(2";*$/5)",$1%/'3,9%8*#1()0(;5%+2,!'2/*/&14*)3#%-$)8")32+5&*.*+"*32"05+*370(3)!-4,7'((2#')4)-(,)!3,,+70+%-6),+?1(./7.+2:$$*.-8#&/+(2)51-72&/(6:360%-:$$!--. 0(*-11'*1,(!+'3835"38;),2%1;(682-5ۀπԀـ؀Ԁ׀݀ՀŀՀрր׀Ҁ؀؀ـր؀ԀҀҀځ"+(02(44/-.5-/#/9*),.:/,',/7,')-/6* 5/)(##-&7.2 *$4,)B-:&6--%0/.+1$,-%6:=7200.$!0--#/-2,#6- ",10"**#**6.4/).."@:+)5.-*+/*,-*23)#84..#&+'-**)*$)7(/75="2'91%=51/++')/! +3.)!-$.,#(41+1,)5 94.71:"+*!+$/2#(*%&'$"."54('0.55/!/,*!$41+8*-%++-,2 )0+9,5"0*'.+','#-%26$%%<,?*.$6&1$/$.)$+1)-2(/1&,+($%+3.(+./-&1/96?62!52'.6(&90*3 -,;)#.2'-&&*4%/ +03;+*,+,'/;39)233'/.#&7")#-%,&6&07(/&+/6/-='.,),(5$4#)/+(*0<1%$8%%-+# (-")+.2*+.:- &5(7)5)*,,.-153250+%&//3/1/.9:68*A;,()'3,.,*+5+)#9$ '/3#'/5(/*&4*,&>'55-1+7&$+-ӀҀ׀ՀԀ΀ـӀـ̀؀ڀӀрӀ׀؀؀؀ՀڀҀڀӁ /*110*10*'18'+)&.3*20;71&83+)*!'.5,50+808$$.'.30,C/%0)17-B 5+#94+-.6-*)4.%,./5(112'(0#=2('+28$#//%)!/!&$2.,(43&+$(/,121"1*,35'&%*/,!6=&3!++.(15(/%(*6$(;,6'!*,/.$41.)2(.30+4%',2*14/,-+03&75-1!5+*$,6-2,$.)8&),/0+/)%*2-+10+101'1)6&'4*7)71'2*11'++.1+3$<+$,,.*3(217%#31.-/+!0-+(/'$2:$./01',!.(+8 . *0.-7.$,.'#/.0)0)',$%/!-7 /**+&!809'-""$-1/:6.@-0))'/,*5$.27(.)A#/1)2+.21;4.3 ,)%/27(/+,/8(/6*=%2&#)4,1.2$5;=)/-'(+.-;))-/)0*$;./271#9/3.&*-/%,5+/(.);/-'%-.%04(+#(0/1$)1#.39(8%7+3"8,3078+/605*'--)*',.38-:8+'؀րۀ׀׀ր؀ـڀƀր׀Հ؀؀׀ր܀؀ՀӀ׀ۀځ5.+13-..=%,2%"867/)(5-2%'*151";,1-3+69!(('%.13*-8&,5/"'-/0,32.8)(01(/99)..$1(%-!($0');0$ -)%.C1'0.&1,&/-;4?27)-3),%:(2);$+0.5.-#+!+3 /-%/''/$+6$&5)F+$'+31*#0,0*&-$'*.'(0**2224.,'-)732.--%36%: !%,/&5(*&&'(*')47-"1"1 4(1!010.#/9(+#-(=+($1'&64&-,5<&%'1/3%*27*/12+7&%*A20'0#)")0+%*6572(<4))<)/%55)*.'60.#'481,(,3/B'=.A.5$,)*"-2&0. "--/<&&&$,#51-&.'+-&()5+,)&-("&,/1)/'- 3.+./%(41($>;4%&77.--/(6&(5?'(+7*56+#6(4*/-2*+6081571.0G$0-74(+*.56,'3%*$%.%/0***)!679$)0#4*/)1(&*2/0*4)/%':#+#,+08/0142<27A3#Ҁ׀݀׀րՀրڀ׀€ڀ׀րրԀՀ؀ՀԀ׀ԀӀ܀ց03./+10*4/637'.-0$>/'+1+%%4)25842'1-0,)72-6-;/#%,.9'5$()*4)':7,1*"0-4.)((947$.0)/-%-"#53,.-6-8'.('063/1*01=/1'+).-$#).1+-*+/%*/%-*+2)'(1"(#$.+"%0-,7'#8'6)-!,)%1"'.-35-.(/(!7)&+$+-2#;&.-'..--''-*1).,3/$=/4(5**+)6% 9)=.,(-20.++(2 (+-,6+&(+*, ,+%0-/'1-2#3/#,+',./#53*('-3.'#82)+/-,-'3(354-(+2($.,).,$36.720,/*1.$//6&'03$67&/(425(!86 %5,80-?$*.-21-)2%9+4.2*,#1,*/-(+(/3"&/+4372"+7,0&."(3)*&5.&5-(%%*:.0?#(195-'*/6+*:2-;B#-7*:5-2.%9*):/+7;)7735,,*5'."53+-&*/217.;6+1-9"4(9׀Ԁ׀Ԁــ؀Հـƀـ׀׀׀Ԁڀр؀ڀրـՀԀׁ+$:(>&/3/)90/62%+;,'8)%--'/1+6,$%) 3-4'&9&(90.2=1--'6+>0('+ &-1:71('1&1.+,8,-'-7%2!'-/;,$**.+.0)+'-63()-2&//&023(#*4"A2&5(-13):545#&+!"%2*0"-43.2#2+&!'%+#,))6*+'4,*5%8.'*/%0A1-+,5+5.(9$2-,.,26/'/%&8'4.)%(,-*<3=-2(.'*("+3417',($2-/9'/#++,)+,)1#4,%16*,1)*5/%*#%$2*)2'1)%#1%1'-9.)-(0/2/(),('2$*2/)*/+36)2E0.'/.'%,A':0:%8($#.%-.'1+%(7>1/!+41%2 ).5$)$5)*4+&+(3*,,%B%%/'0*7%4;@3)6$&,**!,4,8+-.#+*533)!-6,2(&.%5%"13$!).616+'1-$"11'');00.)1-(>1-*+,!+7+26*/1)42+),!3((&$5!("3+*.(:14+'&/6,3'<.'--*44*1+)'//(5*44).1--//3/ %/1-163-)(49)$$4/.+3,8+7+5+*11/6'-0+**)4*$85-(,%29:,..0*,.,5$1))2+2"%$32(9&0/6,2'5'+*(('*3*/2#5,'5%22'3+*<03+%1117;/+34.!322 &).7/!2)"!&<"),8%*2:2('/&/72 %)/')*#*+'$5.%%)( 1-(()&5'0&,5)+1-7)."($26'6,)(5$/#,0'/1''-/()892(-*!!(0,,-33$+)/)<0,950-+"/Ԁр؀؀ҀՀրӀӀĀҀ׀р׀Ԁ׀ـր؀؀ՀӀ܀ہ)7+';.6,/+4862%010-/'0)K10401/0*)'%54+)3')66*4,/36-+)*$,)(-*'1(.16/-0**+/5%$61%(&-2485**>65'7+,2'.,920..%&'&'/08/8,/*0524-+(),&/%+%,-/$=3$4'&/2/1(14$(38+0-/)0%503%'-1/33- 50'%,/&(-4(.2."-''%#/))+++-!7+--7)4*33!(+2(#0*5(&"8 %)/)%4/0.1 3&---/&,$*)62/'(!5&,,(22*9-+--;.'1*0"--767//0 $2)72-2(&9+0!-4(!(3-)),..0.4**-'&'/+'0.-)#6+.12(*++&/-3$)/#*$7 *-7+:!/$3(.3.&+(' 71//331/230$/*(4,318!3/&)?2$$/5/4/'17*51>%*>#%06/3,++/++;-&+)90*1:%*+**,+7,!2,8*,+%1*.%#865%,,++<ڀԀ׀׀׀Ҁր׀ӀƀπڀӀՀԀրڀрӀڀـԀЀف'*'"2.!>+'&25/*/20141&11)/:366'3,/*6(&;7235,17#81+)'31-$0,+/2/:.#/.#311;#-06-)6*2$('45-+*-0%-#8%19-%1/4-+-+31:+/5+'$,)0-.365)#)#12%<)-8-0,'8.)*"7#+ ./'9'*,/%3,/)%0+)&0"3-*+4,:/#+2/':((:.&*%/ (+'210=8+8./,35$!&(/'+2&-8#(-/4#44//0()4 :/;(&%/*1@*7%63/"/*4/5--#--&5%/,4*/-+.5/6%-#+,+"6+)3'021))&"1'-($,+6&62,$5/5&14&(%#,1.5*4+-*:- 7*%(+%+/,4#-#3)/01+,011$,$/&0/!-2-")&'.*'.,,#$(.'20321/#")., 7**,"'/?*.87++.%"454'32!/%%("14-9+/2,5-1-3-)).5/(*-(54>,17.$(:*16--+2((2=+/))+-?4&83-*..0)6!/123(045)/9./);.6'؀؀ڀՀـڀրրրÀ׀ՀҀҀۀ׀ڀԀրـۀҀԀҁ811*.*)/,3'3.25)608-3*1138,(*.(359'5#+.$1/8 /"0@1*')7+0=+0-#.,*-&%.1B3.((+(96--7*2.+3/%%%8#&)4%5.76%+.:1%6721-8-'0/,)4#039/43/:823+))9.%/160#%#1+715)$3 *4-2(.*/+,%:#' (0).&1,!)'()/4(12-.6'*6(,--76/85;#'--%/1*(,0,-0,/2&"/50$.<'*01,%-4'#-%1,+2-*2(5.(.!05-!3 7*56+-(-'$+''01$,)2-2'2/192+1%,41-**4+5)+$)1*(&-)-)-0.(*=(37/06")8*$4+)5$%4!00,0:&5./3+/1,%(,37+37(22:*+,'#1A1(0-7/4051(4B#1++-8-?'#,#()7276(+@'.=*.--/3#? %=.-5 /*-="4.&6--*9*$'&.)>0-3(0000+7+"&,8&(3/2$2$(-6'#=)00:%%42*L5/*5(3-*+/.0&$2067;)52Հ؀ـՀ׀ӀۀрրŀҀ׀ـ؀ـҀ׀ڀ׀؀Ӏـ݀ہ/-'(/5++4"622,)08,01(* /+:/(%2/+-;6.((1'4(93-7+<*703/51481)/81:/<-*$)(.&/(5$/(%)*+.>52/,%/(9-)&/.!8% $$/6,('+4/1/+*-21&*9&.22 )=%+%/,-",(.0-#46(:-$-8% :53*&)'.*2+"$,*'1++%'-0*/#5$/-1-'%+28*-)13./13%&*3/!5++60-$+:%)'56-:/.)%*++5)/7*)0"*.( (1-!0!(2$*%=*3/&,!+)-&7, +.(.(-/-4,,*:212*22*&+0(0B!(,),)0'8110)&6$43 1:1*7(9.;4&,+%*1/0+".(.%-",0)*20-')0)#*-*+-/%''1+3'*5(-)&'',(:62+" ((-;/$)1.'0/$##8/,+02*)H+&9352/+%$.4.01(( 2.76*1.*17(3/$.()+/6*%7%&/)&54+6364!)+5.-)-+%=".(852-$&!-):6+.*/#61"5&,+*7$/&)(%/Ԁ׀ـրҀـՀԀ؀ĀۀրрրՀ΀ӀԀրրՀـԀՁ4%3#162.<60:8-1+'*42,+$61!/5,:22)64(14',#*/0!5906- #;,7.)3/0023-(+015+(6//1 "/0/,57(589-*;%0.!'+2=-+,'.+&#-/& #9)))&4,8)5&,$>/.(/1&+&#/5-#(7(-)/@01;-*+-+24-(,#$&( 14.*/'&%#/2')0"/(-/(2+6-1 -74-0,,60.3&'@129/(0-(*-**0&!#3+3$;*(4-'!&!,8-*0- *0.+30-,2/1);5071(;) +:88,41!350(-($&*$0'8/(3$$*1+06/,"''/7-5(%'%0/(4//-/)%+*3+&%1*/-1/2+.43(%-'1 +/.-'#8194+-+,+-04723)!'7'!))9/$=-+4&"*2542/*-(*/81.'6"&/(85/)79-3++65797/0=46)1=0/,:+./6//6')8'1'&/+*50!)-'/#+/2%//+'"!#:03/-028-487&7#''5*)*/=/2',7..5/Ӏ׀րՀЀ؀ҀՀр׀рــԀՀՀՀ؀ڀԀـ؀ہ-:)-8;$*-7,,*2+1,09!)702;1.'0,&/5#+6-8+1+&/7(A.5."$./%/*.4)&0+&(-&2%6051$0##'9.22+88+#*70&7' 2-#,./:+-31.-/#$!32/9-1*94*,%06,'-#*2)&/2.+00((( 7'*+-5(.'%:.-#.$33!)%&$%2%*5)!3,06(&.)&+03,-,.; /4'*-(+4+,)3/$.882'0:0(/)*'!+5+30$59'550$*-&*&.1+-5+8+7%56%./,+#.*$)4(5%#*(:$,/-%0)/ 5&42*(;;,81.&(-1 &),0($&,509*@)),1/'3$)45/24++16:/66.<&0*1*,'/,-5.,1*/$1.!,81#7)1(5,13%%&-/4(-6&2",38*0#/#-+,+4*%&-+10/.70'6(88"*1)5'.+05=<2=4352,E/)+*1@7$),5(-,+#+&/512-!.4-1-/ '&,-4)#&%2#0++3659(3&.0/,6'*-4"')-+&C5?-3ۀـрҀҀՀ׀؀ӀŀـрӀ׀Հ׀рــՀӀՀӁ(../89&197192227.1-.23;&3$.')1'22$)-/10$*5(,$*#-6'--1(,+ 6>'90/('1*0$670%/.060/#,(2&*2-,3659 &/#B"()-,#(00%0&3,/)4/421))1$)59,+323++)2*.%%*91/5(+-3)+/$46'',0.*&-"20056-/,/***.90-3-67*&,1%+'53+-%&/$2))1%$./)/7-#'45,*)+#:,"-7#.1+,*&)9-.(5$47+,.**+:&/$-/0#'(,1'9)#%.%''94.11,2+#,3<2:#!73425465+$2(*%#413"C0) #;2+$)50%+0"<5/-22,/=;)-06/482.6/-)*'/83-34B/+0.C04741954*<ӀԀӀԀۀրрրԀˀӀր؀Ԁр؀Ԁր׀؀πՀԀہ9&-,,),7+',+*3)+6 $2;*"(.4!)4"%;(-+ )/(5*!'+'&#.-'%/#2"0(01(7# 5.$(6,#03.&!)=2*//4+(*.(-#1+<"0'&124).4'1&(451/+&-;8-31'%0').,"'*,-&$,3)//)20(+7&-(.(3($22,+/1%4-,%+3/'33(+:/+<,"23#6>8.&:)-;''*(9/0/-432+4&'+*-'**1#(,(-.,.&7(+,,&%1512*3#-57,+5$61)-?-.-.'0%4,3*7,/&%/*%#);-#24.6/4$1%*&,-'+/58&%6,"77*8-+"'(=$79#'7,4'#'#/(18'2&1,'$(2<. ,/5&:+3%#-1%-"&.23(#..) #152!01:C.0(,+2,,"9*7#0-)""*(..3(14,*0'+8.'(,0/.17(/1'7'C0C953670:*:?83:531-32&-'3%&#/-2)%$0718*).-9-567/9+/(+-3 481*&+1&&0,%60.-0.0!+,׀؀׀׀ր׀Հ׀ӀÀ׀Ԁ׀ՀրՀ׀ۀـڀڀրՀ΁@(./,$-')<%84,338'@3:--15.4,/& ,+32:!87-6>1-@727'34)))&(/+((*$1.%0+,)+)/5$)(1#2.1%&,21&/05/11+4<3*028,3/= ?*)2.,;-2()*."-1+,03*.+2/04),/-+)*52+)"5+?-/+(F#/20154$,*%-5-$4.*8.%$$ 78/-6.--6*0).#%53&);9%+/%+0*@$52,'$#+()-);"*4*3%6'+-053172'*11..'37*/1,&%:*.%5++)/*A7!!%3(20+#*(67,/!//.252)2475*,32)&4+3-0+#(0 $.-+,+&.)$+#''0.6;7-&3 (+:&,1$;-+(%%&*11,'//-11+,(':%'36)'.5 &65*= '*.,(2(34,2+)0'1B!1/-%3*/32.7.0-7)+)3$6.;*5E9-A;B2J=661D4:4<'%+,+-6)+"+-++2%*0;+;,4.3286'#''7*(.&+%/5,(+3,)2/=2,&&A(15'4$--ӀڀـӀЀӀԀ׀ԀÀ׀؀ӀՀҀրՀӀ׀؀ــڀՁ/3-333>,5(69+6.*(6,-/,5!9,,5#+120--#+49)-"/2,$,:/'&-/(;1246.%0)/.*((..&3/$.4(33.4)#0+'0$(-508" 838'#..*/5*$'1)+##-:31(2<(/)'*4<4*/+2+@#(%)($ #(054/1+*'-6%5+-17."+'+'-%*+$/4#.*+#'+%$1)&5$)7+%C*0',%'#+ *$3(6+0'-.#++(22*&)+"5*%1*50-&4+,/+#)('*$.7+-/7*.*&,'/;@"A)'8'+/,?1-:',2,/174*#/+43$'05"'/-%'.+/+,,0101>.$,01?)(&%( 42-4$/-,&&+;//5,30E&,-/.15"6"+#/,/%1"/%206-4!,2*-*./%-+1-&(5$*%;00/'62.'*'+30+34(*(.'*1/325A:59LEPC8J::<=/7061:2!1397/&&'17+(:-35+9,)?6,*7(.>'-.%**.3,121.(6$'0-0./,*+3);+'70-րԀӀр׀ӀՀԀ؀ˀ׀ҀـҀՀ׀Ѐـ׀؀Հ׀܀ف;&,4++3.,).7"0,/:+4+/,/-7(#*<0 :(739,605+)+-+5("/2*+. '4'7+*0--2/)(029;!64(7/>+8(*--);6'*"2-/*#-(84.8(!02$4*',+'%09+$ 0/:"//-1*/#3.--3)8+5/* 510'/5,8%!*(65*#5)6-33)$22,"(3,D#81"#41.:)200-42-&1/(3-B8137+.%#)/2%.'/.5'"!)2#&'72.,(&4 )/3;5-*"+(+.-,4,)4-).0'+02:+*//5/+/.,+,&-406.-*0&'*,/-+/2.!/#!$*(&(/,9+2-+)&&. 1*-*-3;,'/(.)!( =+1>$98*+71<:.',64';/--('.'4/'.5.&)!78).03)!11%.!%5)&2+<7<%5.$%7//(0=-$2-=8"/',B9'7436E."'1-5% 3$+/$+,-&-"-#$#'*12(()9'+',6))/+0'0#'0(70-32!,/0*!.6/141--.%++&6$4)!9&+'89%*)/-6/!,$*$8#/*5-2/*1-:%*7&5./7#,/,://(#%&'4-0/7)+(%2+-5/-)/#/-#1/6%+."&/49&&/(+,-(")(2#0462,&-/!/*.%+1*:/47+,*2:#36%2-*1)4,0)(*/"7/.#2"=*-+%1.+2&4&+/'8##'/"B)621+*0-) 67*.)6**-)3&$-.+$6111,"(,.04,-/4#(/#0"-012##'/- "-2#,,*"("%/,&,:)4&6/(3)8410*0&-54/%4*-)*)#2#*,)$0*"()/.,+5*,2$2'#%2"5&/)//$")-*-&;$5*-,*1%(9,-+-*/3.20/)'(+%+*)'+&(3)!*..4&$1!,$4.! 5$#(6%%&-(%3+3''4%-4.$042-220 &//"(*&#("724"%!3)20+')69,5' '/5(2'%/3+9*(/"%7843B50.-0)#+1&22(4/&&//$/55+*/A&/)5&--(%.*4!.,&4(-0.7/2&66/*,+'.2.27+0..6$"&"5+3%%//%)$+/0$<8F+1,+ 1:*.4.%'#&&--%*(3%/12,710('+",1,7%..1:2'$%A)7,+('-!4(-+'4672."//,,./+=(872!2.5+- &7798/#12%0+/'(=(/$) +/(6/'*'*+!6"@%3/1*+1.-)(1".2")9!5##&0.0 '+;%+2/$0#///+603$4*-+"".2:;=(,.0/3&'-/0'1/)00)0-+*/93%'1,<2.!1/9,3.)-3#C#31-11,45,+9$&2(4#*5)+ 5-,#$".,.:7@0750-=FLOpr|sQTB674108-3",/,8!!('044-'*47*(6'43(052/6*+/0;/(058.*,(24/C$$;1-(@2.+Ԁ؀ՀۀԀ׀ҀـրĀڀ؀ڀрՀՀԀրրӀۀրՀԁ..492(-)22'$50-/&-*,*26)'9.3;&73.;5': 0010*3/3+-/85*-)#.'/1&6))/%2/"0!'3-0,41%0.4,43%9-/+891(+4)*:0.:'+*0-6 %,7?2,'.42,20-)/)10251+;.(26$,&'//".2/-483/8/).#=7*0%(%)'1#$.*"+4(,5,)0-.2/:--'#$07+0+/# ./7+:7")4'39$7)1#5)/-*1//'0!6$8(1"'%>&;921+* 2)73$(3 #'.-5@23+$$4,1/*#2+*,6 ,5)';2$/&-$-%*-(+&1.<6& *3,(-//*0 .'345,:*5(,4-2/)-2"'&)9))3&+%/0)!602- ,*90(83+>*$8'!4(*2(0:5-!+),,!$'.(.'$226/*/.0!9#%#(5()'1.)+08@7H;BjSlq[JPD?E:.42$*2/25*'->-31'*+,)-/00)943?/-4/-2/61)-()0(045.$8$27#?,64,+5,рրҀԀӀ׀ҀπրˀӀ׀րҀՀ؀րڀԀրՀԀՀҁ2%,2$0'#95:*0.&1/($@6/)060-2,/.*>0/$01'%*7?.'(&-,.!0+//%... (#5(15581+31'0/*3/-%2."',-+*(*",0(%2C#"#.+,.%5%181 '('+'%)3) (&%5&1+/'//!/A!71)=(.#6(&#(,*2/..87,'0#33=-)*1'24A*$34/(/!7.6"2+79(1$7'+#."5%3D,++1-28',+(3/))+-)-#4/2#5-,),%"2002)+0 /+%)1)"*+:3522*6-'.%#!007*:'-/3'569,5$$7!915%+.1019'&&*1%)1.&/%-#0*.'-/<&(--571*-)/*7+'1.')6,"'4,?)&+%2)"*1%2$;,(7(+/*7 (%2'%"--*)('(1%7/3'* (&$')/.*483-1"-383357&>/496JEWa{ā΁uVLOON7C/)3&-&(5'6$6-2:%))&7%=0.),1)10%)375!,&+(1432(-&.1/(2,24%02(//ՀڀҀҀՀրԀԀՀǀҀՀ׀Հ׀Հրۀ؀рڀڀрف*+0)"-2),*,3,;A'0)0%1 %/1-+5 .0(3,42)41-)0.*/08!0,. ;-9:.10,0%.5.1#=.)'*+*23+ -&%*@#,*+8*'%%'2-:#)4+",/37*9/*)2$(.%'-&''3,-.'4-'16'53%)-)-#5)((,/$()2.#!".-52:'37,*#"8,+3+'(/- )'?)26%&#+,'83* (&';&24,)+0(-4!"0-'43--';!/)%"(#(.+*#$3).)5+(+22-3'7',62,-3)+&*!%03#*-/,/*.4$*%#.5)&"%-,7(,%03)6*9!)2,//:.'5!*91-/*&$)6+4-',%.35;%/57@,>4C<%,/(-((#3)- 3-09&(-1#@/*41-B.200/3)*8*5',+86.&1(0.0*1$#*6+5'4&(7"($ +,3(*1(6DQTE^_Áˁˁt]RK:.9;0+E/:2.,D1%;6**(43.)=&*/50-()5203%#<1(66$0/5A9+7'5'8-7+/$#7)'=%׀׀ۀ؀ՀԀՀ؀ۀ؀ӀҀр׀؀ӀՀՀ׀Ѐ׀Ӏف2%21*#9,(66(*/@88///+41'#0-*$&"(%261'-1+*4150+5%8(51'/ 23<1%+#,$/.5)2,?)/!64*43!-+,1%$0&3,)+00/0*%1B5*0,5-5*)0".%-+//.%-"!/*#20!+&(A%,",)#'9,-./.";3-3//*#-4#(/1$$<%7.*.-6$2'*/*')418.(6(4,%*&+1046-0*031+)0)!(5&76$)<&6)0.0,":!16100+/2#5,..&&3/#-&.'*-43+.'/+.093548-,/'14"&16%-'&,59(<+%/'*6-("'-$3(+2',/27*2+2'%,2.1'"/+1.'-:7%.2:4%@2B,'**2.2/#&5*62A&%.-*01&.(*-%0/*/!3!%)#-!-#/53-/.-5!$=%6132.4-10&263()+729;WRi~ԁՁցʁjH@E8?3<54A6/76+-2/4*2$--2%'*5&;*-4+/.5*9<*-08)!=6)6*691'349+40")(28*؀ۀڀ׀рր׀ڀ؀ɀӀ؀׀؀ـՀӀՀـ؀ۀـր܁!80/7%2%24&2$!-)$8/.5-$56565,+1*+0+2)(+0&+80<0-+2*/91'/61630'),%+64,1,%$+,)+.6%0/&4&*/)*070+,')"%85)&*.445-7/&4%8') 99+;*.4&.<#21(-+1*3,(.+$-$,5*846(3,8,*%('(',#%* ));2-$4+1/92)010 5%,:%/*)*.*13+./*&%4"2)41)+/)% ;/2463/-+/+#1.<)(03/,*)#0+.)-%1(5)')3*,)#63.3:*//0.)5, 0+72#'0*02*:#3/(2(!'*/.0#-3(-(*/A')-,+6,*)*%%+50.#//.)4<'*,7%,.<'))+0&,/'0&?2&*-&3%2630*(#-1(,+44#7.1**/&''''+&/0(0$')<-'015$48&$/',#.0A&/D8K=NMqy΁߁΁ρ́hx[aD69G+=48C),1+).915%%!:0@+("5*8139"4)*+./84*%,#/)!#%52&0:12<$(0+/3':׀Հр׀րԀ׀ـрˀԀ΀ՀԀӀր׀р׀ԀրހӀՁ0@.5)38;')09*'$$&*$&/00-7-)(' 32..''.'0)+2=!=6#:)12-47&+2-3/7.<*.:-/--')/("(0++)(6.'32.$#: +.#!.&/3#50*/1,.#7/0 (5*184&$(0#-1/3%/.<& **#8$700./466')03'.02..3'*20,***3-,&-#11'+4.&3%:8$*3"?0',,3,,24%2'*+*3&+'19,2/:%++4*&4!9'A0=%./26(18"/)8. +8)331(3)"*311;#&@).+ =(',-+&+)/)1&)*:)9!55(3/ 1/35-$---'")4%/'+4%-8/#2)0- ")"418.&,(+/2*5-:9648.30$#(+1/8*-+-*+/7(%8! 6-**5'/5:&5,)&:5,. 9*4 2(-+52-5/1'133ADCN?^mŁ܁dUP;IL8/642--5'2-7/0<+1$8&45-&6;&0-)/)1,*-8339*92.1//5(-864'/<%2'):?("׀׀րـڀԀـ׀ۀȀ΀ـπـ؀؀ۀ؀׀؀׀Հ܀с2-=30-+9%;"2/**0)(5(*7.$#/,/02**:$3-75&'<1,*::-2&&/12"8'-263/3%1$&8+7.-41.3$,.*(A,/$,51.,>%.*1-2/)94/'."$(,9+/!(-+55/&44(#2,%:'25:+-)* 5-*+(:#18*('1/*%21+,''-5'+4$' #74,(-(13,<17-+?-)1.0*&!)&+2-13),8%23$2(%/!-.&")**(&%&&(7'6"41,+-++&/0.7+0*+"+7.)1--$13'.0*."+"6+1&,0-2+0*(,0#+,)+%702"-"*(%*+/*54050$851/#,$.D(,?&'7'$'1/8,'.1.! )%*+4 2$;"A0-112)- +$4*/'@&%15&!4)1-6,,1,1-+/3%8972(!)27)6=:2'++/+2'+<%,=31+>(#&$6>80E3EGR]zpāD2MxdHD@DFI7-649"44%,7/04!9754-,9:-*2.)*1/:.2-/6')&'-*0&.1/!,,51*5302/%؀ــ؀Հـ׀րӀǀррՀրրրր؀Ԁ؀ր׀ՀӁ4)4+0#@2,<,'*40.-.$.30#!21+/.'%//<,###&&0/8$,4!3'%--6)/.60-G0=$+*174-21)0//3%)8;16+,,4+3%/5%4(.01.:$)/)...-4/*(95"1 '(248/.)1-*1/4-:.&%'//'//%3$',66105)&'?%#+! E/.#.2&1#%(4-&(+1/"$:/&0'.)7+++)11.*,/-00,1,'!2;1+,.-&")(/)0$(2'&*&'0()*8)+%*,-26,$,(&&#+,/*3*A2/3)/.(+"*8/&/+0,<0',21371'':14%--(7))+)&-,.+0+72/./*4--/'-3.':&%,.//#'22&(/)"0-$$75/34$1*+3")1")6#0*)/6'0'%)7&".',*2%.-2+)5(&&'5?"!->1-8;#7E-30&0260;*>DEXWdu߂FE􁒁{jUXS:B3186-+*+ -1)&()&.3+'5''*-'-1*&+(,72(+.6<$+0-2,9(4,/2,3.+?) ))&)׀ҀـڀրۀՀӀ؀€؀ـӀڀՀԀҀҀӀրрӀ؀ׁ).4+(-9'/7<2/..&.)/.0#*2 */<,5-'*+39-,*>$:)-20#210.&*02*(&73)';.)((3<%400,6+,,-2'**5))!$5-/,77"8)#-,6-32'/'%.4313')0(;*(")1(2,"#"$**%)$'&,5"0+)#('70-%/9,*4-04<+/*/",0$& 7+"&7%5%5$13."*/**?>6*&."1E),981,751 ,03/$9%#*3#+&,/'*',''&&;;(+'&,&3/1-)#)($63-B4'.;(#%$!+/-.+'''-1% 0;>%1$ ''*8)'2,:0+..+()>./%)1/+6*%"(6#&7/!8//3)*)2,,+/,%+4(1':(9/5*%-%)&6''(/-5/ 2&%./-)-'$9/(5*;-115.444.-.1#9,-05D930/13$-.64*0.6/,)20B9/3EWTmZOJ`&k偡jJD>7?;1;<+@:*4()3+'#!-0+-%!59)47(*2$.003/-+$,94912(%%(:0!,0;2251/'%ۀՀ׀׀׀׀ڀـҀĀـրـ؀ր׀ЀԀ؀ЀՀր׀ց-<-(.9$,4-9#1.0-:"'&9,822,9%&3/&7&!;,%%(!4:#6.-8(?25/8>13)*/,'&0)0$(!*7'11*"+,"-+9*+12.3"+'3)8)/#'$0$)2/3&5&)'145-%):+,3-+)9#/.93%;5''<436'&824..#8$2.,+)1%/$0*:.*./-14'*2'#*+836/*(-*-.'2)/'0!*)/"+'&-+,)/)-+350:,12'5-5;)/##""."#1--#*'"+"42*7,3&"0$$,/*3*$+"%) 02(.%*02"#2-.,(10';(&0$%;+-5'%+)%3+,)41,-4'11!)5* +!&)/-#97)$'--7///--5($++/-(#*+#/+/5&'3(+%)1-5,.2-492/(&!+,+%&.))5//,-$)0?0+"+.'-15,')-/.,4:2,9*6-3?IFRPYǂ_KڄQn߂eaSCSA7-3:6//3(+0('9$//3'2(/&)"/&,5(**,-'&4#&6/(>0/74#30/)60- -41(*؀Ӏ݀ԀҀ؀؀Ӏ׀̀΀ـҀրـҀ؀؀րڀԀ׀πہ(.);(/+.51(1;+D2+&1.%A3.*0",)410-$#*,(03;--."-;,,/%&$.22--92-*#-:&,8/*5)!"5+)-#*0,28+7+,$$561!' )#+'//2$+-,%/,52&)/4+,D--(0-8,.)6/3'*"2 ",4--(2.0-*1%;)%1,+8#'0493-1-%.$)/5%(%-&$)1/-0'0)162-')-+%30(()!#,&-/%.*-&!(+&34%,1).*+'&)/0'5)++5'""*-%93*&('/+4&,0!(,/-()'4'$$5/)%+1+2/+74$$+,'.;66+5&/ 0)*,)"9$5*(0#")%'/7*3+'>+513;2,9,-'-3'7:2+6<.2,8+&*/0) 0#*0(,.1&)--*+".*4!0&!)<$+5(09/'49==# *88;*3B4-1/%+)/39.4>H=Fd|:6ۄzS [;ցpeWI;JA.7;3.!,+',$@2-1+.))30>-7/5)5'2,0)2/)2"-1%J1+.'--"4*1"3+(4233-ր΀ҀҀـՀրҀ΀ɀ΀ҀӀـ؀ـԀҀӀӀ׀րڀց+/5%1'532*,(/7$/'+40 -110'>.!4(847-0/':05I%)))9/4)6+:5+&&/0(*&//'0'%!%?3!)36091A"3+:-<",-'&)#'..,8"$.0'81,=2%)%. 3*/%-'408'0..) .3)+B52%%0190(65*50/2.')/%#.5%(02+'(").24!&(,.5&&5/-<4,8&!,*,><20'9+-6*)1.258302.$.9-)1/(,%).)(.",56(,;)0.+1(#.1+ )56)3%*'*'491%%2$//#-+0//&$('.'%&28**).0/"(%&('*7)-%3,2#&+,%)-/7-5+-315/$(**$*--1,:19((%.)0)/.*#;05'+0+=!/%7;5(,7')-/+:',(1/(';.3514(&/"/.-(02.&8('(-4-4$/1:,'/*4'1.-285-L:RctƂ_%mWbdMG/E'71)8:'?-7(316-/).%92/#*7*0&)8=93$,)$*812(3-0$)&+/09)$)'%-)-Հڀрр׀׀Ҁ؀݀ÀԀ׀Ѐրڀ׀׀׀Ҁـ؀Հ؀ځ'"35,'=/1-&7685-713)0.2?)'0*)035-5%('.*/9/--'#5&+*/0(-%-531='9+)'%3$!!/&03&$(,)/(+$)*31)/03:*.+,8''-2?%+4$,0$7+-'31#*9.)-'+/8*F"4%(1/-'6"2)2)20$.2(4/&(*(2+ <+*',+*)2'503$8,+(99-%231&5/61+/)5+)+9&14,0/%)6.2#/4%23).-,0,)09+.*+#.4+;.,%%++-1+5$*% +4")-/1$/'5,'(=3 %71:2;+#/0$*)+1-'/$.5/'/)&3*$&,4%,52*0%+'-47505"/)09,<013/1/68'04/%6-1%,3/8)/#'-"(':'%*, 0'70+5...#/6!2, %)/-(+*'/<#6%#%/1&051<,-1-&%.+,(3@64027.62EEe^vׂ,=턴['v}iWSQJB30E;.31*!->400'/%,++))"0-"*'(0*7'4&)0./;+./5/6,*0+.=.076)/4/րրӀԀӀ׀Հրրƀـ׀ڀր؀Ԁ܀ր׀πրڀԀ؁01*16+"'%536-2466;7+<+242'+(/@'&)3!53;710/1,(5):012$0#+($="9'/,-%-1:#"#+&&&*'@%,. .'>) $5,+24'46*,($.%)2,.)1#43+*&)6 -4/++#'66"'"(367)30-,-/*385)0*>8-/%;+)?!+&/*),3*5('*"/$(1)27-/4&#'940'.,*10$&8!.6*-'1&*%..1/.24+#0%2+036$3.-&%&3-4'#"-:+63%3,"51++#%1&+&)0='-.,)/%#(8(47%1++2/4=52'61"/$-5-4%9+9()**-82- 1-:6,+.)0-.()*3#--4..,00(/(4'7,/**4!/@-.9+*'('*1)46*3*+%7&',,-))+*-/')-''4/1)+421+%6+'9-,9=24,K4MNKZ\vzE`q[JJ#3:*(64:733$5%B%4'.87!+(+9(<7+7*$'4.02&(,0+$/-+9-66.%*&4-:-$/(,62/-*,+"$/.-,0%38 53%'+711),'8--!..)0#)2-3*&2)'28&&+7*+/%'//6,0($+-%*,'&(3*'#5$/!+9&'-+),--&1,*+?+G2/935'3(",'-6'(<+,+%+%42*"+%52//%.957%)*00& 7;.<7/('9#,("<$,2"-*5.#33"1*:$'='"!0/2%231(.,46.510/76/-*70&0#3:;;*""#%3&--'1+(0*45.+-341-;000)/*$(,/1/4 4%.0/.+-4,85,+*/%9%?$"0)--2,1/-%;16/;:<36(8P7OR]w0emnj=S끹weeG89G10C71?0671;)@#(-(67518+2-(35'.%5,*-9#3;/,*/.@7160'%9=8A'/"-ӀԀـ׀րԀـӀӀÀրՀ؀׀ԀӀـԀրӀ׀ՀԀف++*-./50&'1--*+6,(+16)12!1+$)13/-26'7085),.+1!%&00'702E1+.5+,09-3'2'2!0&8'3!/)$- 7$6)50+!)01'&*22:/'02/,+3)'%1(1'*$/./1++$/(<%,:1/).%'* 43-,*0$;.-+=/3,',4*&&*!(**16*/*),73#&5/&0%%9.((+2' >)7/)..'&1(5-193>,%/,,5 "'+"*+'*';*573&+//2(-+" /4&+2,8-0.5574)6#63$'!425=2/4!5240'$)++,&'2- 2),)(="-/&5/!4--!*:7-*77H,/*+4'.6/1(.5,*""050,*31/!'3,!7&(++1-*#3 46).(+9,-*723/7$00"&:/'*;8<-8/$+5)1.-%,$&1)%/&)- ,3.66<&AONn|2O /Rq1ˁdNH4B5=-92%->*3/=5+;5--6-2'6+/1311+4&/+-)36&(,30-/9(/.*(:-2$$01(,ՀրԀۀրՀրҀԀƀۀԀـՀՀ׀߀׀݀ՀЀ׀Ѐ؁3*)0'049+-9-9)/,-+))*6&:;323-03)&2,'))%+!6019%3:,1'(:.6*"+/>/006*/++-2/220/2+((<#*(,,- &)-(/3)+294/%%-+)95+$!6;)'#/1,..1/=)*/-#%&1#+(8261/')7.-*,#/*4:*8' #(&/4-4(18/+*!-/#,-."%:-+::512(!0,2%/,2& ,)0/>*(93)%5&(/*(/1.2!&5%$19+!,.-71')-*0&+3/(%4&17.-*"#4/+/('.44:7:%-#0//42/3-)+/(%,-.+*+6&$2/6.4-46@4'5(2-.=:E,6;323530+)*%3).7,/).:/2&/1E)372+-",317()'&'6)++0)&,-+#3./*")3%&51'0174)+"B()0*/0*&,.!-;2'4%+$3,>49C@7JvkҁF))B?h pcGM@NA7/.&:*+./25/5"3*&,-/&500(/&/.6:(9*6%*+.-,*<,!@83;&46:6.8&/(5)ـր׀ՀԀӀـԀҀŀԀրՀրҀҀ׀Հր׀Ԁ׀ڀ؁/ 537)55#18<0B-/60:/,2-2437(*9)*51/7'(".(3?*%($(.3+./2:*.:.5!.!**4+4"')',-&54+1-*2%)/-203/)-05*( +-(/.#$#+433',+&-,5*#-)#-<,9&+4(5)(1(//2,#0+/6"4)'/3),./.-(4#,1).%!#:52()+.972'&-%*,%-3,1"$4&@-/" //2+,1/5.3-/'.-3%&+(64++6319.'2//&9*,+ -D&+)(:,4"B/"&'+*=>',(53)'2'54.'&+(2/)4?�*&66(.!22)1)+1857(2"1,8-*&7.2A;CQ?869+*@,/#8/(:4...,%:0%)2#98+1*1",8(/*6(/.'',)'&-1/)*.-*&/).605/*2&0-&06+/*$7-:-404+57)0)7)+/'4'21/*?F.[Pbʁ+MrcځǁzjPK:@;249:-C.0)385-5?,.21*:*%*1/3/'1++/29%-2+/'2*4-566(?83(811!'׀ـҀ׀׀ڀ؀؀ՀŀЀӀ؀ҀڀԀԀ׀π݀ր؀ڀׁ12+#:7,'9*53,72-$2+77-56,A))496%$35*'8*;50037#-5/-"8.%.255*)+..)+'96 0-"-, 0-'+%0"308./%-1"06(,/81@;33-'./!3*4/,+4))$.,*2#4/0(*2.'-8&3,-1-&-'%(/0'&/14(<2'6+52)6))(" :-&8$,3%4('+)'.5-# *.)#3%!"20-('.+3,=$00-,&)3&,4217+)))+2/(.,D$$& %$;%.'5:33(++$6)*=89/*#4)#!-0=.'#,(0+//(**//502+.(,+*45.51#0<0&-+"(-3.012:7DE1>=/91-23-%(*7.54,$8(/-/4)+!+.!072&++0224?+*/+*554/.1*!/0''%/'*)%-&00#+7/-+'.',-+,-23)4..48,00$7<35LAIWiāڂ;Ձpr^QED:74>6.7.;)-.*'-&5):,/2#-=0/(//7',#%12#,3+#8$9.7-/)!1!-4%.:.120րրՀۀЀ؀ۀրـȀրр؀ԀӀԀӀ׀рـր׀Հׁ3#5046."$9(263'(0:9#),6.)10=20.0𖑾/"-0,9.(8.-;/4(=6552%-'+'61)/*/%1+1%3,:15/48*:(/1+71+1-.1%+-(5+)$./4%14++36!) <*,%2592, 4/',32"0%,#5,-(%/22/1!"/'/-/6'!:/( -,6#'6*8.,7%(*@"-,%?+#&1*&0-261= 2+)$-* 3+.('+()4+'+.0-/":"'.2)$'0/0",-)2)=(%#(*2(($'!-!95+%!7-#-2 -1!;+/*'9(A'0'/24;1)&+4*%('1'+,-*:-'%-'*2(7'8@?32><3E2112)*/)*6(<-;!0,3/0.62'3&<306/+""#'42))53(7<(5/7$3)+''+C2+'3+-&%6:58+,:@,-)(2236%)*0;/>://90,:L7!@+40C%5'7.>.5-16//;<-*(%5653*.*21+629(%1.)520')5)$-()-6'+"1+..'6'#54/415*2*'(6.>.&2+/4#/+);&!099704%".2-24%3#232)85#3.5+63/#/(-*.%*/'+%3*49(.*#-011-**-/!##8+!00*'%,<3# -..&/1)*#*)6*6-8(:1-,.:+8'"2*1'5'1: &+14,*--0--!C%+/5/8'-)7,1.&'),-'0/ #-(+')A";.3'+*//1.377.**7(&'401-0-8#)*3>3,-%*,%*.0.6,1+*567+.893//>14#9$05!+,-,)%D5,.9+60,.+1=-21'5%.(//+&*,*+0>+,1%27629"97-'-*.*'!++01**&%4.+1%12:@?&5'97,174+).7A:BD_\{p}f[J?AF.=*=31$E376/31&9#*<2*0+7#&*5'71,/&5/%43"'/)+--#&*4))/86&21/252ր׀ӀӀ׀ՀӀ؀ڀƀ׀ՀՀՀրԀڀՀ׀րـԀ׀ف5,27(401)45-(6)9665%+':-A9%+=12*'-"&')-+,5.5'&4+,162&+1$%3,$4,50%+833'1,1.1)/,.3%71$&*28-2.06/'0#),5$:,'*-/>0+-.-7>*)$+%(3'& 0/()4)#,+#7&,$,'='6&+&/'/5-!'3.28/0957)#++))(-:12/.6&211%(/(1+!-'-6*.#53!*-/() %///7'#',),%2-1'57'-1 3.4*2(-0!(/-0*#((',*/..45&() )7+.+08)65*'.6/3&2.%/,0),%*7+3('104<5/+#!..,% 0/4 4/34+8,2>61)A$=0&--*9),3.5.//%.+%5-#4((//(//53-*!/5.&!&',5%,-$(,7(58'%('(5%&4*-2%+'8()?7/309.14($,040/68<25P2@)..4,(+1'$'//)+7/)1/!?0+*ՀԀ׀΀Ԁ؀؀րրǀ׀ҀـրҀــԀԀԀՀҀրց-33"4'.408.2(.*.:11+480./+;46=?QCC_V\Y\]`knTYL?9QB'/6>*9%-.1,'6)%9148+..9>77*)62$'(';"6&+D20*1'88,1$)037%7-+*)12,3րۀ׀׀܀ڀۀԀ׀ÀԀ׀݀Ԁ؀׀ՀӀπۀրրـց%/*,5;.%4"&)+//61(2-:*(%/3+B%6;A0%9001-,&0 1/2/"2 .(1(-36,5.0%0,A.0*%&1/"7&<3281&*-,/*,+*%'413)7/(6"36/'!1/2,:))-/,/50".)-1.-/01/((1%%/",-12(0#,-(25)'3'8-!+5+-$0.'&,31/.,014 #+$-%37/&0.!,*1&32%$%72-.*0/13%'*1))()2*')0.+951%!6!;4!*#!6277;%94*-'+8)-1+6/- 4,').(-4(=>16#&7:#33(+/++,++1+1-1&*)/*/5#%((3E>/.(2.)9"+*21-+ .*,#---%95 03 .+/4)++$0+-87+"4,*%2*-72/;,6).9'#0.2.''5+',(%.+//'*+-+1,'8(1%1-*)4+-.!1/.+&=%!0/?1=I:RQLC?IZ_Y\R\MJ<9>)K?@2904*2+9+3205(,527<;/+(071-78-&)'02!25.-#4*2770(2@&0:'5+4'2+,0.ـڀՀր׀Հ׀؀܀ƀ؀Ӏ׀ــ܀рր؀ҀҀۀԀԁ:1/1('"2)5D3&%0*/62/68&5+-<*&&6*4.73+1:%+*-/6.,,*; 35%*473'+5)'0''0---0#/'93)///%60$2.)3%0'/0/+,+-*2,'*.%*(07*%1%-'62/2*-* ;..(/+,$+4!*)+/3',/!%+'B&,#93'--..!54$0.2/,)1(.23,#89*!6+2/./(2&+)%,3/,:+,13''B'+,2,+60"<-5'745+/-'"5+'@'.& *4,5-4.)>8+*70//'='!+!21!3-+./9..+-&$()'5#'#*=5*-+.&'93!$*)% (,(;*3-4;-.*%/-';@,)#-%+3.8)$( )0)2+2%3/*)=(%040+/.0(&*+.&.*.***;/9+/.'=&)*,0627*5!&+-31#2*+%6,#*"/#*/1"&4/0#314403+0.34<5=*C/2I>DCS=:31-+-/5!)67/'0=*7*7,1C7*28,,+/#/؀Ӏ؀ڀԀԀԀҀՀǀ؀߀Ҁ׀׀ҀӀ׀؀ՀЀրՀ؁@/1.'+&3&)-%0'5=*'1-/-/&,?8/(18+(6>-/'/5/#,5)5'*..7*1-30+/<55#3'*612295)2*-)++*3&/++*$7-);)'@&$61)*0&/(,*+-(,$-44-330/-$.+-'9$6<-&2+0"+$+,)(/=2+; ((+#0+)723#'-/*&9.(*)&+!&#*(/6#,19=(%4"#,"+%&'&/.&'.(%2!%0-+;*9'=373.2)-(+.+5(!1599)72;)&46%+"4+56*8#5A+)$+6,9/6/(0.:3$-/56-#**1*9./#)&'!)52,.+%/25:/71%(-%.!#*0$++.6+,9$?*&,*>)4+C**+,*,-,0)19+3-&$' . *'3#3*3*&5),#0-'/6.5"05+-#2%14%!$4(1&-9%-)+%(./ ''03)'*+1".)/=5,%(0934EDEABT:>MB=:E/BA04A.9$!6*+)A.*40+)1-6'1$604-77$0+20,&'& @-.11.43'11(*.*068210/170׀րրԀـӀՀ؀׀ƀπր׀Ԁ؀ҀՀ׀ӀԀՀـՀҁ)/.&-,;7%--.0119$40'?,2);.0'+&63(201%05;-44%//"&4%-4:)7,<'065&504;8/,'6*4/((0#'+3+/)*6!2//8(*/5+.2)2)(5,00--0-)*14/3,3+*+853&00*%-())4((!2'1;&,( 7/:4:3!2'./00,0!5&'++A%%+715"%!%,%')/+"0.7#*4//-(<2A2"3.((206011"/340-.-%7-''(!7*/1;)/1('242# 8/*4./#0#/$)1*"2&)95&@30+./+%)/%4&'#',)3 $.-<$=0)2*&')9#*&(%'15+2/,4-%3#-.)'-6,",,&)')81-,'+%4$5+16*,47@#',7) 2*/,)*(0)4*+)1A,0+8,"$8,@?>3;=E%*B=3762.'<+,7336*9>+*-/@#14!=,025!//#2-%4)1-)*)'*181'-<1&43)2+6A.0'1׀׀؀׀ӀۀҀ؀ڀĀҀ׀Հ؀рڀۀ؀݀ր؀׀׀Ӂ;%4,E0&/<1(=261<4"/64+(,;-+31<4/2-!5"565,.(*)/!.5%-$+3(*-))'.+/--<&.-(.%#-5/*#'2/%+,70"/$6 1)31.)2/1>*2+/8250#%2*(=,'-2 '")1/6 (&21+2//.54+/52'-,#.&3+%-60"7(-8./'0*5;-..480//$&0 3/5''(0,%!1-/=10)1'8(#(#-4)<')3%-1)4) !2*1/)*5)+2-23'/45+;(-42)/%4&%(1)-."&9.96'.),"4(C83('+1/6()*****,7831-+3),'800'$5-//1(-).%+*3,'8%3:))$7#1)$062)#(7%+,)62..9$10/('82*/0/1---)?20/9''(,.;*-$2#'1!('&;/(/$2().93/32,3./-)1'6*.,55:/'-013529?<3;:191J6+;?>?320;$+*-),,;-)1.130+0C/2))',=&9+/*711(-72./,,1'3,8./31,).*."-)'.),''ڀ׀؀ҀҀԀՀ؀ԀŀԀ׀ՀӀ܀Հրڀۀ΀ՀԀـہ543++/(83+&3)55297/:2$5-&-*1+38)%& 566-+283<#02'.-#+7')-,.4,95)'--)(,.'2&,&<-.<087'%*&21<(3&'(-;3-56***E)&+855.'1/+/.$.0212+1-48---;2(@/.90'8((/))--&,#<=+22*=10.'05(+3/97(.:1#+3#-+(/)*&&-,(%-4./%"&1/!5-'*0@:,.:)++"6(*/9--,5(02!++4+/,-'+ .#.&'B(-3*010'../(**4*'*/,5)'&>2.-!' *#-0/38/+(.&,21'%-(4+-2/,:7*--(4+/3;08'%0+(" *).7>$+.1.$%2><./#0/4*6+=,-1-(8%,-(&$83/!..5)()3+7,%2*)%+&7.'307,,0%(, ;+"0--& .)-2 :+20;B33205;2++#?)8E5@9?772)5=2;/($(64(++&=1+*&27583!287894%5'3!34>)4.4&**/+.)9+/6(5?)39>0',32#'24ـՀ؀րڀ܀րрӀǀՀڀӀـڀ؀Ӏ؀րـր݀Ԁс=-A-,1***+.-+'*3)2&*567-&&..46+/98=+<+8.+,>)*71.&*@733090.#/..<#5,9023?.+7'),76/0/#.#*-46--!15497+2)91+)-(3/)*-3/4*7!/'+'7-4*++$60(--**0+3":=)&$05*,;**:9'.3#/-//3-',6&&!#32.12!.1//*3%1-'4+/""4-1!'979'/1.14*3(.;40&!/"-)("81*/"*)%-1!120&<&.%<')%+*'40/=.&053*(4),-46)2)8#03'-15"-!13$,6552:&.$.4+"01*)**5#+.);,'30736-3.5;#')26(7-,!+*0$)+(!%5&0,10,*/6$.+ - 87'+"/+3+'17-.0/4$=*$.<1++#6$;3=8 +7(&1.;+:/8(=#),(/ ,5B6:&3<3.!/6::/>.:944:":0614,0",420,435/!)+;18%)6= .6%(0105(#-(.%9(70100#:)5%0-;=10+>1'Ԁ؀ԀӀڀ؀ՀՀրĀրӀԀ׀ـڀրـڀՀ؀Ӏۀځ/(;.3182-22<1$,'6/)-*/9/+34.3,!)-=9%(,7:'%3A) ,3,,')),((=.,8.)8-7%4./8'+,(/$(,/4+-4/,")(/1$'&(/#67+)5/3/2:$0/7;*:24/0$+0.+(("3;'421, 156(+7*./'2'-*81,*0"5'&+5)/+/!#.%%+-3)-,*),-;*&&)''9/$$!&5)(8+1&#)(&/',,*/4%+'80./%0*(41911(#$(5+,.-).9"2-&)#4/'1/+.03.1 3$+/;"30@@/'+2.:"(5.-5,16/%2+8/60/'-'14.,-0!9,(),:#/245"/*3)$/.(44639#-#(&: .#8-:6&%7.&23'67*30)&/.%,1,=-.$,"22),:#$3+'/2%6*,/2-/%.*-;--# '-/&+1.2/0734!<2$0,*%$?+,6;)(=54.9;D/+95/84/'32,/,60?-$9(#..716"(>.5)%95:!""2(&4<++*,/+7'$2)/<1!0+$//.9414(:+ــ؀ԀՀ؀܀ӀڀÀ؀ՀـӀـ׀ՀԀۀՀԀ׀րׁ2"0%3#6+0-+/,,-3-5%514.%.-+3)<%94-/*;/2//*.2#;&21'8/"4$*-4/742&!2%+9/#0+*$(&))+!#0 )047316//-50,#1278//(%11&.$$// ((,4%+",51&2083+*)..;-!$&$&/)#11-(',4#'.02*4(,,&/$1&*/3/$3',-':7(*2&90,+/8.+-+33%&3(.7-/"51,).*1$/+0,.,0+:-0*3&6?//C,5,!/%1'3/3'&)916'+7+*$335:P6,(/:3!')/*4#.+.+$-#-./7++-#$',:'53.0!6*(33; '5+-*9,,(()#/ 4101/H/"0(/'+$2/50519*92):/)&-&::5%-)%)$5$15-92+'0852-D4A6,+9+2-)+6*).#%+7)012&+.++'.#+7&6962/5-30)51B5.1445 *5/)45.1,B:18/243%"',,4?9*401.1#7:-& *+,'(&*4380+,3'9''+"5.02(:$-6D+F1*.5?׀ҀՀ׀Ԁ׀׀Ӏ׀ʀԀӀԀ؀׀ӀԀր׀р؀Ҁրԁ&(&-+.',2,!//'".3.'1.B''%(0'-370,/*,.00-0)51*3>;*$+)7-6&$3+(6430%$? &.'&+(5/#7'%.0+)5,16/-,'/6,33.(,/%*-9,,!",,.0'315*2-,-2.-$ 1) 1+<1$)1+2&31'+, 1,/-/.0$;&45!+ 316,1'%#.%4+#/)%2"%. (.9&&5)0.7!.&&*32/09-+,2.*03#./+3+:$1),7#;0-70)-6#.36 )8.1*(?4(+,@6')%631$ 5&%)/+/*5)A:)$#3&&)-*#(/2&73$2'3;59/0'/+'-% '2$+(,75%3=-( 51/2)+*4$-/&!7-2+1 1.--!/*0.-0&*+0+328!/(+14*' )1%5&+6 .<-7+'225-&/+/61",-51/,'&+8 --19+!/,5?50'7/-.'45,&-+!6/4,0-128/15$++2(/#-('5--0?)/+.7(3/"302&*&:.*'7/)-3<:/'-&0+2&-")3/7#0,5׀ՀԀ׀Ԁـ׀݀݀ƀ܀ՀـԀҀՀ܀׀Հ؀؀ՀԀځ#4,0%)*30.'/-0/11'57+0+0C'()&27)=4/4:,=)++')6<1(+(7+1)+0+- ',"/)(),36+-3-0;*68132#1)4.+1&,:;03''*,$'6'$)/0&"',0*-.%*)=-+*%(1(-05*)+;*&9 -(!?,!6/)#2.307/5*<!'*5%+-"(),+<+)/-1!,-3)9)'425*#4-68*'(;0- 1#+5(%+516(01.1,)1*D %*$32:1+/*)+-%@-!7)05-12'.034,(%0+6<-%)&)21+&#%%/&90)-1&=110$1,1?7#*C6,:*8D,("9%).'1.)-'-0%)-06/9)%504%#+8;2)%1*%30.3"310,3$5+5"#0;,0*;--'(*+%/1(-9/633.)4*/1/2#&$?+/01.)+.*/4#33313++:=$0,2;'65B*7901$/-4191-0%:>111H134.7&07/8*0),:-,9//*4&)/''+.1.& %)*--3-#,(3-*/6,0(!1+8'؀ԀӀӀՀր׀׀ـĀԀҀ׀րӀԀڀրҀԀԀԀހҁ4$4$+4.',5)43+;+/*,'(<('%:$9--9-1:<6#'1&3152(,&3)0)09219/2(20),8(21& 6'#+&.,3),,+272*79-!()3C7: ,-+,(045-5,,7,4-#4/8 /:3'+35;//1')44-$/1$*2%.5' *150+.'.>+))7/!,2.6*$'.'/!<333'&6..5.'!/8 47"+30+.279**)"*(173:1/-13,93'(&.(1+.+2)*1/'**''-!-;)01$;=+=1'')#:2-%..+ ----,-&"%)'1) 1)$(&%,6+< /5 -4%$-+/&2*->+#4((,#).21/',%6 /*0"$/'14!;()3.*'9(($6-1:267-4-%-/53&005"+4(5(#2.5(0#)!1.4/',3+");'-+/()0$3210"**"2)#&./0&+/+,(*17,<&4+7+!(--7;;/.+:'(';.01'74/19183+//60!3$,6!1,*1,#80;>0750.2%279!-711@(-4,5ԀπՀـڀՀــހɀՀԀԀր׀րՀӀՀԀ؀рրց9(421($'A/$(12 &;=/7-2&8 :'@2.)8/,'/!//73$C8*/.%,0791"*.+)+**'-/+4%6522*-,1)&12"207.1:'75.)&-)(*,%<17*-.**//3 +!0>&)/3/()-/$%'0)09020(##1/06#2)*:))-+(+#/)3+5(2@0(&+"*)9-&%'#2&&+*/; -#4./5!301*&1-#!1+"/%!#<2*%$%.7-:5(-1'2.&0)03)6/%%;&/889%'839+1;)1+ 038/2/54+.%352A((&+))2-$4#)5#/-'20 ).0)//1*(,:.++7*,++0)+#3#7 ,.*$-)/+$-0$,'(*)/%0*0+*$1B4%4#4(*53/(//%,)'27&%.5///2(/+#(3+6+ 2)#);-51'&.3'&/-+2"+.6=).>"24)-,8..2 )268$?5%+,-2,%1-$3-$*,"4)(+50"*4*-%>"+))28)(2%1-5*02,3/'./3/3/8-.+(51(7)4-(1/15+3'*-+1ՀՀ؀րր׀؀ـڀƀԀ؀ԀԀрӀـۀր׀؀ހ׀ԁ$5%4267+(40-+8',#,12&-.'888&..3+*2%//,/++5+-,2&108(.(1)'/%$*+)),&+(&1012+*74%/%.#*3.70#&$6/64)-$?B;78=1$##<=6%$)1+*)//164-6.-1&+412')*)'+,2)2$!*&4D!/;&%./+=05;,:3/7+8---1&(#$/+,#( +%#7)&3/$(7'/,(0*.$'*10004 #, 56-)5-7+2#*17&*531'-?'&+,.52.#)2+,5-35'+$%$@ .4 -3.1),/.5(0/+*8'19)*..)-55&/ 4$(3,%%-774,(%5)/63 06+625(5(3/#9.9,1.$.%"*80<1!3,&!,+1/-<21,)21-*+!#/-($((%-1&0.-++5'.,,$'%0-7!1-)-&6$.*,00)%5(0!%04-.*25&$41*!+31 3.11:**&)-+;,.-,'*/'*6$-10/-$,"=/)4)+7.*-8,)?8%8-*+)1*#=3, 06)B502*/5,7:2#19%2-؀рր؀ԀԀ׀؀ԀŀЀՀԀԀӀ׀Հ؀ڀڀՀ׀؀ہ/*2)0+3&+(8-+'/*+/80'>5&)(7-61+30/,:-6$'?3%/(,0"+-"=.0-3'$*% -->%++-."%#+B,42/1,/0 2/4&81,#-# ('/'3&!4)*1'!&.!9#@2/*%--#&/6/.F':,%.-/3'5:354+-":---#&5/*=,$/")".),,20-1532+&=-).2+%.,&2+080(23(0$,, /3-*%+%8.//5*4,*1$=*+Հр׀ـԀՀՀ؀݀ɀـՀ׀ԀրӀӀۀ׀ԀՀ̀πԁ7*,7,+(,-!&,2.*(0'00:;$&.7#3/4!,6,,.2+)/C%/4*8++-#-(/3 +.,)9511*0+8(,46(+0+-3.#3#6//!)$80+.6%'=>,%24+,-.,.+001*3)%/#:&'.$--6$""7'5=.&+#/421(!&&30(,72,4,8/0/5(.../-2$%397(-.#'%*3'.+--2$/,8(7-(%'.,+",",008&.<-&4#7='-,+(0+.5.104(3$)3;0%**#,-33/(+?2340-4+51&5,201%/-9'+1&( ++7&%!%8#0,3(*+0&)'.*-:.40*)%)5;6932(%+5)1!//"&2*"6 5$3=-.6 +0&8?7./#3$-/.A6.)0-52.'4/./*.-,1++*/#,+;'*+1;3++'%-/)&.".1/3D.2'&4,57''*)*.28/)/-9.-7-/=-+6#.%6:53.#166-2+76;23/:1701.%4&%$//>/0-,(/'-+:7'8&32&-)6,. (+2)#1/+$#*,-0(08)$*0+,5)&136312523/+$&B5,-2)-*$1(3%*+3..(1/'+#/-%./$%2#!-/+5.1!21'()40)./%+9'$-7;&110/-/7-=&(+012/11 )7*#0.*./'-4,&(+2##/-,#2,-&02//-**25$-4;2<,944)-30"'-&)6$%'5+*162 5#&2203-2,+(*00..+,,''!9.81/15+)%*.9$0/,'/.1/&1'",,=$,-:-1(2/&1$2-,$/'1.3'3/)&.**..5%2%: /(%87+%+!&'/('#"+2,-4'%/140)4.$*/(1/*$+@"+0#1)1+#1<'.,--+%&'!2/*,>-3/0/'22&/):&5-!4$)4*/5360'.,%2//%1$63,/+2(4,.0.-72+#,,,2+-'2//=-,.$>,"*15,%2*5?=%!250;3*05<41!!π׀ՀڀրրՀڀӀĀԀԀԀрՀ߀րҀրՀۀ؀ـӁ3/(('30$)-<>/B$6/.42'//221194,5!07$5*/,00@695/'/./7.,(+."50(%<%2+./+.*,/5* )#00;%5.&5.3629;,*2*'-/(.(22 )0*14*+1)"*8880+/%)*/B.(-%!.<2-$1&5-.73%"3)++)30$/&(,/% 3-2;*))*%7%5.+*5%'0&3)%6(-,+6""/,,&".+-+2)22-2/#37$.'.-/;()415'":;-#0.&%0%)*+8/*$+!1"*'--+)172,'0+--,8-+'+').,,:.(+2)/+/(-97339+()+2!&46&3,! +11+)1-&1-$,9)2/2%263/'%9+59.*50-!15!/7'+'75/),1"2!-#-61'4)%-/+*12.,'/!-/'-'(0559-55&%(%*38/),-2--0)/>-.+18' 678(-9(+(.7%1')/+-:!#02&'176-",*1"3+0D,.';+$-48(&/'$**"1C,3 "3,),90(/3.98+05/1-89'))426(**̀ۀրـՀр׀׀Ҁŀրր׀׀؀ӀԀր׀܀΀ڀ؀Ӂ/=!>*B(5"))2'4)!49'+(28.106*80'&3(82/&9'3- %2*137))/%!5,<43)/3'*7/.;1082#2.757241"7/4(&1$1*+.1#4,%)%+%0*$75)(&,'3&>)6-:-0+(7',5#.3*"6&/,/3"4".+*+-'':8/1!5380/)/*6-(*'.4".$#4'$+&)60"7>(:0-/*,8'*"7+%/"*)$0#!/4(4/62%2.*:'#./*++.1"90/-)/5&<*4-),$-7'2'15#;+;%%/2%*.5+9-#+42#.6. 00,(%1/$*54-'**/0%/)0).)-(+!(%'55/11+,'),2,./#//-$!#.0*(+--#429$/($"8,>)01)04+.='-/'1+.-3%'#1/*6 '/&*:)$9):85/)'(42,'2.213.+!.'$(&:10622# * 25.42:2*6"3,'./4 0&/+53**&-/,%/2-50-/1'*,/-)2,.4+2&#,@+-:'0!% (1A%).5*-(7#52*,)$׀ր؀׀݀׀ڀՀ׀ˀԀـԀـڀրրր׀րڀԀ؀ׁ+,4'3,&54$7.43'4/.)&-+-8((8,+)-)/"('0,2;:,4=#//$.+*22$$1!8(-=+$ -(3;157<.)6&4)+++572*.'.)5.1>2%0/)82336*34'4617)02.//5+,8#'3)').%,1+7' ",40)..(9*4F*15,+)*#54#,&%*769*%7%6+/!2'<1&$3/2+)*;6$-/636!+**&6-'-<6$+*#"0&2*-'-;'&5+=(030&0>'0(2+-3/46&64+#(/1(*6<"3."/4731,%02$8/36$3+3'+.#/2-3/!$&/%1/+:0.;.?*(<4*,+2'%#'/$/,/,*10*-9(().'%4;%93>.946&$./5,<,.6%#73.,6-'),/&*1*2(1.3*/?*),+,&,*4.7- -%/4-)*/++:)69&$,7/4#+.$($6(,07'0,+,/&>5&7;-0&2+*-<%71.9&'-,#092.&(.-,93/2,&15;35/,.-16.2(,/"C8+-+&3(3/(*/$+-.734*&1Ҁրـ؀ՀՀՀҀրĀ̀рۀ؀Հ؀׀؀Հ׀ր܀րہ1##/9/6(!.++@97,5044843;&7/.2=%3*6-58#;0/0)*/%-+-.,43(*#/%%-/,2D/%28;05**0#4$>%90$,=31+,%*/@*1,.*2*;.66,)%/-33).1820-'53*(0. /7'(46,/1+5-)&6&;2-!4;3%+&')#*(&'2>3:6%+*:()5!3,,1--0-%20:$.(0)'*#5'4%'.%.,41"3@!9&1$/%+841,)33)4+*4&$.+8/1-)9&2$%<++5'!$//6+20+!-*/ +(7',&Ol-&/%(*,52B-3(/3%(&-/7(58.($+#) /-E)**+'76"/54*"2%/ 7=*,A+-72>#'../7+52 2*'/,'9$##%1)%16,)$(+942)3),4.()$-&9;2)/.$24,1%)1/!/4/'%+52;22>3 *.%#&2;,/2,"-7&%,&3-A!E-.?2+258=3*%+/-8-,;*/*&(1+1..1&#"*-+,.03,/:%3%3.$$7);7C0',!050;".153؀׀׀Ӏ؀؀ԀՀրɀ؀܀ӀԀՀӀՀЀՀڀـڀـہ3"21)25)*/5&/+>;(/%;),%))-,3,/$601*31+))..,158*/+5):+4C21$=42)*1-.,+2/16+9*/&+/'-)444*3)9520=M(,(+)()4&*2'0.'+24&(4'//+%%,+#94+82-ՀҀр܀؀ۀրրـʀ؀ԀӀ݀؀ۀӀԀۀՀԀՀЀс*90.<(,6+$1,/440!-35096(173558H+30##3=5<1486&*,-)(#/9"$*/*3,6.3((:36)6*403(),)045-/-#*6)"/*)%.!+/3-(,'/(5,/32+%,*//*%"+-/(/<,96)5(54+'.81&,.#3:6.*;4&$,06;.17&0*& 505&;%--+)-2$42#2.052/-)86#)=7.7--:$791'5!".,+):'24;),1+#7/73-'/*3630/-8#&3&-1'!0*1*&':1.&.5-&3/+$+0<*#()/!'..35+3&8,2.&1&9+"#2');*')#.2)")(092/5/3+=4$*(034-79 &42/#'-1%+.*-."+/'+""2412*/!/; 4.6)*)7&,-9+* 7+)3768805<5)#/+7/"1/+*17!//51/7%26$103*2!*)-*,)'(,+60,+>/836'#+)(71,#'72(6)#/4$252.2-%94)/,%/0%/.4!72(6+-)5",$+&)4/-#%3/(+=)ҀԀ׀׀ـ؀Հ׀ՀŀրԀҀ؀؀؀πրՀۀۀӀ׀ҁ.#4%6*6',)8*(?(+-/&;0)6'4$3!1:/1.:43)*;30490929:13/4&7005?$0.;&$("$8,1,-2/.56&'%1'1)#//) (-,3,+(5,6-/--<))3#,(0'1%#3%02')"/.)-3$A%'-#--&%)%)6$)'%/'45.'283:4"564)&/67/33'*:0"3--,(*-7%//'.2,%-.+-2+4).0.+.%,5'/45*4.*'#7,(/<0.&5*/58#%,'3,(( $5*4,1'*;)%6+))7/.!.%25/0<-0(B-$/2'(06:)-&1,+".-&'>11').-.&1%0&/<#62,'&55()--+3*5)(:.-#>)2/-48/,7(361*2&*.53'4+--0515'+0'78.*+*2%-%++6(;+5-"1;+%&)(%0:.*&*34.2.-,/,,%,.%9,'112.8$-13,?(55(3*(7+"%0-11/521--04&,!>%/3,*0)5.52+%*+/*(3)3+.$(*0%%%0*0'0$%(-9.0.9/6F%0130׀ԀЀրҀՀӀՀрրՀԀҀ׀ڀ؀րՀрۀـ؀ہ%%,5!,-+"24%6324,2':$*")+?2/,+,.1(//('0&+-.2&7>9,3*%*>++!/,-902#2(-,0--"53.86./-+3*0/&4156+173/(#17%&//'*24/)3')5&+'+?)'90%1(=F1,&1/(8+. 1431'098,+0=852&&0 %+)//-.=(+.%2&55?,557:2'&5.320($5$62)-,'.!&634%1&&-#0*6.2*13/)62+,*1-&4'.-'*2&%8''/,0(-50-51+0<%%5$#,%$$./2&$./*!$4%&0)-.///+61#5+(*#+531-80.-72 +'-$12*2*,-194)+*7*2&3.,*20+3(/',#/&7/0(-,%-,6,14')83)/#*'%01/;+:0((*;(.,5-&3..459'5!15.%4&0I)10,.#7 %5-'4/'(*.#(+4/95.'&3*-*6+)1(+1*. 2/.@+13+;(7(,*45:*)+/361'@0&3)4#*0936/(3193!#&38,,'2* /&;:#׀܀ـ׀׀րӀӀӀǀڀԀՀԀ׀ـՀՀـ׀׀ЀՀՁ,43(=0-/&)-/7*1/650.'+0-*)+;'0102/918(.):&61'.43#5-+,""&<34/(/&*$'<)/"3//./5+++4'++,3$*6*8+$7+%+;/,:77#//+%A'.,$4)/(*,03&.8)(.(.'+".&B) /;/1#-+*!/)*,+,-.*.'&1-2>&-36,-1)6.$46)-,60).*,3444286%&,.0(()7.(,5,/C,2+158/+( 2%22'#2 +1#,+".)(11/*40+.)//,)4?<..8.--8.14.$,1"$3,(-222-#781$'',3'45*-="+826#41,- 180&#)$*!1%3+1$6/8303/%$-+(>%/%91#.0+,6*0:.&/'%*&8303)./80//07.'-(6(,91#,5+*3)0.&29$.#97,86)%3$-*,2*,2,!7*+&'+/-3'/,5>1';<;(4/*41/(#,72#*2:3004/34;*0/4..(0+@+4A+&2625)05)/7)6(.(1:515)1 6)6,97"21$0,7,/ӀՀ׀ԀՀـррՀȀڀـڀр׀ՀҀӀՀ׀ـӀـ؁630-53-(02-1".+-56(.0)1:$)!'2,+10-*.6/.20.:2/5:,55.,<.="+/.'(-0#220):"-/9#4$-/(9+11,6&$!+ ;5*9='5*/A)!+1;%2**44(+3+8(9&5!032#-.1,)24*61-%7*-,$2%)4-+$22$'2.(/%416C. 9#-1233/*&?''(7%;A#*'*)**';$**.9)427B65,(+#:3'9/A24(5/,)/.)/2534+%&$?56($0)6+2&*+/2)4'1-36(&)0/,''./')5-5-!741;%/%(4*).'-/9!02+417253/&410+!'28&$3+1/!1'4=-1/*9037#*&43*/:(0' +3"3*'#*.0140#/%B1(/%13#2*42,%)/+#7,+$*%*(/)-(+.9%4'9 .*/&&+;++3)''>#1,4-/(5#;)(-2&.41=(%+4#;7*/-,2-2*+$'$*)&.0**0+-) 315+0;3:5:+6+.)%*:4.63)(,7!2:5,-&7-0.1?,!%%7ՀӀ׀р׀ـԀـրŀրԀԀڀӀՀ؀׀рـԀ׀؀ԁ.$/8?1=/+1,4*$.567'*+$,1117:../745*00*)6<'.0(<-,&1/'-2+8'$#0&63#*)1.0/#+4C+&3-3=*2.40+!0-/9+24-4/6&"/:8(*>(/,/,+('/*,(1)%')$*'3' -ᜌ$*"#+*4'!7321.20$$(04,)*&,09)+*6$!5"*>5 20(3-7-*&(.-(45;-" 1(2,2+/%!($=&-(6*4/16+&30*1./+2$-3"3%.&'4$*3,,2*,+15(*+.;2$/.'(-.,.4/3#//*4+*1.330(+.0,(505->=3,&)!0"&&'73)6'),-/2=(0,9*,;**-*1+(6/32;0*2'1-23+*60/+%37.. A,*).!.(2''9.('#3,+$32++61-3)&))08/63/=8%%41%)2*50,(2.+(75,252%25)-7//94.(-.$6&/.,+*@0.%0*'53*3 :)&$18!61)+52/.429,./$8!7()4-4/++6*3.+.%+&2-)=5=3,+,:()6%-&)5322)+3$6,0)1.3'*!13+#*9+4*'+&**'7 96%.*'-(,3 &54-*1')1,.)+09&2,-'2&/,8-);'1%//'1!$+>6%010,1"6,2/*%//0#'>('-*./).:-!),--%*)80.+3(*&17,,0'9+2.-BրـԀڀԀӀހՀԀȀ׀Ҁ؀Ҁ׀рڀԀրԀՀրـ؁)0,,66+*%.*9)65/38002(,+*34"7)3)5'0,/12&*4/.5=('42/6721,%%/()(7:=1*/.2 /.35*&'%+%,3/-'1*4,+15-38!4,2/73&5/%&+2/.!+;/08.*+32)''#-48,6/+/2+>,%;.+'*+4'=10,,8+'5%!=17'81++.,=*+3;5-$"+51-4$689-)$(+4,'0*/=/--+3(,-'1..;.,28&)19'",7*-=,(4+1/+!,&.)'(2**:3481+*,%/,&0"1$&%)3#3'7 ,8/,+2)'5+5024"%(9%691//%))7(.%-9)54*)5%#3/%!1**616:#253//).""16.;)1/#+!*/1,/).1;4+0*-1'5+-:,'1++4&/*+A4.4)?4'3!5-)",)+/72,5..0&!.,6%'*'17 "'6,/1)3'9'1?&0.$*'*)15,)+%) %3( 5'+"-9101*..11;+7,0/+5%2903.&4-0*(8(11+9&73*.65&%+3.K233-,ҀԀՀՀԀ׀׀Հ؀ĀՀڀـԀ܀ՀԀ׀Ӏ܀ҀրڀԁA7%42)*+30+-(B9:-35-71-606%//$%8))//.,/<0+7$&**;;--3')$9'0/)%'1)'/(#+(++34/.-*%,.63*-!+2.034;5-;((!&'-*/10#/5,,%;%2-822',,/.+$1*;'!% /.*7%2-+' +:332//+/+*)-**,2%<'11/# 5/&1)1',21%'3,0+106(2,61##+.(+.$+311(9$+,6)/96.$2((!,2"6 1++00(1(/4-)*)6//$2.6/0+.+,*+,2(,6(-2,*+(%7+%*4')*/6)H#2925012564"(3%,63/8&50$,&;*%3&(4=1/3&7>34*=.2(>/ ,"/:)"3)-$--D).6",'% %.'!:--)+*;+*$)+=4*0+> $8&%07+/;$'3%-*--/,-*5"--3*1(5<13'5&$/3:5'',6+2-:0$//'6/-#((,/*8##%%/2&")012,$1/2/*+&.*!5'%1"-( *%,5?*<7110+73:5?.-6)%(K.7ۀր؀ӀԀ׀ӀՀՀÀրπҀ؀׀Ҁ׀Ԁ؀׀Ӏ؀Ҁց39+)/3+;+'$+97,4*31.86(:)*8/,9*207(';:7++''3(./3,#3"*&(74':+1,123,/(46;=-!+2*-$5./)45223-/?'2+81+1)142-)-*')&25,$6))-'-%+,71//-%+#-7-!.'// (7)!46-2./(4&-2/7+,9)!/6(1(-53-<#"0-(*%)$)%+$,0;&7/#6/+1ϫ'22*';#/+5-99"03.(2)"()0'/02-')+79&401901)7#8)*3**1)"09&5*'.%5,+&1,1+/'60)3+1/)$2*(/38(3,8/"02:*$*31-)+1-*> -+9&,)9*,-516!0)2,-4!+'+-526-21/7/1-52$0-39.-/5'2/:.&*&7*72 11 02)'7B-$6-?5//,30032E-=+$,*':*/*2/%2+,4.37/4-#++-%06&+6611=9")*/-132$&%2+&+/-140;#1/8,0#%40#733./,2*2,+60(75)(/,&1/2#7.$(,%:7)+47&&-340:61//46*-2(>#+$/"0"*3*3)3#%&1+7//(&'#1+050(,--%--(/*+0*'')1(3@%/-%",$/4;3&/2,(88+D,=2.&'/$0*-)/%---+#(+0=%( 5%5?2*.%#52*2,582-:0/'6267; 441#-6/451-$76;222-+#/)'51'0--)2*/*80152)8+*%) 1,7&6--,/,%$=$).63,"?.-- +1.2-$(1*129/%941/0/45-.,)5+&$1$2&)3$4;-7&11,)#2'./'&''12-/1'",5,7-/#'-*%3.##05ۀۀրրڀрՀڀ׀ǀ؀׀րրҀՀԀۀՀԀπӀ؀ԁ3&$50:3*,*56455,-4*/(9)%+<1/'+=14=42()&8&/0.1575'**/-0295113+-)63*-*/0'20)'->.02&4$%3-/6$#70-9,0&,%312*1+/),(,+&0+/#+/*++"020./'/+7.&.0',7-52"( '51#3;-:/1')21)9)*'?#7'+&/:(+4)/+)'91!,#*.)5+,).2$*+022:>)46'$%"-25/3+.%/7*<3&&(*09*3-5-/+&7,1-+"0/,-3%".+/-,*=)/$-*0(./%9+'*(8'*(*2)5'.&+;#2+*/:'(+%068')05@/<1$/->7#+-%- ,$.,2"/3!.-7$)0.&4.&3(E3;+(%--@7/-)9=75+058.+2).*-)-26 /2/-'-0)7+*,7,-)'460+,22.09+-'81/+B&3/53.//: +&+(-6/5(&!80.&(6%-40-%؀ҀրҀՀрـҀ؀ȀՀԀҀՀ܀؀րր׀ՀրՀՀׁ;)2+8&/)&).25%4)5-0(A ,)<*6:$&0<39'7*1+0$,2.76,/1/""0 51&'5,23,!0-*.0>3))3#,+''-,9)+$-/&'-)/19*2-)+9,4,/&!15*!56"%)8-*//#/-'**93*-'$/@-52.-+:0$/+2++,(-).-)0-)5./#+/4-#(35&01*-;;3")(0*5/;)('8%/70/$8*&.-6%%'1436!%-7*/-/&9)2,*26'+1=#///'&.&1*)C#**$+2-,%%0)+/')#31(- 0-12%1'4+.$4)4%-5(2731&*<,768,4482.,)1,7&3/ 1)(.#$21.9,3,1025& 7*#5+:-*(652+#*2+*+(,'"**5-E2 /%0:--/6-($ **$//,(.,71',&04)1*4* -7:-%5,)!-0+340/950'7-,':26'20.0+$-%$-,#<+/"1%1*?";/,"%.1/5#/%*%,3* ('4,+0(+4)&8/)/,5/)./.2+, 7=%*17;;!+/@1$/736D3-(%$/+&*1&;)&8,(&68()6&&9++#*'43#)+-+0%*#77*1.$#!<'/)-7>00:351*%950--B+8.8;"6655$3A($:.133'0%*/-10.//0/0%21,-1:.8/.%*,.4$*%@9$(( $5./24$/.-)/?/5!/3'",+2-++1.--!3/*-/,)*6,/1/*3//4%*1;)#*/-3$1%.8$66*-$1115$242*+*(-$6."6,*&4 3/1):"'26,7#9,?!.$+*&0%1($.'2"(((=";3.48(8:/8*$7'?30$05,;1<1+-0')#'2/%.,(32-%"'505./0""1++4302'0*04*4,:%*7*!3/%."/(222+(1)-$+. -+)0+.+$6-63('2/0$(40),#&.%*$,*++$*93*4,93/.)0';502*76(*!3-5;/"++/+&>615#016)$((//)-/: /255)8>6րـՀր׀ՀـڀـǀՀՀЀۀ؀πҀ؀؀րՀӀр܁$-@$''*/**3,%+647:++5-,8-++2>+**488,')*,+!78'$*65)&--+/<)*'(,1&&"677:5100830,*35437315)(70+7)1+3+&;17+%$58-*//&425'/9*5;20,%2/60<%&+0+08%*;1-'/08-*3107:3(7):.)#++1&*5'/;&:,%,7(131.%$!--8%2=(,0",0,*.3./+''726-0%+6/!,#+)"(&.4/(+4.=3* $2'/>3',$2&/*--#92457/75*(.)(3.+$1&7/9+>7/-?-.)'93)068/;739")445,*/(!5++//"*(4(2-+ 2"73.50/,.)+&57''&,.+4-/""-&$$2,&-'-'%71#&7,6+/(0".55"(60,*"5$5*18,(+3#!/)"1%7+$.)-.1,.(-&,/2@022*0-$#/%#<.(-3(/#5<*12-+7-0*(;56&#+&&30.,70*:+-<02*1,/.20(+/.;10)',(602*,&7024,1(*-50+/9ـ׀ڀ؀ـՀ؀؀рȀրՀրՀՀՀـӀ׀ӀրՀЀՁ8.,+23@;*44/55*:2,?1*69)2?21+609-:;4<&.+5*--50)583')+'2&&5<6.(6.,9.+%)6$&&10.:,'+02.&?+1>3)/)1>)4(/1/%+)*!$'$++ /-->6.9//)/048;033*!+,!$"+):12,'0.22.$.+1$)+/&(6+4,8/*(2!+0)3+(*+55.62+'+*5#--&06,/$1*-)-1+'./4-1%-27A)3&-&#$(/59*3&;(+';/+3.?5*$/)+(-+1/1+,5%-$)47,+>:5:48*(,<01!.($.6)+.3)36&4/+4=/31&)?,3%*/#-11)8-%'03/..-))1-4+%5'51.134*-9*/0%@/107(11),)))9-,#-$+%07.*/1:+8/2(/+"./,7*0.!>+(;(.4!2)+&-+&0/5(/,'1 /,>#-)/57:)('(2--7%+./)((+6$'3'))+,4"6$(0*78?5.4'>&'##2.*0/+0)&рـрՀ׀ՀՀЀ׀ȀԀ׀Ѐ܀րррـрڀ׀׀ـՁ,,1&.&@' ''.$,3(+,)&4&)7ϒ-1(74//1+31-$ -96.*+*,-231/*,*-33.!,*+)46'5* )2&*/5-+44.(2./+32/19.4($):)/ /"'#/%-.4!*-)"%(+0,/:02,'%2/2(-1.3*/4)3-1#-0&0#2.,,.5+-,:1-$-'0/'7//297,3)*7'#,3(+&.)36#'86+=5%-0/,!50,))%09,75,,19+6$,5$'/"77<.=.%2,*.+80,3,502).-,I*+'*/&-./*%&8(!)7-270032(.1-4#:,5+,3//!0='11$/#&%%&'*!4-/'.8)/0-3'.+3-.6)2-($(./7#*)'5)/1+- %"646'./8-*.#,+-,.1;#7-$/6=,245."5'221/,"/,%=./#&?,4($217&(4'345'/'+ =*-7-).&8.1*@3+1"-/%(-*..+16--&+7*%'2696>7/%/)0-=,'61./!-*+<909,-/2'/!('43&719-//85$ـրրр׀׀܀׀ӀʀӀ׀Ӏڀ܀рԀڀӀҀڀ׀׀ځ(6,.3-63*#502-764.(24408,(1*586"&/0)#489,"(?@3+&;#**/5A;4(&49')5$,%89-579+,2./*06&)6- &&4./'2C*1(-(2*)&41 (1"#0-;7),,(90+/,.>'02+192'7!1*'!--,+#E.26,,0*09=.&)%$')2+-91*21$/10$-:2/$7'922 .(37+219%72/'31-/7+#*.*.4/'",+'711' 81,-7:5'140 +*.9#./ /162.2)1+(' 05-*++1&0:1!)1)3*039/.&%3*'*).12,&+22.54'/;-3/!(54-24*02.'=+5%/:93*+32, +*6,(/<)+=.+-.-'4.-(3/3,*7985-73+-3!#-:,93)/12/58!/)1),3-70#(00*39"(-$)))(%+-6.( (2*400+/06*)323+'+)%*(?%('))/:)9+08210./212'3,#6,/2)28<$24)1$73481,3',4<(0-.18#13+.($/!/8(.!3"%3+7-$77-4/+.)11#4 (.*)(!.'3'0#97<7-+;)50'%.&4B*4ր׀ـ؀րـрـπĀӀـ؀ӀӀۀՀ؀Հ׀ӀՀր؁5/!.(6#,31(*'7'02B':6+0:2+'//&'>0%:3473$12,'>2+'6/,+/815'3'.,30**++!11)E;00&90)9&=/%2#73*2+;91/#)5/"*+:%/,,C1**=%;)./,"8,2$,61&/3../)%-7 -.6/;*1&<&3&)'*+!/, 8&$.%%+)(7'15&6;20+7(*61,'57(&,50B&1*2"%8*&.,/-1.+,7/4.-%3%2$+2/ /(9*05:36)71.-7$(3(!)6'0''&"1(41(#5($)+1241;&+1316)#-/-*%&$-0:462%6/*(/6%#-(8102($445%&53462/1--/9*17%*."*2&/(-7&/&,!1/+**6-4,6#-/(2,/-'%/:#*597(-.:7%-/3B1031*J 1'02--0*/>(-+7*/0-+,%<*=+(7)&*5-.483--86952003)#42./-(,).13/:5(,$2,3,')B5. /-/++-7,.6+4-1153533@#<';<,64"7)%'',Հ׀ր׀ՀҀۀπ׀̀΀ـՀڀۀ܀ӀۀՀՀ؀؀ۀс-)7#&/'43$*35)/13./,&--4!*21.;/(21&.4:1+-#&7!!32$58%G6+3;//(14+'+++'(1+-2$).-+9+"?50*-' !+'B2*#57++8:%(+&24-57%2,.-77/08/.*.5%5,'40'5&-52-2.1-$0%+/.&1.-*/7!&2*2"**&$/-)&&.%)0!+"4(&%3).1/0+,),/-(8/$/+0).(/#.)4*'-,:081'9-,,-?)0&!'7-8?0-(%/3462'++(<(-'*-.>5!'%>5=0/()()(+9'!6./.0/0518.$?,+7'9/*+067/('0131+$/,,08'2,1,%-;4407-*9-)..(//0&-2.(/+)?/*2*%.)<,.*/6+-*&09*.,110.-98-))//+)%/2)7'1.*6*2/3.4660/2&%/4/184&94(-4,-0'4.'&*..',1.145--$&.&*2%?-4$/6*/ ?/ -"-(5,9.$-,1(+93*-203--)3,4-2,7(/<&++269&1)2)B3+׀ـڀڀـӀՀՀ؀πӀӀ׀րۀրҀр܀؀؀ӀԀӁ9,,#)40*7),'2.01&7+14:**+-,'*.10)#&7'3)025-)$)!633/%93%*:313(./*(,653%3011*-3(*'.//5/%3((/-/'&./!'+)2702:,'C-2:3 91:22;+20-./53*3!6..)'93?+ 50.2'23)(+0*,/(0!% <,4):031+"-<6('1'5-6,"42?67.1--) 5*/3*.61%$-+10@7((0151-"3/,)"&0()%"-,4:9/-377(9,/>2)676)+&&0#/*$)/28(2)4..*1,)0-1-1/3*1'/)<3E,%23%1-&-&55!'(O&=870(518((.,+-,04)''׀ԀՀ׀ـڀ׀րπÀـՀ׀ـ׀׀ـڀրҀ׀Հ؀Ӂ6)/+7/)70)4.+)'2+!+0)&+'52!,*1)4'5+3)/'51/1+/*3/!(/160@"13;./-*0!/1(*#1=:/01E://'./-*->'/"4(4+3-.)/*)9*$?.8#875*"'-,,-#8%9<1&?2#'&/-#&'0.('$)/7;/4*%-*"28-4#02110+94/5454+9(+'71.:5435&5/;8#/*02/#99'(595,92/%"/' /1,2.)-3#%)<31>>345&&;%!0)&4('#7,&5&'1*22+-!$%(2&)//(0-,5),)6!/="(,%%,1-;9?%*:4'3-,$50.+&'/./@2&%/."54%',4.$4,92$#;%.0)*;5(.7',1+*23:$5,!&,,-6#/B+,--,()%/+,,+(6*39"'6',%1/*04''=$+4'&D36:$72*2*+20'.56%/72%+8.815;"30#51*)8,(.,*;702*.,,");&000#,)5(##-2),((',>1%5+2+>0&*2*/&1-$&2)1-"#/+%10(50ـــӀՀ؀ՀԀҀڀـ׀ـ݀ڀ׀Ԁ׀րڀ؀܀Ӂ(/-7%$38(.:%,883+-*/54.(6-)04&-<9 -14)26.+/+B(45#7'3&/'(9&'1!8.8.)4025)<%05".,3026%5+103/),0+)),327&+-3-,)320.*(3851=/.'&,5$',.0/:!3,5-+-/++.9+- (/*3*3-=-4310..2/916)%77&+"-+,.,-*7'$&'.;./,&(20)771+;#0 --+.#-7'(%(+2%'9#+)(;@1. 3'6)5')()#C3,85,7$-(#.17=*-<,-*+)(602",,1&-9;/0*.C4(.36@,$',+35#<+*9,%!4*8864(+#)-00.6.7*/.90/419(/131(#0.50/+5./6+-+*-/&)0773&'4 4/6';,+7-*>),/4+1-*:90/77%6'+61)+04+,1%533+"+<4$%,/1 )19,-6*261471''-)34+$-+)+/&/4+,39797-",-<2?381*+5(+)./&A)53(/+&/27/1+(44'9.$/9&ՀրҀр׀Հ؀ۀ׀̀׀Հۀ݀ԀـՀڀـҀـ܀؀ہ./9,;%/2'3020F)%9)/:7)5'3*#;2(1A.;!*&#*7;1-,%2%*$('/**.-2:06+:#07*38:6&6+-(644(/).N*440" 53 02/3(*4005,/21))2'*6--./,:-/)1.%(*.+)#-+0A#)5.&$(773)01&*-0:*>)'+6&.(4&%)/B9+5-72>9@AD4-'2>5/*('2&/-)!/)<<'760;#0:3/6/2/*:'221)@-212%#7<--/0C950-#,7&6'5.%&94#.)./+7-2.5(<-7/"+-""4-22.%*5631)'7A''!97'4!(34:)%5+(.0*&7/$(/"+,!/-$$)/(12/1(.,&/12-.&10;71.:,4$$8*4-;--1',,<46$21,9 75*,/278))8*3C,%0)+"&*)3+/(*7;%9961&5/5%8+4$-%15+ 160!"&1,-#-.15=;4/,-+.( *3+*9951,)5'1/&35+3/--/5+1&0108(./* 2<).60420' )625(+%14;($.(1-,5/2;/2#1+'!0%%35,9+(23$/*/)-5)+0)1(224/1D+7-8&1,(4',6.C,&'.93,,/Ԁ׀ـր؀րրڀԀƀ؀ՀրӀրЀ׀ր؀ԀۀԀրՁ20/72*47,>/.(().'.'2685,,/2)++$./3+9".*5+)<(4854/-'/0-.3'09,1204:22E%3.)0)2%5:**40)5*'-2+)34+#0"(/1/B5398=/81,9?&)+!<%*"8"50*(4*2,50)(!(:1;/',%*($ 1/%'*-/$,2'2*(0/-.0+!"2$/:)/1.1'(?"/(.9)6'*4=.1*2**5/)%/!60<1(%,+5+0*48+#.-/5!4%#1-%**6 )*.)4<054#+,)01)3-*+3&4)-9-/311'1/&)(,1&4*,!=4.7%4')378*1*4!-.>(2/%*'*:0.+050(-,*,6)&/-):79-(7'"//60".;6) .6-?45/5.1)21(?.0''#2&%*0/-69/&//*$/44(298)*! 9+&1&'9-,+.6,7,1+1=8/+. $&6$/8+*92D$-(*')0/30/&%**)6*)+3144<30),&7%($1(,-$1;106=9&7:5(4!,,/*4*(!*93'7/'(/;80/׀ՀҀــՀ׀ҀրǀԀՀـр׀рՀڀ׀Ԁڀ΀܀ہ6/%2252&++5/<&),:3*4;3A/(6<$+3#7/-5-5195+.5&1/40%!2,/.%3*2014,&4"43"+1,%#8$).7/423+#;4'!46.;+)"&2#0855B@@L039<33-*$?/+,);8'8+17!263A$*-*23/4/61,)<*"1%053#3335?.#B-''#/9$./.'-'(-0+&-0 =+616*1/7 '&+!)")43>75.9*3//*'/18&40.(-%*&1:$7*'.-!)D/"+)-/1.&)4360.+0,:=-9'(.+67//3$%47*ՀӀրπՀڀڀӀԀɀۀրـҀ܀πՀҀــрրԀӁ64.2/)44 $8%-1)2.9%7%//6548:,'3),.-1*.&'*3-+222/+. +/%,4'/*9%,2.(@'%)2+.7-2'/%4 3)"/8.),//+)&+!-+)/1/8FX:EAE5217)<87/-&.)/4'956%)+2%.2.3/0%*':&-#"/807'-*%7&+5)4D.61&<)(:3.91)./*5192;-.1/13,9+<'2%<068-.6#*(-6;2*58.(//)+1/-2/#34&03?0-/5(/6/9*%?49.00)++/'*)2,,=4';612-2/214'43%?0',1//.((6)! +&5"*86+-&6++1"+*/%)(&#+#.(31)-+6-1-( 70>&*$8+(>")55/6/,(*1/#5&/=5,#*"(8+)0#.')5651:"&&/;1+"$&)+ 9*/1/&*+690,/9*:)#3"*:)$;8)-.63/14%.--%?,!4$.-7.)-(,3:+,9"'.+*1540&20.<)&0$7%..5=+8,&1/1"4,-#3-1,.5/:<*,)/5(3;2)Հ؀׀ڀрՀ׀ڀـǀ܀׀׀ـ׀ՀـЀ׀׀ՀπЀӁ4.-6,;64.D/ ,3'>68>5.40*94A()&)62.73,5&+49520"/=3,7(3!2*''3'"7147-5,&%2%7@(-10 ,'4,0,9*-/(%!+.,7+(..?2;M;=DAE@5=+10.3;&/-.-.!(3+1-#+%-)B*.11:%0+*$281,6'(++ +-)+*.+1.31+=3/:'+$-D2-1;(>:1B'2'.;,*+1/'$044.1)(43.--(+./('+&,"54:?(8$+/4&-//*5/(639&/*,---3276+39';/)(,%4,2,:/<2%('.+)'/+6-3% %)$048&93-6(5)1%(41/+4.%/43+*-,(6)3*(,36+'00+/&.<)*''+)"##/%*,3&(3(26.+/..&+.3+25)('*73/(6+3'1'&-162*/8+#%865#2/$62&33&2&!3'%'-+5+%'+2&-0?--',-7*5>A,2,461+/-&/;5&)/,;(#.(,;7630,-879(8//003",0((+/@,0*3)%/2-!9.34+6/620#)ԀԀ׀Ԁ׀ՀրۀՀƀրԀՀր׀׀ҀـԀӀ؀ԀҀЁ(22;7'3&:/6<,(8!&7/=:2300$&(++.,7,<.4*5+-(430?6/3:,5$=65&3--,/&+&+:2$65*!!4D+-.$%038!+I*)/%$'0'2/(00GF=4@ML>JE:.<7:+1&.03%-'A"8&,0423.#7-.)8'%-67)+4%0,/2'.//*+.12/-B1%6*(/).7*#(/$'&*&6!-:/0"0-+*8";.7&2"+(0*51++2+,&=.(1(9/4;.+.(%.18!0(1,'-+,*(/5(3;).50+5','456../&97,;5"%,1?/4(/+/3+6A.334'+#-*;.,7+2)&(5*-%-3>2,(/8-,*8B*-)1-$,204/%:8(8-04&9+3*)87+"6*6/),&+/5.1--4/.#;%6&0910%%*/$2'/35"'42,3!&,07/4'*--&('51/:;440*7:--)/6-4$-2-9&$8(";)((#1(-&)/270/>+6353'8?12/2-*'/0'1+'D/%:6301++*5-49:*/&*14+4"*/+<(C؀ҀڀӀڀ׀ۀՀ׀ÀـӀҀ׀Հ׀ԀրրހՀՀԀԁ03774:4,3+30.$)E335/-7?".+-*(26!$08/#.()*466).$9:3/#B(A58=751'3./#0)25-/151)2,,!2213(/33&2'/'+13"#%69.1;NBHH=C=?:..#.*00>,324%9$**+5/0(85.*+&,$8&)0);(>,54*;0--&7,&5,10.%/-*+)2+3)*75,!-/*+31+!84)'41#10/1408&3*,7,>-':=(-#%04*07'+%42.7/43+10;.1#)8)156+/E&.-'9-,';3)/>$/557.,)/,!/4-0"471,-0,/=81--*+()/%3,2. 7/''938,"5K/0)*)+3(55/142)-'(''-3.0@*/,-7%4/.')'+270(*/&52020)*2+,0,,/$-+0/<,3)(/*.<065(!-0=23362/$.-4.#47/232+%.#$2,360(*03?24-),(".<*+2+9(,:7,&+04.,6*0.&4/5B'/.*3(7321,.4+750$,#:)8(%/3Հ؀ۀր׀؀׀ӀՀŀрـԀ׀Հڀـڀ׀ր׀̀ՀՁ10/---<02)%6:&7**++%'73.*9/,9)10+*3-"@;=)/'7)3-#2=-7+'5-)7+27-'6+-*'-%+!0)%9'7(8$$%80085.0'8,+*16A>05>D//1+.,'.=)$-4%+%.4-*19'<(5.)5,/91-.9,/4/13<5-7.06017035?5/6./--3'+)-7632 2(-&9/7!<66&5.@=B=?II@C5@*8:&:;'*3 ;.:)#542,703-6'/-6**0830'53/2&(%9(),.(@/)-#D'+0+0/5%15867%,.+'0#*'#'C9(:4/>&-*).1.1,4-7)416-55)5*#8&.0*/%0'35,9(*3,902<%(/,23>+&-/4+-:/:*07#81575/6*,0+2)$,!%>2<2$030+**?-.00/**),6(*7%.&.5/.,*.0--580">"+2%,801#@/"159(-0//-34$,,5,C,0'&/#331+'2-)8307*%-)-&55-?$4>*4)5*+6+'4/%#333*/3#%36./,3/7&.$1'/5+/"+-+&,4*2!5/463"!.-%+/5/,*2402)609+-;89/:.'1-;2!435%0 /(1,3-80-+"32)'9%2'6+ՀҀ׀Ҁ܀ڀՀ׀րĀ؀؀ՀԀۀՀڀ׀ۀрրր΀Ձ&0/ 47.077)-4A/D:+/)281.(.2(,2&.$1318%+$312:5/(941%4'0+9+?5-64)**+,--6/>0&&.,(-"7/.*8 67-*:+(&F..6$%06,5#.,5//&1,%7+32/,;83!:2.#/'8,$/)75.5883>1/-283ԀЀ׀ЀԀԀԀ؀րˀӀրՀ׀׀Ԁۀրــ݀Ҁڀ؁32)!0@#44@0*'35301/.*))-,-/&.44/<5+12?(9+&+0@2,24.:&2%&132%.(1).60*9.:/A)#*'*+.,5//.8/16.206+&&'+?)=5,E%2H2+/:2492%,6.+>5/;(*09,:0*0#5()-6161%,0.'21%.-55+(-16'-$*8%3447%+;)-0(2,(+ ,F7-.+#1<:-7)8(0$7#/29+?.63*).*@%7&2435!16/$-$*,3-.C84..*+%83;:,.%/8/878.**%:&,&,,%:&;0&./$&00$61$48()*0+4,14*$!#(0+0+5$8&/62**1)(=%.<0!%7-(4-6.6657)"!,23$5*''0=44;#-A"=,'/57027620/)5%3!!*.116,1;2;%%1',/03/2/,,&1--'05'.-$ +.+6()7:126<#:/&;'76%-'.47;@1$*-4:4-4/:5.$B&2?1"3/!=".-,5'+$<.7./@&18"*+!+/32+>/+<<9<31*,=61#'&344'<؀րԀڀՀ׀Ӏ؀ր̀׀׀ڀ؀Հр׀Ҁ؀ӀрԀՀځ8&.+477#0()+$1+3'1%/6)-3,/1!#;/%8315'4.-+27)9.110-)2)**B3+5.8(.4*-&+4"'+0-5+)/(/):.,+/$2(0'3:)/14162 3,@,+&)3-6*<-$*)9 /6'9023%,#(+5-"46'(-9/).85+7@:..2(#%0/2;*)E0,7)6&*')/+%3!(..&!$=8.4+*!+/0<79))0+*11,4-(+)4./33'#7.%1%-/-+0((&61;7=76-).+'5$3+-55+**$73-#$+/38/'3-&29!)7*B128)4%2).%#0+,)0*')$490469064?2=3*)0)%4.*5.2+-*1$)61-6&3*04.%52+9%()&**519+2759/C0)4*5"-"7()(0'2$.65/7#-'45,$!05%(-*+/96A//4/3'4,-=3(50%-,11*08,1-/=>%41*"428)20.*&5,+/*+6-/.27..67(*,21-11(7.%2("81*#@38+/)4.'-1.76')0+/.',(90&/4,'/)**#214 2..3*$)4:24%#%?,$,6*-44.7*(-!'6.0./0/+4)---36;/+74*$.6212/)4/&",)$$$3.3*0'06//,%+.&;-6--'),%98/30#(4*/7'54,/(%$*#%(5"-$0-(-'+79'22%)/(3-,;0(1-0"5-'//38&,#1+351(45-7-;.6$3: 3)3)'0#3@3'30(0'/ <>/3))).$6* )5)-15.1=+*'.%3)-73,((2'4/-9!,)0.#>85'01*/,)22/0.6*--99(#(4-5-,!2'/9'%7(3,/2",-))!.7,/66-6-*''-1&+(0)/-*:%+<0735.-$.*(1&0;3;8?//'&1!4&ــۀӀ׀ӀՀ؀рƀ؀؀րրـڀ؀Հր؀ҀՀـ߁,0(*716(1//#,",3-2?:&(-1,,3%'(6-/D0'@)5$3*,#'+)/,+(!*'31)#7--*0/'/+3/1;4,8/5/4'*..(122.#05'6-&+%/+<43!:49753%//9/-160827".+,/(/<2272)+<)35+,0(17)(02&/%4716%.)14-6.3.#51/+/,-9+09'250%,38&;4*-0#63*1.4$ 8,>+(3(/. ;*1758/+4,&-23,/'-#331$(;1/:/$-&"51=0&".'36.3*:0/./,6,(*7532/7454.#)/2-%).-*/ 4;'3,,2-#1*8&. !+.-22(-00,)0;6,504)21$./),1--60,1*../2/>((5*5/1'&034064,.#++/0$379+4,038-9/7.264)002+2'.1.14E-+,2"5**75?+,!+0)..+3003)10';16,+6+ 18+#1@4.6:6"(31()%+542,+&9,(/3$/73/-+:-"5/:/,.2./%,010//:6#-*74*ــրӀӀրրՀрŀԀҀրрՀـՀڀՀՀ׀ـՀց=6+-)4:A9/0/%&12)07;9#,7;:,3+'*%-+68+75-0801+&+45&.)%8%0<8,)/%0/3.9<3/0.++8/(@/.40'0-21;'5')68<2/0./1/ (=91*5;5)3/3506'0,'6%:('8-27,+9*;&+08(38/1&/,+45-4+'%#25>'5,/0=#$.('/2?4":4!059#(6%8.3.+:)**/$,11)+%5*$4.$/,),4&,$(+01-%,24.'3+154,-+(3."/0*::242&!)(+2379)0-'/+-+,0%.7(,://.1*.34&..:,7/0(/%=%G8#)'38+)0*(=&./#+)5-1*+(+31+7&"70-*'A,54874+/8')<$#6$',/<53'-,36$02)+;0=/23'8%$:29461)34)6>%/'*&-327,.!/'6,!3'7$&/:3+0/1>2)&'))1;5B12,0-6.-/0+1$(%-((6$14)+635/7.#+&#I-,,,.<:)@9")0%-(+$9"#4*&+C&12+<2.'+=<.4(рԀ׀րӀـ׀܀׀Ȁـ׀ӀԀ׀ԀڀڀӀӀրрـЁ5/(($// .,-%1-10'-77.3).72645"%*)(0-101<1/14*.(>,)#.*.5/'*:+5-+')502<;1+.<7*3)6201!().2:.++)/-5.6107-'15 44-4E%5'-&$,(1883,-((#+@/92%A'&"4./>!#/+).<)+-4*'+? 51'24311%-6(*0#0(;-+31 )*)!673:'523**-/35/*0($'$%.450))%/9/310">-(.212>(/)4943,%"42.3*)('1+3),,&19+!&!."/,-7 ) <+2#+"%>/%&&+.6(:7/(+C3+%1/2:+61"5(+=8.-,-,,0/24($3-#24 -./2).008/,4,?3/5-2)&0,'( )8767/)#-&,-5$:28.;'"0+$)(**&)3')0:(-'%&*('/1(483)$5<+2!/+/.A1'5&41/4*+)C68,<$-.-*0,'+113% +31,%./+3.,2/6B2/1)-: 161.('A,2++11709#:/,>"/D9236,./.3.07'܀р؀րԀ؀؀׀׀ÀՀ׀րـՀހ؀׀ڀ׀Հр׀ҁ,0468053-135(%/;-+)0%-/1#*714312:,)52<.4'600-+!=:)68+1,,7).$/5;-'812$073*/1+)/146;A.43+-/;&-46+02.08'&%+3,.-.18#+/2&56%*8.-4(..)515.1++.10-,:-,$733/=&213(-.34;(5*G6+5'!?/05/,C#)(*&*9$)20723(0+2/').):/ +67"..-1/")1/!/' -/%-3,,/-+:/*8=-.9=7+*#:&/5(8+*&3/2F+('69-7".,?0-20+3.%(60293.*(1(.31,-%6>"<8+))'17//;/3"3*5'/54.54/,H62(&)+45+-245- 1+5''3!",*?2./+*'0,<20;$*&47+."$0",504%+7+/6066/17"--A#.,2--2)6+&6)/-)+A.4>$.6(-4;22''-5)(0#.%%&.,--*?@/?34-&2539<3",'5/1<#:/7.',(1,8915-B42)59$3/+7&$"--(233(/=%1-/րԀҀ܀׀ڀـ׀ԀŀՀـԀԀـ݀րӀրԀЀրрց*<>*'213258/71#5498/86--,2+62),;020--.1)'))5!9+).*""+<463#.0$B&64/),*;48',(2&+191-//9-.("2(".0-/#"36+:41@/+478:0/(.+/26,67& (4,2(/44',$9*4:4-:4#17.:.67+.6+33#&+;51'-+/33+07%/B77.22$9+(514,0ۀ؀րԀ؀Ԁۀ؀ӀŀހԀـڀԀր؀Հր؀ڀ؀րׁ20.&"/2&5&%1)).6%"(,/75 31)1;("%++--'1+11')*0)*"(-/+->.0#&.+-"*55-%4'+&41 ?/$!01+)4';'1%-/%)'1&)(5)/&,'('*#. ',")2+/17=+95"2.*/ +0$-.!/6$3,..(%/,-+%5'7-.,)+5(41),/$&1(,'3*$0''-)&%$')03-:'8"4%%22+!)'"/&+,*2"1'/)"-.%<& /* .C+3/,&-1/+'0(!'3+/),&/<.&<.(/,%)(8-+,;0.5%/2"7/2#06'!!6'.=128)'B)!'"-0&2>+$,,!(+!#%:!!(/ *-& 4&'(*'$.)-3.%+0+1&*'3 /-("-&,61%-;6/0(+.,071)!$3$/2&-3.5;-'(/)-),*4/86+&#,'#'0*22((+5--35.$/!$B*(-613/%$$33)-*0,(."*11)0(+/(3."()./(1$9%-7+*.4,3#)7'()'%().58+"8(#$,(41#*.+6./5"-!3πЀӀրՀՀ؀؀؀Ȁ׀ՀՀ݀ՀҀҀԀՀ׀ҀـՀրހӀـڀڀــڀۀڀ݀܀ڀۀ܀׀ــڀ߀ـڀـۀـր݀ڀӀ׀ـ؀Ӏ؀׀׀؀ۀ܀ހ׀Հրۀڀۀــրڀ׀ـۀ܀ڀԀـր؀Հހ؀ــ؀ۀ؀؀׀܀ـӀ؀ـــ܀׀؀ӀԀ݀ڀـۀڀۀڀ܀׀ـــՀӀӀ׀ڀӀ׀ր׀ۀ׀ڀ݀؀ۀۀڀր׀Հ܀Ԁـڀ׀ԀڀӀۀۀ݀׀ـրڀـڀ׀ڀՀ׀׀݀׀׀؀׀ـڀ؀׀ڀ݀ր׀݀܀ــ׀րـ܀߀ۀ܀݀ۀրրހۀ݀׀؀׀ހـڀր׀׀Ӏ܀ـۀ؀݀݀ڀ܀݀؀ڀ׀ڀ݀܀Ӏ܀Ѐـ؀܀ــՀրۀۀ߀րۀۀ؀؀ހ׀׀րՀۀ׀؀ڀڀހ؀րրրڀـ݀؀׀ڀ܀؀Ѐۀրـ؀ۀ׀݀׀܀؀ڀ߀ր܀ۀԀڀրڀ؀ـ؀Հ݀Ӏۀ׀܀؀ـԀՀۀՀ؀ـր܀ՀՀڀڀ؀׀ـ߀׀ڀ܀݀ڀـ؀ــ߀ۀڀـ؀ـՀۀـ߀܀܀݀܀ـ݀݀ހــ׀ۀЀ׀ր׀ހ܀ۀ؀ր܀ـۀ׀ր׀ڀ߀ڀـ؀ۀڀ؀Ѐ݀ۀۀۀՀۀހـ݀؀ۀӀ׀׀߀Ҁրڀ؀րـ߀ހր؀ՀՀՀۀ׀ހՀـԀՀր݀׀ـրۀـــ׀ـрۀ߀ـހր׀Ѐۀ߀ـ݀րۀԀڀۀـՀـրۀۀۀӀۀ݀ڀـ؀ڀـ܀ـπـހڀ؀ڀـҀ܀׀ڀ܀ڀӀրـۀԀ݀؀؀ۀ߀ڀӀۀۀ׀׀܀րրՀ؀ڀ݀؀Ԁـ݀׀ـր׀ۀـր܀ۀ݀܀Հڀ׀ԀـڀҀـڀ܀ڀڀրۀ܀ــր؀րۀ׀߀ހ߀݀ـ܀܀ހՀ߀ـՀՀӀ؀Ҁр׀ҀƀԀրՀـӀـЀրـ؀րրՀ؀Ӏۀ݀؀܀ۀــ׀ڀ܀Հրۀۀ݀Հ݀݀ڀ׀ڀ׀Հހ؀ր؀ـހڀ؀݀݀ހڀ؀ـڀ܀ԀՀــ߀Հ܀ڀۀڀր܀ـ؀׀׀ـ݀݀ۀۀڀ׀؀܀Ӏ݀܀րڀڀ؀Հۀــ؀ڀ׀Հــۀڀ׀܀Ԁ׀ۀրӀـހ܀ӀـՀڀ܀ۀՀ؀ۀۀԀڀ܀߀ڀӀ݀ڀՀԀ؀ڀ؀րӀր߀րۀ؀րـ܀ڀۀــڀۀҀۀۀ׀׀ـ؀݀܀Հ܀ڀ؀ۀـۀ׀Ԁ܀׀Ӏ׀؀ـڀހ݀ـր߀Ӏـ܀؀ՀހـՀۀ؀؀ـ؀׀ހڀۀ؀Ԁրۀրـ׀ـՀۀԀ܀܀ۀ׀Հڀۀ߀׀ր؀Ԁހ׀܀ՀրҀڀހ݀݀߀ހҀۀڀۀ܀ۀۀрۀހ׀ۀڀ؀ހۀ؀׀ڀـڀ؀ۀՀ׀݀׀؀݀݀׀܀ڀڀրڀҀۀڀ׀܀Հрހـۀۀـۀـրހ׀ڀ׀ހ߀ڀՀڀ׀ڀ؀ހ׀؀ـ׀րـۀՀҀ׀ـҀڀـۀڀ݀ۀ܀ـۀــڀՀ܀ڀڀ׀ـۀր׀׀ـڀ܀ڀՀ݀ۀ؀݀р׀׀݀ڀӀ܀Հۀ؀؀׀؀ڀـӀրـڀӀހ܀ހ׀܀Հــ؀ԀـՀ؀ۀ݀؀ڀ׀րހ׀ڀۀـڀހۀ߀րـ׀ԀڀԀـ܀ـ݀ۀր؀؀׀؀Ӏ؀׀׀݀ۀــրրـ׀؀؀׀ހـ܀ڀۀ׀ڀڀـ܀Հހـրۀրۀڀۀրրڀր؀؀ۀހ׀݀ր؀׀؀ՀՀـ؀ڀڀڀ؀Ԁۀڀ׀ۀــــ߀ۀـ؀րۀـӀӀ׀ڀ߀ڀՀ܀րԀۀ݀ڀۀـۀۀ؀Հ؀ڀ؀ۀ؀؀׀ـՀ݀؀րۀ܀؀Հ܀Ӏ؀Հـ݀݀݀ۀրڀـրҀ׀Ӏ؀ՀÀՀԀڀրՀۀԀՀ׀ـԀӀր׀׀ــ׀܀܀ր؀р؀؀ӀԀҀ׀׀߀ۀՀـՀـހ݀؀݀׀׀ـ׀׀؀ހۀ߀׀׀ڀր؀ۀ׀߀Հ׀׀ڀ݀݀߀ڀڀրՀ؀ՀԀҀ׀ـۀՀԀՀ݀׀ڀ܀؀ۀۀ׀݀ր݀ۀրހڀր؀؀݀ۀ׀ڀր߀׀Հـڀڀ؀ۀڀۀր܀؀րՀۀրڀՀր؀؀ڀԀ؀ڀڀրۀ܀ր݀ڀ߀؀݀ـ܀؀ۀڀڀ݀܀ހՀۀـڀۀـԀ؀ڀԀ؀܀ـՀڀӀ׀ــۀ؀ۀۀڀڀۀ݀؀ڀրր؀ڀ݀܀׀րڀހրԀ׀ـހۀՀ؀ـۀ݀݀ր܀׀րӀۀ؀ڀۀڀ܀ۀ݀ڀ׀ր܀րՀڀ׀Ѐ߀߀ڀӀۀ׀܀ڀــ݀ҀـրހՀ݀ۀۀ܀ՀՀڀـ׀ـ܀ԀԀ׀ր؀ۀԀրӀր׀ـހـրՀــ؀܀؀݀ـ؀׀؀ڀڀ׀؀؀ـրր׀؀߀ڀ׀݀ڀـހ݀؀ՀۀՀۀ܀܀ހ؀ۀր׀߀؀ۀڀ؀Ԁـ׀ՀڀӀڀڀۀ܀ۀ׀ۀ݀݀܀؀׀ր܀ۀՀۀڀր؀׀րԀԀ߀ـ؀ـ؀ր݀ـ݀ۀ׀܀ـۀ؀ڀۀԀۀ݀ـڀۀ݀ڀՀ߀ڀۀ܀ۀ݀Ӏ܀րԀԀ׀ـހ׀׀րԀ׀ԀڀـրՀ݀߀܀ҀրӀ׀րڀҀڀۀ݀ـ؀ڀڀ݀ڀҀـ؀؀݀ڀۀ׀րـ׀׀ۀ؀ـۀڀ׀؀ڀՀۀ؀ــڀڀ׀Ԁ؀ڀ݀ـ׀؀؀݀؀ۀ܀݀ۀ؀݀ۀڀр׀ـՀ܀؀ـ܀݀Ԁրހ׀ԀӀـ׀ۀ׀ڀۀۀـՀ܀րր܀׀ր׀߀ـրۀـ׀؀ـր߀рڀրـڀ܀րՀۀـڀـ؀׀܀؀Ӏ؀րـ׀ՀڀڀՀՀЀ߀ҀʀԀրҀ׀Ӏ؀рӀԀЀӀ؀؀Ԁ׀Ҁ׀Ԁڀ׀܀݀݀Հ݀݀܀ۀڀր݀׀Հڀ؀ۀڀ؀݀ހ݀׀؀ր܀ڀ׀ۀրր؀րހրҀ؀׀݀܀Հ܀ڀ׀݀ր߀ڀ׀׀ــۀۀۀրـۀۀڀހ؀ڀڀـՀ׀݀ڀЀـހـڀ܀ـۀԀۀۀހ݀Հ׀߀ހ׀݀݀؀ڀրـ܀׀׀ހ؀܀Հ܀ۀۀ׀؀݀ڀۀӀۀրـӀրՀՀۀ߀܀߀ـ܀ڀ؀ۀ݀׀ۀԀڀրՀ؀ـ׀܀؀ۀ׀ހ؀؀׀ـ׀ڀ׀ـҀۀ؀ۀۀ݀ـՀ݀ـ؀ހ܀߀ۀۀ߀ڀڀ׀ۀ܀ހــڀڀ׀ր؀ــ݀݀ڀ؀ڀրڀՀ܀׀Հ݀Ҁ׀݀ــԀՀ׀ـ׀ր׀ڀـՀ݀ۀ؀ـրր߀ڀՀހӀ׀ۀـ؀ـ݀׀ۀ܀׀ՀՀҀԀ؀ڀՀ؀րــڀ׀Հ݀ڀ؀ــ݀ڀ؀Ԁ׀րـӀڀۀӀ؀ڀـ܀Ҁ܀Հـ݀؀ۀԀڀـۀ؀րۀڀـڀՀۀހ؀߀ڀր׀׀ۀ܀߀ЀڀڀրԀӀـހــրـӀــҀــ߀ހ؀ۀրۀ׀؀݀؀ހ׀Հۀ܀ހ܀ـۀ׀܀ԀԀ߀րԀڀ׀ڀ݀ۀۀ׀݀׀ۀـހՀۀ؀Ԁր݀рڀր׀݀݀ۀԀހـ׀߀݀؀Հۀ׀ۀـڀրҀրրՀՀӀۀՀӀ؀ـۀ݀׀ր݀ۀڀ؀ՀڀրڀӀ׀׀ـ܀׀Ӏ؀ۀـ݀؀׀ـڀـրԀـҀ܀׀ހ׀Ԁۀ׀րـڀـހ؀݀րր׀ڀ؀ۀڀۀ܀݀ڀ؀ր݀ހ׀ڀրـڀۀ׀ۀր݀؀ڀ׀ۀ׀݀؀׀݀׀րـ܀ր܀ۀ؀܀ԀӀـ݀ހԀـ݀݀ڀހրڀ׀ڀ׀ހՀӀЀԀԀ؀րրՀȀҀՀـр؀׀ՀԀ؀рڀӀՀ׀܀׀ۀ؀؀ԀۀրԀ؀ހـ݀ـۀۀڀ؀ــހ׀Ҁ׀ހӀ؀ڀـۀрՀҀـՀڀـրـڀրـ܀؀ր߀Հـր؀߀ـۀՀـ؀܀Ԁ߀؀ހӀۀր׀݀݀ڀـ؀ـ܀ڀۀ܀׀ۀۀӀ׀Ҁڀ߀܀ـ׀ۀڀـ܀ـڀ׀ހــۀڀڀ܀ـހӀހ߀ــӀ؀׀Ӏۀހ݀րۀۀ߀ــــ܀׀ހր׀ۀۀ׀݀ހۀԀــ܀؀؀րր׀݀ـՀՀـ׀ր؀߀ڀ܀ڀ݀ڀۀـ݀׀րӀ׀݀ۀـ؀ڀ݀ހҀՀـ݀ۀۀހ܀ڀۀـۀۀ׀ڀـڀրՀۀ݀ӀڀԀՀ׀؀܀ـրՀހրހրހڀـڀ׀܀݀؀ڀހـ؀ـڀ߀ــ߀ۀ׀݀݀ـڀ؀ۀ݀ڀրЀۀ׀܀׀܀܀׀ՀӀ݀܀ـ؀؀Ӏ܀ڀ݀ـڀ׀ۀ؀ــԀ؀߀܀ՀԀԀՀ؀ހ݀׀؀րրӀ׀ـ׀߀؀؀Հ؀׀Ѐހހր܀׀܀؀ր׀ڀր׀߀ڀۀڀրڀՀ׀܀Ԁր׀Հــ׀ـ܀׀ڀЀۀـ܀؀׀؀ڀ܀ՀۀӀ؀܀ـۀڀ܀׀ـ׀ڀՀހՀՀۀ׀׀ր܀ۀӀڀހۀڀ؀ڀ؀ڀ݀׀݀ـۀ݀ڀـրـ׀׀؀Ԁ݀܀׀ـԀڀڀۀۀـ؀݀Ӏۀ܀ԀۀӀ؀Ԁڀـ؀ۀՀ؀؀׀րր׀ڀ߀ր؀݀׀ӀՀ܀؀ــҀڀՀրӀڀـــۀڀڀ܀Ҁـڀڀ׀݀ڀԀ׀ۀՀـڀ׀܀ـڀ؀ـ؀ــۀ܀ڀՀـԀڀ׀ۀ݀݀ՀՀ܀ـ׀ــ݀݀Ԁ߀׀ހ׀Ӏۀـ؀׀݀ڀՀ܀ـ݀ۀۀ݀ހހ؀ـۀڀۀր߀ՀڀڀՀـԀрՀӀÀր׀׀؀րр׀ҀՀӀҀـրԀ؀ӀــހրـՀ߀݀܀ۀ؀܀߀߀ހ݀ڀڀ܀Հڀ؀܀Հ؀؀ր؀܀ۀրڀՀހր׀ր݀րـՀԀԀ؀ۀڀԀӀڀր؀րр܀ۀՀـ߀؀ڀ؀ۀـЀԀ؀ۀրڀۀԀրրրЀڀրـրـՀڀ׀Ԁ؀܀ր܀ڀԀ؀ـހڀր܀׀ۀ؀݀ۀۀڀڀۀڀրрڀڀԀ܀؀Ӏـۀ܀ـ܀ـր߀ՀـՀۀڀـ݀ۀހڀـڀ׀ۀڀۀրۀ؀׀րـ܀܀րրڀڀ؀߀܀Հހ؀ـހـ߀׀؀ۀԀ܀ـ߀؀Հـ݀ۀրڀր׀׀ۀ؀׀Ԁڀ׀׀؀ـրހڀ׀؀Հ׀ހـ׀ــҀڀ܀ր܀Հ׀ۀڀۀڀ׀ڀԀۀԀހ؀ـ׀؀ۀԀۀӀۀӀՀՀۀ؀܀݀ـ؀ۀ߀ހڀ߀րڀ׀ـ؀ހـڀـ׀܀ـԀۀ׀Ԁڀـ݀րڀ؀Ӏڀۀـ߀܀ڀ׀ڀـڀ׀Ԁۀ׀׀Ԁ؀ۀրڀۀր܀ހހԀր܀܀ր݀׀߀׀րـۀـ׀׀ڀۀ׀ۀـ׀Ӏ߀ր؀Հ؀Հހڀۀ؀܀܀ۀ׀؀ڀـ؀ۀ׀ۀ׀݀߀Ҁ߀ۀـ܀׀܀Ԁ݀ۀۀ؀րـۀۀۀր؀؀Ӏ݀ހԀـڀۀր݀րـ߀ڀ݀Հހـր݀ڀրހҀ܀ۀ؀׀րրՀ߀ڀ܀ۀ݀ڀՀـڀ؀؀؀ՀրӀ؀܀ՀӀ؀րՀ߀؀ۀԀ؀؀܀րՀր׀ۀՀ׀ր݀ۀՀ؀߀؀؀ـ׀Հրـ׀؀ڀـ܀Հ׀ۀ׀؀݀ހրހހ܀܀Հ׀ۀڀۀՀӀۀ׀ހր؀׀ڀ׀Հـڀр܀ۀր؀ڀـ׀ـ؀րـ݀րՀڀۀ݀ՀՀـր݀րրрՀ׀׀׀ԀπҀԀʀԀۀԀӀ؀׀ҀԀ׀ՀҀ׀րԀۀۀ݀ڀ׀Ӏ܀ހ݀܀ڀ܀րԀـڀڀڀ݀ڀ݀׀݀؀ڀހր׀׀ـۀЀڀ݀׀؀ۀՀ߀ـԀۀـۀڀ؀؀ր؀ۀـրڀ݀Հހ܀րۀ݀ۀ׀ـ؀ۀـۀԀـڀ׀ڀ׀؀ՀڀۀԀ܀ր܀׀ڀ߀ــրۀ׀ڀ׀Հրڀۀրրր؀ـۀ׀܀؀ڀـހրՀۀ߀ۀ߀ۀڀ׀ۀ݀Ӏ݀ր؀Հހ݀ــ؀܀׀Ӏڀڀ׀Ҁ܀ՀڀՀـــր؀Ԁۀ؀܀܀ڀ׀ހ܀؀؀րۀ؀ڀ؀ۀ׀Ӏڀ؀ۀ؀؀ـۀـ܀ڀڀ݀ր݀׀Հڀ؀؀ۀڀ؀ۀԀր؀Հۀڀ݀ۀ׀׀׀׀؀؀؀׀ڀ܀܀ԀՀۀӀۀ؀Ӏ݀߀݀ۀ׀ڀ׀׀ՀԀڀـրـۀـۀــ܀ۀۀր߀؀ۀڀ؀ـ׀؀ـۀրՀ׀߀؀ހۀ׀ڀ؀ڀڀـ؀؀Ԁــ؀ـ׀ՀԀԀڀۀ܀ހ؀׀Ԁڀ܀ۀ݀ӀــԀ׀݀؀ӀӀ݀ڀ߀׀Հـ؀ހրՀ׀ҀـՀ׀݀Հـ؀ՀՀ܀Ԁۀրڀڀ׀׀Հ׀ـۀԀۀՀ܀ۀ؀݀݀݀܀݀ـۀ׀ۀ܀܀ــ܀ۀ؀Հ؀؀ـۀрՀ݀׀ۀրՀ׀׀܀؀؀߀րڀـڀۀրـ׀ۀ؀׀Հۀ܀ۀڀ׀Ԁ؀؀܀ۀԀۀހ؀܀ۀ؀ـ܀рڀۀۀـ׀ހր܀׀؀ڀ߀׀׀ԀڀӀ׀ր܀Ԁۀڀ؀Ԁ؀ۀ݀ҀӀՀԀ؀րۀր܀؀׀݀րڀـ׀؀؀׀ڀӀڀހ׀ـ܀؀ހπـހ؀܀Ԁ׀ڀڀــ؀ր܀؀؀րԀ݀܀ڀ׀ڀڀــ׀Ԁր܀Հրۀր׀Հ؀ۀ׀ڀڀ݀ހۀ؀ހրـրՀ׀ڀր׀؀ـԀÀӀ܀׀րҀـրՀـӀԀ܀Ԁր܀ڀрր׀ڀрۀ݀ـՀހـ܀Հր݀݀؀؀ԀۀۀـրۀրրހՀ؀ր܀ۀ݀ـހـ݀܀݀րڀրۀԀۀۀހ؀ր܀ր׀Հـ݀ـՀր׀߀݀׀ۀՀ؀ۀـހր߀ـ؀׀ۀ؀ڀހ؀׀ր؀ۀրۀހրրۀ܀ӀހԀրـ׀׀ӀԀ׀ڀր߀׀܀݀ڀրڀ݀ـ׀׀ڀ؀ԀԀۀ׀݀܀рـ؀Ԁڀ؀ۀހۀ׀܀׀ۀۀրڀ؀؀ԀۀۀԀր؀׀ۀڀـ؀ۀ܀Ҁ܀ۀ߀Հހ߀րــԀрۀր܀ՀՀԀـڀހ߀րҀڀــــހՀ߀ԀҀ݀ـրۀՀ܀Ӏۀրڀ܀׀ՀۀـӀހ܀ۀՀ܀ۀ؀ހـހ׀ՀހՀ݀؀ހ݀܀ـڀ׀؀րـՀ݀׀ڀ׀ـހ܀܀؀րր܀܀܀؀ڀ؀܀؀׀Հـ݀Ԁـڀۀڀڀ؀݀ր׀րڀՀڀրڀ׀׀ՀՀۀ׀܀ڀ؀؀׀ـ׀݀Ԁ׀ր݀׀րۀـڀۀ܀܀؀؀׀ڀ܀ـڀ߀ԀրހӀ݀ـӀـڀր݀ـ؀րــۀۀӀ؀ڀ؀ڀۀ׀ـ݀؀ۀ݀ـ؀ڀـڀۀ׀ހӀـ߀ۀ؀ۀՀ܀ހۀӀ܀܀؀܀ۀ׀Հۀـۀ؀ۀـހր߀׀܀Ҁۀ܀ހڀڀրրۀՀ؀݀Ҁڀ܀ۀۀրـ߀ۀ܀ـր܀׀߀ـ܀ۀր؀ڀҀ؀ڀۀـՀۀՀڀ߀ۀ׀ՀԀۀ׀ۀۀ؀Հրڀڀ݀Հ׀؀ڀ׀܀ۀրـڀՀ׀рԀۀ؀܀ـ݀ڀـڀ؀ՀՀ׀ڀ׀݀рހ؀׀ـ׀܀ՀڀۀՀՀۀրԀԀڀ݀ڀ׀ۀހ؀ڀڀۀۀӀ׀ԀڀՀր؀Ӏހڀ׀؀րۀՀՀ؀рӀڀҀ؀؀πӀӀ׀րۀրҀр܀؀؀ӀԀӁ9,,#)40*7),'2.01&7+14:**+-,'*.10)#&7'3)025-)$)!633/%93%*:313(./*(,653%3011*-3(*'.//5/%3((/-/'&./!'+)2702:,'C-2:3 91:22;+20-./53*3!6..)'93?+ 50.2'23)(+0*,/(0!% <,4):031+"-<6('1'5-6,"42?67.1--) 5*/3*.61%$-+10@7((0151-"3/,)"&0()%"-,4:9/-377(9,/>2)676)+&&0#/*$)/28(2)4..*1,)0-1-1/3*1'/)<3E,%23%1-&-&55!'(O&=870(518((.,+-,04)''׀ԀՀ׀ـڀ׀րπÀـՀ׀ـ׀׀ـڀրҀ׀Հ؀Ӂ6)/+7/)70)4.+)'2+!+0)&+'52!,*1)4'5+3)/'51/1+/*3/!(/160@"13;./-*0!/1(*#1=:/01E://'./-*ccdproc-1.3.0.post1/ccdproc/tests/test_bitfield.py0000664000175000017500000000452513207605210023631 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import absolute_import, division, unicode_literals import numpy as np import pytest from astropy.tests.helper import catch_warnings from ..core import bitfield_to_boolean_mask def test_bitfield_not_integer(): with pytest.raises(TypeError): bitfield_to_boolean_mask(np.random.random((10, 10))) def test_bitfield_negative_flags(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(ValueError): bitfield_to_boolean_mask(bm, [-1]) def test_bitfield_non_poweroftwo_flags(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(ValueError): bitfield_to_boolean_mask(bm, [3]) def test_bitfield_flipbits_when_no_bits(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(TypeError): bitfield_to_boolean_mask(bm, None, flip_bits=1) def test_bitfield_flipbits_when_stringbits(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(TypeError): bitfield_to_boolean_mask(bm, '3', flip_bits=1) def test_bitfield_string_flag_flip_not_start_of_string(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(ValueError): bitfield_to_boolean_mask(bm, '1, ~4') def test_bitfield_string_flag_unbalanced_parens(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(ValueError): bitfield_to_boolean_mask(bm, '(1, 4))') def test_bitfield_string_flag_wrong_positioned_parens(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(ValueError): bitfield_to_boolean_mask(bm, '((1, )4)') def test_bitfield_string_flag_empty(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(ValueError): bitfield_to_boolean_mask(bm, '~') def test_bitfield_flag_non_integer(): bm = np.random.randint(0, 10, (10, 10)) with pytest.raises(TypeError): bitfield_to_boolean_mask(bm, [1.3]) def test_bitfield_duplicate_flag_throws_warning(): bm = np.random.randint(0, 10, (10, 10)) with catch_warnings(UserWarning) as w: bitfield_to_boolean_mask(bm, [1, 1]) assert len(w) def test_bitfield_none_identical_to_strNone(): bm = np.random.randint(0, 10, (10, 10)) m1 = bitfield_to_boolean_mask(bm, None) m2 = bitfield_to_boolean_mask(bm, 'None') np.testing.assert_array_equal(m1, m2) ccdproc-1.3.0.post1/ccdproc/tests/test_rebin.py0000664000175000017500000000511013207605210023135 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np import pytest from astropy.nddata import StdDevUncertainty from astropy.tests.helper import catch_warnings from astropy.utils.exceptions import AstropyDeprecationWarning from ..core import rebin # test rebinning ndarray def test_rebin_ndarray(): with pytest.raises(TypeError), catch_warnings(AstropyDeprecationWarning): rebin(1, (5, 5)) # test rebinning dimensions @pytest.mark.data_size(10) def test_rebin_dimensions(ccd_data): with pytest.raises(ValueError), catch_warnings(AstropyDeprecationWarning): rebin(ccd_data.data, (5,)) # test rebinning dimensions @pytest.mark.data_size(10) def test_rebin_ccddata_dimensions(ccd_data): with pytest.raises(ValueError), catch_warnings(AstropyDeprecationWarning): rebin(ccd_data, (5,)) # test rebinning works @pytest.mark.data_size(10) def test_rebin_larger(ccd_data): a = ccd_data.data with catch_warnings(AstropyDeprecationWarning) as w: b = rebin(a, (20, 20)) assert len(w) >= 1 assert b.shape == (20, 20) np.testing.assert_almost_equal(b.sum(), 4 * a.sum()) # test rebinning is invariant @pytest.mark.data_size(10) def test_rebin_smaller(ccd_data): a = ccd_data.data with catch_warnings(AstropyDeprecationWarning) as w: b = rebin(a, (20, 20)) c = rebin(b, (10, 10)) assert len(w) >= 1 assert c.shape == (10, 10) assert (c-a).sum() == 0 # test rebinning with ccddata object @pytest.mark.parametrize('mask_data, uncertainty', [ (False, False), (True, True)]) @pytest.mark.data_size(10) def test_rebin_ccddata(ccd_data, mask_data, uncertainty): if mask_data: ccd_data.mask = np.zeros_like(ccd_data) if uncertainty: err = np.random.normal(size=ccd_data.shape) ccd_data.uncertainty = StdDevUncertainty(err) with catch_warnings(AstropyDeprecationWarning) as w: b = rebin(ccd_data, (20, 20)) assert len(w) >= 1 assert b.shape == (20, 20) if mask_data: assert b.mask.shape == (20, 20) if uncertainty: assert b.uncertainty.array.shape == (20, 20) def test_rebin_does_not_change_input(ccd_data): original = ccd_data.copy() with catch_warnings(AstropyDeprecationWarning) as w: ccd = rebin(ccd_data, (20,20)) assert len(w) >= 1 np.testing.assert_array_equal(original.data, ccd_data.data) assert original.unit == ccd_data.unit ccdproc-1.3.0.post1/ccdproc/tests/test_keyword.py0000664000175000017500000000441613207605210023532 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) from astropy.extern import six import pytest from astropy import units as u from astropy.io import fits from ..core import Keyword def test_keyword_init(): key_name = 'some_key' key = Keyword(key_name, unit=u.second) assert key.name == key_name assert key.unit == u.second def test_keyword_properties_read_only(): key = Keyword('observer') with pytest.raises(AttributeError): key.name = 'error' with pytest.raises(AttributeError): key.unit = u.hour unit = u.second numerical_value = 30 # The variable "expected" below is # True if the expected result is key.value == numerical_value * key.unit # Name of an error if an error is expected # A string if the expected value is a string @pytest.mark.parametrize('value,unit,expected', [ (numerical_value, unit, True), (numerical_value, None, ValueError), (numerical_value * unit, None, True), (numerical_value * unit, unit, True), (numerical_value * unit, u.km, True), ('some string', None, 'some string'), ('no strings with unit', unit, ValueError) ]) def test_value_setting(value, unit, expected): name = 'exposure' # Setting at initialization time with try: expected_is_error = issubclass(expected, Exception) except TypeError: expected_is_error = False if expected_is_error: with pytest.raises(expected): key = Keyword(name, unit=unit, value=value) else: key = Keyword(name, unit=unit, value=value) if isinstance(expected, six.string_types): assert key.value == expected else: assert key.value == numerical_value * key.unit def test_keyword_value_from_header(): name = 'exposure' numerical_value = 30 unit = u.second h = fits.Header() h[name] = numerical_value key = Keyword(name, unit=unit) assert key.value_from(h) == numerical_value * unit assert key.value == numerical_value * unit ccdproc-1.3.0.post1/ccdproc/tests/setup_package.py0000664000175000017500000000044213207605210023615 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst def get_package_data(): return { _ASTROPY_PACKAGE_NAME_ + '.tests': ['coveragerc', 'data/a8280271.fits', 'data/sip-wcs.fit']} ccdproc-1.3.0.post1/ccdproc/tests/test_gain.py0000664000175000017500000000421213207605210022756 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np import pytest import astropy.units as u from ..core import create_deviation, gain_correct, Keyword # tests for gain @pytest.mark.parametrize('gain', [ 3.0, 3.0 * u.photon / u.adu, 3.0 * u.electron / u.adu, Keyword('gainval', unit=u.electron / u.adu)]) @pytest.mark.data_unit(u.adu) def test_linear_gain_correct(ccd_data, gain): # The data values should be positive, so the poisson noise calculation # works without throwing warnings ccd_data.data = np.absolute(ccd_data.data) ccd_data = create_deviation(ccd_data, readnoise=1.0 * u.adu) ccd_data.meta['gainval'] = 3.0 orig_data = ccd_data.data ccd = gain_correct(ccd_data, gain) if isinstance(gain, Keyword): gain = gain.value # convert to Quantity... try: gain_value = gain.value except AttributeError: gain_value = gain np.testing.assert_array_equal(ccd.data, gain_value * orig_data) np.testing.assert_array_equal(ccd.uncertainty.array, gain_value * ccd_data.uncertainty.array) if isinstance(gain, u.Quantity): assert ccd.unit == ccd_data.unit * gain.unit else: assert ccd.unit == ccd_data.unit # test gain with gain_unit @pytest.mark.data_unit(u.adu) def test_linear_gain_unit_keyword(ccd_data): # The data values should be positive, so the poisson noise calculation # works without throwing warnings ccd_data.data = np.absolute(ccd_data.data) ccd_data = create_deviation(ccd_data, readnoise=1.0 * u.adu) orig_data = ccd_data.data gain = 3.0 gain_unit = u.electron / u.adu ccd = gain_correct(ccd_data, gain, gain_unit=gain_unit) np.testing.assert_array_equal(ccd.data, gain * orig_data) np.testing.assert_array_equal(ccd.uncertainty.array, gain * ccd_data.uncertainty.array) assert ccd.unit == ccd_data.unit * gain_unit ccdproc-1.3.0.post1/ccdproc/tests/coveragerc0000664000175000017500000000131713207605210022475 0ustar mseifertmseifert00000000000000[run] source = ccdproc branch = False omit = ccdproc/_astropy_init* ccdproc/conftest* ccdproc/cython_version* ccdproc/setup_package* ccdproc/*/setup_package* ccdproc/*/*/setup_package* ccdproc/tests/* ccdproc/*/tests/* ccdproc/*/*/tests/* ccdproc/*version* [report] exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about packages we have installed except ImportError # Don't complain if tests don't hit assertions raise AssertionError raise NotImplementedError # Don't complain about script hooks def main\(.*\): # Ignore branches that don't pertain to this version of Python pragma: py{ignore_python_version} ccdproc-1.3.0.post1/ccdproc/tests/test_ccdproc.py0000664000175000017500000011565113207605210023467 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np from astropy.io import fits from astropy.modeling import models from astropy.units.quantity import Quantity import astropy.units as u from astropy.wcs import WCS from astropy.tests.helper import catch_warnings from astropy.utils.exceptions import AstropyUserWarning from astropy.nddata import StdDevUncertainty import astropy from numpy.testing import assert_array_equal import pytest from ..ccddata import CCDData from ..core import ( ccd_process, cosmicray_median, cosmicray_lacosmic, create_deviation, flat_correct, gain_correct, subtract_bias, subtract_dark, subtract_overscan, transform_image, trim_image, wcs_project, Keyword) from ..core import _blkavg try: from ..core import block_reduce, block_average, block_replicate HAS_BLOCK_X_FUNCS = True except ImportError: HAS_BLOCK_X_FUNCS = False # test creating deviation # success expected if u_image * u_gain = u_readnoise @pytest.mark.parametrize('u_image,u_gain,u_readnoise,expect_success', [ (u.electron, None, u.electron, True), (u.electron, u.electron, u.electron, False), (u.adu, u.electron / u.adu, u.electron, True), (u.electron, None, u.dimensionless_unscaled, False), (u.electron, u.dimensionless_unscaled, u.electron, True), (u.adu, u.dimensionless_unscaled, u.electron, False), (u.adu, u.photon / u.adu, u.electron, False), ]) @pytest.mark.data_size(10) def test_create_deviation(ccd_data, u_image, u_gain, u_readnoise, expect_success): ccd_data.unit = u_image if u_gain is not None: gain = 2.0 * u_gain else: gain = None readnoise = 5 * u_readnoise if expect_success: ccd_var = create_deviation(ccd_data, gain=gain, readnoise=readnoise) assert ccd_var.uncertainty.array.shape == (10, 10) assert ccd_var.uncertainty.array.size == 100 assert ccd_var.uncertainty.array.dtype == np.dtype(float) if gain is not None: expected_var = np.sqrt(2 * ccd_data.data + 5 ** 2) / 2 else: expected_var = np.sqrt(ccd_data.data + 5 ** 2) np.testing.assert_array_equal(ccd_var.uncertainty.array, expected_var) assert ccd_var.unit == ccd_data.unit # uncertainty should *not* have any units -- does it? with pytest.raises(AttributeError): ccd_var.uncertainty.array.unit else: with pytest.raises(u.UnitsError): ccd_var = create_deviation(ccd_data, gain=gain, readnoise=readnoise) def test_create_deviation_keywords_must_have_unit(ccd_data): # gain must have units if provided with pytest.raises(TypeError): create_deviation(ccd_data, gain=3) # readnoise must have units with pytest.raises(TypeError): create_deviation(ccd_data, readnoise=5) # readnoise must be provided with pytest.raises(ValueError): create_deviation(ccd_data) # tests for overscan @pytest.mark.parametrize('data_rectangle', [False, True]) @pytest.mark.parametrize('median,transpose', [ (False, False), (False, True), (True, False), ]) def test_subtract_overscan(ccd_data, median, transpose, data_rectangle): # Make data non-square if desired if data_rectangle: ccd_data.data = ccd_data.data[:, :-30] # create the overscan region oscan = 300. oscan_region = (slice(None), slice(0, 10)) # indices 0 through 9 fits_section = '[1:10, :]' science_region = (slice(None), slice(10, None)) overscan_axis = 1 if transpose: # Put overscan in first axis, not second, a test for #70 oscan_region = oscan_region[::-1] fits_section = '[:, 1:10]' science_region = science_region[::-1] overscan_axis = 0 ccd_data.data[oscan_region] = oscan # Add a fake sky background so the "science" part of the image has a # different average than the "overscan" part. sky = 10. original_mean = ccd_data.data[science_region].mean() ccd_data.data[science_region] += oscan + sky # Test once using the overscan argument to specify the overscan region ccd_data_overscan = subtract_overscan(ccd_data, overscan=ccd_data[oscan_region], overscan_axis=overscan_axis, median=median, model=None) # Is the mean of the "science" region the sum of sky and the mean the # "science" section had before backgrounds were added? np.testing.assert_almost_equal( ccd_data_overscan.data[science_region].mean(), sky + original_mean) # Is the overscan region zero? assert (ccd_data_overscan.data[oscan_region] == 0).all() # Now do what should be the same subtraction, with the overscan specified # with the fits_section ccd_data_fits_section = subtract_overscan(ccd_data, overscan_axis=overscan_axis, fits_section=fits_section, median=median, model=None) # Is the mean of the "science" region the sum of sky and the mean the # "science" section had before backgrounds were added? np.testing.assert_almost_equal( ccd_data_fits_section.data[science_region].mean(), sky + original_mean) # Is the overscan region zero? assert (ccd_data_fits_section.data[oscan_region] == 0).all() # Do both ways of subtracting overscan give exactly the same result? np.testing.assert_array_equal(ccd_data_overscan[science_region], ccd_data_fits_section[science_region]) # Set overscan_axis to None, and let the routine figure out the axis. # This should lead to the same results as before. ccd_data_overscan_auto = subtract_overscan( ccd_data, overscan_axis=None, overscan=ccd_data[oscan_region], median=median, model=None) np.testing.assert_almost_equal( ccd_data_overscan_auto.data[science_region].mean(), sky + original_mean) # Use overscan_axis=None with a FITS section ccd_data_fits_section_overscan_auto = subtract_overscan( ccd_data, overscan_axis=None, fits_section=fits_section, median=median, model=None) np.testing.assert_almost_equal( ccd_data_fits_section_overscan_auto.data[science_region].mean(), sky + original_mean) # overscan_axis should be 1 for a square overscan region # This test only works for a non-square data region, but the # default has the wrong axis. if data_rectangle: ccd_data.data = ccd_data.data.T oscan_region = (slice(None), slice(0, -30)) science_region = (slice(None), slice(-30, None)) ccd_data_square_overscan_auto = subtract_overscan( ccd_data, overscan_axis=None, overscan=ccd_data[oscan_region], median=median, model=None) ccd_data_square = subtract_overscan( ccd_data, overscan_axis=1, overscan=ccd_data[oscan_region], median=median, model=None) np.testing.assert_allclose(ccd_data_square_overscan_auto, ccd_data_square) # A more substantial test of overscan modeling @pytest.mark.parametrize('transpose', [ True, False]) def test_subtract_overscan_model(ccd_data, transpose): # create the overscan region size = ccd_data.shape[0] oscan_region = (slice(None), slice(0, 10)) science_region = (slice(None), slice(10, None)) yscan, xscan = np.mgrid[0:size, 0:size] / 10.0 + 300.0 if transpose: oscan_region = oscan_region[::-1] science_region = science_region[::-1] scan = xscan overscan_axis = 0 else: overscan_axis = 1 scan = yscan original_mean = ccd_data.data[science_region].mean() ccd_data.data[oscan_region] = 0. # only want overscan in that region ccd_data.data = ccd_data.data + scan ccd_data = subtract_overscan(ccd_data, overscan=ccd_data[oscan_region], overscan_axis=overscan_axis, median=False, model=models.Polynomial1D(2)) np.testing.assert_almost_equal(ccd_data.data[science_region].mean(), original_mean) # Set the overscan_axis explicitly to None, and let the routine # figure it out. ccd_data = subtract_overscan(ccd_data, overscan=ccd_data[oscan_region], overscan_axis=None, median=False, model=models.Polynomial1D(2)) np.testing.assert_almost_equal(ccd_data.data[science_region].mean(), original_mean) def test_subtract_overscan_fails(ccd_data): # do we get an error if the *image* is neither CCDData nor an array? with pytest.raises(TypeError): subtract_overscan(3, np.zeros((5, 5))) # do we get an error if the *overscan* is not an image or an array? with pytest.raises(TypeError): subtract_overscan(np.zeros((10, 10)), 3, median=False, model=None) # Do we get an error if we specify both overscan and fits_section? with pytest.raises(TypeError): subtract_overscan(ccd_data, overscan=ccd_data[0:10], fits_section='[1:10]') # do we raise an error if we specify neither overscan nor fits_section? with pytest.raises(TypeError): subtract_overscan(ccd_data) # Does a fits_section which is not a string raise an error? with pytest.raises(TypeError): subtract_overscan(ccd_data, fits_section=5) def test_trim_image_fits_section_requires_string(ccd_data): with pytest.raises(TypeError): trim_image(ccd_data, fits_section=5) @pytest.mark.parametrize('mask_data, uncertainty', [ (False, False), (True, True)]) @pytest.mark.data_size(50) def test_trim_image_fits_section(ccd_data, mask_data, uncertainty): if mask_data: ccd_data.mask = np.zeros_like(ccd_data) if uncertainty: err = np.random.normal(size=ccd_data.shape) ccd_data.uncertainty = StdDevUncertainty(err) trimmed = trim_image(ccd_data, fits_section='[20:40,:]') # FITS reverse order, bounds are inclusive and starting index is 1-based assert trimmed.shape == (50, 21) np.testing.assert_array_equal(trimmed.data, ccd_data[:, 19:40]) if mask_data: assert trimmed.shape == trimmed.mask.shape if uncertainty: assert trimmed.shape == trimmed.uncertainty.array.shape @pytest.mark.data_size(50) def test_trim_image_no_section(ccd_data): trimmed = trim_image(ccd_data[:, 19:40]) assert trimmed.shape == (50, 21) np.testing.assert_array_equal(trimmed.data, ccd_data[:, 19:40]) def test_trim_with_wcs_alters_wcs(ccd_data): # WCS construction example pulled form astropy.wcs docs wcs = WCS(naxis=2) wcs.wcs.crpix = np.array(ccd_data.shape)/2 wcs.wcs.cdelt = np.array([-0.066667, 0.066667]) wcs.wcs.crval = [0, -90] wcs.wcs.ctype = ["RA---AIR", "DEC--AIR"] wcs.wcs.set_pv([(2, 1, 45.0)]) ccd_wcs = CCDData(ccd_data, wcs=wcs) # The trim below should subtract 10 from the 2nd element of crpix. # (Second element because the FITS convention for index ordering is # opposite that of python) trimmed = trim_image(ccd_wcs[10:, :]) assert trimmed.wcs.wcs.crpix[1] == wcs.wcs.crpix[1] - 10 def test_subtract_bias(ccd_data): data_avg = ccd_data.data.mean() bias_level = 5.0 ccd_data.data = ccd_data.data + bias_level ccd_data.header['key'] = 'value' master_bias_array = np.zeros_like(ccd_data.data) + bias_level master_bias = CCDData(master_bias_array, unit=ccd_data.unit) no_bias = subtract_bias(ccd_data, master_bias, add_keyword=None) # Does the data we are left with have the correct average? np.testing.assert_almost_equal(no_bias.data.mean(), data_avg) # With logging turned off, metadata should not change assert no_bias.header == ccd_data.header del no_bias.header['key'] assert 'key' in ccd_data.header assert no_bias.header is not ccd_data.header @pytest.mark.data_size(50) def test_subtract_bias_fails(ccd_data): # Should fail if shapes don't match bias = CCDData(np.array([200, 200]), unit=u.adu) with pytest.raises(ValueError): subtract_bias(ccd_data, bias) # should fail because units don't match bias = CCDData(np.zeros_like(ccd_data), unit=u.meter) with pytest.raises(u.UnitsError): subtract_bias(ccd_data, bias) @pytest.mark.parametrize('exposure_keyword', [True, False]) @pytest.mark.parametrize('explicit_times', [True, False]) @pytest.mark.parametrize('scale', [True, False]) def test_subtract_dark(ccd_data, explicit_times, scale, exposure_keyword): exptime = 30.0 exptime_key = 'exposure' exposure_unit = u.second dark_level = 1.7 master_dark_data = np.zeros_like(ccd_data.data) + dark_level master_dark = CCDData(master_dark_data, unit=u.adu) master_dark.header[exptime_key] = 2 * exptime dark_exptime = master_dark.header[exptime_key] ccd_data.header[exptime_key] = exptime dark_exposure_unit = exposure_unit if explicit_times: # test case when units of dark and data exposures are different dark_exposure_unit = u.minute dark_sub = subtract_dark(ccd_data, master_dark, dark_exposure=dark_exptime * dark_exposure_unit, data_exposure=exptime * exposure_unit, scale=scale, add_keyword=None) elif exposure_keyword: key = Keyword(exptime_key, unit=u.second) dark_sub = subtract_dark(ccd_data, master_dark, exposure_time=key, scale=scale, add_keyword=None) else: dark_sub = subtract_dark(ccd_data, master_dark, exposure_time=exptime_key, exposure_unit=u.second, scale=scale, add_keyword=None) dark_scale = 1.0 if scale: dark_scale = float((exptime / dark_exptime) * (exposure_unit / dark_exposure_unit)) np.testing.assert_array_equal(ccd_data.data - dark_scale * dark_level, dark_sub.data) # Headers should have the same content...do they? assert dark_sub.header == ccd_data.header # But the headers should not be the same object -- a copy was made assert dark_sub.header is not ccd_data.header def test_subtract_dark_fails(ccd_data): # None of these tests check a result so the content of the master # can be anything. ccd_data.header['exptime'] = 30.0 master = ccd_data.copy() # Do we fail if we give one of dark_exposure, data_exposure but not both? with pytest.raises(TypeError): subtract_dark(ccd_data, master, dark_exposure=30 * u.second) with pytest.raises(TypeError): subtract_dark(ccd_data, master, data_exposure=30 * u.second) # Do we fail if we supply dark_exposure and data_exposure and exposure_time with pytest.raises(TypeError): subtract_dark(ccd_data, master, dark_exposure=10 * u.second, data_exposure=10 * u.second, exposure_time='exptime') # Fail if we supply none of the exposure-related arguments? with pytest.raises(TypeError): subtract_dark(ccd_data, master) # Fail if we supply exposure time but not a unit? with pytest.raises(TypeError): subtract_dark(ccd_data, master, exposure_time='exptime') # Fail if ccd_data or master are not CCDData objects? with pytest.raises(TypeError): subtract_dark(ccd_data.data, master, exposure_time='exptime') with pytest.raises(TypeError): subtract_dark(ccd_data, master.data, exposure_time='exptime') # Fail if units do not match... # ...when there is no scaling? master = CCDData(ccd_data) master.unit = u.meter with pytest.raises(u.UnitsError) as e: subtract_dark(ccd_data, master, exposure_time='exptime', exposure_unit=u.second) assert "uncalibrated image" in str(e.value) def test_unit_mismatch_behaves_as_expected(ccd_data): """ Test to alert us to any changes in how errors are raised in astropy when units do not match. """ bad_unit = ccd_data.copy() bad_unit.unit = u.meter if astropy.__version__.startswith('1.0'): expected_error = ValueError expected_message = 'operand units' else: expected_error = u.UnitConversionError # Make this an empty string, which always matches. In this case # we are really only checking by the type of error raised. expected_message = '' # Did we raise the right error? with pytest.raises(expected_error) as e: ccd_data.subtract(bad_unit) # Was the error message as expected? assert expected_message in str(e) # test for flat correction @pytest.mark.data_scale(10) def test_flat_correct(ccd_data): # add metadata to header for a test below... ccd_data.header['my_key'] = 42 size = ccd_data.shape[0] # create the flat, with some scatter data = 2 * np.random.normal(loc=1.0, scale=0.05, size=(size, size)) flat = CCDData(data, meta=fits.header.Header(), unit=ccd_data.unit) flat_data = flat_correct(ccd_data, flat, add_keyword=None) #check that the flat was normalized # Should be the case that flat * flat_data = ccd_data * flat.data.mean # if the normalization was done correctly. np.testing.assert_almost_equal((flat_data.data * flat.data).mean(), ccd_data.data.mean() * flat.data.mean()) np.testing.assert_allclose(ccd_data.data / flat_data.data, flat.data / flat.data.mean()) # check that metadata is unchanged (since logging is turned off) assert flat_data.header == ccd_data.header # test for flat correction with min_value @pytest.mark.data_scale(1) @pytest.mark.data_mean(5) def test_flat_correct_min_value(ccd_data): size = ccd_data.shape[0] # create the flat data = 2 * np.random.normal(loc=1.0, scale=0.05, size=(size, size)) flat = CCDData(data, meta=fits.header.Header(), unit=ccd_data.unit) flat_orig_data = flat.data.copy() min_value = 2.1 # should replace some, but not all, values flat_corrected_data = flat_correct(ccd_data, flat, min_value=min_value) flat_with_min = flat.copy() flat_with_min.data[flat_with_min.data < min_value] = min_value # Check that the flat was normalized. The asserts below, which look a # little odd, are correctly testing that # flat_corrected_data = ccd_data / (flat_with_min / mean(flat_with_min)) np.testing.assert_almost_equal( (flat_corrected_data.data * flat_with_min.data).mean(), (ccd_data.data * flat_with_min.data.mean()).mean() ) np.testing.assert_allclose(ccd_data.data / flat_corrected_data.data, flat_with_min.data / flat_with_min.data.mean()) # Test that flat is not modified. assert (flat_orig_data == flat.data).all() assert flat_orig_data is not flat.data @pytest.mark.data_scale(10) def test_flat_correct_norm_value(ccd_data): # Test flat correction with mean value that is different than # the mean of the flat frame. # create the flat, with some scatter # Note that mean value of flat is set below and is different than # the mean of the flat data. flat_mean = 5.0 data = np.random.normal(loc=1.0, scale=0.05, size=ccd_data.shape) flat = CCDData(data, meta=fits.Header(), unit=ccd_data.unit) flat_data = flat_correct(ccd_data, flat, add_keyword=None, norm_value=flat_mean) # check that the flat was normalized # Should be the case that flat * flat_data = ccd_data * flat_mean # if the normalization was done correctly. np.testing.assert_almost_equal((flat_data.data * flat.data).mean(), ccd_data.data.mean() * flat_mean) np.testing.assert_allclose(ccd_data.data / flat_data.data, flat.data / flat_mean) def test_flat_correct_norm_value_bad_value(ccd_data): # Test that flat_correct raises the appropriate error if # it is given a bad norm_value. Bad means <=0. # create the flat, with some scatter data = np.random.normal(loc=1.0, scale=0.05, size=ccd_data.shape) flat = CCDData(data, meta=fits.Header(), unit=ccd_data.unit) with pytest.raises(ValueError) as e: flat_correct(ccd_data, flat, add_keyword=None, norm_value=-7) assert "norm_value must be" in str(e) # test for deviation and for flat correction @pytest.mark.data_scale(10) @pytest.mark.data_mean(300) def test_flat_correct_deviation(ccd_data): size = ccd_data.shape[0] ccd_data.unit = u.electron ccd_data = create_deviation(ccd_data, readnoise=5 * u.electron) # create the flat data = 2 * np.ones((size, size)) flat = CCDData(data, meta=fits.header.Header(), unit=ccd_data.unit) flat = create_deviation(flat, readnoise=0.5 * u.electron) ccd_data = flat_correct(ccd_data, flat) # test the uncertainty on the data after flat correction def test_flat_correct_data_uncertainty(): # Regression test for #345 dat = CCDData(np.ones([100, 100]), unit='adu', uncertainty=np.ones([100, 100])) # Note flat is set to 10, error, if present, is set to one. flat = CCDData(10 * np.ones([100, 100]), unit='adu') res = flat_correct(dat, flat) assert (res.data == dat.data).all() assert (res.uncertainty.array == dat.uncertainty.array).all() # tests for gain correction def test_gain_correct(ccd_data): init_data = ccd_data.data gain_data = gain_correct(ccd_data, gain=3, add_keyword=None) assert_array_equal(gain_data.data, 3 * init_data) assert ccd_data.meta == gain_data.meta def test_gain_correct_quantity(ccd_data): init_data = ccd_data.data g = Quantity(3, u.electron / u.adu) ccd_data = gain_correct(ccd_data, gain=g) assert_array_equal(ccd_data.data, 3 * init_data) assert ccd_data.unit == u.electron #test transform is ccd def test_transform_isccd(): with pytest.raises(TypeError): transform_image(1, 1) #test function is callable def test_transform_isfunc(ccd_data): with pytest.raises(TypeError): transform_image(ccd_data, 1) @pytest.mark.parametrize('mask_data, uncertainty', [ (False, False), (True, True)]) @pytest.mark.data_size(50) def test_transform_image(ccd_data, mask_data, uncertainty): if mask_data: ccd_data.mask = np.zeros_like(ccd_data) ccd_data.mask[10, 10] = 1 if uncertainty: err = np.random.normal(size=ccd_data.shape) ccd_data.uncertainty = StdDevUncertainty(err) def tran(arr): return 10 * arr tran = transform_image(ccd_data, tran) assert_array_equal(10 * ccd_data.data, tran.data) if mask_data: assert tran.shape == tran.mask.shape assert_array_equal(ccd_data.mask, tran.mask) if uncertainty: assert tran.shape == tran.uncertainty.array.shape assert_array_equal(10 * ccd_data.uncertainty.array, tran.uncertainty.array) # test block_reduce and block_replicate wrapper @pytest.mark.skipif(not HAS_BLOCK_X_FUNCS, reason="needs astropy >= 1.1.x") def test_block_reduce(): ccd = CCDData(np.ones((4, 4)), unit='adu', meta={'testkw': 1}, mask=np.zeros((4, 4), dtype=bool), uncertainty=StdDevUncertainty(np.ones((4, 4))), wcs=np.zeros((4, 4))) with catch_warnings(AstropyUserWarning) as w: ccd_summed = block_reduce(ccd, (2, 2)) assert len(w) == 1 assert 'following attributes were set' in str(w[0].message) assert isinstance(ccd_summed, CCDData) assert np.all(ccd_summed.data == 4) assert ccd_summed.data.shape == (2, 2) assert ccd_summed.unit == u.adu # Other attributes are set to None. In case the function is modified to # work on these attributes correctly those tests need to be updated! assert ccd_summed.meta == {'testkw': 1} assert ccd_summed.mask is None assert ccd_summed.wcs is None assert ccd_summed.uncertainty is None # Make sure meta is copied ccd_summed.meta['testkw2'] = 10 assert 'testkw2' not in ccd.meta @pytest.mark.skipif(not HAS_BLOCK_X_FUNCS, reason="needs astropy >= 1.1.x") def test_block_average(): ccd = CCDData(np.ones((4, 4)), unit='adu', meta={'testkw': 1}, mask=np.zeros((4, 4), dtype=bool), uncertainty=StdDevUncertainty(np.ones((4, 4))), wcs=np.zeros((4, 4))) ccd.data[::2, ::2] = 2 with catch_warnings(AstropyUserWarning) as w: ccd_avgd = block_average(ccd, (2, 2)) assert len(w) == 1 assert 'following attributes were set' in str(w[0].message) assert isinstance(ccd_avgd, CCDData) assert np.all(ccd_avgd.data == 1.25) assert ccd_avgd.data.shape == (2, 2) assert ccd_avgd.unit == u.adu # Other attributes are set to None. In case the function is modified to # work on these attributes correctly those tests need to be updated! assert ccd_avgd.meta == {'testkw': 1} assert ccd_avgd.mask is None assert ccd_avgd.wcs is None assert ccd_avgd.uncertainty is None # Make sure meta is copied ccd_avgd.meta['testkw2'] = 10 assert 'testkw2' not in ccd.meta @pytest.mark.skipif(not HAS_BLOCK_X_FUNCS, reason="needs astropy >= 1.1.x") def test_block_replicate(): ccd = CCDData(np.ones((4, 4)), unit='adu', meta={'testkw': 1}, mask=np.zeros((4, 4), dtype=bool), uncertainty=StdDevUncertainty(np.ones((4, 4))), wcs=np.zeros((4, 4))) with catch_warnings(AstropyUserWarning) as w: ccd_repl = block_replicate(ccd, (2, 2)) assert len(w) == 1 assert 'following attributes were set' in str(w[0].message) assert isinstance(ccd_repl, CCDData) assert np.all(ccd_repl.data == 0.25) assert ccd_repl.data.shape == (8, 8) assert ccd_repl.unit == u.adu # Other attributes are set to None. In case the function is modified to # work on these attributes correctly those tests need to be updated! assert ccd_repl.meta == {'testkw': 1} assert ccd_repl.mask is None assert ccd_repl.wcs is None assert ccd_repl.uncertainty is None # Make sure meta is copied ccd_repl.meta['testkw2'] = 10 assert 'testkw2' not in ccd.meta #test blockaveraging ndarray def test__blkavg_ndarray(): with pytest.raises(TypeError): _blkavg(1, (5, 5)) #test rebinning dimensions @pytest.mark.data_size(10) def test__blkavg_dimensions(ccd_data): with pytest.raises(ValueError): _blkavg(ccd_data.data, (5,)) #test blkavg works @pytest.mark.data_size(20) def test__blkavg_larger(ccd_data): a = ccd_data.data b = _blkavg(a, (10, 10)) assert b.shape == (10, 10) np.testing.assert_almost_equal(b.sum(), 0.25 * a.sum()) #test overscan changes def test__overscan_schange(ccd_data): old_data = ccd_data.copy() new_data = subtract_overscan(ccd_data, overscan=ccd_data[:,1], overscan_axis=0) assert not np.allclose(old_data.data, new_data.data) np.testing.assert_array_equal(old_data.data, ccd_data.data) def test_create_deviation_does_not_change_input(ccd_data): original = ccd_data.copy() ccd = create_deviation(ccd_data, gain=5 * u.electron / u.adu, readnoise=10 * u.electron) np.testing.assert_array_equal(original.data, ccd_data.data) assert original.unit == ccd_data.unit def test_cosmicray_median_does_not_change_input(ccd_data): original = ccd_data.copy() error = np.zeros_like(ccd_data) ccd = cosmicray_median(ccd_data,error_image=error, thresh=5, mbox=11, gbox=0, rbox=0) np.testing.assert_array_equal(original.data,ccd_data.data) assert original.unit == ccd_data.unit def test_cosmicray_lacosmic_does_not_change_input(ccd_data): original = ccd_data.copy() error = np.zeros_like(ccd_data) ccd = cosmicray_lacosmic(ccd_data) np.testing.assert_array_equal(original.data, ccd_data.data) assert original.unit == ccd_data.unit def test_flat_correct_does_not_change_input(ccd_data): original = ccd_data.copy() flat = CCDData(np.zeros_like(ccd_data), unit=ccd_data.unit) ccd = flat_correct(ccd_data,flat=flat) np.testing.assert_array_equal(original.data, ccd_data.data) assert original.unit == ccd_data.unit def test_gain_correct_does_not_change_input(ccd_data): original = ccd_data.copy() ccd = gain_correct(ccd_data, gain=1, gain_unit=ccd_data.unit) np.testing.assert_array_equal(original.data, ccd_data.data) assert original.unit == ccd_data.unit def test_subtract_bias_does_not_change_input(ccd_data): original = ccd_data.copy() master_frame = CCDData(np.zeros_like(ccd_data), unit=ccd_data.unit) ccd = subtract_bias(ccd_data, master=master_frame) np.testing.assert_array_equal(original.data, ccd_data.data) assert original.unit == ccd_data.unit def test_trim_image_does_not_change_input(ccd_data): original = ccd_data.copy() ccd = trim_image(ccd_data, fits_section=None) np.testing.assert_array_equal(original.data, ccd_data.data) assert original.unit == ccd_data.unit def test_transform_image_does_not_change_input(ccd_data): original = ccd_data.copy() ccd = transform_image(ccd_data, np.sqrt) np.testing.assert_array_equal(original.data, ccd_data) assert original.unit == ccd_data.unit def wcs_for_testing(shape): # Set up a simply WCS, details are cut/pasted from astropy WCS docs, # mostly. CRPIX is set to the center of shape, rounded down. # Create a new WCS object. The number of axes must be set # from the start w = WCS(naxis=2) # Set up an "Airy's zenithal" projection # Vector properties may be set with Python lists, or Numpy arrays w.wcs.crpix = [shape[0] // 2, shape[1] // 2] w.wcs.cdelt = np.array([-0.066667, 0.066667]) w.wcs.crval = [0, -90] w.wcs.ctype = ["RA---AIR", "DEC--AIR"] w.wcs.set_pv([(2, 1, 45.0)]) return w def test_wcs_project_onto_same_wcs(ccd_data): # The trivial case, same WCS, no mask. target_wcs = wcs_for_testing(ccd_data.shape) ccd_data.wcs = wcs_for_testing(ccd_data.shape) new_ccd = wcs_project(ccd_data, target_wcs) # Make sure new image has correct WCS. assert new_ccd.wcs.wcs.compare(target_wcs.wcs) # Make sure data matches within some reasonable tolerance. np.testing.assert_allclose(ccd_data.data, new_ccd.data, rtol=1e-5) def test_wcs_project_onto_shifted_wcs(ccd_data): # Just make the target WCS the same as the initial with the center # pixel shifted by 1 in x and y. ccd_data.wcs = wcs_for_testing(ccd_data.shape) target_wcs = wcs_for_testing(ccd_data.shape) target_wcs.wcs.crpix += [1, 1] ccd_data.mask = np.random.choice([0, 1], size=ccd_data.shape) new_ccd = wcs_project(ccd_data, target_wcs) # Make sure new image has correct WCS. assert new_ccd.wcs.wcs.compare(target_wcs.wcs) # Make sure data matches within some reasonable tolerance, keeping in mind # that the pixels should all be shifted. masked_input = np.ma.array(ccd_data.data, mask=ccd_data.mask) masked_output = np.ma.array(new_ccd.data, mask=new_ccd.mask) np.testing.assert_allclose(masked_input[:-1, :-1], masked_output[1:, 1:], rtol=1e-5) # The masks should all be shifted too. np.testing.assert_array_equal(ccd_data.mask[:-1, :-1], new_ccd.mask[1:, 1:]) # We should have more values that are masked in the output array # than on input because some on output were not in the footprint # of the original array. # In the case of a shift, one row and one column should be nan, and they # will share one common nan where they intersect, so we know how many nan # there should be. assert np.isnan(new_ccd.data).sum() == np.sum(new_ccd.shape) - 1 # Use an odd number of pixels to make a well-defined center pixel @pytest.mark.data_size(31) def test_wcs_project_onto_scale_wcs(ccd_data): # Make the target WCS with half the pixel scale and number of pixels # and the values should drop by a factor of 4. ccd_data.wcs = wcs_for_testing(ccd_data.shape) # Make sure wcs is centered at the center of the center pixel. ccd_data.wcs.wcs.crpix += 0.5 # Use uniform input data value for simplicity. ccd_data.data = np.ones_like(ccd_data.data) # Make mask zero... ccd_data.mask = np.zeros_like(ccd_data.data) # ...except the center pixel, which is one. ccd_data.mask[int(ccd_data.wcs.wcs.crpix[0]), int(ccd_data.wcs.wcs.crpix[1])] = 1 target_wcs = wcs_for_testing(ccd_data.shape) target_wcs.wcs.cdelt /= 2 # Choice below ensures we are really at the center pixel of an odd range. target_shape = 2 * np.array(ccd_data.shape) + 1 target_wcs.wcs.crpix = 2 * target_wcs.wcs.crpix + 1 + 0.5 # Explicitly set the interpolation method so we know what to # expect for the mass. new_ccd = wcs_project(ccd_data, target_wcs, target_shape=target_shape, order='nearest-neighbor') # Make sure new image has correct WCS. assert new_ccd.wcs.wcs.compare(target_wcs.wcs) # Define a cutout from the new array that should match the old. new_lower_bound = (np.array(new_ccd.shape) - np.array(ccd_data.shape)) // 2 new_upper_bound = (np.array(new_ccd.shape) + np.array(ccd_data.shape)) // 2 data_cutout = new_ccd.data[new_lower_bound[0]:new_upper_bound[0], new_lower_bound[1]:new_upper_bound[1]] # Make sure data matches within some reasonable tolerance, keeping in mind # that the pixels have been scaled. np.testing.assert_allclose(ccd_data.data / 4, data_cutout, rtol=1e-5) # Mask should be true for four pixels (all nearest neighbors) # of the single pixel we masked initially. new_center = np.array(new_ccd.wcs.wcs.crpix, dtype=int, copy=False) assert np.all(new_ccd.mask[new_center[0]:new_center[0]+2, new_center[1]:new_center[1]+2]) # Those four, and any that reproject made nan because they draw on # pixels outside the footprint of the original image, are the only # pixels that should be masked. assert new_ccd.mask.sum() == 4 + np.isnan(new_ccd.data).sum() def test_ccd_process_does_not_change_input(ccd_data): original = ccd_data.copy() ccd = ccd_process(ccd_data, gain=5 * u.electron / u.adu, readnoise=10 * u.electron) np.testing.assert_array_equal(original.data, ccd_data.data) assert original.unit == ccd_data.unit def test_ccd_process_parameters_are_appropriate(ccd_data): # oscan check with pytest.raises(TypeError): ccd_process(ccd_data, oscan=True) # trim section check with pytest.raises(TypeError): ccd_process(ccd_data, trim=True) # error frame check # gain and readnoise must be specified with pytest.raises(ValueError): ccd_process(ccd_data, error=True) # gain must be specified with pytest.raises(ValueError): ccd_process(ccd_data, error=True, gain=None, readnoise=5) # mask check with pytest.raises(TypeError): ccd_process(ccd_data, bad_pixel_mask=3) # master bias check with pytest.raises(TypeError): ccd_process(ccd_data, master_bias=3) # master flat check with pytest.raises(TypeError): ccd_process(ccd_data, master_flat=3) def test_ccd_process(): # test the through ccd_process ccd_data = CCDData(10.0 * np.ones((100, 100)), unit=u.adu) ccd_data.data[:, -10:] = 2 ccd_data.meta['testkw'] = 100 mask = np.zeros((100, 90)) masterbias = CCDData(2.0 * np.ones((100, 90)), unit=u.electron) masterbias.uncertainty = StdDevUncertainty(np.zeros((100, 90))) dark_frame = CCDData(0.0 * np.ones((100, 90)), unit=u.electron) dark_frame.uncertainty = StdDevUncertainty(np.zeros((100, 90))) masterflat = CCDData(10.0 * np.ones((100, 90)), unit=u.electron) masterflat.uncertainty = StdDevUncertainty(np.zeros((100, 90))) occd = ccd_process(ccd_data, oscan=ccd_data[:, -10:], trim='[1:90,1:100]', error=True, master_bias=masterbias, master_flat=masterflat, dark_frame=dark_frame, bad_pixel_mask=mask, gain=0.5 * u.electron/u.adu, readnoise=5**0.5 * u.electron, oscan_median=True, dark_scale=False, dark_exposure=1.*u.s, data_exposure=1.*u.s) # final results should be (10 - 2) / 2.0 - 2 = 2 # error should be (4 + 5)**0.5 / 0.5 = 3.0 np.testing.assert_array_equal(2.0 * np.ones((100, 90)), occd.data) np.testing.assert_almost_equal(3.0 * np.ones((100, 90)), occd.uncertainty.array) np.testing.assert_array_equal(mask, occd.mask) assert(occd.unit == u.electron) # Make sure the original keyword is still present. Regression test for #401 assert occd.meta['testkw'] == 100 def test_ccd_process_gain_corrected(): # test the through ccd_process with gain_corrected as False ccd_data = CCDData(10.0 * np.ones((100, 100)), unit=u.adu) ccd_data.data[:, -10:] = 2 ccd_data.meta['testkw'] = 100 mask = np.zeros((100, 90)) masterbias = CCDData(4.0 * np.ones((100, 90)), unit=u.adu) masterbias.uncertainty = StdDevUncertainty(np.zeros((100, 90))) dark_frame = CCDData(0.0 * np.ones((100, 90)), unit=u.adu) dark_frame.uncertainty = StdDevUncertainty(np.zeros((100, 90))) masterflat = CCDData(5.0 * np.ones((100, 90)), unit=u.adu) masterflat.uncertainty = StdDevUncertainty(np.zeros((100, 90))) occd = ccd_process(ccd_data, oscan=ccd_data[:, -10:], trim='[1:90,1:100]', error=True, master_bias=masterbias, master_flat=masterflat, dark_frame=dark_frame, bad_pixel_mask=mask, gain=0.5 * u.electron/u.adu, readnoise=5**0.5 * u.electron, oscan_median=True, dark_scale=False, dark_exposure=1.*u.s, data_exposure=1.*u.s, gain_corrected=False) # final results should be (10 - 2) / 2.0 - 2 = 2 # error should be (4 + 5)**0.5 / 0.5 = 3.0 np.testing.assert_array_equal(2.0 * np.ones((100, 90)), occd.data) np.testing.assert_almost_equal(3.0 * np.ones((100, 90)), occd.uncertainty.array) np.testing.assert_array_equal(mask, occd.mask) assert(occd.unit == u.electron) # Make sure the original keyword is still present. Regression test for #401 assert occd.meta['testkw'] == 100 ccdproc-1.3.0.post1/ccdproc/tests/test_wrapped_external_funcs.py0000664000175000017500000000527213207605210026611 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np from astropy.nddata import StdDevUncertainty from scipy import ndimage from ..ccddata import CCDData from .. import core def test_medianfilter_correct(): ccd = CCDData([[2, 6, 6, 1, 7, 2, 4, 5, 9, 1], [10, 10, 9, 0, 2, 10, 8, 3, 9, 7], [2, 4, 0, 4, 4, 10, 0, 5, 6, 5], [7, 10, 8, 7, 7, 0, 5, 3, 5, 9], [9, 6, 3, 8, 6, 9, 2, 8, 10, 10], [6, 5, 1, 7, 8, 0, 8, 2, 9, 3], [0, 6, 0, 6, 3, 10, 8, 9, 7, 8], [5, 8, 3, 2, 3, 0, 2, 0, 3, 5], [9, 6, 3, 7, 1, 0, 5, 4, 8, 3], [5, 6, 9, 9, 0, 4, 9, 1, 7, 8]], unit='adu') result = core.median_filter(ccd, 3) assert isinstance(result, CCDData) assert np.all(result.data == [[6, 6, 6, 6, 2, 4, 4, 5, 5, 7], [4, 6, 4, 4, 4, 4, 5, 5, 5, 6], [7, 8, 7, 4, 4, 5, 5, 5, 5, 7], [7, 6, 6, 6, 7, 5, 5, 5, 6, 9], [7, 6, 7, 7, 7, 6, 3, 5, 8, 9], [6, 5, 6, 6, 7, 8, 8, 8, 8, 8], [5, 5, 5, 3, 3, 3, 2, 7, 5, 5], [6, 5, 6, 3, 3, 3, 4, 5, 5, 5], [6, 6, 6, 3, 2, 2, 2, 4, 4, 5], [6, 6, 7, 7, 4, 4, 4, 7, 7, 8]]) assert result.unit == 'adu' assert all(getattr(result, attr) is None for attr in ['mask', 'uncertainty', 'wcs', 'flags']) # The following test could be deleted if log_to_metadata is also applied. assert not result.meta def test_medianfilter_unusued(): ccd = CCDData(np.ones((3, 3)), unit='adu', mask=np.ones((3, 3)), uncertainty=StdDevUncertainty(np.ones((3, 3))), wcs=np.ones((3, 3)), flags=np.ones((3, 3))) result = core.median_filter(ccd, 3) assert isinstance(result, CCDData) assert result.unit == 'adu' assert all(getattr(result, attr) is None for attr in ['mask', 'uncertainty', 'wcs', 'flags']) # The following test could be deleted if log_to_metadata is also applied. assert not result.meta def test_medianfilter_ndarray(): arr = np.random.random((5, 5)) result = core.median_filter(arr, 3) reference = ndimage.median_filter(arr, 3) # It's a wrapped function so we can use the equal comparison. np.testing.assert_array_equal(result, reference) ccdproc-1.3.0.post1/ccdproc/tests/test_ccddata.py0000664000175000017500000010626213207605210023433 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import textwrap import numpy as np import pytest from astropy.io import fits from astropy.nddata import StdDevUncertainty, MissingDataAssociationException from astropy import units as u from astropy.extern import six from astropy import log from astropy.wcs import WCS, FITSFixedWarning from astropy.tests.helper import catch_warnings from astropy.utils import minversion from astropy.utils.data import get_pkg_data_filename from ..ccddata import CCDData from .. import subtract_dark ASTROPY_GT_1_2 = minversion("astropy", "1.2") ASTROPY_GT_2_0 = minversion("astropy", "2.0") def test_ccddata_empty(): with pytest.raises(TypeError): CCDData() # empty initializer should fail def test_ccddata_must_have_unit(): with pytest.raises(ValueError): CCDData(np.zeros([100, 100])) def test_ccddata_unit_cannot_be_set_to_none(ccd_data): with pytest.raises(TypeError): ccd_data.unit = None def test_ccddata_meta_header_conflict(): with pytest.raises(ValueError) as exc: CCDData([1, 2, 3], unit='', meta={1: 1}, header={2: 2}) assert "can't have both header and meta." in str(exc) @pytest.mark.data_size(10) def test_ccddata_simple(ccd_data): assert ccd_data.shape == (10, 10) assert ccd_data.size == 100 assert ccd_data.dtype == np.dtype(float) def test_ccddata_init_with_string_electron_unit(): ccd = CCDData(np.zeros((10, 10)), unit="electron") assert ccd.unit is u.electron @pytest.mark.data_size(10) def test_initialize_from_FITS(ccd_data, tmpdir): hdu = fits.PrimaryHDU(ccd_data) hdulist = fits.HDUList([hdu]) filename = tmpdir.join('afile.fits').strpath hdulist.writeto(filename) cd = CCDData.read(filename, unit=u.electron) assert cd.shape == (10, 10) assert cd.size == 100 assert np.issubdtype(cd.data.dtype, np.floating) for k, v in hdu.header.items(): assert cd.meta[k] == v def test_reader_removes_wcs_related_keywords(tmpdir): arr = np.arange(100).reshape(10, 10) # Create a WCS programatically (straight from the astropy WCS documentation) w = WCS(naxis=2) w.wcs.crpix = [-234.75, 8.3393] w.wcs.cdelt = np.array([-0.066667, 0.066667]) w.wcs.crval = [0, -90] w.wcs.ctype = ["RA---AIR", "DEC--AIR"] w.wcs.set_pv([(2, 1, 45.0)]) # Write the image including WCS to file ccd = CCDData(arr, unit='adu', wcs=w) filename = tmpdir.join('afile.fits').strpath ccd.write(filename) # Check that the header and the meta of the CCDData are not equal ccd_fromfile = CCDData.read(filename) read_header = ccd_fromfile.meta ref_header = fits.getheader(filename) ref_wcs = ccd_fromfile.wcs.to_header() for key in read_header: # Make sure no new keys were accidentally added assert key in ref_header for key in ref_header: assert key in read_header or key in ref_wcs def test_initialize_from_fits_with_unit_in_header(tmpdir): fake_img = np.random.random(size=(100, 100)) hdu = fits.PrimaryHDU(fake_img) hdu.header['bunit'] = u.adu.to_string() filename = tmpdir.join('afile.fits').strpath hdu.writeto(filename) ccd = CCDData.read(filename) # ccd should pick up the unit adu from the fits header...did it? assert ccd.unit is u.adu # An explicit unit in the read overrides any unit in the FITS file ccd2 = CCDData.read(filename, unit="photon") assert ccd2.unit is u.photon def test_initialize_from_fits_with_ADU_in_header(tmpdir): fake_img = np.random.random(size=(100, 100)) hdu = fits.PrimaryHDU(fake_img) hdu.header['bunit'] = 'ADU' filename = tmpdir.join('afile.fits').strpath hdu.writeto(filename) ccd = CCDData.read(filename) # ccd should pick up the unit adu from the fits header...did it? assert ccd.unit is u.adu def test_initialize_from_fits_with_data_in_different_extension(tmpdir): fake_img = np.random.random(size=(100, 100)) hdu1 = fits.PrimaryHDU() hdu2 = fits.ImageHDU(fake_img) hdus = fits.HDUList([hdu1, hdu2]) filename = tmpdir.join('afile.fits').strpath hdus.writeto(filename) with catch_warnings(FITSFixedWarning) as w: ccd = CCDData.read(filename, unit='adu') if not ASTROPY_GT_2_0 or minversion("astropy", "2.0.3"): # Test can only succeed if astropy <= 2 (where it uses ccdprocs CCDData) # or with the patched astropy.nddata.CCDData # (scheduled for 2.0.3) assert len(w) == 0 # check that the header is the combined header assert hdu2.header + hdu1.header == ccd.header # ccd should pick up the unit adu from the fits header...did it? np.testing.assert_array_equal(ccd.data, fake_img) def test_initialize_from_fits_with_extension(tmpdir): fake_img1 = np.random.random(size=(100, 100)) fake_img2 = np.random.random(size=(100, 100)) new_hdul = fits.HDUList() hdu0 = fits.PrimaryHDU() hdu1 = fits.ImageHDU(fake_img1) hdu2 = fits.ImageHDU(fake_img2) hdus = fits.HDUList([hdu0, hdu1, hdu2]) filename = tmpdir.join('afile.fits').strpath hdus.writeto(filename) ccd = CCDData.read(filename, hdu=2, unit='adu') # ccd should pick up the unit adu from the fits header...did it? np.testing.assert_array_equal(ccd.data, fake_img2) def test_write_unit_to_hdu(ccd_data): ccd_unit = ccd_data.unit hdulist = ccd_data.to_hdu() assert 'bunit' in hdulist[0].header assert hdulist[0].header['bunit'] == ccd_unit.to_string() def test_initialize_from_FITS_bad_keyword_raises_error(ccd_data, tmpdir): # There are two fits.open keywords that are not permitted in ccdproc: # do_not_scale_image_data and scale_back filename = tmpdir.join('test.fits').strpath ccd_data.write(filename) with pytest.raises(TypeError): CCDData.read(filename, unit=ccd_data.unit, do_not_scale_image_data=True) with pytest.raises(TypeError): CCDData.read(filename, unit=ccd_data.unit, scale_back=True) def test_ccddata_writer(ccd_data, tmpdir): filename = tmpdir.join('test.fits').strpath ccd_data.write(filename) ccd_disk = CCDData.read(filename, unit=ccd_data.unit) np.testing.assert_array_equal(ccd_data.data, ccd_disk.data) def test_ccddata_meta_is_case_sensitive(ccd_data): key = 'SoMeKEY' ccd_data.meta[key] = 10 assert key.lower() not in ccd_data.meta assert key.upper() not in ccd_data.meta assert key in ccd_data.meta def test_ccddata_meta_is_not_fits_header(ccd_data): ccd_data.meta = {'OBSERVER': 'Edwin Hubble'} assert not isinstance(ccd_data.meta, fits.Header) def test_fromMEF(ccd_data, tmpdir): hdu = fits.PrimaryHDU(ccd_data) hdu2 = fits.PrimaryHDU(2 * ccd_data.data) hdulist = fits.HDUList(hdu) hdulist.append(hdu2) filename = tmpdir.join('afile.fits').strpath hdulist.writeto(filename) # by default, we reading from the first extension cd = CCDData.read(filename, unit=u.electron) np.testing.assert_array_equal(cd.data, ccd_data.data) # but reading from the second should work too cd = CCDData.read(filename, hdu=1, unit=u.electron) np.testing.assert_array_equal(cd.data, 2 * ccd_data.data) def test_metafromheader(): hdr = fits.header.Header() hdr['observer'] = 'Edwin Hubble' hdr['exptime'] = '3600' d1 = CCDData(np.ones((5, 5)), meta=hdr, unit=u.electron) assert d1.meta['OBSERVER'] == 'Edwin Hubble' assert d1.header['OBSERVER'] == 'Edwin Hubble' def test_metafromdict(): dic = {'OBSERVER': 'Edwin Hubble', 'EXPTIME': 3600} d1 = CCDData(np.ones((5, 5)), meta=dic, unit=u.electron) assert d1.meta['OBSERVER'] == 'Edwin Hubble' def test_header2meta(): hdr = fits.header.Header() hdr['observer'] = 'Edwin Hubble' hdr['exptime'] = '3600' d1 = CCDData(np.ones((5, 5)), unit=u.electron) d1.header = hdr assert d1.meta['OBSERVER'] == 'Edwin Hubble' assert d1.header['OBSERVER'] == 'Edwin Hubble' def test_metafromstring_fail(): hdr = 'this is not a valid header' with pytest.raises(TypeError): CCDData(np.ones((5, 5)), meta=hdr, unit=u.adu) def test_setting_bad_uncertainty_raises_error(ccd_data): with pytest.raises(TypeError): # Uncertainty is supposed to be an instance of NDUncertainty ccd_data.uncertainty = 10 def test_setting_uncertainty_with_array(ccd_data): ccd_data.uncertainty = None fake_uncertainty = np.sqrt(np.abs(ccd_data.data)) ccd_data.uncertainty = fake_uncertainty.copy() np.testing.assert_array_equal(ccd_data.uncertainty.array, fake_uncertainty) def test_setting_uncertainty_wrong_shape_raises_error(): ccd = CCDData(np.ones((10, 10)), unit='adu') with pytest.raises(ValueError): ccd.uncertainty = np.zeros((20, 20)) def test_to_hdu(ccd_data): ccd_data.meta = {'observer': 'Edwin Hubble'} fits_hdulist = ccd_data.to_hdu() assert isinstance(fits_hdulist, fits.HDUList) for k, v in ccd_data.meta.items(): assert fits_hdulist[0].header[k] == v np.testing.assert_array_equal(fits_hdulist[0].data, ccd_data.data) def test_to_hdu_long_metadata_item(ccd_data): # There is no attempt to try to handle the general problem of # a long keyword (that requires HIERARCH) with a long string value # (that requires CONTINUE). # However, a long-ish keyword with a long value can happen because of # auto-logging, and we are supposed to handle that. # So, a nice long command: from ..core import subtract_dark, _short_names dark = CCDData(np.zeros_like(ccd_data.data), unit="adu") result = subtract_dark(ccd_data, dark, dark_exposure=30 * u.second, data_exposure=15 * u.second, scale=True) assert 'subtract_dark' in result.header with catch_warnings() as w: hdulist = result.to_hdu() if ASTROPY_GT_2_0: # Astropys CCDData only shortens the keyword name if the value is # also longer than 72 chars. assert len(w) == 1 assert str(w[0].message) == ( "Keyword name 'subtract_dark' is greater than 8 characters or " "contains characters not allowed by the FITS standard; a HIERARCH " "card will be created.") header = hdulist[0].header assert header['subtract_dark'] == _short_names['subtract_dark'] args_value = header[_short_names['subtract_dark']] # Yuck -- have to hand code the ".0" to the numbers to get this to pass... assert "dark_exposure={0} {1}".format(30.0, u.second) in args_value assert "data_exposure={0} {1}".format(15.0, u.second) in args_value assert "scale=True" in args_value def test_copy(ccd_data): ccd_copy = ccd_data.copy() np.testing.assert_array_equal(ccd_copy.data, ccd_data.data) assert ccd_copy.unit == ccd_data.unit assert ccd_copy.meta == ccd_data.meta @pytest.mark.parametrize('operation,affects_uncertainty', [ ("multiply", True), ("divide", True), ]) @pytest.mark.parametrize('operand', [ 2.0, 2 * u.dimensionless_unscaled, 2 * u.photon / u.adu, ]) @pytest.mark.parametrize('with_uncertainty', [ True, False]) @pytest.mark.data_unit(u.adu) def test_mult_div_overload(ccd_data, operand, with_uncertainty, operation, affects_uncertainty): if with_uncertainty: ccd_data.uncertainty = StdDevUncertainty(np.ones_like(ccd_data)) method = ccd_data.__getattribute__(operation) np_method = np.__getattribute__(operation) result = method(operand) assert result is not ccd_data assert isinstance(result, CCDData) assert (result.uncertainty is None or isinstance(result.uncertainty, StdDevUncertainty)) try: op_value = operand.value except AttributeError: op_value = operand np.testing.assert_array_equal(result.data, np_method(ccd_data.data, op_value)) if with_uncertainty: if affects_uncertainty: np.testing.assert_array_equal(result.uncertainty.array, np_method(ccd_data.uncertainty.array, op_value)) else: np.testing.assert_array_equal(result.uncertainty.array, ccd_data.uncertainty.array) else: assert result.uncertainty is None if isinstance(operand, u.Quantity): # Need the "1 *" below to force arguments to be Quantity to work around # astropy/astropy#2377 expected_unit = np_method(1 * ccd_data.unit, 1 * operand.unit).unit assert result.unit == expected_unit else: assert result.unit == ccd_data.unit @pytest.mark.parametrize('operation,affects_uncertainty', [ ("add", False), ("subtract", False), ]) @pytest.mark.parametrize('operand,expect_failure', [ (2.0, u.UnitsError), # fail--units don't match image (2 * u.dimensionless_unscaled, u.UnitsError), # same (2 * u.adu, False), ]) @pytest.mark.parametrize('with_uncertainty', [ True, False]) @pytest.mark.data_unit(u.adu) def test_add_sub_overload(ccd_data, operand, expect_failure, with_uncertainty, operation, affects_uncertainty): if with_uncertainty: ccd_data.uncertainty = StdDevUncertainty(np.ones_like(ccd_data)) method = ccd_data.__getattribute__(operation) np_method = np.__getattribute__(operation) if expect_failure: with pytest.raises(expect_failure): result = method(operand) return else: result = method(operand) assert result is not ccd_data assert isinstance(result, CCDData) assert (result.uncertainty is None or isinstance(result.uncertainty, StdDevUncertainty)) try: op_value = operand.value except AttributeError: op_value = operand np.testing.assert_array_equal(result.data, np_method(ccd_data.data, op_value)) if with_uncertainty: if affects_uncertainty: np.testing.assert_array_equal(result.uncertainty.array, np_method(ccd_data.uncertainty.array, op_value)) else: np.testing.assert_array_equal(result.uncertainty.array, ccd_data.uncertainty.array) else: assert result.uncertainty is None if isinstance(operand, u.Quantity): assert (result.unit == ccd_data.unit and result.unit == operand.unit) else: assert result.unit == ccd_data.unit def test_arithmetic_overload_fails(ccd_data): with pytest.raises(TypeError): ccd_data.multiply("five") with pytest.raises(TypeError): ccd_data.divide("five") with pytest.raises(TypeError): ccd_data.add("five") with pytest.raises(TypeError): ccd_data.subtract("five") def test_arithmetic_no_wcs_compare(): ccd = CCDData(np.ones((10, 10)), unit='') assert ccd.add(ccd, compare_wcs=None).wcs is None assert ccd.subtract(ccd, compare_wcs=None).wcs is None assert ccd.multiply(ccd, compare_wcs=None).wcs is None assert ccd.divide(ccd, compare_wcs=None).wcs is None @pytest.mark.skipif('not ASTROPY_GT_1_2') def test_arithmetic_with_wcs_compare(): def return_diff_smaller_3(first, second): return abs(first - second) <= 3 ccd1 = CCDData(np.ones((10, 10)), unit='', wcs=2) ccd2 = CCDData(np.ones((10, 10)), unit='', wcs=5) assert ccd1.add(ccd2, compare_wcs=return_diff_smaller_3).wcs == 2 assert ccd1.subtract(ccd2, compare_wcs=return_diff_smaller_3).wcs == 2 assert ccd1.multiply(ccd2, compare_wcs=return_diff_smaller_3).wcs == 2 assert ccd1.divide(ccd2, compare_wcs=return_diff_smaller_3).wcs == 2 @pytest.mark.skipif('not ASTROPY_GT_1_2') def test_arithmetic_with_wcs_compare_fail(): def return_diff_smaller_1(first, second): return abs(first - second) <= 1 ccd1 = CCDData(np.ones((10, 10)), unit='', wcs=2) ccd2 = CCDData(np.ones((10, 10)), unit='', wcs=5) with pytest.raises(ValueError): ccd1.add(ccd2, compare_wcs=return_diff_smaller_1).wcs with pytest.raises(ValueError): ccd1.subtract(ccd2, compare_wcs=return_diff_smaller_1).wcs with pytest.raises(ValueError): ccd1.multiply(ccd2, compare_wcs=return_diff_smaller_1).wcs with pytest.raises(ValueError): ccd1.divide(ccd2, compare_wcs=return_diff_smaller_1).wcs @pytest.mark.skipif('ASTROPY_GT_1_2') def test_arithmetic_with_wcs_compare_fail_astropy_version(): def return_diff_smaller_1(first, second): return abs(first - second) <= 1 ccd1 = CCDData(np.ones((10, 10)), unit='', wcs=2) ccd2 = CCDData(np.ones((10, 10)), unit='', wcs=5) with pytest.raises(ImportError): ccd1.add(ccd2, compare_wcs=return_diff_smaller_1).wcs with pytest.raises(ImportError): ccd1.subtract(ccd2, compare_wcs=return_diff_smaller_1).wcs with pytest.raises(ImportError): ccd1.multiply(ccd2, compare_wcs=return_diff_smaller_1).wcs with pytest.raises(ImportError): ccd1.divide(ccd2, compare_wcs=return_diff_smaller_1).wcs def test_arithmetic_overload_ccddata_operand(ccd_data): ccd_data.uncertainty = StdDevUncertainty(np.ones_like(ccd_data)) operand = ccd_data.copy() result = ccd_data.add(operand) assert len(result.meta) == 0 np.testing.assert_array_equal(result.data, 2 * ccd_data.data) np.testing.assert_array_equal(result.uncertainty.array, np.sqrt(2) * ccd_data.uncertainty.array) result = ccd_data.subtract(operand) assert len(result.meta) == 0 np.testing.assert_array_equal(result.data, 0 * ccd_data.data) np.testing.assert_array_equal(result.uncertainty.array, np.sqrt(2) * ccd_data.uncertainty.array) result = ccd_data.multiply(operand) assert len(result.meta) == 0 np.testing.assert_array_equal(result.data, ccd_data.data ** 2) expected_uncertainty = (np.sqrt(2) * np.abs(ccd_data.data) * ccd_data.uncertainty.array) np.testing.assert_allclose(result.uncertainty.array, expected_uncertainty) result = ccd_data.divide(operand) assert len(result.meta) == 0 np.testing.assert_array_equal(result.data, np.ones_like(ccd_data.data)) expected_uncertainty = (np.sqrt(2) / np.abs(ccd_data.data) * ccd_data.uncertainty.array) np.testing.assert_allclose(result.uncertainty.array, expected_uncertainty) def test_arithmetic_overload_differing_units(): a = np.array([1, 2, 3]) * u.m b = np.array([1, 2, 3]) * u.cm ccddata = CCDData(a) # TODO: Could also be parametrized. res = ccddata.add(b) np.testing.assert_array_almost_equal(res.data, np.add(a, b).value) assert res.unit == np.add(a, b).unit res = ccddata.subtract(b) np.testing.assert_array_almost_equal(res.data, np.subtract(a, b).value) assert res.unit == np.subtract(a, b).unit res = ccddata.multiply(b) np.testing.assert_array_almost_equal(res.data, np.multiply(a, b).value) assert res.unit == np.multiply(a, b).unit res = ccddata.divide(b) np.testing.assert_array_almost_equal(res.data, np.divide(a, b).value) assert res.unit == np.divide(a, b).unit @pytest.mark.skipif('not ASTROPY_GT_1_2') def test_arithmetic_add_with_array(): ccd = CCDData(np.ones((3, 3)), unit='') res = ccd.add(np.arange(3)) np.testing.assert_array_equal(res.data, [[1, 2, 3]] * 3) ccd = CCDData(np.ones((3, 3)), unit='adu') with pytest.raises(ValueError): ccd.add(np.arange(3)) @pytest.mark.skipif('not ASTROPY_GT_1_2') def test_arithmetic_subtract_with_array(): ccd = CCDData(np.ones((3, 3)), unit='') res = ccd.subtract(np.arange(3)) np.testing.assert_array_equal(res.data, [[1, 0, -1]] * 3) ccd = CCDData(np.ones((3, 3)), unit='adu') with pytest.raises(ValueError): ccd.subtract(np.arange(3)) @pytest.mark.skipif('not ASTROPY_GT_1_2') def test_arithmetic_multiply_with_array(): ccd = CCDData(np.ones((3, 3)) * 3, unit=u.m) res = ccd.multiply(np.ones((3, 3)) * 2) np.testing.assert_array_equal(res.data, [[6, 6, 6]] * 3) assert res.unit == ccd.unit @pytest.mark.skipif('not ASTROPY_GT_1_2') def test_arithmetic_divide_with_array(): ccd = CCDData(np.ones((3, 3)), unit=u.m) res = ccd.divide(np.ones((3, 3)) * 2) np.testing.assert_array_equal(res.data, [[0.5, 0.5, 0.5]] * 3) assert res.unit == ccd.unit def test_ccddata_header_does_not_corrupt_fits(ccd_data, tmpdir): # This test is for the problem described in astropy/ccdproc#165 # The issue comes up when a long FITS keyword value is in a header # that is read in and then converted to a non-fits.Header object # that is dict-like, and then you try to write that out again as # FITS. Certainly FITS files out to be able to round-trip, and # this test checks for that. fake_dark = ccd_data.copy() # This generates a nice long log entry in the header. ccd = subtract_dark(ccd_data, fake_dark, dark_exposure=30*u.second, data_exposure=30*u.second) # The write below succeeds... long_key = tmpdir.join('long_key.fit').strpath with catch_warnings() as w: ccd.write(long_key) if ASTROPY_GT_2_0: # Astropys CCDData only shortens the keyword name if the value is # also longer than 72 chars. assert len(w) == 1 assert str(w[0].message) == ( "Keyword name 'subtract_dark' is greater than 8 characters or " "contains characters not allowed by the FITS standard; a HIERARCH " "card will be created.") # And this read succeeds... ccd_read = CCDData.read(long_key, unit="adu") # This write failed in astropy/ccdproc#165 but should not: rewritten = tmpdir.join('should_work.fit').strpath ccd_read.write(rewritten) # If all is well then reading the file we just wrote should result in an # identical header. ccd_reread = CCDData.read(rewritten, unit="adu") assert ccd_reread.header == ccd_read.header def test_ccddata_with_fits_header_as_meta_works_with_autologging(ccd_data, tmpdir): tmp_file = tmpdir.join('tmp.fits') hdr = fits.Header(ccd_data.header) ccd_data.header = hdr fake_dark = ccd_data.copy() # The combination below will generate a long keyword ('subtract_dark') # and a long value (the function signature) in autlogging. ccd2 = subtract_dark(ccd_data, fake_dark, dark_exposure=30*u.second, data_exposure=15*u.second, scale=True) # This should not fail.... ccd2.write(tmp_file.strpath) # And the header on ccd2 should be a subset of the written header; they # do not match exactly because the written header contains information # about the array size that is the hdr we created manually. ccd2_read = CCDData.read(tmp_file.strpath, unit=u.adu) for k, v in six.iteritems(ccd2.header): assert ccd2_read.header[k] == v def test_history_preserved_if_metadata_is_fits_header(tmpdir): fake_img = np.random.random(size=(100, 100)) hdu = fits.PrimaryHDU(fake_img) hdu.header['history'] = 'one' hdu.header['history'] = 'two' hdu.header['history'] = 'three' assert len(hdu.header['history']) == 3 tmp_file = tmpdir.join('temp.fits').strpath hdu.writeto(tmp_file) ccd_read = CCDData.read(tmp_file, unit="adu") assert ccd_read.header['history'] == hdu.header['history'] def test_infol_logged_if_unit_in_fits_header(ccd_data, tmpdir): tmpfile = tmpdir.join('temp.fits') ccd_data.write(tmpfile.strpath) log.setLevel('INFO') explicit_unit_name = "photon" with log.log_to_list() as log_list: ccd_from_disk = CCDData.read(tmpfile.strpath, unit=explicit_unit_name) assert explicit_unit_name in log_list[0].message def test_wcs_attribute(ccd_data, tmpdir): """ Check that WCS attribute gets added to header, and that if a CCDData object is created from a FITS file with a header, and the WCS attribute is modified, then the CCDData object is turned back into an hdu, the WCS object overwrites the old WCS information in the header. """ tmpfile = tmpdir.join('temp.fits') # This wcs example is taken from the astropy.wcs docs. wcs = WCS(naxis=2) wcs.wcs.crpix = np.array(ccd_data.shape)/2 wcs.wcs.cdelt = np.array([-0.066667, 0.066667]) wcs.wcs.crval = [0, -90] wcs.wcs.ctype = ["RA---AIR", "DEC--AIR"] wcs.wcs.set_pv([(2, 1, 45.0)]) ccd_data.header = ccd_data.to_hdu()[0].header ccd_data.header.extend(wcs.to_header(), useblanks=False) ccd_data.write(tmpfile.strpath) ccd_new = CCDData.read(tmpfile.strpath) original_header_length = len(ccd_new.header) # WCS attribute should be set for ccd_new assert ccd_new.wcs is not None # WCS attribute should be equal to wcs above. assert ccd_new.wcs.wcs == wcs.wcs # Making a CCDData with WCS (but not WCS in the header) should lead to # WCS information in the header when it is converted to an HDU. ccd_wcs_not_in_header = CCDData(ccd_data.data, wcs=wcs, unit="adu") hdu = ccd_wcs_not_in_header.to_hdu()[0] wcs_header = wcs.to_header() for k in wcs_header.keys(): # Skip these keywords if they are in the WCS header because they are # not WCS-specific. if k in ['', 'COMMENT', 'HISTORY']: continue # No keyword from the WCS should be in the header. assert k not in ccd_wcs_not_in_header.header # Every keyword in the WCS should be in the header of the HDU assert hdu.header[k] == wcs_header[k] # Now check that if WCS of a CCDData is modified, then the CCDData is # converted to an HDU, the WCS keywords in the header are overwritten # with the appropriate keywords from the header. # # ccd_new has a WCS and WCS keywords in the header, so try modifying # the WCS. ccd_new.wcs.wcs.cdelt *= 2 ccd_new_hdu_mod_wcs = ccd_new.to_hdu()[0] assert ccd_new_hdu_mod_wcs.header['CDELT1'] == ccd_new.wcs.wcs.cdelt[0] assert ccd_new_hdu_mod_wcs.header['CDELT2'] == ccd_new.wcs.wcs.cdelt[1] def test_header(ccd_data): a = {'Observer': 'Hubble'} ccd = CCDData(ccd_data, header=a) assert ccd.meta == a def test_wcs_arithmetic(ccd_data): ccd_data.wcs = 5 result = ccd_data.multiply(1.0) assert result.wcs == 5 @pytest.mark.parametrize('operation', ['multiply', 'divide', 'add', 'subtract']) def test_wcs_arithmetic_ccd(ccd_data, operation): ccd_data2 = ccd_data.copy() ccd_data.wcs = 5 method = ccd_data.__getattribute__(operation) result = method(ccd_data2) assert result.wcs == ccd_data.wcs assert ccd_data2.wcs is None def test_wcs_sip_handling(): """ Check whether the ctypes RA---TAN-SIP and DEC--TAN-SIP survive a roundtrip unchanged. """ data_file = get_pkg_data_filename('data/sip-wcs.fit') def check_wcs_ctypes_header(header): expected_wcs_ctypes = { 'CTYPE1': 'RA---TAN-SIP', 'CTYPE2': 'DEC--TAN-SIP' } return [header[k] == v for k, v in expected_wcs_ctypes.items()] def check_wcs_ctypes_wcs(wcs): expected = ['RA---TAN-SIP', 'DEC--TAN-SIP'] return [act == ref for act, ref in zip(wcs.wcs.ctype, expected)] ccd_original = CCDData.read(data_file) good_ctype = check_wcs_ctypes_wcs(ccd_original.wcs) assert all(good_ctype) ccd_new = ccd_original.to_hdu() good_ctype = check_wcs_ctypes_header(ccd_new[0].header) assert all(good_ctype) # Try converting to header with wcs_relax=False and # the header should contain the CTYPE keywords without # the -SIP ccd_no_relax = ccd_original.to_hdu(wcs_relax=False) good_ctype = check_wcs_ctypes_header(ccd_no_relax[0].header) if ASTROPY_GT_1_2: # This behavior was introduced in astropy 1.2. assert not any(good_ctype) assert ccd_no_relax[0].header['CTYPE1'] == 'RA---TAN' assert ccd_no_relax[0].header['CTYPE2'] == 'DEC--TAN' else: # The -SIP is left in place. assert all(good_ctype) @pytest.mark.parametrize('operation', ['multiply', 'divide', 'add', 'subtract']) def test_mask_arithmetic_ccd(ccd_data, operation): ccd_data2 = ccd_data.copy() ccd_data.mask = (ccd_data.data > 0) method = ccd_data.__getattribute__(operation) result = method(ccd_data2) np.testing.assert_equal(result.mask, ccd_data.mask) def test_write_read_multiextensionfits_mask_default(ccd_data, tmpdir): # Test that if a mask is present the mask is saved and loaded by default. ccd_data.mask = ccd_data.data > 10 filename = tmpdir.join('afile.fits').strpath ccd_data.write(filename) ccd_after = CCDData.read(filename) assert ccd_after.mask is not None np.testing.assert_array_equal(ccd_data.mask, ccd_after.mask) def test_write_read_multiextensionfits_uncertainty_default(ccd_data, tmpdir): # Test that if a uncertainty is present it is saved and loaded by default. ccd_data.uncertainty = StdDevUncertainty(ccd_data.data * 10) filename = tmpdir.join('afile.fits').strpath ccd_data.write(filename) ccd_after = CCDData.read(filename) assert ccd_after.uncertainty is not None np.testing.assert_array_equal(ccd_data.uncertainty.array, ccd_after.uncertainty.array) def test_write_read_multiextensionfits_not(ccd_data, tmpdir): # Test that writing mask and uncertainty can be disabled ccd_data.mask = ccd_data.data > 10 ccd_data.uncertainty = StdDevUncertainty(ccd_data.data * 10) filename = tmpdir.join('afile.fits').strpath ccd_data.write(filename, hdu_mask=None, hdu_uncertainty=None) ccd_after = CCDData.read(filename) assert ccd_after.uncertainty is None assert ccd_after.mask is None def test_write_read_multiextensionfits_custom_ext_names(ccd_data, tmpdir): # Test writing mask, uncertainty in another extension than default ccd_data.mask = ccd_data.data > 10 ccd_data.uncertainty = StdDevUncertainty(ccd_data.data * 10) filename = tmpdir.join('afile.fits').strpath ccd_data.write(filename, hdu_mask='Fun', hdu_uncertainty='NoFun') # Try reading with defaults extension names ccd_after = CCDData.read(filename) assert ccd_after.uncertainty is None assert ccd_after.mask is None # Try reading with custom extension names ccd_after = CCDData.read(filename, hdu_mask='Fun', hdu_uncertainty='NoFun') assert ccd_after.uncertainty is not None assert ccd_after.mask is not None np.testing.assert_array_equal(ccd_data.mask, ccd_after.mask) np.testing.assert_array_equal(ccd_data.uncertainty.array, ccd_after.uncertainty.array) @pytest.mark.skipif(ASTROPY_GT_2_0 and not minversion("astropy", "2.0.3"), reason="CCDData with reader is used from astropy and this " "Bug isn't fixed currently.") def test_read_wcs_not_creatable(tmpdir): # The following Header can't be converted to a WCS object. See also # astropy issue #6499. hdr_txt_example_WCS = textwrap.dedent(''' SIMPLE = T / Fits standard BITPIX = 16 / Bits per pixel NAXIS = 2 / Number of axes NAXIS1 = 1104 / Axis length NAXIS2 = 4241 / Axis length CRVAL1 = 164.98110962 / Physical value of the reference pixel X CRVAL2 = 44.34089279 / Physical value of the reference pixel Y CRPIX1 = -34.0 / Reference pixel in X (pixel) CRPIX2 = 2041.0 / Reference pixel in Y (pixel) CDELT1 = 0.10380000 / X Scale projected on detector (#/pix) CDELT2 = 0.10380000 / Y Scale projected on detector (#/pix) CTYPE1 = 'RA---TAN' / Pixel coordinate system CTYPE2 = 'WAVELENGTH' / Pixel coordinate system CUNIT1 = 'degree ' / Units used in both CRVAL1 and CDELT1 CUNIT2 = 'nm ' / Units used in both CRVAL2 and CDELT2 CD1_1 = 0.20760000 / Pixel Coordinate translation matrix CD1_2 = 0.00000000 / Pixel Coordinate translation matrix CD2_1 = 0.00000000 / Pixel Coordinate translation matrix CD2_2 = 0.10380000 / Pixel Coordinate translation matrix C2YPE1 = 'RA---TAN' / Pixel coordinate system C2YPE2 = 'DEC--TAN' / Pixel coordinate system C2NIT1 = 'degree ' / Units used in both C2VAL1 and C2ELT1 C2NIT2 = 'degree ' / Units used in both C2VAL2 and C2ELT2 RADECSYS= 'FK5 ' / The equatorial coordinate system ''') with catch_warnings(FITSFixedWarning): hdr = fits.Header.fromstring(hdr_txt_example_WCS, sep='\n') hdul = fits.HDUList([fits.PrimaryHDU(np.ones((4241, 1104)), header=hdr)]) filename = tmpdir.join('afile.fits').strpath hdul.writeto(filename) # The hdr cannot be converted to a WCS object because of an # InconsistentAxisTypesError but it should still open the file ccd = CCDData.read(filename, unit='adu') assert ccd.wcs is None def test_wcs(ccd_data): ccd_data.wcs = 5 assert ccd_data.wcs == 5 def test_recognized_fits_formats(): from ..ccddata import _recognized_fits_file_extensions # These are the extensions that are supposed to be supported. supported_extensions = ['fit', 'fits', 'fts'] # Make sure they are actually supported. assert len(set(_recognized_fits_file_extensions) - set(supported_extensions)) == 0 def test_recognized_fits_formats_for_read_write(ccd_data, tmpdir): # Test that incorporates astropy/ccdproc#355, which asked that .fts # be auto-identified as a FITS file extension. from ..ccddata import _recognized_fits_file_extensions for ext in _recognized_fits_file_extensions: path = tmpdir.join("test.{}".format(ext)) ccd_data.write(path.strpath) from_disk = CCDData.read(path.strpath) assert (ccd_data.data == from_disk.data).all() def test_stddevuncertainty_compat_descriptor_no_parent(): with pytest.raises(MissingDataAssociationException): StdDevUncertainty(np.ones((10, 10))).parent_nddata def test_stddevuncertainty_compat_descriptor_no_weakref(): # TODO: Remove this test if astropy 1.0 isn't supported anymore # This test might create a Memoryleak on purpose, so the last lines after # the assert are IMPORTANT cleanup. ccd = CCDData(np.ones((10, 10)), unit='') uncert = StdDevUncertainty(np.ones((10, 10))) uncert._parent_nddata = ccd assert uncert.parent_nddata is ccd uncert._parent_nddata = None ccdproc-1.3.0.post1/ccdproc/version.py0000664000175000017500000001621613207623132021336 0ustar mseifertmseifert00000000000000# Autogenerated by Astropy-affiliated package ccdproc's setup.py on 2017-11-29 21:27:54 from __future__ import unicode_literals import datetime import locale import os import subprocess import warnings def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding = 'utf-8' try: text = stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback text = stream.decode('latin1') return text def update_git_devstr(version, path=None): """ Updates the git revision string if and only if the path is being imported directly from a git working copy. This ensures that the revision number in the version string is accurate. """ try: # Quick way to determine if we're in git or not - returns '' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version if not devstr: # Probably not in git so just pass silently return version if 'dev' in version: # update to the current git revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' + devstr else: # otherwise it's already the true/release version return version def get_git_devstr(sha=False, show_warning=True, path=None): """ Determines the number of revisions in this repository. Parameters ---------- sha : bool If True, the full SHA1 hash will be returned. Otherwise, the total count of commits in the repository will be used as a "revision number". show_warning : bool If True, issue a warning if git returns an error code, otherwise errors pass silently. path : str or None If a string, specifies the directory to look in to find the git repository. If `None`, the current working directory is used, and must be the root of the git repository. If given a filename it uses the directory containing that file. Returns ------- devversion : str Either a string with the revision number (if `sha` is False), the SHA1 hash of the current commit (if `sha` is True), or an empty string if git version info could not be identified. """ if path is None: path = os.getcwd() if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha: # Faster for getting just the hash of HEAD cmd = ['rev-parse', 'HEAD'] else: cmd = ['rev-list', '--count', 'HEAD'] def run_git(cmd): try: p = subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate() except OSError as e: if show_warning: warnings.warn('Error running git: ' + str(e)) return (None, b'', b'') if p.returncode == 128: if show_warning: warnings.warn('No git repository present at {0!r}! Using ' 'default dev version.'.format(path)) return (p.returncode, b'', b'') if p.returncode == 129: if show_warning: warnings.warn('Your git looks old (does it support {0}?); ' 'consider upgrading to v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif p.returncode != 0: if show_warning: warnings.warn('Git failed while determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return p.returncode, stdout, stderr returncode, stdout, stderr = run_git(cmd) if not sha and returncode == 128: # git returns 128 if the command is not run from within a git # repository tree. In this case, a warning is produced above but we # return the default dev version of '0'. return '0' elif not sha and returncode == 129: # git returns 129 if a command option failed to parse; in # particular this could happen in git versions older than 1.7.2 # where the --count option is not supported # Also use --abbrev-commit and --abbrev=0 to display the minimum # number of characters needed per-commit (rather than the full hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr = run_git(cmd) # Fall back on the old method of getting all revisions and counting # the lines if returncode == 0: return str(stdout.count(b'\n')) else: return '' elif sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() # This function is tested but it is only ever executed within a subprocess when # creating a fake package, so it doesn't get picked up by coverage metrics. def _get_repo_path(pathname, levels=None): # pragma: no cover """ Given a file or directory name, determine the root of the git repository this path is under. If given, this won't look any higher than ``levels`` (that is, if ``levels=0`` then the given path must be the root of the git repository and is returned if so. Returns `None` if the given path could not be determined to belong to a git repo. """ if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return None current_level = 0 while levels is None or current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level += 1 if current_dir == os.path.dirname(current_dir): break current_dir = os.path.dirname(current_dir) return None _packagename = "ccdproc" _last_generated_version = "1.3.0.post1" _last_githash = "a27143531a48303b5d6691975a45a8f95fcab1e3" # Determine where the source code for this module # lives. If __file__ is not a filesystem path then # it is assumed not to live in a git repo at all. if _get_repo_path(__file__, levels=len(_packagename.split('.'))): version = update_git_devstr(_last_generated_version, path=__file__) githash = get_git_devstr(sha=True, show_warning=False, path=__file__) or _last_githash else: # The file does not appear to live in a git repo so don't bother # invoking git version = _last_generated_version githash = _last_githash major = 1 minor = 3 bugfix = 0 release = True timestamp = datetime.datetime(2017, 11, 29, 21, 27, 54) debug = False try: from ._compiler import compiler except ImportError: compiler = "unknown" try: from .cython_version import cython_version except ImportError: cython_version = "unknown" ccdproc-1.3.0.post1/ccdproc/extern/0000775000175000017500000000000013207623133020577 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/ccdproc/extern/__init__.py0000664000175000017500000000010013207605210022673 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst ccdproc-1.3.0.post1/ccdproc/extern/bitfield.py0000664000175000017500000004306613207617404022750 0ustar mseifertmseifert00000000000000# External license! License can be found in "licenses/LICENSE_STSCI_TOOLS.txt". """ A module that provides functions for manipulating bitmasks and data quality (DQ) arrays. :Authors: Mihai Cara (contact: help@stsci.edu) :License: ``_ """ import numpy as np import warnings import six __version__ = '1.0.0' __vdate__ = '16-March-2017' __author__ = 'Mihai Cara' __all__ = ['bitfield_to_boolean_mask', 'interpret_bit_flags', 'is_bit_flag'] # Revision history: # 0.1.0 (29-March-2015) - initial release based on code from stsci.skypac # 0.1.1 (21-February-2017) - documentation typo fix # 0.2.0 (23-February-2017) - performance and stability improvements. Changed # default output mask type from numpy.uint8 to numpy.bool_. # 1.0.0 (16-March-2017) - Multiple enhancements: # 1. Deprecated 'interpret_bits_value()'in favor of # 'interpret_bit_flags()' which now takes 'flip_bits' argument to flip # bits in (list of) integer flags. # 2. Deprecated 'bitmask2mask()' in favor of 'bitfield_to_boolean_mask()' # which now also takes 'flip_bits' argument. # 3. Renamed arguments of 'interpret_bit_flags()' and # 'bitfield_to_boolean_mask()' to be more technically correct. # 4. 'interpret_bit_flags()' and 'bitfield_to_boolean_mask()' now # accept Python lists of bit flags (in addition to integer bitmasks # and string comma- (or '+') separated lists of bit flags). # 5. Added 'is_bit_flag()' function to check if an integer number has # only one bit set (i.e., that it is a power of 2). def is_bit_flag(n): """ Verifies if the input number is a bit flag (i.e., an integer number that is an integer power of 2). Parameters ---------- n : int A positive integer number. Non-positive integers are considered not to be "flags". Returns ------- bool ``True`` if input ``n`` is a bit flag and ``False`` if it is not. """ if n < 1: return False return bin(n).count('1') == 1 def _is_int(n): return ( (isinstance(n, int) and not isinstance(n, bool)) or (isinstance(n, np.generic) and np.issubdtype(n, np.integer)) ) def interpret_bit_flags(bit_flags, flip_bits=None): """ Converts input bit flags to a single integer value (bitmask) or `None`. When input is a list of flags (either a Python list of integer flags or a sting of comma- or '+'-separated list of flags), the returned bitmask is obtained by summing input flags. .. note:: In order to flip the bits of the returned bitmask, for input of `str` type, prepend '~' to the input string. '~' must be prepended to the *entire string* and not to each bit flag! For input that is already a bitmask or a Python list of bit flags, set `flip_bits` for `True` in order to flip the bits of the returned bitmask. Parameters ---------- bit_flags : int, str, list, None An integer bitmask or flag, `None`, a string of comma- or '+'-separated list of integer bit flags, or a Python list of integer bit flags. If `bit_flags` is a `str` and if it is prepended with '~', then the output bitmask will have its bits flipped (compared to simple sum of input flags). For input `bit_flags` that is already a bitmask or a Python list of bit flags, bit-flipping can be controlled through `flip_bits` parameter. flip_bits : bool, None Indicates whether or not to flip the bits of the returned bitmask obtained from input bit flags. This parameter must be set to `None` when input `bit_flags` is either `None` or a Python list of flags. Returns ------- bitmask : int or None Returns and integer bit mask formed from the input bit value or `None` if input `bit_flags` parameter is `None` or an empty string. If input string value was prepended with '~' (or `flip_bits` was set to `True`), then returned value will have its bits flipped (inverse mask). Examples -------- >>> from ccdproc.extern.bitfield import interpret_bit_flags >>> "{0:016b}".format(0xFFFF & interpret_bit_flags(28)) '0000000000011100' >>> "{0:016b}".format(0xFFFF & interpret_bit_flags('4,8,16')) '0000000000011100' >>> "{0:016b}".format(0xFFFF & interpret_bit_flags('~4,8,16')) '1111111111100011' >>> "{0:016b}".format(0xFFFF & interpret_bit_flags('~(4+8+16)')) '1111111111100011' >>> "{0:016b}".format(0xFFFF & interpret_bit_flags([4, 8, 16])) '0000000000011100' >>> "{0:016b}".format(0xFFFF & interpret_bit_flags([4, 8, 16], flip_bits=True)) '1111111111100011' """ has_flip_bits = flip_bits is not None flip_bits = bool(flip_bits) allow_non_flags = False if _is_int(bit_flags): return (~int(bit_flags) if flip_bits else bit_flags) elif bit_flags is None: if has_flip_bits: raise TypeError( "Keyword argument 'flip_bits' must be set to 'None' when " "input 'bit_flags' is None." ) return None elif isinstance(bit_flags, six.string_types): if has_flip_bits: raise TypeError( "Keyword argument 'flip_bits' is not permitted for " "comma-separated string lists of bit flags. Prepend '~' to " "the string to indicate bit-flipping." ) bit_flags = str(bit_flags).strip() if bit_flags.upper() in ['', 'NONE', 'INDEF']: return None # check whether bitwise-NOT is present and if it is, check that it is # in the first position: bitflip_pos = bit_flags.find('~') if bitflip_pos == 0: flip_bits = True bit_flags = bit_flags[1:].lstrip() else: if bitflip_pos > 0: raise ValueError("Bitwise-NOT must precede bit flag list.") flip_bits = False # basic check for correct use of parenthesis: while True: nlpar = bit_flags.count('(') nrpar = bit_flags.count(')') if nlpar == 0 and nrpar == 0: break if nlpar != nrpar: raise ValueError("Unbalanced parantheses in bit flag list.") lpar_pos = bit_flags.find('(') rpar_pos = bit_flags.rfind(')') if lpar_pos > 0 or rpar_pos < (len(bit_flags) - 1): raise ValueError("Incorrect syntax (incorrect use of " "parenthesis) in bit flag list.") bit_flags = bit_flags[1:-1].strip() if ',' in bit_flags: bit_flags = bit_flags.split(',') elif '+' in bit_flags: bit_flags = bit_flags.split('+') else: if bit_flags == '': raise ValueError( "Empty bit flag lists not allowed when either bitwise-NOT " "or parenthesis are present." ) bit_flags = [bit_flags] allow_non_flags = len(bit_flags) == 1 elif hasattr(bit_flags, '__iter__'): if not all([_is_int(flag) for flag in bit_flags]): raise TypeError("Each bit flag in a list must be an integer.") else: raise TypeError("Unsupported type for argument 'bit_flags'.") bitset = set(map(int, bit_flags)) if len(bitset) != len(bit_flags): warnings.warn("Duplicate bit flags will be ignored") bitmask = 0 for v in bitset: if not is_bit_flag(v) and not allow_non_flags: raise ValueError("Input list contains invalid (not powers of two) " "bit flags") bitmask += v if flip_bits: bitmask = ~bitmask return bitmask def bitfield_to_boolean_mask(bitfield, ignore_flags=0, flip_bits=None, good_mask_value=True, dtype=np.bool_): """ bitfield_to_boolean_mask(bitfield, ignore_flags=None, flip_bits=None, \ good_mask_value=True, dtype=numpy.bool\_) Converts an array of bit fields to a boolean (or integer) mask array according to a bitmask constructed from the supplied bit flags (see ``ignore_flags`` parameter). This function is particularly useful to convert data quality arrays to boolean masks with selective filtering of DQ flags. Parameters ---------- bitfield : numpy.ndarray An array of bit flags. By default, values different from zero are interpreted as "bad" values and values equal to zero are considered as "good" values. However, see ``ignore_flags`` parameter on how to selectively ignore some bits in the ``bitfield`` array data. ignore_flags : int, str, list, None (Default = 0) An integer bitmask, a Python list of bit flags, a comma- or '+'-separated string list of integer bit flags that indicate what bits in the input ``bitfield`` should be *ignored* (i.e., zeroed), or `None`. | Setting ``ignore_flags`` to `None` effectively will make `bitfield_to_boolean_mask` interpret all ``bitfield`` elements as "good" regardless of their value. | When ``ignore_flags`` argument is an integer bitmask, it will be combined using bitwise-NOT and bitwise-AND with each element of the input ``bitfield`` array (``~ignore_flags & bitfield``). If the resultant bitfield element is non-zero, that element will be interpreted as a "bad" in the output boolean mask and it will be interpreted as "good" otherwise. ``flip_bits`` parameter may be used to flip the bits (``bitwise-NOT``) of the bitmask thus effectively changing the meaning of the ``ignore_flags`` parameter from "ignore" to "use only" these flags. .. note:: Setting ``ignore_flags`` to 0 effectively will assume that all non-zero elements in the input ``bitfield`` array are to be interpreted as "bad". | When ``ignore_flags`` argument is an Python list of integer bit flags, these flags are added together to create an integer bitmask. Each item in the list must be a flag, i.e., an integer that is an integer power of 2. In order to flip the bits of the resultant bitmask, use ``flip_bits`` parameter. | Alternatively, ``ignore_flags`` may be a string of comma- or '+'-separated list of integer bit flags that should be added together to create an integer bitmask. For example, both ``'4,8'`` and ``'4+8'`` are equivalent and indicate that bit flags 4 and 8 in the input ``bitfield`` array should be ignored when generating boolean mask. .. note:: ``'None'``, ``'INDEF'``, and empty (or all white space) strings are special values of string ``ignore_flags`` that are interpreted as `None`. .. note:: Each item in the list must be a flag, i.e., an integer that is an integer power of 2. In addition, for convenience, an arbitrary **single** integer is allowed and it will be interpretted as an integer bitmask. For example, instead of ``'4,8'`` one could simply provide string ``'12'``. .. note:: When ``ignore_flags`` is a `str` and when it is prepended with '~', then the meaning of ``ignore_flags`` parameters will be reversed: now it will be interpreted as a list of bit flags to be *used* (or *not ignored*) when deciding which elements of the input ``bitfield`` array are "bad". Following this convention, an ``ignore_flags`` string value of ``'~0'`` would be equivalent to setting ``ignore_flags=None``. .. warning:: Because prepending '~' to a string ``ignore_flags`` is equivalent to setting ``flip_bits`` to `True`, ``flip_bits`` cannot be used with string ``ignore_flags`` and it must be set to `None`. flip_bits : bool, None (Default = None) Specifies whether or not to invert the bits of the bitmask either supplied directly through ``ignore_flags`` parameter or built from the bit flags passed through ``ignore_flags`` (only when bit flags are passed as Python lists of integer bit flags). Occasionally, it may be useful to *consider only specific bit flags* in the ``bitfield`` array when creating a boolean mask as opposite to *ignoring* specific bit flags as ``ignore_flags`` behaves by default. This can be achieved by inverting/flipping the bits of the bitmask created from ``ignore_flags`` flags which effectively changes the meaning of the ``ignore_flags`` parameter from "ignore" to "use only" these flags. Setting ``flip_bits`` to `None` means that no bit flipping will be performed. Bit flipping for string lists of bit flags must be specified by prepending '~' to string bit flag lists (see documentation for ``ignore_flags`` for more details). .. warning:: This parameter can be set to either `True` or `False` **ONLY** when ``ignore_flags`` is either an integer bitmask or a Python list of integer bit flags. When ``ignore_flags`` is either `None` or a string list of flags, ``flip_bits`` **MUST** be set to `None`. good_mask_value : int, bool (Default = True) This parameter is used to derive the values that will be assigned to the elements in the output boolean mask array that correspond to the "good" bit fields (that are 0 after zeroing bits specified by ``ignore_flags``) in the input ``bitfield`` array. When ``good_mask_value`` is non-zero or `True` then values in the output boolean mask array corresponding to "good" bit fields in ``bitfield`` will be `True` (if ``dtype`` is `numpy.bool_`) or 1 (if ``dtype`` is of numerical type) and values of corresponding to "bad" flags will be `False` (or 0). When ``good_mask_value`` is zero or `False` then the values in the output boolean mask array corresponding to "good" bit fields in ``bitfield`` will be `False` (if ``dtype`` is `numpy.bool_`) or 0 (if ``dtype`` is of numerical type) and values of corresponding to "bad" flags will be `True` (or 1). dtype : data-type (Default = numpy.bool\_) The desired data-type for the output binary mask array. Returns ------- mask : numpy.ndarray Returns an array of the same dimensionality as the input ``bitfield`` array whose elements can have two possible values, e.g., `True` or `False` (or 1 or 0 for integer ``dtype``) according to values of to the input ``bitfield`` elements, ``ignore_flags`` parameter, and the ``good_mask_value`` parameter. Examples -------- >>> from ccdproc.extern import bitfield >>> import numpy as np >>> dqbits = np.asarray([[0, 0, 1, 2, 0, 8, 12, 0], ... [10, 4, 0, 0, 0, 16, 6, 0]]) >>> bitfield.bitfield_to_boolean_mask(dqbits, ignore_flags=0, ... dtype=int) array([[1, 1, 0, 0, 1, 0, 0, 1], [0, 0, 1, 1, 1, 0, 0, 1]]) >>> bitfield.bitfield_to_boolean_mask(dqbits, ignore_flags=0, ... dtype=bool) array([[ True, True, False, False, True, False, False, True], [False, False, True, True, True, False, False, True]], dtype=bool) >>> bitfield.bitfield_to_boolean_mask(dqbits, ignore_flags=6, ... good_mask_value=0, dtype=int) array([[0, 0, 1, 0, 0, 1, 1, 0], [1, 0, 0, 0, 0, 1, 0, 0]]) >>> bitfield.bitfield_to_boolean_mask(dqbits, ignore_flags=~6, ... good_mask_value=0, dtype=int) array([[0, 0, 0, 1, 0, 0, 1, 0], [1, 1, 0, 0, 0, 0, 1, 0]]) >>> bitfield.bitfield_to_boolean_mask(dqbits, ignore_flags=6, dtype=int, ... flip_bits=True, good_mask_value=0) array([[0, 0, 0, 1, 0, 0, 1, 0], [1, 1, 0, 0, 0, 0, 1, 0]]) >>> bitfield.bitfield_to_boolean_mask(dqbits, ignore_flags='~(2+4)', ... good_mask_value=0, dtype=int) array([[0, 0, 0, 1, 0, 0, 1, 0], [1, 1, 0, 0, 0, 0, 1, 0]]) >>> bitfield.bitfield_to_boolean_mask(dqbits, ignore_flags=[2, 4], ... flip_bits=True, good_mask_value=0, ... dtype=int) array([[0, 0, 0, 1, 0, 0, 1, 0], [1, 1, 0, 0, 0, 0, 1, 0]]) """ bitfield = np.asarray(bitfield) if not np.issubdtype(bitfield.dtype, np.integer): raise TypeError("Input bitfield array must be of integer type.") ignore_mask = interpret_bit_flags(ignore_flags, flip_bits=flip_bits) if ignore_mask is None: if good_mask_value: mask = np.ones_like(bitfield, dtype=dtype) else: mask = np.zeros_like(bitfield, dtype=dtype) return mask ignore_mask = np.bitwise_not(bitfield.dtype.type(ignore_mask)) mask = np.empty_like(bitfield, dtype=np.bool_) np.bitwise_and(bitfield, ignore_mask, out=mask, casting='unsafe') if good_mask_value: np.logical_not(mask, out=mask) return mask.astype(dtype=dtype, subok=False, copy=False) ccdproc-1.3.0.post1/ccdproc/utils/0000775000175000017500000000000013207623133020432 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/ccdproc/utils/__init__.py0000664000175000017500000000030713207605210022537 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst # This sub-module is destined for common non-package specific utility # functions that will ultimately be merged into `astropy.utils` ccdproc-1.3.0.post1/ccdproc/utils/tests/0000775000175000017500000000000013207623133021574 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/ccdproc/utils/tests/__init__.py0000664000175000017500000000000013207605210023667 0ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/ccdproc/utils/tests/test_slices.py0000664000175000017500000001036413207605210024467 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst import numpy as np import pytest from ..slices import slice_from_string # none of these are properly enclosed in brackets; is an error raised? @pytest.mark.parametrize('arg', ['1:2', '[1:2', '1:2]']) def test_slice_from_string_needs_enclosing_brackets(arg): with pytest.raises(ValueError): slice_from_string(arg) @pytest.mark.parametrize('start,stop,step', [ (None, None, -1), (5, 10, None), (None, 25, None), (2, 30, 3), (30, None, -2), (None, None, None) ]) def test_slice_from_string_1d(start, stop, step): an_array = np.zeros([100]) stringify = lambda n: str(n) if n else '' start_str = stringify(start) stop_str = stringify(stop) step_str = stringify(step) if step_str: slice_str = ':'.join([start_str, stop_str, step_str]) else: slice_str = ':'.join([start_str, stop_str]) sli = slice_from_string('[' + slice_str + ']') expected = an_array[slice(start, stop, step)] np.testing.assert_array_equal(expected, an_array[sli]) @pytest.mark.parametrize('arg', [' [ 1: 45]', '[ 1 :4 5]', ' [1:45] ']) def test_slice_from_string_spaces(arg): an_array = np.zeros([100]) np.testing.assert_array_equal(an_array[1:45], an_array[slice_from_string(arg)]) def test_slice_from_string_2d(): an_array = np.zeros([100, 200]) # manually writing a few cases here rather than parametrizing because the # latter seems not worth the trouble. sli = slice_from_string('[:-1:2, :]') np.testing.assert_array_equal(an_array[:-1:2, :], an_array[sli]) sli = slice_from_string('[:, 15:90]') np.testing.assert_array_equal(an_array[:, 15:90], an_array[sli]) sli = slice_from_string('[10:80:5, 15:90:-1]') np.testing.assert_array_equal(an_array[10:80:5, 15:90:-1], an_array[sli]) def test_slice_from_string_fits_style(): sli = slice_from_string('[1:5, :]', fits_convention=True) # order is reversed, so is the *first* slice one that includes everything? assert (sli[0].start is None and sli[0].stop is None and sli[0].step is None) # In the second slice, has the first index been reduced by 1 and the # second index left unchanged? assert (sli[1].start == 0 and sli[1].stop == 5) sli = slice_from_string('[1:10:2, 4:5:2]', fits_convention=True) assert sli[0] == slice(3, 5, 2) assert sli[1] == slice(0, 10, 2) def test_slice_from_string_fits_inverted(): sli = slice_from_string('[20:10:2, 10:5, 5:4]', fits_convention=True) assert sli[0] == slice(4, 2, -1) assert sli[1] == slice(9, 3, -1) assert sli[2] == slice(19, 8, -2) # Handle a bunch of special cases for inverted slices, when the # stop index is 1 or 2 sli = slice_from_string('[20:1:4, 21:1:4, 22:2:4, 2:1]', fits_convention=True) assert sli[0] == slice(1, None, -1) assert sli[1] == slice(21, 0, -4) assert sli[2] == slice(20, None, -4) assert sli[3] == slice(19, None, -4) def test_slice_from_string_empty(): assert len(slice_from_string('')) == 0 def test_slice_from_string_bad_fits_slice(): with pytest.raises(ValueError): # Do I error because 0 is an illegal lower bound? slice_from_string('[0:10, 1:5]', fits_convention=True) with pytest.raises(ValueError): # Same as above, but switched order slice_from_string('[1:5, 0:10]', fits_convention=True) with pytest.raises(ValueError): # Do I error if an ending index is negative? slice_from_string('[1:10, 10:-1]', fits_convention=True) def test_slice_from_string_fits_wildcard(): sli = slice_from_string('[*,-*]', fits_convention=True) assert sli[0] == slice(None, None, -1) assert sli[1] == slice(None, None, None) sli = slice_from_string('[*:2,-*:2]', fits_convention=True) assert sli[0] == slice(None, None, -2) assert sli[1] == slice(None, None, 2) ccdproc-1.3.0.post1/ccdproc/utils/slices.py0000664000175000017500000001011213207605210022255 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Define utility functions and classes for ccdproc """ __all__ = ["slice_from_string"] def slice_from_string(string, fits_convention=False): """ Convert a string to a tuple of slices. Parameters ---------- string : str A string that can be converted to a slice. fits_convention : bool, optional If True, assume the input string follows the FITS convention for indexing: the indexing is one-based (not zero-based) and the first axis is that which changes most rapidly as the index increases. Returns ------- slice_tuple : tuple of slice objects A tuple able to be used to index a numpy.array Notes ----- The ``string`` argument can be anything that would work as a valid way to slice an array in Numpy. It must be enclosed in matching brackets; all spaces are stripped from the string before processing. Examples -------- >>> import numpy as np >>> arr1d = np.arange(5) >>> a_slice = slice_from_string('[2:5]') >>> arr1d[a_slice] array([2, 3, 4]) >>> a_slice = slice_from_string('[ : : -2] ') >>> arr1d[a_slice] array([4, 2, 0]) >>> arr2d = np.array([arr1d, arr1d + 5, arr1d + 10]) >>> arr2d array([[ 0, 1, 2, 3, 4], [ 5, 6, 7, 8, 9], [10, 11, 12, 13, 14]]) >>> a_slice = slice_from_string('[1:-1, 0:4:2]') >>> arr2d[a_slice] array([[5, 7]]) >>> a_slice = slice_from_string('[0:2,0:3]') >>> arr2d[a_slice] array([[0, 1, 2], [5, 6, 7]]) """ no_space = string.replace(' ', '') if not no_space: return () if not (no_space.startswith('[') and no_space.endswith(']')): raise ValueError('Slice string must be enclosed in square brackets.') no_space = no_space.strip('[]') if fits_convention: # Special cases first # Flip dimension, with step no_space = no_space.replace('-*:', '::-') # Flip dimension no_space = no_space.replace('-*', '::-1') # Normal wildcard no_space = no_space.replace('*', ':') string_slices = no_space.split(',') slices = [] for string_slice in string_slices: slice_args = [int(arg) if arg else None for arg in string_slice.split(':')] a_slice = slice(*slice_args) slices.append(a_slice) if fits_convention: slices = _defitsify_slice(slices) return tuple(slices) def _defitsify_slice(slices): """ Convert a FITS-style slice specification into a python slice. This means two things: + Subtract 1 from starting index because in the FITS specification arrays are one-based. + Do **not** subtract 1 from the ending index because the python convention for a slice is for the last value to be one less than the stop value. In other words, this subtraction is already built into python. + Reverse the order of the slices, because the FITS specification dictates that the first axis is the one along which the index varies most rapidly (aka FORTRAN order). """ python_slice = [] for a_slice in slices[::-1]: new_start = a_slice.start - 1 if a_slice.start is not None else None if new_start is not None and new_start < 0: raise ValueError("Smallest permissible FITS index is 1") if a_slice.stop is not None and a_slice.stop < 0: raise ValueError("Negative final index not allowed for FITS slice") new_slice = slice(new_start, a_slice.stop, a_slice.step) if (a_slice.start is not None and a_slice.stop is not None and a_slice.start > a_slice.stop): # FITS use a positive step index when dimension are inverted new_step = -1 if a_slice.step is None else -a_slice.step # Special case to prevent -1 as slice stop value new_stop = None if a_slice.stop == 1 else a_slice.stop-2 new_slice = slice(new_start, new_stop, new_step) python_slice.append(new_slice) return python_slice ccdproc-1.3.0.post1/ccdproc/combiner.py0000664000175000017500000007507713207605210021456 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """This module implements the combiner class.""" from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np from numpy import ma from .ccddata import CCDData from .core import sigma_func from astropy.nddata import StdDevUncertainty from astropy import log import math __all__ = ['Combiner', 'combine'] class Combiner(object): """ A class for combining CCDData objects. The Combiner class is used to combine together `~astropy.nddata.CCDData` objects including the method for combining the data, rejecting outlying data, and weighting used for combining frames. Parameters ----------- ccd_list : list A list of CCDData objects that will be combined together. dtype : str or `numpy.dtype` or None, optional Allows user to set dtype. See `numpy.array` ``dtype`` parameter description. If ``None`` it uses ``np.float64``. Default is ``None``. Raises ------ TypeError If the ``ccd_list`` are not `~astropy.nddata.CCDData` objects, have different units, or are different shapes. Examples -------- The following is an example of combining together different `~astropy.nddata.CCDData` objects:: >>> import numpy as np >>> import astropy.units as u >>> from ccdproc import Combiner, CCDData >>> ccddata1 = CCDData(np.ones((4, 4)), unit=u.adu) >>> ccddata2 = CCDData(np.zeros((4, 4)), unit=u.adu) >>> ccddata3 = CCDData(np.ones((4, 4)), unit=u.adu) >>> c = Combiner([ccddata1, ccddata2, ccddata3]) >>> ccdall = c.average_combine() >>> ccdall # doctest: +FLOAT_CMP CCDData([[ 0.66666667, 0.66666667, 0.66666667, 0.66666667], [ 0.66666667, 0.66666667, 0.66666667, 0.66666667], [ 0.66666667, 0.66666667, 0.66666667, 0.66666667], [ 0.66666667, 0.66666667, 0.66666667, 0.66666667]]) """ def __init__(self, ccd_list, dtype=None): if ccd_list is None: raise TypeError("ccd_list should be a list of CCDData objects.") if dtype is None: dtype = np.float64 default_shape = None default_unit = None for ccd in ccd_list: # raise an error if the objects aren't CCDData objects if not isinstance(ccd, CCDData): raise TypeError( "ccd_list should only contain CCDData objects.") # raise an error if the shape is different if default_shape is None: default_shape = ccd.shape else: if not (default_shape == ccd.shape): raise TypeError("CCDData objects are not the same size.") # raise an error if the units are different if default_unit is None: default_unit = ccd.unit else: if not (default_unit == ccd.unit): raise TypeError("CCDData objects don't the same unit.") self.ccd_list = ccd_list self.unit = default_unit self.weights = None self._dtype = dtype # set up the data array new_shape = (len(ccd_list),) + default_shape self.data_arr = ma.masked_all(new_shape, dtype=dtype) # populate self.data_arr for i, ccd in enumerate(ccd_list): self.data_arr[i] = ccd.data if ccd.mask is not None: self.data_arr.mask[i] = ccd.mask else: self.data_arr.mask[i] = ma.zeros(default_shape) # Must be after self.data_arr is defined because it checks the # length of the data array. self.scaling = None @property def dtype(self): return self._dtype @property def weights(self): """ Weights used when combining the `~astropy.nddata.CCDData` objects. Parameters ---------- weight_values : `numpy.ndarray` or None An array with the weight values. The dimensions should match the the dimensions of the data arrays being combined. """ return self._weights @weights.setter def weights(self, value): if value is not None: if isinstance(value, np.ndarray): if value.shape == self.data_arr.data.shape: self._weights = value else: raise ValueError( "dimensions of weights do not match data.") else: raise TypeError("weights must be a numpy.ndarray.") else: self._weights = None @property def scaling(self): """ Scaling factor used in combining images. Parameters ---------- scale : function or `numpy.ndarray`-like or None, optional Images are multiplied by scaling prior to combining them. Scaling may be either a function, which will be applied to each image to determine the scaling factor, or a list or array whose length is the number of images in the `~ccdproc.Combiner`. """ return self._scaling @scaling.setter def scaling(self, value): if value is None: self._scaling = value else: n_images = self.data_arr.data.shape[0] if callable(value): self._scaling = [value(self.data_arr[i]) for i in range(n_images)] self._scaling = np.array(self._scaling) else: try: len(value) == n_images self._scaling = np.array(value) except TypeError: raise TypeError("scaling must be a function or an array " "the same length as the number of images.") # reshape so that broadcasting occurs properly for i in range(len(self.data_arr.data.shape)-1): self._scaling = self.scaling[:, np.newaxis] # set up IRAF-like minmax clipping def clip_extrema(self, nlow=0, nhigh=0): """Mask pixels using an IRAF-like minmax clipping algorithm. The algorithm will mask the lowest nlow values and the highest nhigh values before combining the values to make up a single pixel in the resulting image. For example, the image will be a combination of Nimages-nlow-nhigh pixel values instead of the combination of Nimages. Parameters ----------- nlow : int or None, optional If not None, the number of low values to reject from the combination. Default is 0. nhigh : int or None, optional If not None, the number of high values to reject from the combination. Default is 0. Notes ----- Note that this differs slightly from the nominal IRAF imcombine behavior when other masks are in use. For example, if ``nhigh>=1`` and any pixel is already masked for some other reason, then this algorithm will count the masking of that pixel toward the count of nhigh masked pixels. Here is a copy of the relevant IRAF help text [0]_: nlow = 1, nhigh = (minmax) The number of low and high pixels to be rejected by the "minmax" algorithm. These numbers are converted to fractions of the total number of input images so that if no rejections have taken place the specified number of pixels are rejected while if pixels have been rejected by masking, thresholding, or nonoverlap, then the fraction of the remaining pixels, truncated to an integer, is used. References ---------- .. [0] image.imcombine help text. http://stsdas.stsci.edu/cgi-bin/gethelp.cgi?imcombine """ if nlow is None: nlow = 0 if nhigh is None: nhigh = 0 argsorted = np.argsort(self.data_arr.data, axis=0) mg = np.mgrid[[slice(ndim) for i, ndim in enumerate(self.data_arr.shape) if i > 0]] for i in range(-1*nhigh, nlow): # create a tuple with the indices where = tuple([argsorted[i, :, :].ravel()] + [i.ravel() for i in mg]) self.data_arr.mask[where] = True # set up min/max clipping algorithms def minmax_clipping(self, min_clip=None, max_clip=None): """Mask all pixels that are below min_clip or above max_clip. Parameters ----------- min_clip : float or None, optional If not None, all pixels with values below min_clip will be masked. Default is ``None``. max_clip : float or None, optional If not None, all pixels with values above min_clip will be masked. Default is ``None``. """ if min_clip is not None: mask = (self.data_arr < min_clip) self.data_arr.mask[mask] = True if max_clip is not None: mask = (self.data_arr > max_clip) self.data_arr.mask[mask] = True # set up sigma clipping algorithms def sigma_clipping(self, low_thresh=3, high_thresh=3, func=ma.mean, dev_func=ma.std): """ Pixels will be rejected if they have deviations greater than those set by the threshold values. The algorithm will first calculated a baseline value using the function specified in func and deviation based on dev_func and the input data array. Any pixel with a deviation from the baseline value greater than that set by high_thresh or lower than that set by low_thresh will be rejected. Parameters ----------- low_thresh : positive float or None, optional Threshold for rejecting pixels that deviate below the baseline value. If negative value, then will be convert to a positive value. If None, no rejection will be done based on low_thresh. Default is 3. high_thresh : positive float or None, optional Threshold for rejecting pixels that deviate above the baseline value. If None, no rejection will be done based on high_thresh. Default is 3. func : function, optional Function for calculating the baseline values (i.e. `numpy.ma.mean` or `numpy.ma.median`). This should be a function that can handle `numpy.ma.MaskedArray` objects. Default is `numpy.ma.mean`. dev_func : function, optional Function for calculating the deviation from the baseline value (i.e. `numpy.ma.std`). This should be a function that can handle `numpy.ma.MaskedArray` objects. Default is `numpy.ma.std`. """ # setup baseline values baseline = func(self.data_arr, axis=0) dev = dev_func(self.data_arr, axis=0) # reject values if low_thresh is not None: # check for negative numbers in low_thresh if low_thresh < 0: low_thresh = abs(low_thresh) mask = (self.data_arr - baseline < -low_thresh * dev) self.data_arr.mask[mask] = True if high_thresh is not None: mask = (self.data_arr - baseline > high_thresh * dev) self.data_arr.mask[mask] = True # set up the combining algorithms def median_combine(self, median_func=ma.median, scale_to=None, uncertainty_func=sigma_func): """ Median combine a set of arrays. A `~astropy.nddata.CCDData` object is returned with the data property set to the median of the arrays. If the data was masked or any data have been rejected, those pixels will not be included in the median. A mask will be returned, and if a pixel has been rejected in all images, it will be masked. The uncertainty of the combined image is set by 1.4826 times the median absolute deviation of all input images. Parameters ---------- median_func : function, optional Function that calculates median of a `numpy.ma.MaskedArray`. Default is `numpy.ma.median`. scale_to : float or None, optional Scaling factor used in the average combined image. If given, it overrides `scaling`. Defaults to None. uncertainty_func : function, optional Function to calculate uncertainty. Defaults is `~ccdproc.sigma_func`. Returns ------- combined_image: `~astropy.nddata.CCDData` CCDData object based on the combined input of CCDData objects. Warnings -------- The uncertainty currently calculated using the median absolute deviation does not account for rejected pixels. """ if scale_to is not None: scalings = scale_to elif self.scaling is not None: scalings = self.scaling else: scalings = 1.0 # set the data data = median_func(scalings * self.data_arr, axis=0) # set the mask masked_values = self.data_arr.mask.sum(axis=0) mask = (masked_values == len(self.data_arr)) # set the uncertainty uncertainty = uncertainty_func(self.data_arr.data, axis=0) # Divide uncertainty by the number of pixel (#309) # TODO: This should be np.sqrt(len(self.data_arr) - masked_values) but # median_absolute_deviation ignores the mask... so it # would yield inconsistent results. uncertainty /= math.sqrt(len(self.data_arr)) # Convert uncertainty to plain numpy array (#351) # There is no need to care about potential masks because the # uncertainty was calculated based on the data so potential masked # elements are also masked in the data. No need to keep two identical # masks. uncertainty = np.asarray(uncertainty) # create the combined image with a dtype matching the combiner combined_image = CCDData(np.asarray(data.data, dtype=self.dtype), mask=mask, unit=self.unit, uncertainty=StdDevUncertainty(uncertainty)) # update the meta data combined_image.meta['NCOMBINE'] = len(self.data_arr) # return the combined image return combined_image def average_combine(self, scale_func=ma.average, scale_to=None, uncertainty_func=ma.std): """ Average combine together a set of arrays. A `~astropy.nddata.CCDData` object is returned with the data property set to the average of the arrays. If the data was masked or any data have been rejected, those pixels will not be included in the average. A mask will be returned, and if a pixel has been rejected in all images, it will be masked. The uncertainty of the combined image is set by the standard deviation of the input images. Parameters ---------- scale_func : function, optional Function to calculate the average. Defaults to `numpy.ma.average`. scale_to : float or None, optional Scaling factor used in the average combined image. If given, it overrides `scaling`. Defaults to ``None``. uncertainty_func : function, optional Function to calculate uncertainty. Defaults to `numpy.ma.std`. Returns ------- combined_image: `~astropy.nddata.CCDData` CCDData object based on the combined input of CCDData objects. """ if scale_to is not None: scalings = scale_to elif self.scaling is not None: scalings = self.scaling else: scalings = 1.0 # set up the data data, wei = scale_func(scalings * self.data_arr, axis=0, weights=self.weights, returned=True) # set up the mask masked_values = self.data_arr.mask.sum(axis=0) mask = (masked_values == len(self.data_arr)) # set up the deviation uncertainty = uncertainty_func(self.data_arr, axis=0) # Divide uncertainty by the number of pixel (#309) uncertainty /= np.sqrt(len(self.data_arr) - masked_values) # Convert uncertainty to plain numpy array (#351) uncertainty = np.asarray(uncertainty) # create the combined image with a dtype that matches the combiner combined_image = CCDData(np.asarray(data.data, dtype=self.dtype), mask=mask, unit=self.unit, uncertainty=StdDevUncertainty(uncertainty)) # update the meta data combined_image.meta['NCOMBINE'] = len(self.data_arr) # return the combined image return combined_image def sum_combine(self, sum_func=ma.sum, scale_to=None, uncertainty_func=ma.std): """ Sum combine together a set of arrays. A `~astropy.nddata.CCDData` object is returned with the data property set to the sum of the arrays. If the data was masked or any data have been rejected, those pixels will not be included in the sum. A mask will be returned, and if a pixel has been rejected in all images, it will be masked. The uncertainty of the combined image is set by the multiplication of summation of standard deviation of the input by square root of number of images. Because sum_combine returns 'pure sum' with masked pixels ignored, if re-scaled sum is needed, average_combine have to be used with multiplication by number of images combined. Parameters ---------- sum_func : function, optional Function to calculate the sum. Defaults to `numpy.ma.sum`. scale_to : float or None, optional Scaling factor used in the sum combined image. If given, it overrides `scaling`. Defaults to ``None``. uncertainty_func : function, optional Function to calculate uncertainty. Defaults to `numpy.ma.std`. Returns ------- combined_image: `~astropy.nddata.CCDData` CCDData object based on the combined input of CCDData objects. """ if scale_to is not None: scalings = scale_to elif self.scaling is not None: scalings = self.scaling else: scalings = 1.0 # set up the data data = sum_func(scalings * self.data_arr, axis=0) # set up the mask masked_values = self.data_arr.mask.sum(axis=0) mask = (masked_values == len(self.data_arr)) # set up the deviation uncertainty = uncertainty_func(self.data_arr, axis=0) # Divide uncertainty by the number of pixel (#309) uncertainty /= np.sqrt(len(self.data_arr) - masked_values) # Convert uncertainty to plain numpy array (#351) uncertainty = np.asarray(uncertainty) # Multiply uncertainty by square root of the number of images uncertainty *= len(self.data_arr) - masked_values # create the combined image with a dtype that matches the combiner combined_image = CCDData(np.asarray(data.data, dtype=self.dtype), mask=mask, unit=self.unit, uncertainty=StdDevUncertainty(uncertainty)) # update the meta data combined_image.meta['NCOMBINE'] = len(self.data_arr) # return the combined image return combined_image def combine(img_list, output_file=None, method='average', weights=None, scale=None, mem_limit=16e9, clip_extrema=False, nlow=1, nhigh=1, minmax_clip=False, minmax_clip_min=None, minmax_clip_max=None, sigma_clip=False, sigma_clip_low_thresh=3, sigma_clip_high_thresh=3, sigma_clip_func=ma.mean, sigma_clip_dev_func=ma.std, dtype=None, combine_uncertainty_function=None, **ccdkwargs): """ Convenience function for combining multiple images. Parameters ----------- img_list : `numpy.ndarray`, list or str A list of fits filenames or `~astropy.nddata.CCDData` objects that will be combined together. Or a string of fits filenames separated by comma ",". output_file : str or None, optional Optional output fits file-name to which the final output can be directly written. Default is ``None``. method : str, optional Method to combine images: - ``'average'`` : To combine by calculating the average. - ``'median'`` : To combine by calculating the median. - ``'sum'`` : To combine by calculating the sum. Default is ``'average'``. weights : `numpy.ndarray` or None, optional Weights to be used when combining images. An array with the weight values. The dimensions should match the the dimensions of the data arrays being combined. Default is ``None``. scale : function or `numpy.ndarray`-like or None, optional Scaling factor to be used when combining images. Images are multiplied by scaling prior to combining them. Scaling may be either a function, which will be applied to each image to determine the scaling factor, or a list or array whose length is the number of images in the `Combiner`. Default is ``None``. mem_limit : float, optional Maximum memory which should be used while combining (in bytes). Default is ``16e9``. clip_extrema : bool, optional Set to True if you want to mask pixels using an IRAF-like minmax clipping algorithm. The algorithm will mask the lowest nlow values and the highest nhigh values before combining the values to make up a single pixel in the resulting image. For example, the image will be a combination of Nimages-low-nhigh pixel values instead of the combination of Nimages. Parameters below are valid only when clip_extrema is set to True, see :meth:`Combiner.clip_extrema` for the parameter description: - ``nlow`` : int or None, optional - ``nhigh`` : int or None, optional minmax_clip : bool, optional Set to True if you want to mask all pixels that are below minmax_clip_min or above minmax_clip_max before combining. Default is ``False``. Parameters below are valid only when minmax_clip is set to True, see :meth:`Combiner.minmax_clipping` for the parameter description: - ``minmax_clip_min`` : float or None, optional - ``minmax_clip_max`` : float or None, optional sigma_clip : bool, optional Set to True if you want to reject pixels which have deviations greater than those set by the threshold values. The algorithm will first calculated a baseline value using the function specified in func and deviation based on sigma_clip_dev_func and the input data array. Any pixel with a deviation from the baseline value greater than that set by sigma_clip_high_thresh or lower than that set by sigma_clip_low_thresh will be rejected. Default is ``False``. Parameters below are valid only when sigma_clip is set to True. See :meth:`Combiner.sigma_clipping` for the parameter description. - ``sigma_clip_low_thresh`` : positive float or None, optional - ``sigma_clip_high_thresh`` : positive float or None, optional - ``sigma_clip_func`` : function, optional - ``sigma_clip_dev_func`` : function, optional dtype : str or `numpy.dtype` or None, optional The intermediate and resulting ``dtype`` for the combined CCDs. See `ccdproc.Combiner`. If ``None`` this is set to ``float64``. Default is ``None``. combine_uncertainty_function : callable, None, optional If ``None`` use the default uncertainty func when using average, median or sum combine, otherwise use the function provided. Default is ``None``. ccdkwargs : Other keyword arguments for `astropy.nddata.fits_ccddata_reader`. Returns ------- combined_image : `~astropy.nddata.CCDData` CCDData object based on the combined input of CCDData objects. """ if not isinstance(img_list, list): # If not a list, check whether it is a numpy ndarray or string of # filenames separated by comma if isinstance(img_list, np.ndarray): img_list = img_list.tolist() elif isinstance(img_list, str) and (',' in img_list): img_list = img_list.split(',') else: raise ValueError( "unrecognised input for list of images to combine.") # Select Combine function to call in Combiner if method == 'average': combine_function = 'average_combine' elif method == 'median': combine_function = 'median_combine' elif method == 'sum': combine_function = 'sum_combine' else: raise ValueError("unrecognised combine method : {0}.".format(method)) # First we create a CCDObject from first image for storing output if isinstance(img_list[0], CCDData): ccd = img_list[0].copy() else: # User has provided fits filenames to read from ccd = CCDData.read(img_list[0], **ccdkwargs) # If uncertainty_func is given for combine this will create an uncertainty # even if the originals did not have one. In that case we need to create # an empty placeholder. if ccd.uncertainty is None and combine_uncertainty_function is not None: ccd.uncertainty = StdDevUncertainty(np.zeros(ccd.data.shape)) if dtype is None: dtype = np.float64 # Convert the master image to the appropriate dtype so when overwriting it # later the data is not downcast and the memory consumption calculation # uses the internally used dtype instead of the original dtype. #391 if ccd.data.dtype != dtype: ccd.data = ccd.data.astype(dtype) size_of_an_img = ccd.data.nbytes try: size_of_an_img += ccd.uncertainty.array.nbytes # In case uncertainty is None it has no "array" and in case the "array" is # not a numpy array: except AttributeError: pass # Mask is enforced to be a numpy.array across astropy versions if ccd.mask is not None: size_of_an_img += ccd.mask.nbytes # flags is not necessarily a numpy array so do not fail with an # AttributeError in case something was set! # TODO: Flags are not taken into account in Combiner. This number is added # nevertheless for future compatibility. try: size_of_an_img += ccd.flags.nbytes except AttributeError: pass no_of_img = len(img_list) # determine the number of chunks to split the images into no_chunks = int((size_of_an_img * no_of_img) / mem_limit) + 1 if no_chunks > 1: log.info('splitting each image into {0} chunks to limit memory usage ' 'to {1} bytes.'.format(no_chunks, mem_limit)) xs, ys = ccd.data.shape # First we try to split only along fast x axis xstep = max(1, int(xs/no_chunks)) # If more chunks need to be created we split in Y axis for remaining number # of chunks ystep = max(1, int(ys / (1 + no_chunks - int(xs / xstep)))) # Dictionary of Combiner properties to set and methods to call before # combining to_set_in_combiner = {} to_call_in_combiner = {} # Define all the Combiner properties one wants to apply before combining # images if weights is not None: to_set_in_combiner['weights'] = weights if scale is not None: # If the scale is a function, then scaling function need to be applied # on full image to obtain scaling factor and create an array instead. if callable(scale): scalevalues = [] for image in img_list: if isinstance(image, CCDData): imgccd = image else: imgccd = CCDData.read(image, **ccdkwargs) scalevalues.append(scale(imgccd.data)) to_set_in_combiner['scaling'] = np.array(scalevalues) else: to_set_in_combiner['scaling'] = scale if clip_extrema: to_call_in_combiner['clip_extrema'] = {'nlow': nlow, 'nhigh': nhigh} if minmax_clip: to_call_in_combiner['minmax_clipping'] = {'min_clip': minmax_clip_min, 'max_clip': minmax_clip_max} if sigma_clip: to_call_in_combiner['sigma_clipping'] = { 'low_thresh': sigma_clip_low_thresh, 'high_thresh': sigma_clip_high_thresh, 'func': sigma_clip_func, 'dev_func': sigma_clip_dev_func} # Finally Run the input method on all the subsections of the image # and write final stitched image to ccd for x in range(0, xs, xstep): for y in range(0, ys, ystep): xend, yend = min(xs, x + xstep), min(ys, y + ystep) ccd_list = [] for image in img_list: if isinstance(image, CCDData): imgccd = image else: imgccd = CCDData.read(image, **ccdkwargs) # Trim image ccd_list.append(imgccd[x:xend, y:yend]) # Create Combiner for tile tile_combiner = Combiner(ccd_list, dtype=dtype) # Set all properties and call all methods for to_set in to_set_in_combiner: setattr(tile_combiner, to_set, to_set_in_combiner[to_set]) for to_call in to_call_in_combiner: getattr(tile_combiner, to_call)(**to_call_in_combiner[to_call]) # Finally call the combine algorithm combine_kwds = {} if combine_uncertainty_function is not None: combine_kwds['uncertainty_func'] = combine_uncertainty_function comb_tile = getattr(tile_combiner, combine_function)(**combine_kwds) # add it back into the master image ccd.data[x:xend, y:yend] = comb_tile.data if ccd.mask is not None: ccd.mask[x:xend, y:yend] = comb_tile.mask if ccd.uncertainty is not None: ccd.uncertainty.array[x:xend, y:yend] = comb_tile.uncertainty.array # Write fits file if filename was provided if output_file is not None: ccd.write(output_file) return ccd ccdproc-1.3.0.post1/ccdproc/log_meta.py0000664000175000017500000001214513207605210021432 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) from functools import wraps import inspect from itertools import chain import numpy as np from astropy.extern import six from astropy.nddata import NDData from astropy import units as u from astropy.io import fits import ccdproc # really only need Keyword from ccdproc __all__ = [] _LOG_ARGUMENT = 'add_keyword' _LOG_ARG_HELP = \ """ {arg} : str, `~ccdproc.Keyword` or dict-like, optional Item(s) to add to metadata of result. Set to False or None to completely disable logging. Default is to add a dictionary with a single item: The key is the name of this function and the value is a string containing the arguments the function was called with, except the value of this argument. """.format(arg=_LOG_ARGUMENT) def _insert_in_metadata_fits_safe(ccd, key, value): from .core import _short_names if key in _short_names: # This keyword was (hopefully) added by autologging but the # combination of it and its value not FITS-compliant in two # ways: the keyword name may be more than 8 characters and # the value may be too long. FITS cannot handle both of # those problems at once, so this fixes one of those # problems... # Shorten, sort of... short_name = _short_names[key] if isinstance(ccd.meta, fits.Header): ccd.meta['HIERARCH {0}'.format(key.upper())] = ( short_name, "Shortened name for ccdproc command") else: ccd.meta[key] = ( short_name, "Shortened name for ccdproc command") ccd.meta[short_name] = value else: ccd.meta[key] = value def log_to_metadata(func): """ Decorator that adds logging to ccdproc functions. The decorator adds the optional argument _LOG_ARGUMENT to function signature and updates the function's docstring to reflect that. It also sets the default value of the argument to the name of the function and the arguments it was called with. """ func.__doc__ = func.__doc__.format(log=_LOG_ARG_HELP) (original_args, varargs, keywords, defaults) = inspect.getargspec(func) # grab the names of positional arguments for use in automatic logging try: original_positional_args = original_args[:-len(defaults)] except TypeError: original_positional_args = original_args # Add logging keyword and its default value for docstring original_args.append(_LOG_ARGUMENT) try: defaults = list(defaults) except TypeError: defaults = [] defaults.append(True) signature_with_arg_added = inspect.formatargspec(original_args, varargs, keywords, defaults) signature_with_arg_added = "{0}{1}".format(func.__name__, signature_with_arg_added) func.__doc__ = "\n".join([signature_with_arg_added, func.__doc__]) @wraps(func) def wrapper(*args, **kwd): # Grab the logging keyword, if it is present. log_result = kwd.pop(_LOG_ARGUMENT, True) result = func(*args, **kwd) if not log_result: # No need to add metadata.... meta_dict = {} elif log_result is not True: meta_dict = _metadata_to_dict(log_result) else: # Logging is not turned off, but user did not provide a value # so construct one. key = func.__name__ all_args = chain(zip(original_positional_args, args), six.iteritems(kwd)) all_args = ["{0}={1}".format(name, _replace_array_with_placeholder(val)) for name, val in all_args] log_val = ", ".join(all_args) log_val = log_val.replace("\n", "") meta_dict = {key: log_val} for k, v in six.iteritems(meta_dict): _insert_in_metadata_fits_safe(result, k, v) return result return wrapper def _metadata_to_dict(arg): if isinstance(arg, six.string_types): # add the key, no value return {arg: None} elif isinstance(arg, ccdproc.Keyword): return {arg.name: arg.value} else: return arg def _replace_array_with_placeholder(value): return_type_not_value = False if isinstance(value, u.Quantity): return_type_not_value = not value.isscalar elif isinstance(value, (NDData, np.ndarray)): try: length = len(value) except TypeError: # value has no length... try: # ...but if it is NDData its .data will have a length length = len(value.data) except TypeError: # No idea what this data is, assume length is not 1 length = 42 return_type_not_value = length > 1 if return_type_not_value: return "<{0}>".format(value.__class__.__name__) else: return value ccdproc-1.3.0.post1/LICENSE.rst0000664000175000017500000000273713207605210017476 0ustar mseifertmseifert00000000000000Copyright (c) 2011-2017, Astropy-ccdproc Developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Astropy Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ccdproc-1.3.0.post1/astropy_helpers/0000775000175000017500000000000013207623133021100 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/LICENSE.rst0000664000175000017500000000272313207611674022727 0ustar mseifertmseifert00000000000000Copyright (c) 2014, Astropy Developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Astropy Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/0000775000175000017500000000000013207623133024323 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/test_helpers.py0000664000175000017500000000100313207611674027377 0ustar mseifertmseifert00000000000000from __future__ import (absolute_import, division, print_function, unicode_literals) import warnings from .commands.test import AstropyTest # noqa # Leaving this module here for now, but really it needn't exist # (and it's doubtful that any code depends on it anymore) warnings.warn('The astropy_helpers.test_helpers module is deprecated as ' 'of version 1.1.0; the AstropyTest command can be found in ' 'astropy_helpers.commands.test.', DeprecationWarning) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/__init__.py0000664000175000017500000000345413207611674026451 0ustar mseifertmseifert00000000000000try: from .version import version as __version__ from .version import githash as __githash__ except ImportError: __version__ = '' __githash__ = '' # If we've made it as far as importing astropy_helpers, we don't need # ah_bootstrap in sys.modules anymore. Getting rid of it is actually necessary # if the package we're installing has a setup_requires of another package that # uses astropy_helpers (and possibly a different version at that) # See https://github.com/astropy/astropy/issues/3541 import sys if 'ah_bootstrap' in sys.modules: del sys.modules['ah_bootstrap'] # Note, this is repeated from ah_bootstrap.py, but is here too in case this # astropy-helpers was upgraded to from an older version that did not have this # check in its ah_bootstrap. # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass import os # Ensure that all module-level code in astropy or other packages know that # we're in setup mode: if ('__main__' in sys.modules and hasattr(sys.modules['__main__'], '__file__')): filename = os.path.basename(sys.modules['__main__'].__file__) if filename.rstrip('co') == 'setup.py': if sys.version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True del filename ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/setup_helpers.py0000664000175000017500000006635513207611674027605 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This module contains a number of utilities for use during setup/build/packaging that are useful to astropy as a whole. """ from __future__ import absolute_import, print_function import collections import os import re import subprocess import sys import traceback import warnings from distutils import log from distutils.dist import Distribution from distutils.errors import DistutilsOptionError, DistutilsModuleError from distutils.core import Extension from distutils.core import Command from distutils.command.sdist import sdist as DistutilsSdist from setuptools import find_packages as _find_packages from .distutils_helpers import (add_command_option, get_compiler_option, get_dummy_distribution, get_distutils_build_option, get_distutils_build_or_install_option) from .version_helpers import get_pkg_version_module from .utils import (walk_skip_hidden, import_file, extends_doc, resolve_name, AstropyDeprecationWarning) from .commands.build_ext import generate_build_ext_command from .commands.build_py import AstropyBuildPy from .commands.install import AstropyInstall from .commands.install_lib import AstropyInstallLib from .commands.register import AstropyRegister from .commands.test import AstropyTest # These imports are not used in this module, but are included for backwards # compat with older versions of this module from .utils import get_numpy_include_path, write_if_different # noqa from .commands.build_ext import should_build_with_cython, get_compiler_version # noqa _module_state = {'registered_commands': None, 'have_sphinx': False, 'package_cache': None, 'exclude_packages': set(), 'excludes_too_late': False} try: import sphinx # noqa _module_state['have_sphinx'] = True except ValueError as e: # This can occur deep in the bowels of Sphinx's imports by way of docutils # and an occurrence of this bug: http://bugs.python.org/issue18378 # In this case sphinx is effectively unusable if 'unknown locale' in e.args[0]: log.warn( "Possible misconfiguration of one of the environment variables " "LC_ALL, LC_CTYPES, LANG, or LANGUAGE. For an example of how to " "configure your system's language environment on OSX see " "http://blog.remibergsma.com/2012/07/10/" "setting-locales-correctly-on-mac-osx-terminal-application/") except ImportError: pass except SyntaxError: # occurs if markupsafe is recent version, which doesn't support Python 3.2 pass PY3 = sys.version_info[0] >= 3 # This adds a new keyword to the setup() function Distribution.skip_2to3 = [] def adjust_compiler(package): """ This function detects broken compilers and switches to another. If the environment variable CC is explicitly set, or a compiler is specified on the commandline, no override is performed -- the purpose here is to only override a default compiler. The specific compilers with problems are: * The default compiler in XCode-4.2, llvm-gcc-4.2, segfaults when compiling wcslib. The set of broken compilers can be updated by changing the compiler_mapping variable. It is a list of 2-tuples where the first in the pair is a regular expression matching the version of the broken compiler, and the second is the compiler to change to. """ warnings.warn( 'Direct use of the adjust_compiler function in setup.py is ' 'deprecated and can be removed from your setup.py. This ' 'functionality is now incorporated directly into the build_ext ' 'command.', AstropyDeprecationWarning) def get_debug_option(packagename): """ Determines if the build is in debug mode. Returns ------- debug : bool True if the current build was started with the debug option, False otherwise. """ try: current_debug = get_pkg_version_module(packagename, fromlist=['debug'])[0] except (ImportError, AttributeError): current_debug = None # Only modify the debug flag if one of the build commands was explicitly # run (i.e. not as a sub-command of something else) dist = get_dummy_distribution() if any(cmd in dist.commands for cmd in ['build', 'build_ext']): debug = bool(get_distutils_build_option('debug')) else: debug = bool(current_debug) if current_debug is not None and current_debug != debug: build_ext_cmd = dist.get_command_class('build_ext') build_ext_cmd.force_rebuild = True return debug def add_exclude_packages(excludes): if _module_state['excludes_too_late']: raise RuntimeError( "add_package_excludes must be called before all other setup helper " "functions in order to properly handle excluded packages") _module_state['exclude_packages'].add(set(excludes)) def register_commands(package, version, release, srcdir='.'): if _module_state['registered_commands'] is not None: return _module_state['registered_commands'] if _module_state['have_sphinx']: try: from .commands.build_sphinx import (AstropyBuildSphinx, AstropyBuildDocs) except ImportError: AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx else: AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx _module_state['registered_commands'] = registered_commands = { 'test': generate_test_command(package), # Use distutils' sdist because it respects package_data. # setuptools/distributes sdist requires duplication of information in # MANIFEST.in 'sdist': DistutilsSdist, # The exact form of the build_ext command depends on whether or not # we're building a release version 'build_ext': generate_build_ext_command(package, release), # We have a custom build_py to generate the default configuration file 'build_py': AstropyBuildPy, # Since install can (in some circumstances) be run without # first building, we also need to override install and # install_lib. See #2223 'install': AstropyInstall, 'install_lib': AstropyInstallLib, 'register': AstropyRegister, 'build_sphinx': AstropyBuildSphinx, 'build_docs': AstropyBuildDocs } # Need to override the __name__ here so that the commandline options are # presented as being related to the "build" command, for example; normally # this wouldn't be necessary since commands also have a command_name # attribute, but there is a bug in distutils' help display code that it # uses __name__ instead of command_name. Yay distutils! for name, cls in registered_commands.items(): cls.__name__ = name # Add a few custom options; more of these can be added by specific packages # later for option in [ ('use-system-libraries', "Use system libraries whenever possible", True)]: add_command_option('build', *option) add_command_option('install', *option) add_command_hooks(registered_commands, srcdir=srcdir) return registered_commands def add_command_hooks(commands, srcdir='.'): """ Look through setup_package.py modules for functions with names like ``pre__hook`` and ``post__hook`` where ```` is the name of a ``setup.py`` command (e.g. build_ext). If either hook is present this adds a wrapped version of that command to the passed in ``commands`` `dict`. ``commands`` may be pre-populated with other custom distutils command classes that should be wrapped if there are hooks for them (e.g. `AstropyBuildPy`). """ hook_re = re.compile(r'^(pre|post)_(.+)_hook$') # Distutils commands have a method of the same name, but it is not a # *classmethod* (which probably didn't exist when distutils was first # written) def get_command_name(cmdcls): if hasattr(cmdcls, 'command_name'): return cmdcls.command_name else: return cmdcls.__name__ packages = filter_packages(find_packages(srcdir)) dist = get_dummy_distribution() hooks = collections.defaultdict(dict) for setuppkg in iter_setup_packages(srcdir, packages): for name, obj in vars(setuppkg).items(): match = hook_re.match(name) if not match: continue hook_type = match.group(1) cmd_name = match.group(2) if hook_type not in hooks[cmd_name]: hooks[cmd_name][hook_type] = [] hooks[cmd_name][hook_type].append((setuppkg.__name__, obj)) for cmd_name, cmd_hooks in hooks.items(): commands[cmd_name] = generate_hooked_command( cmd_name, dist.get_command_class(cmd_name), cmd_hooks) def generate_hooked_command(cmd_name, cmd_cls, hooks): """ Returns a generated subclass of ``cmd_cls`` that runs the pre- and post-command hooks for that command before and after the ``cmd_cls.run`` method. """ def run(self, orig_run=cmd_cls.run): self.run_command_hooks('pre_hooks') orig_run(self) self.run_command_hooks('post_hooks') return type(cmd_name, (cmd_cls, object), {'run': run, 'run_command_hooks': run_command_hooks, 'pre_hooks': hooks.get('pre', []), 'post_hooks': hooks.get('post', [])}) def run_command_hooks(cmd_obj, hook_kind): """Run hooks registered for that command and phase. *cmd_obj* is a finalized command object; *hook_kind* is either 'pre_hook' or 'post_hook'. """ hooks = getattr(cmd_obj, hook_kind, None) if not hooks: return for modname, hook in hooks: if isinstance(hook, str): try: hook_obj = resolve_name(hook) except ImportError as exc: raise DistutilsModuleError( 'cannot find hook {0}: {1}'.format(hook, exc)) else: hook_obj = hook if not callable(hook_obj): raise DistutilsOptionError('hook {0!r} is not callable' % hook) log.info('running {0} from {1} for {2} command'.format( hook_kind.rstrip('s'), modname, cmd_obj.get_command_name())) try: hook_obj(cmd_obj) except Exception: log.error('{0} command hook {1} raised an exception: %s\n'.format( hook_obj.__name__, cmd_obj.get_command_name())) log.error(traceback.format_exc()) sys.exit(1) def generate_test_command(package_name): """ Creates a custom 'test' command for the given package which sets the command's ``package_name`` class attribute to the name of the package being tested. """ return type(package_name.title() + 'Test', (AstropyTest,), {'package_name': package_name}) def update_package_files(srcdir, extensions, package_data, packagenames, package_dirs): """ This function is deprecated and maintained for backward compatibility with affiliated packages. Affiliated packages should update their setup.py to use `get_package_info` instead. """ info = get_package_info(srcdir) extensions.extend(info['ext_modules']) package_data.update(info['package_data']) packagenames = list(set(packagenames + info['packages'])) package_dirs.update(info['package_dir']) def get_package_info(srcdir='.', exclude=()): """ Collates all of the information for building all subpackages and returns a dictionary of keyword arguments that can be passed directly to `distutils.setup`. The purpose of this function is to allow subpackages to update the arguments to the package's ``setup()`` function in its setup.py script, rather than having to specify all extensions/package data directly in the ``setup.py``. See Astropy's own ``setup.py`` for example usage and the Astropy development docs for more details. This function obtains that information by iterating through all packages in ``srcdir`` and locating a ``setup_package.py`` module. This module can contain the following functions: ``get_extensions()``, ``get_package_data()``, ``get_build_options()``, ``get_external_libraries()``, and ``requires_2to3()``. Each of those functions take no arguments. - ``get_extensions`` returns a list of `distutils.extension.Extension` objects. - ``get_package_data()`` returns a dict formatted as required by the ``package_data`` argument to ``setup()``. - ``get_build_options()`` returns a list of tuples describing the extra build options to add. - ``get_external_libraries()`` returns a list of libraries that can optionally be built using external dependencies. - ``get_entry_points()`` returns a dict formatted as required by the ``entry_points`` argument to ``setup()``. - ``requires_2to3()`` should return `True` when the source code requires `2to3` processing to run on Python 3.x. If ``requires_2to3()`` is missing, it is assumed to return `True`. """ ext_modules = [] packages = [] package_data = {} package_dir = {} skip_2to3 = [] if exclude: warnings.warn( "Use of the exclude parameter is no longer supported since it does " "not work as expected. Use add_exclude_packages instead. Note that " "it must be called prior to any other calls from setup helpers.", AstropyDeprecationWarning) # Use the find_packages tool to locate all packages and modules packages = filter_packages(find_packages(srcdir, exclude=exclude)) # Update package_dir if the package lies in a subdirectory if srcdir != '.': package_dir[''] = srcdir # For each of the setup_package.py modules, extract any # information that is needed to install them. The build options # are extracted first, so that their values will be available in # subsequent calls to `get_extensions`, etc. for setuppkg in iter_setup_packages(srcdir, packages): if hasattr(setuppkg, 'get_build_options'): options = setuppkg.get_build_options() for option in options: add_command_option('build', *option) if hasattr(setuppkg, 'get_external_libraries'): libraries = setuppkg.get_external_libraries() for library in libraries: add_external_library(library) if hasattr(setuppkg, 'requires_2to3'): requires_2to3 = setuppkg.requires_2to3() else: requires_2to3 = True if not requires_2to3: skip_2to3.append( os.path.dirname(setuppkg.__file__)) for setuppkg in iter_setup_packages(srcdir, packages): # get_extensions must include any Cython extensions by their .pyx # filename. if hasattr(setuppkg, 'get_extensions'): ext_modules.extend(setuppkg.get_extensions()) if hasattr(setuppkg, 'get_package_data'): package_data.update(setuppkg.get_package_data()) # Locate any .pyx files not already specified, and add their extensions in. # The default include dirs include numpy to facilitate numerical work. ext_modules.extend(get_cython_extensions(srcdir, packages, ext_modules, ['numpy'])) # Now remove extensions that have the special name 'skip_cython', as they # exist Only to indicate that the cython extensions shouldn't be built for i, ext in reversed(list(enumerate(ext_modules))): if ext.name == 'skip_cython': del ext_modules[i] # On Microsoft compilers, we need to pass the '/MANIFEST' # commandline argument. This was the default on MSVC 9.0, but is # now required on MSVC 10.0, but it doesn't seem to hurt to add # it unconditionally. if get_compiler_option() == 'msvc': for ext in ext_modules: ext.extra_link_args.append('/MANIFEST') return { 'ext_modules': ext_modules, 'packages': packages, 'package_dir': package_dir, 'package_data': package_data, 'skip_2to3': skip_2to3 } def iter_setup_packages(srcdir, packages): """ A generator that finds and imports all of the ``setup_package.py`` modules in the source packages. Returns ------- modgen : generator A generator that yields (modname, mod), where `mod` is the module and `modname` is the module name for the ``setup_package.py`` modules. """ for packagename in packages: package_parts = packagename.split('.') package_path = os.path.join(srcdir, *package_parts) setup_package = os.path.relpath( os.path.join(package_path, 'setup_package.py')) if os.path.isfile(setup_package): module = import_file(setup_package, name=packagename + '.setup_package') yield module def iter_pyx_files(package_dir, package_name): """ A generator that yields Cython source files (ending in '.pyx') in the source packages. Returns ------- pyxgen : generator A generator that yields (extmod, fullfn) where `extmod` is the full name of the module that the .pyx file would live in based on the source directory structure, and `fullfn` is the path to the .pyx file. """ for dirpath, dirnames, filenames in walk_skip_hidden(package_dir): for fn in filenames: if fn.endswith('.pyx'): fullfn = os.path.relpath(os.path.join(dirpath, fn)) # Package must match file name extmod = '.'.join([package_name, fn[:-4]]) yield (extmod, fullfn) break # Don't recurse into subdirectories def get_cython_extensions(srcdir, packages, prevextensions=tuple(), extincludedirs=None): """ Looks for Cython files and generates Extensions if needed. Parameters ---------- srcdir : str Path to the root of the source directory to search. prevextensions : list of `~distutils.core.Extension` objects The extensions that are already defined. Any .pyx files already here will be ignored. extincludedirs : list of str or None Directories to include as the `include_dirs` argument to the generated `~distutils.core.Extension` objects. Returns ------- exts : list of `~distutils.core.Extension` objects The new extensions that are needed to compile all .pyx files (does not include any already in `prevextensions`). """ # Vanilla setuptools and old versions of distribute include Cython files # as .c files in the sources, not .pyx, so we cannot simply look for # existing .pyx sources in the previous sources, but we should also check # for .c files with the same remaining filename. So we look for .pyx and # .c files, and we strip the extension. prevsourcepaths = [] ext_modules = [] for ext in prevextensions: for s in ext.sources: if s.endswith(('.pyx', '.c', '.cpp')): sourcepath = os.path.realpath(os.path.splitext(s)[0]) prevsourcepaths.append(sourcepath) for package_name in packages: package_parts = package_name.split('.') package_path = os.path.join(srcdir, *package_parts) for extmod, pyxfn in iter_pyx_files(package_path, package_name): sourcepath = os.path.realpath(os.path.splitext(pyxfn)[0]) if sourcepath not in prevsourcepaths: ext_modules.append(Extension(extmod, [pyxfn], include_dirs=extincludedirs)) return ext_modules class DistutilsExtensionArgs(collections.defaultdict): """ A special dictionary whose default values are the empty list. This is useful for building up a set of arguments for `distutils.Extension` without worrying whether the entry is already present. """ def __init__(self, *args, **kwargs): def default_factory(): return [] super(DistutilsExtensionArgs, self).__init__( default_factory, *args, **kwargs) def update(self, other): for key, val in other.items(): self[key].extend(val) def pkg_config(packages, default_libraries, executable='pkg-config'): """ Uses pkg-config to update a set of distutils Extension arguments to include the flags necessary to link against the given packages. If the pkg-config lookup fails, default_libraries is applied to libraries. Parameters ---------- packages : list of str A list of pkg-config packages to look up. default_libraries : list of str A list of library names to use if the pkg-config lookup fails. Returns ------- config : dict A dictionary containing keyword arguments to `distutils.Extension`. These entries include: - ``include_dirs``: A list of include directories - ``library_dirs``: A list of library directories - ``libraries``: A list of libraries - ``define_macros``: A list of macro defines - ``undef_macros``: A list of macros to undefine - ``extra_compile_args``: A list of extra arguments to pass to the compiler """ flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries', '-D': 'define_macros', '-U': 'undef_macros'} command = "{0} --libs --cflags {1}".format(executable, ' '.join(packages)), result = DistutilsExtensionArgs() try: pipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) output = pipe.communicate()[0].strip() except subprocess.CalledProcessError as e: lines = [ ("{0} failed. This may cause the build to fail below." .format(executable)), " command: {0}".format(e.cmd), " returncode: {0}".format(e.returncode), " output: {0}".format(e.output) ] log.warn('\n'.join(lines)) result['libraries'].extend(default_libraries) else: if pipe.returncode != 0: lines = [ "pkg-config could not lookup up package(s) {0}.".format( ", ".join(packages)), "This may cause the build to fail below." ] log.warn('\n'.join(lines)) result['libraries'].extend(default_libraries) else: for token in output.split(): # It's not clear what encoding the output of # pkg-config will come to us in. It will probably be # some combination of pure ASCII (for the compiler # flags) and the filesystem encoding (for any argument # that includes directories or filenames), but this is # just conjecture, as the pkg-config documentation # doesn't seem to address it. arg = token[:2].decode('ascii') value = token[2:].decode(sys.getfilesystemencoding()) if arg in flag_map: if arg == '-D': value = tuple(value.split('=', 1)) result[flag_map[arg]].append(value) else: result['extra_compile_args'].append(value) return result def add_external_library(library): """ Add a build option for selecting the internal or system copy of a library. Parameters ---------- library : str The name of the library. If the library is `foo`, the build option will be called `--use-system-foo`. """ for command in ['build', 'build_ext', 'install']: add_command_option(command, str('use-system-' + library), 'Use the system {0} library'.format(library), is_bool=True) def use_system_library(library): """ Returns `True` if the build configuration indicates that the given library should use the system copy of the library rather than the internal one. For the given library `foo`, this will be `True` if `--use-system-foo` or `--use-system-libraries` was provided at the commandline or in `setup.cfg`. Parameters ---------- library : str The name of the library Returns ------- use_system : bool `True` if the build should use the system copy of the library. """ return ( get_distutils_build_or_install_option('use_system_{0}'.format(library)) or get_distutils_build_or_install_option('use_system_libraries')) @extends_doc(_find_packages) def find_packages(where='.', exclude=(), invalidate_cache=False): """ This version of ``find_packages`` caches previous results to speed up subsequent calls. Use ``invalide_cache=True`` to ignore cached results from previous ``find_packages`` calls, and repeat the package search. """ if exclude: warnings.warn( "Use of the exclude parameter is no longer supported since it does " "not work as expected. Use add_exclude_packages instead. Note that " "it must be called prior to any other calls from setup helpers.", AstropyDeprecationWarning) # Calling add_exclude_packages after this point will have no effect _module_state['excludes_too_late'] = True if not invalidate_cache and _module_state['package_cache'] is not None: return _module_state['package_cache'] packages = _find_packages( where=where, exclude=list(_module_state['exclude_packages'])) _module_state['package_cache'] = packages return packages def filter_packages(packagenames): """ Removes some packages from the package list that shouldn't be installed on the current version of Python. """ if PY3: exclude = '_py2' else: exclude = '_py3' return [x for x in packagenames if not x.endswith(exclude)] class FakeBuildSphinx(Command): """ A dummy build_sphinx command that is called if Sphinx is not installed and displays a relevant error message """ # user options inherited from sphinx.setup_command.BuildDoc user_options = [ ('fresh-env', 'E', ''), ('all-files', 'a', ''), ('source-dir=', 's', ''), ('build-dir=', None, ''), ('config-dir=', 'c', ''), ('builder=', 'b', ''), ('project=', None, ''), ('version=', None, ''), ('release=', None, ''), ('today=', None, ''), ('link-index', 'i', '')] # user options appended in astropy.setup_helpers.AstropyBuildSphinx user_options.append(('warnings-returncode', 'w', '')) user_options.append(('clean-docs', 'l', '')) user_options.append(('no-intersphinx', 'n', '')) user_options.append(('open-docs-in-browser', 'o', '')) def initialize_options(self): try: raise RuntimeError("Sphinx and its dependencies must be installed " "for build_docs.") except: log.error('error: Sphinx and its dependencies must be installed ' 'for build_docs.') sys.exit(1) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/version_helpers.py0000664000175000017500000002303313207611674030114 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Utilities for generating the version string for Astropy (or an affiliated package) and the version.py module, which contains version info for the package. Within the generated astropy.version module, the `major`, `minor`, and `bugfix` variables hold the respective parts of the version number (bugfix is '0' if absent). The `release` variable is True if this is a release, and False if this is a development version of astropy. For the actual version string, use:: from astropy.version import version or:: from astropy import __version__ """ from __future__ import division import datetime import imp import os import pkgutil import sys import time from distutils import log import pkg_resources from . import git_helpers from .distutils_helpers import is_distutils_display_option from .utils import invalidate_caches PY3 = sys.version_info[0] == 3 def _version_split(version): """ Split a version string into major, minor, and bugfix numbers. If any of those numbers are missing the default is zero. Any pre/post release modifiers are ignored. Examples ======== >>> _version_split('1.2.3') (1, 2, 3) >>> _version_split('1.2') (1, 2, 0) >>> _version_split('1.2rc1') (1, 2, 0) >>> _version_split('1') (1, 0, 0) >>> _version_split('') (0, 0, 0) """ parsed_version = pkg_resources.parse_version(version) if hasattr(parsed_version, 'base_version'): # New version parsing for setuptools >= 8.0 if parsed_version.base_version: parts = [int(part) for part in parsed_version.base_version.split('.')] else: parts = [] else: parts = [] for part in parsed_version: if part.startswith('*'): # Ignore any .dev, a, b, rc, etc. break parts.append(int(part)) if len(parts) < 3: parts += [0] * (3 - len(parts)) # In principle a version could have more parts (like 1.2.3.4) but we only # support .. return tuple(parts[:3]) # This is used by setup.py to create a new version.py - see that file for # details. Note that the imports have to be absolute, since this is also used # by affiliated packages. _FROZEN_VERSION_PY_TEMPLATE = """ # Autogenerated by {packagetitle}'s setup.py on {timestamp!s} from __future__ import unicode_literals import datetime {header} major = {major} minor = {minor} bugfix = {bugfix} release = {rel} timestamp = {timestamp!r} debug = {debug} try: from ._compiler import compiler except ImportError: compiler = "unknown" try: from .cython_version import cython_version except ImportError: cython_version = "unknown" """[1:] _FROZEN_VERSION_PY_WITH_GIT_HEADER = """ {git_helpers} _packagename = "{packagename}" _last_generated_version = "{verstr}" _last_githash = "{githash}" # Determine where the source code for this module # lives. If __file__ is not a filesystem path then # it is assumed not to live in a git repo at all. if _get_repo_path(__file__, levels=len(_packagename.split('.'))): version = update_git_devstr(_last_generated_version, path=__file__) githash = get_git_devstr(sha=True, show_warning=False, path=__file__) or _last_githash else: # The file does not appear to live in a git repo so don't bother # invoking git version = _last_generated_version githash = _last_githash """[1:] _FROZEN_VERSION_PY_STATIC_HEADER = """ version = "{verstr}" githash = "{githash}" """[1:] def _get_version_py_str(packagename, version, githash, release, debug, uses_git=True): epoch = int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) timestamp = datetime.datetime.utcfromtimestamp(epoch) major, minor, bugfix = _version_split(version) if packagename.lower() == 'astropy': packagetitle = 'Astropy' else: packagetitle = 'Astropy-affiliated package ' + packagename header = '' if uses_git: header = _generate_git_header(packagename, version, githash) elif not githash: # _generate_git_header will already generate a new git has for us, but # for creating a new version.py for a release (even if uses_git=False) # we still need to get the githash to include in the version.py # See https://github.com/astropy/astropy-helpers/issues/141 githash = git_helpers.get_git_devstr(sha=True, show_warning=True) if not header: # If _generate_git_header fails it returns an empty string header = _FROZEN_VERSION_PY_STATIC_HEADER.format(verstr=version, githash=githash) return _FROZEN_VERSION_PY_TEMPLATE.format(packagetitle=packagetitle, timestamp=timestamp, header=header, major=major, minor=minor, bugfix=bugfix, rel=release, debug=debug) def _generate_git_header(packagename, version, githash): """ Generates a header to the version.py module that includes utilities for probing the git repository for updates (to the current git hash, etc.) These utilities should only be available in development versions, and not in release builds. If this fails for any reason an empty string is returned. """ loader = pkgutil.get_loader(git_helpers) source = loader.get_source(git_helpers.__name__) or '' source_lines = source.splitlines() if not source_lines: log.warn('Cannot get source code for astropy_helpers.git_helpers; ' 'git support disabled.') return '' idx = 0 for idx, line in enumerate(source_lines): if line.startswith('# BEGIN'): break git_helpers_py = '\n'.join(source_lines[idx + 1:]) if PY3: verstr = version else: # In Python 2 don't pass in a unicode string; otherwise verstr will # be represented with u'' syntax which breaks on Python 3.x with x # < 3. This is only an issue when developing on multiple Python # versions at once verstr = version.encode('utf8') new_githash = git_helpers.get_git_devstr(sha=True, show_warning=False) if new_githash: githash = new_githash return _FROZEN_VERSION_PY_WITH_GIT_HEADER.format( git_helpers=git_helpers_py, packagename=packagename, verstr=verstr, githash=githash) def generate_version_py(packagename, version, release=None, debug=None, uses_git=True, srcdir='.'): """Regenerate the version.py module if necessary.""" try: version_module = get_pkg_version_module(packagename) try: last_generated_version = version_module._last_generated_version except AttributeError: last_generated_version = version_module.version try: last_githash = version_module._last_githash except AttributeError: last_githash = version_module.githash current_release = version_module.release current_debug = version_module.debug except ImportError: version_module = None last_generated_version = None last_githash = None current_release = None current_debug = None if release is None: # Keep whatever the current value is, if it exists release = bool(current_release) if debug is None: # Likewise, keep whatever the current value is, if it exists debug = bool(current_debug) version_py = os.path.join(srcdir, packagename, 'version.py') if (last_generated_version != version or current_release != release or current_debug != debug): if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) if is_distutils_display_option(): # Always silence unnecessary log messages when display options are # being used log.set_threshold(log.WARN) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(packagename, version, last_githash, release, debug, uses_git=uses_git)) invalidate_caches() if version_module: imp.reload(version_module) def get_pkg_version_module(packagename, fromlist=None): """Returns the package's .version module generated by `astropy_helpers.version_helpers.generate_version_py`. Raises an ImportError if the version module is not found. If ``fromlist`` is an iterable, return a tuple of the members of the version module corresponding to the member names given in ``fromlist``. Raises an `AttributeError` if any of these module members are not found. """ if not fromlist: # Due to a historical quirk of Python's import implementation, # __import__ will not return submodules of a package if 'fromlist' is # empty. # TODO: For Python 3.1 and up it may be preferable to use importlib # instead of the __import__ builtin return __import__(packagename + '.version', fromlist=['']) else: mod = __import__(packagename + '.version', fromlist=fromlist) return tuple(getattr(mod, member) for member in fromlist) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/version.py0000664000175000017500000000102513207611701026356 0ustar mseifertmseifert00000000000000# Autogenerated by Astropy-affiliated package astropy_helpers's setup.py on 2017-11-29 20:08:33 from __future__ import unicode_literals import datetime version = "2.0.2" githash = "d23a53f46dd1c3703e5eee63dca3f53bd18a4e8b" major = 2 minor = 0 bugfix = 2 release = True timestamp = datetime.datetime(2017, 11, 29, 20, 8, 33) debug = False try: from ._compiler import compiler except ImportError: compiler = "unknown" try: from .cython_version import cython_version except ImportError: cython_version = "unknown" ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/0000775000175000017500000000000013207623133025630 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/automodapi/0000775000175000017500000000000013207623133027772 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/automodapi/__init__.py0000664000175000017500000000002413207611674032106 0ustar mseifertmseifert00000000000000__version__ = '0.6' ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/automodapi/smart_resolver.py0000664000175000017500000000717713207611674033436 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The classes in the astropy docs are documented by their API location, which is not necessarily where they are defined in the source. This causes a problem when certain automated features of the doc build, such as the inheritance diagrams or the `Bases` list of a class reference a class by its canonical location rather than its "user" location. In the `autodoc-process-docstring` event, a mapping from the actual name to the API name is maintained. Later, in the `missing-reference` event, unresolved references are looked up in this dictionary and corrected if possible. """ from docutils.nodes import literal, reference def process_docstring(app, what, name, obj, options, lines): if isinstance(obj, type): env = app.env if not hasattr(env, 'class_name_mapping'): env.class_name_mapping = {} mapping = env.class_name_mapping mapping[obj.__module__ + '.' + obj.__name__] = name def missing_reference_handler(app, env, node, contnode): if not hasattr(env, 'class_name_mapping'): env.class_name_mapping = {} mapping = env.class_name_mapping reftype = node['reftype'] reftarget = node['reftarget'] if reftype in ('obj', 'class', 'exc', 'meth'): reftarget = node['reftarget'] suffix = '' if reftarget not in mapping: if '.' in reftarget: front, suffix = reftarget.rsplit('.', 1) else: suffix = reftarget if suffix.startswith('_') and not suffix.startswith('__'): # If this is a reference to a hidden class or method, # we can't link to it, but we don't want to have a # nitpick warning. return node[0].deepcopy() if reftype in ('obj', 'meth') and '.' in reftarget: if front in mapping: reftarget = front suffix = '.' + suffix if (reftype in ('class', ) and '.' in reftarget and reftarget not in mapping): if '.' in front: reftarget, _ = front.rsplit('.', 1) suffix = '.' + suffix reftarget = reftarget + suffix prefix = reftarget.rsplit('.')[0] inventory = env.intersphinx_named_inventory if (reftarget not in mapping and prefix in inventory): if reftarget in inventory[prefix]['py:class']: newtarget = inventory[prefix]['py:class'][reftarget][2] if not node['refexplicit'] and \ '~' not in node.rawsource: contnode = literal(text=reftarget) newnode = reference('', '', internal=True) newnode['reftitle'] = reftarget newnode['refuri'] = newtarget newnode.append(contnode) return newnode if reftarget in mapping: newtarget = mapping[reftarget] + suffix if not node['refexplicit'] and '~' not in node.rawsource: contnode = literal(text=newtarget) newnode = env.domains['py'].resolve_xref( env, node['refdoc'], app.builder, 'class', newtarget, node, contnode) if newnode is not None: newnode['reftitle'] = reftarget return newnode def setup(app): app.connect('autodoc-process-docstring', process_docstring) app.connect('missing-reference', missing_reference_handler) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/automodapi/automodsumm.py0000664000175000017500000006265213207611674032740 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This directive will produce an "autosummary"-style table for public attributes of a specified module. See the `sphinx.ext.autosummary`_ extension for details on this process. The main difference from the `autosummary`_ directive is that `autosummary`_ requires manually inputting all attributes that appear in the table, while this captures the entries automatically. This directive requires a single argument that must be a module or package. It also accepts any options supported by the `autosummary`_ directive- see `sphinx.ext.autosummary`_ for details. It also accepts some additional options: * ``:classes-only:`` If present, the autosummary table will only contain entries for classes. This cannot be used at the same time with ``:functions-only:`` or ``:variables-only:``. * ``:functions-only:`` If present, the autosummary table will only contain entries for functions. This cannot be used at the same time with ``:classes-only:`` or ``:variables-only:``. * ``:variables-only:`` If present, the autosummary table will only contain entries for variables (everything except functions and classes). This cannot be used at the same time with ``:classes-only:`` or ``:functions-only:``. * ``:skip: obj1, [obj2, obj3, ...]`` If present, specifies that the listed objects should be skipped and not have their documentation generated, nor be included in the summary table. * ``:allowed-package-names: pkgormod1, [pkgormod2, pkgormod3, ...]`` Specifies the packages that functions/classes documented here are allowed to be from, as comma-separated list of package names. If not given, only objects that are actually in a subpackage of the package currently being documented are included. * ``:inherited-members:`` or ``:no-inherited-members:`` The global sphinx configuration option ``automodsumm_inherited_members`` decides if members that a class inherits from a base class are included in the generated documentation. The flags ``:inherited-members:`` or ``:no-inherited-members:`` allows overrriding this global setting. This extension also adds two sphinx configuration options: * ``automodsumm_writereprocessed`` Should be a bool, and if ``True``, will cause `automodsumm`_ to write files with any ``automodsumm`` sections replaced with the content Sphinx processes after ``automodsumm`` has run. The output files are not actually used by sphinx, so this option is only for figuring out the cause of sphinx warnings or other debugging. Defaults to ``False``. * ``automodsumm_inherited_members`` Should be a bool and if ``True``, will cause `automodsumm`_ to document class members that are inherited from a base class. This value can be overriden for any particular automodsumm directive by including the ``:inherited-members:`` or ``:no-inherited-members:`` options. Defaults to ``False``. .. _sphinx.ext.autosummary: http://sphinx-doc.org/latest/ext/autosummary.html .. _autosummary: http://sphinx-doc.org/latest/ext/autosummary.html#directive-autosummary .. _automod-diagram: automod-diagram directive ========================= This directive will produce an inheritance diagram like that of the `sphinx.ext.inheritance_diagram`_ extension. This directive requires a single argument that must be a module or package. It accepts no options. .. note:: Like 'inheritance-diagram', 'automod-diagram' requires `graphviz `_ to generate the inheritance diagram. .. _sphinx.ext.inheritance_diagram: http://sphinx-doc.org/latest/ext/inheritance.html """ import inspect import os import re import io from sphinx.ext.autosummary import Autosummary from sphinx.ext.inheritance_diagram import InheritanceDiagram from docutils.parsers.rst.directives import flag from .utils import find_mod_objs, cleanup_whitespace def _str_list_converter(argument): """ A directive option conversion function that converts the option into a list of strings. Used for 'skip' option. """ if argument is None: return [] else: return [s.strip() for s in argument.split(',')] class Automodsumm(Autosummary): required_arguments = 1 optional_arguments = 0 final_argument_whitespace = False has_content = False option_spec = dict(Autosummary.option_spec) option_spec['functions-only'] = flag option_spec['classes-only'] = flag option_spec['variables-only'] = flag option_spec['skip'] = _str_list_converter option_spec['allowed-package-names'] = _str_list_converter option_spec['inherited-members'] = flag option_spec['no-inherited-members'] = flag def run(self): env = self.state.document.settings.env modname = self.arguments[0] self.warnings = [] nodelist = [] try: localnames, fqns, objs = find_mod_objs(modname) except ImportError: self.warnings = [] self.warn("Couldn't import module " + modname) return self.warnings try: # set self.content to trick the autosummary internals. # Be sure to respect functions-only and classes-only. funconly = 'functions-only' in self.options clsonly = 'classes-only' in self.options varonly = 'variables-only' in self.options if [clsonly, funconly, varonly].count(True) > 1: self.warning('more than one of functions-only, classes-only, ' 'or variables-only defined. Ignoring.') clsonly = funconly = varonly = False skipnames = [] if 'skip' in self.options: option_skipnames = set(self.options['skip']) for lnm in localnames: if lnm in option_skipnames: option_skipnames.remove(lnm) skipnames.append(lnm) if len(option_skipnames) > 0: self.warn('Tried to skip objects {objs} in module {mod}, ' 'but they were not present. Ignoring.' .format(objs=option_skipnames, mod=modname)) if funconly: cont = [] for nm, obj in zip(localnames, objs): if nm not in skipnames and inspect.isroutine(obj): cont.append(nm) elif clsonly: cont = [] for nm, obj in zip(localnames, objs): if nm not in skipnames and inspect.isclass(obj): cont.append(nm) elif varonly: cont = [] for nm, obj in zip(localnames, objs): if nm not in skipnames and not (inspect.isclass(obj) or inspect.isroutine(obj)): cont.append(nm) else: cont = [nm for nm in localnames if nm not in skipnames] self.content = cont # for some reason, even though ``currentmodule`` is substituted in, # sphinx doesn't necessarily recognize this fact. So we just force # it internally, and that seems to fix things env.temp_data['py:module'] = modname env.ref_context['py:module'] = modname # can't use super because Sphinx/docutils has trouble return # super(Autosummary,self).run() nodelist.extend(Autosummary.run(self)) return self.warnings + nodelist finally: # has_content = False for the Automodsumm self.content = [] def get_items(self, names): self.genopt['imported-members'] = True return Autosummary.get_items(self, names) # <-------------------automod-diagram stuff-----------------------------------> class Automoddiagram(InheritanceDiagram): option_spec = dict(InheritanceDiagram.option_spec) option_spec['allowed-package-names'] = _str_list_converter option_spec['skip'] = _str_list_converter def run(self): try: ols = self.options.get('allowed-package-names', []) ols = True if len(ols) == 0 else ols # if none are given, assume only local nms, objs = find_mod_objs(self.arguments[0], onlylocals=ols)[1:] except ImportError: self.warnings = [] self.warn("Couldn't import module " + self.arguments[0]) return self.warnings # Check if some classes should be skipped skip = self.options.get('skip', []) clsnms = [] for n, o in zip(nms, objs): if n.split('.')[-1] in skip: continue if inspect.isclass(o): clsnms.append(n) oldargs = self.arguments try: if len(clsnms) > 0: self.arguments = [' '.join(clsnms)] return InheritanceDiagram.run(self) finally: self.arguments = oldargs # <---------------------automodsumm generation stuff--------------------------> def process_automodsumm_generation(app): env = app.builder.env filestosearch = [] for docname in env.found_docs: filename = env.doc2path(docname) if os.path.isfile(filename): filestosearch.append(docname + os.path.splitext(filename)[1]) liness = [] for sfn in filestosearch: lines = automodsumm_to_autosummary_lines(sfn, app) liness.append(lines) if app.config.automodsumm_writereprocessed: if lines: # empty list means no automodsumm entry is in the file outfn = os.path.join(app.srcdir, sfn) + '.automodsumm' with open(outfn, 'w') as f: for l in lines: f.write(l) f.write('\n') for sfn, lines in zip(filestosearch, liness): suffix = os.path.splitext(sfn)[1] if len(lines) > 0: generate_automodsumm_docs( lines, sfn, builder=app.builder, warn=app.warn, info=app.info, suffix=suffix, base_path=app.srcdir, inherited_members=app.config.automodsumm_inherited_members) # _automodsummrex = re.compile(r'^(\s*)\.\. automodsumm::\s*([A-Za-z0-9_.]+)\s*' # r'\n\1(\s*)(\S|$)', re.MULTILINE) _lineendrex = r'(?:\n|$)' _hdrex = r'^\n?(\s*)\.\. automodsumm::\s*(\S+)\s*' + _lineendrex _oprex1 = r'(?:\1(\s+)\S.*' + _lineendrex + ')' _oprex2 = r'(?:\1\4\S.*' + _lineendrex + ')' _automodsummrex = re.compile(_hdrex + '(' + _oprex1 + '?' + _oprex2 + '*)', re.MULTILINE) def automodsumm_to_autosummary_lines(fn, app): """ Generates lines from a file with an "automodsumm" entry suitable for feeding into "autosummary". Searches the provided file for `automodsumm` directives and returns a list of lines specifying the `autosummary` commands for the modules requested. This does *not* return the whole file contents - just an autosummary section in place of any :automodsumm: entries. Note that any options given for `automodsumm` are also included in the generated `autosummary` section. Parameters ---------- fn : str The name of the file to search for `automodsumm` entries. app : sphinx.application.Application The sphinx Application object Return ------ lines : list of str Lines for all `automodsumm` entries with the entries replaced by `autosummary` and the module's members added. """ fullfn = os.path.join(app.builder.env.srcdir, fn) with io.open(fullfn, encoding='utf8') as fr: # Note: we use __name__ here instead of just writing the module name in # case this extension is bundled into another package from . import automodapi try: extensions = app.extensions except AttributeError: # Sphinx <1.6 extensions = app._extensions if automodapi.__name__ in extensions: # Must do the automodapi on the source to get the automodsumm # that might be in there docname = os.path.splitext(fn)[0] filestr = automodapi.automodapi_replace(fr.read(), app, True, docname, False) else: filestr = fr.read() spl = _automodsummrex.split(filestr) # 0th entry is the stuff before the first automodsumm line indent1s = spl[1::5] mods = spl[2::5] opssecs = spl[3::5] indent2s = spl[4::5] remainders = spl[5::5] # only grab automodsumm sections and convert them to autosummary with the # entries for all the public objects newlines = [] # loop over all automodsumms in this document for i, (i1, i2, modnm, ops, rem) in enumerate(zip(indent1s, indent2s, mods, opssecs, remainders)): allindent = i1 + (' ' if i2 is None else i2) # filter out functions-only, classes-only, and ariables-only # options if present. oplines = ops.split('\n') toskip = [] allowedpkgnms = [] funcsonly = clssonly = varsonly = False for i, ln in reversed(list(enumerate(oplines))): if ':functions-only:' in ln: funcsonly = True del oplines[i] if ':classes-only:' in ln: clssonly = True del oplines[i] if ':variables-only:' in ln: varsonly = True del oplines[i] if ':skip:' in ln: toskip.extend(_str_list_converter(ln.replace(':skip:', ''))) del oplines[i] if ':allowed-package-names:' in ln: allowedpkgnms.extend(_str_list_converter(ln.replace(':allowed-package-names:', ''))) del oplines[i] if [funcsonly, clssonly, varsonly].count(True) > 1: msg = ('Defined more than one of functions-only, classes-only, ' 'and variables-only. Skipping this directive.') lnnum = sum([spl[j].count('\n') for j in range(i * 5 + 1)]) app.warn('[automodsumm]' + msg, (fn, lnnum)) continue # Use the currentmodule directive so we can just put the local names # in the autosummary table. Note that this doesn't always seem to # actually "take" in Sphinx's eyes, so in `Automodsumm.run`, we have to # force it internally, as well. newlines.extend([i1 + '.. currentmodule:: ' + modnm, '', '.. autosummary::']) newlines.extend(oplines) ols = True if len(allowedpkgnms) == 0 else allowedpkgnms for nm, fqn, obj in zip(*find_mod_objs(modnm, onlylocals=ols)): if nm in toskip: continue if funcsonly and not inspect.isroutine(obj): continue if clssonly and not inspect.isclass(obj): continue if varsonly and (inspect.isclass(obj) or inspect.isroutine(obj)): continue newlines.append(allindent + nm) # add one newline at the end of the autosummary block newlines.append('') return newlines def generate_automodsumm_docs(lines, srcfn, suffix='.rst', warn=None, info=None, base_path=None, builder=None, template_dir=None, inherited_members=False): """ This function is adapted from `sphinx.ext.autosummary.generate.generate_autosummmary_docs` to generate source for the automodsumm directives that should be autosummarized. Unlike generate_autosummary_docs, this function is called one file at a time. """ from sphinx.jinja2glue import BuiltinTemplateLoader from sphinx.ext.autosummary import import_by_name, get_documenter from sphinx.ext.autosummary.generate import (_simple_info, _simple_warn) from sphinx.util.osutil import ensuredir from sphinx.util.inspect import safe_getattr from jinja2 import FileSystemLoader, TemplateNotFound from jinja2.sandbox import SandboxedEnvironment from .utils import find_autosummary_in_lines_for_automodsumm as find_autosummary_in_lines if info is None: info = _simple_info if warn is None: warn = _simple_warn # info('[automodsumm] generating automodsumm for: ' + srcfn) # Create our own templating environment - here we use Astropy's # templates rather than the default autosummary templates, in order to # allow docstrings to be shown for methods. template_dirs = [os.path.join(os.path.dirname(__file__), 'templates'), os.path.join(base_path, '_templates')] if builder is not None: # allow the user to override the templates template_loader = BuiltinTemplateLoader() template_loader.init(builder, dirs=template_dirs) else: if template_dir: template_dirs.insert(0, template_dir) template_loader = FileSystemLoader(template_dirs) template_env = SandboxedEnvironment(loader=template_loader) # read # items = find_autosummary_in_files(sources) items = find_autosummary_in_lines(lines, filename=srcfn) if len(items) > 0: msg = '[automodsumm] {1}: found {0} automodsumm entries to generate' info(msg.format(len(items), srcfn)) # gennms = [item[0] for item in items] # if len(gennms) > 20: # gennms = gennms[:10] + ['...'] + gennms[-10:] # info('[automodsumm] generating autosummary for: ' + ', '.join(gennms)) # remove possible duplicates items = list(set(items)) # keep track of new files new_files = [] # write for name, path, template_name, inherited_mem in sorted(items): if path is None: # The corresponding autosummary:: directive did not have # a :toctree: option continue path = os.path.abspath(os.path.join(base_path, path)) ensuredir(path) try: import_by_name_values = import_by_name(name) except ImportError as e: warn('[automodsumm] failed to import %r: %s' % (name, e)) continue # if block to accommodate Sphinx's v1.2.2 and v1.2.3 respectively if len(import_by_name_values) == 3: name, obj, parent = import_by_name_values elif len(import_by_name_values) == 4: name, obj, parent, module_name = import_by_name_values fn = os.path.join(path, name + suffix) # skip it if it exists if os.path.isfile(fn): continue new_files.append(fn) f = open(fn, 'w') try: doc = get_documenter(obj, parent) if template_name is not None: template = template_env.get_template(template_name) else: tmplstr = 'autosummary_core/%s.rst' try: template = template_env.get_template(tmplstr % doc.objtype) except TemplateNotFound: template = template_env.get_template(tmplstr % 'base') def get_members_mod(obj, typ, include_public=[]): """ typ = None -> all """ items = [] for name in dir(obj): try: documenter = get_documenter(safe_getattr(obj, name), obj) except AttributeError: continue if typ is None or documenter.objtype == typ: items.append(name) public = [x for x in items if x in include_public or not x.startswith('_')] return public, items def get_members_class(obj, typ, include_public=[], include_base=False): """ typ = None -> all include_base -> include attrs that are from a base class """ items = [] # using dir gets all of the attributes, including the elements # from the base class, otherwise use __slots__ or __dict__ if include_base: names = dir(obj) else: if hasattr(obj, '__slots__'): names = tuple(getattr(obj, '__slots__')) else: names = getattr(obj, '__dict__').keys() for name in names: try: documenter = get_documenter(safe_getattr(obj, name), obj) except AttributeError: continue if typ is None or documenter.objtype == typ: items.append(name) public = [x for x in items if x in include_public or not x.startswith('_')] return public, items ns = {} if doc.objtype == 'module': ns['members'] = get_members_mod(obj, None) ns['functions'], ns['all_functions'] = \ get_members_mod(obj, 'function') ns['classes'], ns['all_classes'] = \ get_members_mod(obj, 'class') ns['exceptions'], ns['all_exceptions'] = \ get_members_mod(obj, 'exception') elif doc.objtype == 'class': if inherited_mem is not None: # option set in this specifc directive include_base = inherited_mem else: # use default value include_base = inherited_members api_class_methods = ['__init__', '__call__'] ns['members'] = get_members_class(obj, None, include_base=include_base) ns['methods'], ns['all_methods'] = \ get_members_class(obj, 'method', api_class_methods, include_base=include_base) ns['attributes'], ns['all_attributes'] = \ get_members_class(obj, 'attribute', include_base=include_base) ns['methods'].sort() ns['attributes'].sort() parts = name.split('.') if doc.objtype in ('method', 'attribute'): mod_name = '.'.join(parts[:-2]) cls_name = parts[-2] obj_name = '.'.join(parts[-2:]) ns['class'] = cls_name else: mod_name, obj_name = '.'.join(parts[:-1]), parts[-1] ns['fullname'] = name ns['module'] = mod_name ns['objname'] = obj_name ns['name'] = parts[-1] ns['objtype'] = doc.objtype ns['underline'] = len(obj_name) * '=' # We now check whether a file for reference footnotes exists for # the module being documented. We first check if the # current module is a file or a directory, as this will give a # different path for the reference file. For example, if # documenting astropy.wcs then the reference file is at # ../wcs/references.txt, while if we are documenting # astropy.config.logging_helper (which is at # astropy/config/logging_helper.py) then the reference file is set # to ../config/references.txt if '.' in mod_name: mod_name_dir = mod_name.replace('.', '/').split('/', 1)[1] else: mod_name_dir = mod_name if not os.path.isdir(os.path.join(base_path, mod_name_dir)) \ and os.path.isdir(os.path.join(base_path, mod_name_dir.rsplit('/', 1)[0])): mod_name_dir = mod_name_dir.rsplit('/', 1)[0] # We then have to check whether it exists, and if so, we pass it # to the template. if os.path.exists(os.path.join(base_path, mod_name_dir, 'references.txt')): # An important subtlety here is that the path we pass in has # to be relative to the file being generated, so we have to # figure out the right number of '..'s ndirsback = path.replace(base_path, '').count('/') ref_file_rel_segments = ['..'] * ndirsback ref_file_rel_segments.append(mod_name_dir) ref_file_rel_segments.append('references.txt') ns['referencefile'] = os.path.join(*ref_file_rel_segments) rendered = template.render(**ns) f.write(cleanup_whitespace(rendered)) finally: f.close() def setup(app): # need autodoc fixes # Note: we use __name__ here instead of just writing the module name in # case this extension is bundled into another package from . import autodoc_enhancements app.setup_extension(autodoc_enhancements.__name__) # need inheritance-diagram for automod-diagram app.setup_extension('sphinx.ext.inheritance_diagram') app.add_directive('automod-diagram', Automoddiagram) app.add_directive('automodsumm', Automodsumm) app.connect('builder-inited', process_automodsumm_generation) app.add_config_value('automodsumm_writereprocessed', False, True) app.add_config_value('automodsumm_inherited_members', False, 'env') ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/automodapi/automodapi.py0000664000175000017500000003665113207611674032530 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This directive takes a single argument that must be a module or package. It will produce a block of documentation that includes the docstring for the package, an :ref:`automodsumm` directive, and an :ref:`automod-diagram` if there are any classes in the module. If only the main docstring of the module/package is desired in the documentation, use `automodule`_ instead of `automodapi`_. It accepts the following options: * ``:include-all-objects:`` If present, include not just functions and classes, but all objects. This includes variables, for which a possible docstring after the variable definition will be shown. * ``:no-inheritance-diagram:`` If present, the inheritance diagram will not be shown even if the module/package has classes. * ``:skip: str`` This option results in the specified object being skipped, that is the object will *not* be included in the generated documentation. This option may appear any number of times to skip multiple objects. * ``:no-main-docstr:`` If present, the docstring for the module/package will not be generated. The function and class tables will still be used, however. * ``:headings: str`` Specifies the characters (in one string) used as the heading levels used for the generated section. This must have at least 2 characters (any after 2 will be ignored). This also *must* match the rest of the documentation on this page for sphinx to be happy. Defaults to "-^", which matches the convention used for Python's documentation, assuming the automodapi call is inside a top-level section (which usually uses '='). * ``:no-heading:`` If specified do not create a top level heading for the section. That is, do not create a title heading with text like "packagename Package". The actual docstring for the package/module will still be shown, though, unless ``:no-main-docstr:`` is given. * ``:allowed-package-names: str`` Specifies the packages that functions/classes documented here are allowed to be from, as comma-separated list of package names. If not given, only objects that are actually in a subpackage of the package currently being documented are included. * ``:inherited-members:`` / ``:no-inherited-members:`` The global sphinx configuration option ``automodsumm_inherited_members`` decides if members that a class inherits from a base class are included in the generated documentation. The option ``:inherited-members:`` or ``:no-inherited-members:`` allows the user to overrride the global setting. This extension also adds three sphinx configuration options: * ``automodapi_toctreedirnm`` This must be a string that specifies the name of the directory the automodsumm generated documentation ends up in. This directory path should be relative to the documentation root (e.g., same place as ``index.rst``). Defaults to ``'api'``. * ``automodapi_writereprocessed`` Should be a bool, and if `True`, will cause `automodapi`_ to write files with any `automodapi`_ sections replaced with the content Sphinx processes after `automodapi`_ has run. The output files are not actually used by sphinx, so this option is only for figuring out the cause of sphinx warnings or other debugging. Defaults to `False`. * ``automodsumm_inherited_members`` Should be a bool and if ``True`` members that a class inherits from a base class are included in the generated documentation. Defaults to ``False``. .. _automodule: http://sphinx-doc.org/latest/ext/autodoc.html?highlight=automodule#directive-automodule """ # Implementation note: # The 'automodapi' directive is not actually implemented as a docutils # directive. Instead, this extension searches for the 'automodapi' text in # all sphinx documents, and replaces it where necessary from a template built # into this extension. This is necessary because automodsumm (and autosummary) # use the "builder-inited" event, which comes before the directives are # actually built. import inspect import io import os import re import sys from .utils import find_mod_objs if sys.version_info[0] == 3: text_type = str else: text_type = unicode automod_templ_modheader = """ {modname} {pkgormod} {modhds}{pkgormodhds} {automoduleline} """ automod_templ_classes = """ Classes {clshds} .. automodsumm:: {modname} :classes-only: {clsfuncoptions} """ automod_templ_funcs = """ Functions {funchds} .. automodsumm:: {modname} :functions-only: {clsfuncoptions} """ automod_templ_vars = """ Variables {otherhds} .. automodsumm:: {modname} :variables-only: {clsfuncoptions} """ automod_templ_inh = """ Class Inheritance Diagram {clsinhsechds} .. automod-diagram:: {modname} :private-bases: :parts: 1 {allowedpkgnms} {skip} """ _automodapirex = re.compile(r'^(?:\.\.\s+automodapi::\s*)([A-Za-z0-9_.]+)' r'\s*$((?:\n\s+:[a-zA-Z_\-]+:.*$)*)', flags=re.MULTILINE) # the last group of the above regex is intended to go into finall with the below _automodapiargsrex = re.compile(r':([a-zA-Z_\-]+):(.*)$', flags=re.MULTILINE) def automodapi_replace(sourcestr, app, dotoctree=True, docname=None, warnings=True): """ Replaces `sourcestr`'s entries of ".. automdapi::" with the automodapi template form based on provided options. This is used with the sphinx event 'source-read' to replace `automodapi`_ entries before sphinx actually processes them, as automodsumm needs the code to be present to generate stub documentation. Parameters ---------- sourcestr : str The string with sphinx source to be checked for automodapi replacement. app : `sphinx.application.Application` The sphinx application. dotoctree : bool If `True`, a ":toctree:" option will be added in the ".. automodsumm::" sections of the template, pointing to the appropriate "generated" directory based on the Astropy convention (e.g. in ``docs/api``) docname : str The name of the file for this `sourcestr` (if known - if not, it can be `None`). If not provided and `dotoctree` is `True`, the generated files may end up in the wrong place. warnings : bool If `False`, all warnings that would normally be issued are silenced. Returns ------- newstr :str The string with automodapi entries replaced with the correct sphinx markup. """ spl = _automodapirex.split(sourcestr) if len(spl) > 1: # automodsumm is in this document # Use app.srcdir because api folder should be inside source folder not # at folder where sphinx is run. if dotoctree: toctreestr = ':toctree: ' api_dir = os.path.join(app.srcdir, app.config.automodapi_toctreedirnm) if docname is None: doc_path = '.' else: doc_path = os.path.join(app.srcdir, docname) toctreestr += os.path.relpath(api_dir, os.path.dirname(doc_path)) else: toctreestr = '' newstrs = [spl[0]] for grp in range(len(spl) // 3): modnm = spl[grp * 3 + 1] # find where this is in the document for warnings if docname is None: location = None else: location = (docname, spl[0].count('\n')) # initialize default options toskip = [] inhdiag = maindocstr = top_head = True hds = '-^' allowedpkgnms = [] allowothers = False # look for actual options unknownops = [] inherited_members = None for opname, args in _automodapiargsrex.findall(spl[grp * 3 + 2]): if opname == 'skip': toskip.append(args.strip()) elif opname == 'no-inheritance-diagram': inhdiag = False elif opname == 'no-main-docstr': maindocstr = False elif opname == 'headings': hds = args elif opname == 'no-heading': top_head = False elif opname == 'allowed-package-names': allowedpkgnms.append(args.strip()) elif opname == 'inherited-members': inherited_members = True elif opname == 'no-inherited-members': inherited_members = False elif opname == 'include-all-objects': allowothers = True else: unknownops.append(opname) # join all the allowedpkgnms if len(allowedpkgnms) == 0: allowedpkgnms = '' onlylocals = True else: allowedpkgnms = ':allowed-package-names: ' + ','.join(allowedpkgnms) onlylocals = allowedpkgnms # get the two heading chars if len(hds) < 2: msg = 'Not enough headings (got {0}, need 2), using default -^' if warnings: app.warn(msg.format(len(hds)), location) hds = '-^' h1, h2 = hds.lstrip()[:2] # tell sphinx that the remaining args are invalid. if len(unknownops) > 0 and app is not None: opsstrs = ','.join(unknownops) msg = 'Found additional options ' + opsstrs + ' in automodapi.' if warnings: app.warn(msg, location) ispkg, hascls, hasfuncs, hasother = _mod_info( modnm, toskip, onlylocals=onlylocals) # add automodule directive only if no-main-docstr isn't present if maindocstr: automodline = '.. automodule:: {modname}'.format(modname=modnm) else: automodline = '' if top_head: newstrs.append(automod_templ_modheader.format( modname=modnm, modhds=h1 * len(modnm), pkgormod='Package' if ispkg else 'Module', pkgormodhds=h1 * (8 if ispkg else 7), automoduleline=automodline)) # noqa else: newstrs.append(automod_templ_modheader.format( modname='', modhds='', pkgormod='', pkgormodhds='', automoduleline=automodline)) # construct the options for the class/function sections # start out indented at 4 spaces, but need to keep the indentation. clsfuncoptions = [] if toctreestr: clsfuncoptions.append(toctreestr) if toskip: clsfuncoptions.append(':skip: ' + ','.join(toskip)) if allowedpkgnms: clsfuncoptions.append(allowedpkgnms) if hascls: # This makes no sense unless there are classes. if inherited_members is True: clsfuncoptions.append(':inherited-members:') if inherited_members is False: clsfuncoptions.append(':no-inherited-members:') clsfuncoptionstr = '\n '.join(clsfuncoptions) if hasfuncs: newstrs.append(automod_templ_funcs.format( modname=modnm, funchds=h2 * 9, clsfuncoptions=clsfuncoptionstr)) if hascls: newstrs.append(automod_templ_classes.format( modname=modnm, clshds=h2 * 7, clsfuncoptions=clsfuncoptionstr)) if allowothers and hasother: newstrs.append(automod_templ_vars.format( modname=modnm, otherhds=h2 * 9, clsfuncoptions=clsfuncoptionstr)) if inhdiag and hascls: # add inheritance diagram if any classes are in the module if toskip: clsskip = ':skip: ' + ','.join(toskip) else: clsskip = '' diagram_entry = automod_templ_inh.format( modname=modnm, clsinhsechds=h2 * 25, allowedpkgnms=allowedpkgnms, skip=clsskip) diagram_entry = diagram_entry.replace(' \n', '') newstrs.append(diagram_entry) newstrs.append(spl[grp * 3 + 3]) newsourcestr = ''.join(newstrs) if app.config.automodapi_writereprocessed: # sometimes they are unicode, sometimes not, depending on how # sphinx has processed things if isinstance(newsourcestr, text_type): ustr = newsourcestr else: ustr = newsourcestr.decode(app.config.source_encoding) if docname is None: with io.open(os.path.join(app.srcdir, 'unknown.automodapi'), 'a', encoding='utf8') as f: f.write(u'\n**NEW DOC**\n\n') f.write(ustr) else: env = app.builder.env # Determine the filename associated with this doc (specifically # the extension) filename = docname + os.path.splitext(env.doc2path(docname))[1] filename += '.automodapi' with io.open(os.path.join(app.srcdir, filename), 'w', encoding='utf8') as f: f.write(ustr) return newsourcestr else: return sourcestr def _mod_info(modname, toskip=[], onlylocals=True): """ Determines if a module is a module or a package and whether or not it has classes or functions. """ hascls = hasfunc = hasother = False for localnm, fqnm, obj in zip(*find_mod_objs(modname, onlylocals=onlylocals)): if localnm not in toskip: hascls = hascls or inspect.isclass(obj) hasfunc = hasfunc or inspect.isroutine(obj) hasother = hasother or (not inspect.isclass(obj) and not inspect.isroutine(obj)) if hascls and hasfunc and hasother: break # find_mod_objs has already imported modname # TODO: There is probably a cleaner way to do this, though this is pretty # reliable for all Python versions for most cases that we care about. pkg = sys.modules[modname] ispkg = (hasattr(pkg, '__file__') and isinstance(pkg.__file__, str) and os.path.split(pkg.__file__)[1].startswith('__init__.py')) return ispkg, hascls, hasfunc, hasother def process_automodapi(app, docname, source): source[0] = automodapi_replace(source[0], app, True, docname) def setup(app): app.setup_extension('sphinx.ext.autosummary') # Note: we use __name__ here instead of just writing the module name in # case this extension is bundled into another package from . import automodsumm app.setup_extension(automodsumm.__name__) app.connect('source-read', process_automodapi) app.add_config_value('automodapi_toctreedirnm', 'api', True) app.add_config_value('automodapi_writereprocessed', False, True) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/automodapi/utils.py0000664000175000017500000001574413207611674031526 0ustar mseifertmseifert00000000000000import inspect import sys import re import os from warnings import warn from sphinx.ext.autosummary.generate import find_autosummary_in_docstring if sys.version_info[0] >= 3: def iteritems(dictionary): return dictionary.items() else: def iteritems(dictionary): return dictionary.iteritems() # We use \n instead of os.linesep because even on Windows, the generated files # use \n as the newline character. SPACE_NEWLINE = ' \n' SINGLE_NEWLINE = '\n' DOUBLE_NEWLINE = '\n\n' TRIPLE_NEWLINE = '\n\n\n' def cleanup_whitespace(text): """ Make sure there are never more than two consecutive newlines, and that there are no trailing whitespaces. """ # Get rid of overall leading/trailing whitespace text = text.strip() + '\n' # Get rid of trailing whitespace on each line while SPACE_NEWLINE in text: text = text.replace(SPACE_NEWLINE, SINGLE_NEWLINE) # Avoid too many consecutive newlines while TRIPLE_NEWLINE in text: text = text.replace(TRIPLE_NEWLINE, DOUBLE_NEWLINE) return text def find_mod_objs(modname, onlylocals=False): """ Returns all the public attributes of a module referenced by name. .. note:: The returned list *not* include subpackages or modules of `modname`,nor does it include private attributes (those that beginwith '_' or are not in `__all__`). Parameters ---------- modname : str The name of the module to search. onlylocals : bool If True, only attributes that are either members of `modname` OR one of its modules or subpackages will be included. Returns ------- localnames : list of str A list of the names of the attributes as they are named in the module `modname` . fqnames : list of str A list of the full qualified names of the attributes (e.g., ``astropy.utils.misc.find_mod_objs``). For attributes that are simple variables, this is based on the local name, but for functions or classes it can be different if they are actually defined elsewhere and just referenced in `modname`. objs : list of objects A list of the actual attributes themselves (in the same order as the other arguments) """ __import__(modname) mod = sys.modules[modname] if hasattr(mod, '__all__'): pkgitems = [(k, mod.__dict__[k]) for k in mod.__all__] else: pkgitems = [(k, mod.__dict__[k]) for k in dir(mod) if k[0] != '_'] # filter out modules and pull the names and objs out ismodule = inspect.ismodule localnames = [k for k, v in pkgitems if not ismodule(v)] objs = [v for k, v in pkgitems if not ismodule(v)] # fully qualified names can be determined from the object's module fqnames = [] for obj, lnm in zip(objs, localnames): if hasattr(obj, '__module__') and hasattr(obj, '__name__'): fqnames.append(obj.__module__ + '.' + obj.__name__) else: fqnames.append(modname + '.' + lnm) if onlylocals: valids = [fqn.startswith(modname) for fqn in fqnames] localnames = [e for i, e in enumerate(localnames) if valids[i]] fqnames = [e for i, e in enumerate(fqnames) if valids[i]] objs = [e for i, e in enumerate(objs) if valids[i]] return localnames, fqnames, objs def find_autosummary_in_lines_for_automodsumm(lines, module=None, filename=None): """Find out what items appear in autosummary:: directives in the given lines. Returns a list of (name, toctree, template, inherited_members) where *name* is a name of an object and *toctree* the :toctree: path of the corresponding autosummary directive (relative to the root of the file name), *template* the value of the :template: option, and *inherited_members* is the value of the :inherited-members: option. *toctree*, *template*, and *inherited_members* are ``None`` if the directive does not have the corresponding options set. .. note:: This is a slightly modified version of ``sphinx.ext.autosummary.generate.find_autosummary_in_lines`` which recognizes the ``inherited-members`` option. """ autosummary_re = re.compile(r'^(\s*)\.\.\s+autosummary::\s*') automodule_re = re.compile( r'^\s*\.\.\s+automodule::\s*([A-Za-z0-9_.]+)\s*$') module_re = re.compile( r'^\s*\.\.\s+(current)?module::\s*([a-zA-Z0-9_.]+)\s*$') autosummary_item_re = re.compile(r'^\s+(~?[_a-zA-Z][a-zA-Z0-9_.]*)\s*.*?') toctree_arg_re = re.compile(r'^\s+:toctree:\s*(.*?)\s*$') template_arg_re = re.compile(r'^\s+:template:\s*(.*?)\s*$') inherited_members_arg_re = re.compile(r'^\s+:inherited-members:\s*$') no_inherited_members_arg_re = re.compile(r'^\s+:no-inherited-members:\s*$') documented = [] toctree = None template = None inherited_members = None current_module = module in_autosummary = False base_indent = "" for line in lines: if in_autosummary: m = toctree_arg_re.match(line) if m: toctree = m.group(1) if filename: toctree = os.path.join(os.path.dirname(filename), toctree) continue m = template_arg_re.match(line) if m: template = m.group(1).strip() continue m = inherited_members_arg_re.match(line) if m: inherited_members = True continue m = no_inherited_members_arg_re.match(line) if m: inherited_members = False continue if line.strip().startswith(':'): warn(line) continue # skip options m = autosummary_item_re.match(line) if m: name = m.group(1).strip() if name.startswith('~'): name = name[1:] if current_module and \ not name.startswith(current_module + '.'): name = "%s.%s" % (current_module, name) documented.append((name, toctree, template, inherited_members)) continue if not line.strip() or line.startswith(base_indent + " "): continue in_autosummary = False m = autosummary_re.match(line) if m: in_autosummary = True base_indent = m.group(1) toctree = None template = None inherited_members = None continue m = automodule_re.search(line) if m: current_module = m.group(1).strip() # recurse into the automodule docstring documented.extend(find_autosummary_in_docstring( current_module, filename=filename)) continue m = module_re.match(line) if m: current_module = m.group(2) continue return documented ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/automodapi/autodoc_enhancements.py0000664000175000017500000001230413207611674034541 0ustar mseifertmseifert00000000000000""" Miscellaneous enhancements to help autodoc along. """ import inspect import sys import types import sphinx from distutils.version import LooseVersion from sphinx.ext.autodoc import AttributeDocumenter, ModuleDocumenter from sphinx.util.inspect import isdescriptor if sys.version_info[0] == 3: class_types = (type,) else: class_types = (type, types.ClassType) SPHINX_LT_15 = (LooseVersion(sphinx.__version__) < LooseVersion('1.5')) MethodDescriptorType = type(type.__subclasses__) # See # https://github.com/astropy/astropy-helpers/issues/116#issuecomment-71254836 # for further background on this. def type_object_attrgetter(obj, attr, *defargs): """ This implements an improved attrgetter for type objects (i.e. classes) that can handle class attributes that are implemented as properties on a metaclass. Normally `getattr` on a class with a `property` (say, "foo"), would return the `property` object itself. However, if the class has a metaclass which *also* defines a `property` named "foo", ``getattr(cls, 'foo')`` will find the "foo" property on the metaclass and resolve it. For the purposes of autodoc we just want to document the "foo" property defined on the class, not on the metaclass. For example:: >>> class Meta(type): ... @property ... def foo(cls): ... return 'foo' ... >>> class MyClass(metaclass=Meta): ... @property ... def foo(self): ... \"\"\"Docstring for MyClass.foo property.\"\"\" ... return 'myfoo' ... >>> getattr(MyClass, 'foo') 'foo' >>> type_object_attrgetter(MyClass, 'foo') >>> type_object_attrgetter(MyClass, 'foo').__doc__ 'Docstring for MyClass.foo property.' The last line of the example shows the desired behavior for the purposes of autodoc. """ for base in obj.__mro__: if attr in base.__dict__: if isinstance(base.__dict__[attr], property): # Note, this should only be used for properties--for any other # type of descriptor (classmethod, for example) this can mess # up existing expectations of what getattr(cls, ...) returns return base.__dict__[attr] break return getattr(obj, attr, *defargs) if SPHINX_LT_15: # Provided to work around a bug in Sphinx # See https://github.com/sphinx-doc/sphinx/pull/1843 class AttributeDocumenter(AttributeDocumenter): @classmethod def can_document_member(cls, member, membername, isattr, parent): non_attr_types = cls.method_types + class_types + \ (MethodDescriptorType,) isdatadesc = isdescriptor(member) and not \ isinstance(member, non_attr_types) and not \ type(member).__name__ == "instancemethod" # That last condition addresses an obscure case of C-defined # methods using a deprecated type in Python 3, that is not # otherwise exported anywhere by Python return isdatadesc or (not isinstance(parent, ModuleDocumenter) and not inspect.isroutine(member) and not isinstance(member, class_types)) def setup(app): # Must have the autodoc extension set up first so we can override it app.setup_extension('sphinx.ext.autodoc') # Need to import this too since it re-registers all the documenter types # =_= import sphinx.ext.autosummary.generate app.add_autodoc_attrgetter(type, type_object_attrgetter) if sphinx.version_info < (1, 4, 2): # this is a really ugly hack to supress a warning that sphinx 1.4 # generates when overriding an existing directive (which is *desired* # behavior here). As of sphinx v1.4.2, this has been fixed: # https://github.com/sphinx-doc/sphinx/issues/2451 # But we leave it in for 1.4.0/1.4.1 . But if the "needs_sphinx" is # eventually updated to >= 1.4.2, this should be removed entirely (in # favor of the line in the "else" clause) _oldwarn = app._warning _oldwarncount = app._warncount try: try: # *this* is in a try/finally because we don't want to force six as # a real dependency. In sphinx 1.4, six is a prerequisite, so # there's no issue. But in older sphinxes this may not be true... # but the inderlying warning is absent anyway so we let it slide. from six import StringIO app._warning = StringIO() except ImportError: pass app.add_autodocumenter(AttributeDocumenter) finally: app._warning = _oldwarn app._warncount = _oldwarncount else: suppress_warnigns_orig = app.config.suppress_warnings[:] if 'app.add_directive' not in app.config.suppress_warnings: app.config.suppress_warnings.append('app.add_directive') try: app.add_autodocumenter(AttributeDocumenter) finally: app.config.suppress_warnings = suppress_warnigns_orig ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/numpydoc/0000775000175000017500000000000013207623133027466 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/numpydoc/__init__.py0000664000175000017500000000016513207611674031610 0ustar mseifertmseifert00000000000000from __future__ import division, absolute_import, print_function __version__ = '0.7.0' from .numpydoc import setup ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/numpydoc/docscrape_sphinx.py0000664000175000017500000002510613207611674033407 0ustar mseifertmseifert00000000000000from __future__ import division, absolute_import, print_function import sys import re import inspect import textwrap import pydoc import collections import os from jinja2 import FileSystemLoader from jinja2.sandbox import SandboxedEnvironment import sphinx from sphinx.jinja2glue import BuiltinTemplateLoader from .docscrape import NumpyDocString, FunctionDoc, ClassDoc if sys.version_info[0] >= 3: sixu = lambda s: s else: sixu = lambda s: unicode(s, 'unicode_escape') class SphinxDocString(NumpyDocString): def __init__(self, docstring, config={}): NumpyDocString.__init__(self, docstring, config=config) self.load_config(config) def load_config(self, config): self.use_plots = config.get('use_plots', False) self.class_members_toctree = config.get('class_members_toctree', True) self.template = config.get('template', None) if self.template is None: template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')] template_loader = FileSystemLoader(template_dirs) template_env = SandboxedEnvironment(loader=template_loader) self.template = template_env.get_template('numpydoc_docstring.rst') # string conversion routines def _str_header(self, name, symbol='`'): return ['.. rubric:: ' + name, ''] def _str_field_list(self, name): return [':' + name + ':'] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): return [''] if self['Signature']: return ['``%s``' % self['Signature']] + [''] else: return [''] def _str_summary(self): return self['Summary'] + [''] def _str_extended_summary(self): return self['Extended Summary'] + [''] def _str_returns(self, name='Returns'): out = [] if self[name]: out += self._str_field_list(name) out += [''] for param, param_type, desc in self[name]: if param_type: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) else: out += self._str_indent([param.strip()]) if desc: out += [''] out += self._str_indent(desc, 8) out += [''] return out def _str_param_list(self, name): out = [] if self[name]: out += self._str_field_list(name) out += [''] for param, param_type, desc in self[name]: if param_type: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) else: out += self._str_indent(['**%s**' % param.strip()]) if desc: out += [''] out += self._str_indent(desc, 8) out += [''] return out @property def _obj(self): if hasattr(self, '_cls'): return self._cls elif hasattr(self, '_f'): return self._f return None def _str_member_list(self, name): """ Generate a member listing, autosummary:: table where possible, and a table where not. """ out = [] if self[name]: out += ['.. rubric:: %s' % name, ''] prefix = getattr(self, '_name', '') if prefix: prefix = '~%s.' % prefix autosum = [] others = [] for param, param_type, desc in self[name]: param = param.strip() # Check if the referenced member can have a docstring or not param_obj = getattr(self._obj, param, None) if not (callable(param_obj) or isinstance(param_obj, property) or inspect.isgetsetdescriptor(param_obj)): param_obj = None if param_obj and (pydoc.getdoc(param_obj) or not desc): # Referenced object has a docstring autosum += [" %s%s" % (prefix, param)] else: others.append((param, param_type, desc)) if autosum: out += ['.. autosummary::'] if self.class_members_toctree: out += [' :toctree:'] out += [''] + autosum if others: maxlen_0 = max(3, max([len(x[0]) + 4 for x in others])) hdr = sixu("=") * maxlen_0 + sixu(" ") + sixu("=") * 10 fmt = sixu('%%%ds %%s ') % (maxlen_0,) out += ['', '', hdr] for param, param_type, desc in others: desc = sixu(" ").join(x.strip() for x in desc).strip() if param_type: desc = "(%s) %s" % (param_type, desc) out += [fmt % ("**" + param.strip() + "**", desc)] out += [hdr] out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += [''] content = textwrap.dedent("\n".join(self[name])).split("\n") out += content out += [''] return out def _str_see_also(self, func_role): out = [] if self['See Also']: see_also = super(SphinxDocString, self)._str_see_also(func_role) out = ['.. seealso::', ''] out += self._str_indent(see_also[2:]) return out def _str_warnings(self): out = [] if self['Warnings']: out = ['.. warning::', ''] out += self._str_indent(self['Warnings']) return out def _str_index(self): idx = self['index'] out = [] if len(idx) == 0: return out out += ['.. index:: %s' % idx.get('default', '')] for section, references in idx.items(): if section == 'default': continue elif section == 'refguide': out += [' single: %s' % (', '.join(references))] else: out += [' %s: %s' % (section, ','.join(references))] return out def _str_references(self): out = [] if self['References']: out += self._str_header('References') if isinstance(self['References'], str): self['References'] = [self['References']] out.extend(self['References']) out += [''] # Latex collects all references to a separate bibliography, # so we need to insert links to it if sphinx.__version__ >= "0.6": out += ['.. only:: latex', ''] else: out += ['.. latexonly::', ''] items = [] for line in self['References']: m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I) if m: items.append(m.group(1)) out += [' ' + ", ".join(["[%s]_" % item for item in items]), ''] return out def _str_examples(self): examples_str = "\n".join(self['Examples']) if (self.use_plots and 'import matplotlib' in examples_str and 'plot::' not in examples_str): out = [] out += self._str_header('Examples') out += ['.. plot::', ''] out += self._str_indent(self['Examples']) out += [''] return out else: return self._str_section('Examples') def __str__(self, indent=0, func_role="obj"): ns = { 'signature': self._str_signature(), 'index': self._str_index(), 'summary': self._str_summary(), 'extended_summary': self._str_extended_summary(), 'parameters': self._str_param_list('Parameters'), 'returns': self._str_returns('Returns'), 'yields': self._str_returns('Yields'), 'other_parameters': self._str_param_list('Other Parameters'), 'raises': self._str_param_list('Raises'), 'warns': self._str_param_list('Warns'), 'warnings': self._str_warnings(), 'see_also': self._str_see_also(func_role), 'notes': self._str_section('Notes'), 'references': self._str_references(), 'examples': self._str_examples(), 'attributes': self._str_member_list('Attributes'), 'methods': self._str_member_list('Methods'), } ns = dict((k, '\n'.join(v)) for k, v in ns.items()) rendered = self.template.render(**ns) return '\n'.join(self._str_indent(rendered.split('\n'), indent)) class SphinxFunctionDoc(SphinxDocString, FunctionDoc): def __init__(self, obj, doc=None, config={}): self.load_config(config) FunctionDoc.__init__(self, obj, doc=doc, config=config) class SphinxClassDoc(SphinxDocString, ClassDoc): def __init__(self, obj, doc=None, func_doc=None, config={}): self.load_config(config) ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config) class SphinxObjDoc(SphinxDocString): def __init__(self, obj, doc=None, config={}): self._f = obj self.load_config(config) SphinxDocString.__init__(self, doc, config=config) def get_doc_object(obj, what=None, doc=None, config={}, builder=None): if what is None: if inspect.isclass(obj): what = 'class' elif inspect.ismodule(obj): what = 'module' elif isinstance(obj, collections.Callable): what = 'function' else: what = 'object' template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')] if builder is not None: template_loader = BuiltinTemplateLoader() template_loader.init(builder, dirs=template_dirs) else: template_loader = FileSystemLoader(template_dirs) template_env = SandboxedEnvironment(loader=template_loader) config['template'] = template_env.get_template('numpydoc_docstring.rst') if what == 'class': return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc, config=config) elif what in ('function', 'method'): return SphinxFunctionDoc(obj, doc=doc, config=config) else: if doc is None: doc = pydoc.getdoc(obj) return SphinxObjDoc(obj, doc, config=config) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/numpydoc/numpydoc.py0000664000175000017500000002253313207611674031712 0ustar mseifertmseifert00000000000000""" ======== numpydoc ======== Sphinx extension that handles docstrings in the Numpy standard format. [1] It will: - Convert Parameters etc. sections to field lists. - Convert See Also section to a See also entry. - Renumber references. - Extract the signature from the docstring, if it can't be determined otherwise. .. [1] https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt """ from __future__ import division, absolute_import, print_function import sys import re import pydoc import sphinx import inspect import collections if sphinx.__version__ < '1.0.1': raise RuntimeError("Sphinx 1.0.1 or newer is required") from .docscrape_sphinx import get_doc_object, SphinxDocString if sys.version_info[0] >= 3: sixu = lambda s: s else: sixu = lambda s: unicode(s, 'unicode_escape') def rename_references(app, what, name, obj, options, lines, reference_offset=[0]): # replace reference numbers so that there are no duplicates references = [] for line in lines: line = line.strip() m = re.match(sixu('^.. \\[(%s)\\]') % app.config.numpydoc_citation_re, line, re.I) if m: references.append(m.group(1)) if references: for i, line in enumerate(lines): for r in references: if re.match(sixu('^\\d+$'), r): new_r = sixu("R%d") % (reference_offset[0] + int(r)) else: new_r = sixu("%s%d") % (r, reference_offset[0]) lines[i] = lines[i].replace(sixu('[%s]_') % r, sixu('[%s]_') % new_r) lines[i] = lines[i].replace(sixu('.. [%s]') % r, sixu('.. [%s]') % new_r) reference_offset[0] += len(references) def mangle_docstrings(app, what, name, obj, options, lines): cfg = {'use_plots': app.config.numpydoc_use_plots, 'show_class_members': app.config.numpydoc_show_class_members, 'show_inherited_class_members': app.config.numpydoc_show_inherited_class_members, 'class_members_toctree': app.config.numpydoc_class_members_toctree} u_NL = sixu('\n') if what == 'module': # Strip top title pattern = '^\\s*[#*=]{4,}\\n[a-z0-9 -]+\\n[#*=]{4,}\\s*' title_re = re.compile(sixu(pattern), re.I | re.S) lines[:] = title_re.sub(sixu(''), u_NL.join(lines)).split(u_NL) else: doc = get_doc_object(obj, what, u_NL.join(lines), config=cfg, builder=app.builder) if sys.version_info[0] >= 3: doc = str(doc) else: doc = unicode(doc) lines[:] = doc.split(u_NL) if (app.config.numpydoc_edit_link and hasattr(obj, '__name__') and obj.__name__): if hasattr(obj, '__module__'): v = dict(full_name=sixu("%s.%s") % (obj.__module__, obj.__name__)) else: v = dict(full_name=obj.__name__) lines += [sixu(''), sixu('.. htmlonly::'), sixu('')] lines += [sixu(' %s') % x for x in (app.config.numpydoc_edit_link % v).split("\n")] # call function to replace reference numbers so that there are no # duplicates rename_references(app, what, name, obj, options, lines) def mangle_signature(app, what, name, obj, options, sig, retann): # Do not try to inspect classes that don't define `__init__` if (inspect.isclass(obj) and (not hasattr(obj, '__init__') or 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' if not (isinstance(obj, collections.Callable) or hasattr(obj, '__argspec_is_invalid_')): return if not hasattr(obj, '__doc__'): return doc = SphinxDocString(pydoc.getdoc(obj)) sig = doc['Signature'] or getattr(obj, '__text_signature__', None) if sig: sig = re.sub(sixu("^[^(]*"), sixu(""), sig) return sig, sixu('') def setup(app, get_doc_object_=get_doc_object): if not hasattr(app, 'add_config_value'): return # probably called by nose, better bail out global get_doc_object get_doc_object = get_doc_object_ app.connect('autodoc-process-docstring', mangle_docstrings) app.connect('autodoc-process-signature', mangle_signature) app.add_config_value('numpydoc_edit_link', None, False) app.add_config_value('numpydoc_use_plots', None, False) app.add_config_value('numpydoc_show_class_members', True, True) app.add_config_value('numpydoc_show_inherited_class_members', True, True) app.add_config_value('numpydoc_class_members_toctree', True, True) app.add_config_value('numpydoc_citation_re', '[a-z0-9_.-]+', True) # Extra mangling domains app.add_domain(NumpyPythonDomain) app.add_domain(NumpyCDomain) metadata = {'parallel_read_safe': True} return metadata # ------------------------------------------------------------------------------ # Docstring-mangling domains # ------------------------------------------------------------------------------ from docutils.statemachine import ViewList from sphinx.domains.c import CDomain from sphinx.domains.python import PythonDomain class ManglingDomainBase(object): directive_mangling_map = {} def __init__(self, *a, **kw): super(ManglingDomainBase, self).__init__(*a, **kw) self.wrap_mangling_directives() def wrap_mangling_directives(self): for name, objtype in list(self.directive_mangling_map.items()): self.directives[name] = wrap_mangling_directive( self.directives[name], objtype) class NumpyPythonDomain(ManglingDomainBase, PythonDomain): name = 'np' directive_mangling_map = { 'function': 'function', 'class': 'class', 'exception': 'class', 'method': 'function', 'classmethod': 'function', 'staticmethod': 'function', 'attribute': 'attribute', } indices = [] class NumpyCDomain(ManglingDomainBase, CDomain): name = 'np-c' directive_mangling_map = { 'function': 'function', 'member': 'attribute', 'macro': 'function', 'type': 'class', 'var': 'object', } def match_items(lines, content_old): """Create items for mangled lines. This function tries to match the lines in ``lines`` with the items (source file references and line numbers) in ``content_old``. The ``mangle_docstrings`` function changes the actual docstrings, but doesn't keep track of where each line came from. The manging does many operations on the original lines, which are hard to track afterwards. Many of the line changes come from deleting or inserting blank lines. This function tries to match lines by ignoring blank lines. All other changes (such as inserting figures or changes in the references) are completely ignored, so the generated line numbers will be off if ``mangle_docstrings`` does anything non-trivial. This is a best-effort function and the real fix would be to make ``mangle_docstrings`` actually keep track of the ``items`` together with the ``lines``. Examples -------- >>> lines = ['', 'A', '', 'B', ' ', '', 'C', 'D'] >>> lines_old = ['a', '', '', 'b', '', 'c'] >>> items_old = [('file1.py', 0), ('file1.py', 1), ('file1.py', 2), ... ('file2.py', 0), ('file2.py', 1), ('file2.py', 2)] >>> content_old = ViewList(lines_old, items=items_old) >>> match_items(lines, content_old) # doctest: +NORMALIZE_WHITESPACE [('file1.py', 0), ('file1.py', 0), ('file2.py', 0), ('file2.py', 0), ('file2.py', 2), ('file2.py', 2), ('file2.py', 2), ('file2.py', 2)] >>> # first 2 ``lines`` are matched to 'a', second 2 to 'b', rest to 'c' >>> # actual content is completely ignored. Notes ----- The algorithm tries to match any line in ``lines`` with one in ``lines_old``. It skips over all empty lines in ``lines_old`` and assigns this line number to all lines in ``lines``, unless a non-empty line is found in ``lines`` in which case it goes to the next line in ``lines_old``. """ items_new = [] lines_old = content_old.data items_old = content_old.items j = 0 for i, line in enumerate(lines): # go to next non-empty line in old: # line.strip() checks whether the string is all whitespace while j < len(lines_old) - 1 and not lines_old[j].strip(): j += 1 items_new.append(items_old[j]) if line.strip() and j < len(lines_old) - 1: j += 1 assert(len(items_new) == len(lines)) return items_new def wrap_mangling_directive(base_directive, objtype): class directive(base_directive): def run(self): env = self.state.document.settings.env name = None if self.arguments: m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0]) name = m.group(2).strip() if not name: name = self.arguments[0] lines = list(self.content) mangle_docstrings(env.app, objtype, name, None, None, lines) if self.content: items = match_items(lines, self.content) self.content = ViewList(lines, items=items, parent=self.content.parent) return base_directive.run(self) return directive ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/numpydoc/docscrape.py0000664000175000017500000004452113207611674032020 0ustar mseifertmseifert00000000000000"""Extract reference documentation from the NumPy source tree. """ from __future__ import division, absolute_import, print_function import inspect import textwrap import re import pydoc from warnings import warn import collections import copy import sys class Reader(object): """A line-based string reader. """ def __init__(self, data): """ Parameters ---------- data : str String with lines separated by '\n'. """ if isinstance(data, list): self._str = data else: self._str = data.split('\n') # store string as list of lines self.reset() def __getitem__(self, n): return self._str[n] def reset(self): self._l = 0 # current line nr def read(self): if not self.eof(): out = self[self._l] self._l += 1 return out else: return '' def seek_next_non_empty_line(self): for l in self[self._l:]: if l.strip(): break else: self._l += 1 def eof(self): return self._l >= len(self._str) def read_to_condition(self, condition_func): start = self._l for line in self[start:]: if condition_func(line): return self[start:self._l] self._l += 1 if self.eof(): return self[start:self._l+1] return [] def read_to_next_empty_line(self): self.seek_next_non_empty_line() def is_empty(line): return not line.strip() return self.read_to_condition(is_empty) def read_to_next_unindented_line(self): def is_unindented(line): return (line.strip() and (len(line.lstrip()) == len(line))) return self.read_to_condition(is_unindented) def peek(self, n=0): if self._l + n < len(self._str): return self[self._l + n] else: return '' def is_empty(self): return not ''.join(self._str).strip() class ParseError(Exception): def __str__(self): message = self.args[0] if hasattr(self, 'docstring'): message = "%s in %r" % (message, self.docstring) return message class NumpyDocString(collections.Mapping): sections = { 'Signature': '', 'Summary': [''], 'Extended Summary': [], 'Parameters': [], 'Returns': [], 'Yields': [], 'Raises': [], 'Warns': [], 'Other Parameters': [], 'Attributes': [], 'Methods': [], 'See Also': [], 'Notes': [], 'Warnings': [], 'References': '', 'Examples': '', 'index': {} } def __init__(self, docstring, config={}): orig_docstring = docstring docstring = textwrap.dedent(docstring).split('\n') self._doc = Reader(docstring) self._parsed_data = copy.deepcopy(self.sections) try: self._parse() except ParseError as e: e.docstring = orig_docstring raise def __getitem__(self, key): return self._parsed_data[key] def __setitem__(self, key, val): if key not in self._parsed_data: warn("Unknown section %s" % key) else: self._parsed_data[key] = val def __iter__(self): return iter(self._parsed_data) def __len__(self): return len(self._parsed_data) def _is_at_section(self): self._doc.seek_next_non_empty_line() if self._doc.eof(): return False l1 = self._doc.peek().strip() # e.g. Parameters if l1.startswith('.. index::'): return True l2 = self._doc.peek(1).strip() # ---------- or ========== return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) def _strip(self, doc): i = 0 j = 0 for i, line in enumerate(doc): if line.strip(): break for j, line in enumerate(doc[::-1]): if line.strip(): break return doc[i:len(doc)-j] def _read_to_next_section(self): section = self._doc.read_to_next_empty_line() while not self._is_at_section() and not self._doc.eof(): if not self._doc.peek(-1).strip(): # previous line was empty section += [''] section += self._doc.read_to_next_empty_line() return section def _read_sections(self): while not self._doc.eof(): data = self._read_to_next_section() name = data[0].strip() if name.startswith('..'): # index section yield name, data[1:] elif len(data) < 2: yield StopIteration else: yield name, self._strip(data[2:]) def _parse_param_list(self, content): r = Reader(content) params = [] while not r.eof(): header = r.read().strip() if ' : ' in header: arg_name, arg_type = header.split(' : ')[:2] else: arg_name, arg_type = header, '' desc = r.read_to_next_unindented_line() desc = dedent_lines(desc) params.append((arg_name, arg_type, desc)) return params _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) def _parse_see_also(self, content): """ func_name : Descriptive text continued text another_func_name : Descriptive text func_name1, func_name2, :meth:`func_name`, func_name3 """ items = [] def parse_item_name(text): """Match ':role:`name`' or 'name'""" m = self._name_rgx.match(text) if m: g = m.groups() if g[1] is None: return g[3], None else: return g[2], g[1] raise ParseError("%s is not a item name" % text) def push_item(name, rest): if not name: return name, role = parse_item_name(name) items.append((name, list(rest), role)) del rest[:] current_func = None rest = [] for line in content: if not line.strip(): continue m = self._name_rgx.match(line) if m and line[m.end():].strip().startswith(':'): push_item(current_func, rest) current_func, line = line[:m.end()], line[m.end():] rest = [line.split(':', 1)[1].strip()] if not rest[0]: rest = [] elif not line.startswith(' '): push_item(current_func, rest) current_func = None if ',' in line: for func in line.split(','): if func.strip(): push_item(func, []) elif line.strip(): current_func = line elif current_func is not None: rest.append(line.strip()) push_item(current_func, rest) return items def _parse_index(self, section, content): """ .. index: default :refguide: something, else, and more """ def strip_each_in(lst): return [s.strip() for s in lst] out = {} section = section.split('::') if len(section) > 1: out['default'] = strip_each_in(section[1].split(','))[0] for line in content: line = line.split(':') if len(line) > 2: out[line[1]] = strip_each_in(line[2].split(',')) return out def _parse_summary(self): """Grab signature (if given) and summary""" if self._is_at_section(): return # If several signatures present, take the last one while True: summary = self._doc.read_to_next_empty_line() summary_str = " ".join([s.strip() for s in summary]).strip() if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): self['Signature'] = summary_str if not self._is_at_section(): continue break if summary is not None: self['Summary'] = summary if not self._is_at_section(): self['Extended Summary'] = self._read_to_next_section() def _parse(self): self._doc.reset() self._parse_summary() sections = list(self._read_sections()) section_names = set([section for section, content in sections]) has_returns = 'Returns' in section_names has_yields = 'Yields' in section_names # We could do more tests, but we are not. Arbitrarily. if has_returns and has_yields: msg = 'Docstring contains both a Returns and Yields section.' raise ValueError(msg) for (section, content) in sections: if not section.startswith('..'): section = (s.capitalize() for s in section.split(' ')) section = ' '.join(section) if self.get(section): if hasattr(self, '_obj'): # we know where the docs came from: try: filename = inspect.getsourcefile(self._obj) except TypeError: filename = None msg = ("The section %s appears twice in " "the docstring of %s in %s." % (section, self._obj, filename)) raise ValueError(msg) else: msg = ("The section %s appears twice" % section) raise ValueError(msg) if section in ('Parameters', 'Returns', 'Yields', 'Raises', 'Warns', 'Other Parameters', 'Attributes', 'Methods'): self[section] = self._parse_param_list(content) elif section.startswith('.. index::'): self['index'] = self._parse_index(section, content) elif section == 'See Also': self['See Also'] = self._parse_see_also(content) else: self[section] = content # string conversion routines def _str_header(self, name, symbol='-'): return [name, len(name)*symbol] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): if self['Signature']: return [self['Signature'].replace('*', '\*')] + [''] else: return [''] def _str_summary(self): if self['Summary']: return self['Summary'] + [''] else: return [] def _str_extended_summary(self): if self['Extended Summary']: return self['Extended Summary'] + [''] else: return [] def _str_param_list(self, name): out = [] if self[name]: out += self._str_header(name) for param, param_type, desc in self[name]: if param_type: out += ['%s : %s' % (param, param_type)] else: out += [param] out += self._str_indent(desc) out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += self[name] out += [''] return out def _str_see_also(self, func_role): if not self['See Also']: return [] out = [] out += self._str_header("See Also") last_had_desc = True for func, desc, role in self['See Also']: if role: link = ':%s:`%s`' % (role, func) elif func_role: link = ':%s:`%s`' % (func_role, func) else: link = "`%s`_" % func if desc or last_had_desc: out += [''] out += [link] else: out[-1] += ", %s" % link if desc: out += self._str_indent([' '.join(desc)]) last_had_desc = True else: last_had_desc = False out += [''] return out def _str_index(self): idx = self['index'] out = [] out += ['.. index:: %s' % idx.get('default', '')] for section, references in idx.items(): if section == 'default': continue out += [' :%s: %s' % (section, ', '.join(references))] return out def __str__(self, func_role=''): out = [] out += self._str_signature() out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Returns', 'Yields', 'Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_section('Warnings') out += self._str_see_also(func_role) for s in ('Notes', 'References', 'Examples'): out += self._str_section(s) for param_list in ('Attributes', 'Methods'): out += self._str_param_list(param_list) out += self._str_index() return '\n'.join(out) def indent(str, indent=4): indent_str = ' '*indent if str is None: return indent_str lines = str.split('\n') return '\n'.join(indent_str + l for l in lines) def dedent_lines(lines): """Deindent a list of lines maximally""" return textwrap.dedent("\n".join(lines)).split("\n") def header(text, style='-'): return text + '\n' + style*len(text) + '\n' class FunctionDoc(NumpyDocString): def __init__(self, func, role='func', doc=None, config={}): self._f = func self._role = role # e.g. "func" or "meth" if doc is None: if func is None: raise ValueError("No function or docstring given") doc = inspect.getdoc(func) or '' NumpyDocString.__init__(self, doc) if not self['Signature'] and func is not None: func, func_name = self.get_func() try: try: signature = str(inspect.signature(func)) except (AttributeError, ValueError): # try to read signature, backward compat for older Python if sys.version_info[0] >= 3: argspec = inspect.getfullargspec(func) else: argspec = inspect.getargspec(func) signature = inspect.formatargspec(*argspec) signature = '%s%s' % (func_name, signature.replace('*', '\*')) except TypeError: signature = '%s()' % func_name self['Signature'] = signature def get_func(self): func_name = getattr(self._f, '__name__', self.__class__.__name__) if inspect.isclass(self._f): func = getattr(self._f, '__call__', self._f.__init__) else: func = self._f return func, func_name def __str__(self): out = '' func, func_name = self.get_func() signature = self['Signature'].replace('*', '\*') roles = {'func': 'function', 'meth': 'method'} if self._role: if self._role not in roles: print("Warning: invalid role %s" % self._role) out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''), func_name) out += super(FunctionDoc, self).__str__(func_role=self._role) return out class ClassDoc(NumpyDocString): extra_public_methods = ['__call__'] def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc, config={}): if not inspect.isclass(cls) and cls is not None: raise ValueError("Expected a class or None, but got %r" % cls) self._cls = cls self.show_inherited_members = config.get( 'show_inherited_class_members', True) if modulename and not modulename.endswith('.'): modulename += '.' self._mod = modulename if doc is None: if cls is None: raise ValueError("No class or documentation string given") doc = pydoc.getdoc(cls) NumpyDocString.__init__(self, doc) if config.get('show_class_members', True): def splitlines_x(s): if not s: return [] else: return s.splitlines() for field, items in [('Methods', self.methods), ('Attributes', self.properties)]: if not self[field]: doc_list = [] for name in sorted(items): try: doc_item = pydoc.getdoc(getattr(self._cls, name)) doc_list.append((name, '', splitlines_x(doc_item))) except AttributeError: pass # method doesn't exist self[field] = doc_list @property def methods(self): if self._cls is None: return [] return [name for name, func in inspect.getmembers(self._cls) if ((not name.startswith('_') or name in self.extra_public_methods) and isinstance(func, collections.Callable) and self._is_show_member(name))] @property def properties(self): if self._cls is None: return [] return [name for name, func in inspect.getmembers(self._cls) if (not name.startswith('_') and (func is None or isinstance(func, property) or inspect.isgetsetdescriptor(func)) and self._is_show_member(name))] def _is_show_member(self, name): if self.show_inherited_members: return True # show all class members if name not in self._cls.__dict__: return False # class member is inherited, we do not show it return True ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/__init__.py0000664000175000017500000000111513207611674027746 0ustar mseifertmseifert00000000000000# The ``astropy_helpers.extern`` sub-module includes modules developed elsewhere # that are bundled here for convenience. At the moment, this consists of the # following two sphinx extensions: # # * `numpydoc `_, a Sphinx extension # developed as part of the Numpy project. This is used to parse docstrings # in Numpy format # # * `sphinx-automodapi `_, a Sphinx # developed as part of the Astropy project. This used to be developed directly # in ``astropy-helpers`` but is now a standalone package. ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/extern/setup_package.py0000664000175000017500000000027513207611674031030 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst def get_package_data(): return {'astropy_helpers.extern': ['automodapi/templates/*/*.rst', 'numpydoc/templates/*.rst']} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/utils.py0000664000175000017500000006476513207611674026066 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import absolute_import, unicode_literals import contextlib import functools import imp import inspect import os import sys import glob import textwrap import types import warnings try: from importlib import machinery as import_machinery # Python 3.2 does not have SourceLoader if not hasattr(import_machinery, 'SourceLoader'): import_machinery = None except ImportError: import_machinery = None # Python 3.3's importlib caches filesystem reads for faster imports in the # general case. But sometimes it's necessary to manually invalidate those # caches so that the import system can pick up new generated files. See # https://github.com/astropy/astropy/issues/820 if sys.version_info[:2] >= (3, 3): from importlib import invalidate_caches else: def invalidate_caches(): return None # Python 2/3 compatibility if sys.version_info[0] < 3: string_types = (str, unicode) # noqa else: string_types = (str,) # Note: The following Warning subclasses are simply copies of the Warnings in # Astropy of the same names. class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyDeprecationWarning(AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ def _get_platlib_dir(cmd): """ Given a build command, return the name of the appropriate platform-specific build subdirectory directory (e.g. build/lib.linux-x86_64-2.7) """ plat_specifier = '.{0}-{1}'.format(cmd.plat_name, sys.version[0:3]) return os.path.join(cmd.build_base, 'lib' + plat_specifier) def get_numpy_include_path(): """ Gets the path to the numpy headers. """ # We need to go through this nonsense in case setuptools # downloaded and installed Numpy for us as part of the build or # install, since Numpy may still think it's in "setup mode", when # in fact we're ready to use it to build astropy now. if sys.version_info[0] >= 3: import builtins if hasattr(builtins, '__NUMPY_SETUP__'): del builtins.__NUMPY_SETUP__ import imp import numpy imp.reload(numpy) else: import __builtin__ if hasattr(__builtin__, '__NUMPY_SETUP__'): del __builtin__.__NUMPY_SETUP__ import numpy reload(numpy) try: numpy_include = numpy.get_include() except AttributeError: numpy_include = numpy.get_numpy_include() return numpy_include class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x def write(self, s): pass def flush(self): pass @contextlib.contextmanager def silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr if sys.platform == 'win32': import ctypes def _has_hidden_attribute(filepath): """ Returns True if the given filepath has the hidden attribute on MS-Windows. Based on a post here: http://stackoverflow.com/questions/284115/cross-platform-hidden-file-detection """ if isinstance(filepath, bytes): filepath = filepath.decode(sys.getfilesystemencoding()) try: attrs = ctypes.windll.kernel32.GetFileAttributesW(filepath) assert attrs != -1 result = bool(attrs & 2) except (AttributeError, AssertionError): result = False return result else: def _has_hidden_attribute(filepath): return False def is_path_hidden(filepath): """ Determines if a given file or directory is hidden. Parameters ---------- filepath : str The path to a file or directory Returns ------- hidden : bool Returns `True` if the file is hidden """ name = os.path.basename(os.path.abspath(filepath)) if isinstance(name, bytes): is_dotted = name.startswith(b'.') else: is_dotted = name.startswith('.') return is_dotted or _has_hidden_attribute(filepath) def walk_skip_hidden(top, onerror=None, followlinks=False): """ A wrapper for `os.walk` that skips hidden files and directories. This function does not have the parameter `topdown` from `os.walk`: the directories must always be recursed top-down when using this function. See also -------- os.walk : For a description of the parameters """ for root, dirs, files in os.walk( top, topdown=True, onerror=onerror, followlinks=followlinks): # These lists must be updated in-place so os.walk will skip # hidden directories dirs[:] = [d for d in dirs if not is_path_hidden(d)] files[:] = [f for f in files if not is_path_hidden(f)] yield root, dirs, files def write_if_different(filename, data): """Write `data` to `filename`, if the content of the file is different. Parameters ---------- filename : str The file name to be written to. data : bytes The data to be written to `filename`. """ assert isinstance(data, bytes) if os.path.exists(filename): with open(filename, 'rb') as fd: original_data = fd.read() else: original_data = None if original_data != data: with open(filename, 'wb') as fd: fd.write(data) def import_file(filename, name=None): """ Imports a module from a single file as if it doesn't belong to a particular package. The returned module will have the optional ``name`` if given, or else a name generated from the filename. """ # Specifying a traditional dot-separated fully qualified name here # results in a number of "Parent module 'astropy' not found while # handling absolute import" warnings. Using the same name, the # namespaces of the modules get merged together. So, this # generates an underscore-separated name which is more likely to # be unique, and it doesn't really matter because the name isn't # used directly here anyway. mode = 'U' if sys.version_info[0] < 3 else 'r' if name is None: basename = os.path.splitext(filename)[0] name = '_'.join(os.path.relpath(basename).split(os.sep)[1:]) if import_machinery: loader = import_machinery.SourceFileLoader(name, filename) mod = loader.load_module() else: with open(filename, mode) as fd: mod = imp.load_module(name, fd, filename, ('.py', mode, 1)) return mod def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. Raise `ImportError` if the module or name is not found. """ parts = name.split('.') cursor = len(parts) - 1 module_name = parts[:cursor] attr_name = parts[-1] while cursor > 0: try: ret = __import__('.'.join(module_name), fromlist=[attr_name]) break except ImportError: if cursor == 0: raise cursor -= 1 module_name = parts[:cursor] attr_name = parts[cursor] ret = '' for part in parts[cursor:]: try: ret = getattr(ret, part) except AttributeError: raise ImportError(name) return ret if sys.version_info[0] >= 3: def iteritems(dictionary): return dictionary.items() else: def iteritems(dictionary): return dictionary.iteritems() def extends_doc(extended_func): """ A function decorator for use when wrapping an existing function but adding additional functionality. This copies the docstring from the original function, and appends to it (along with a newline) the docstring of the wrapper function. Example ------- >>> def foo(): ... '''Hello.''' ... >>> @extends_doc(foo) ... def bar(): ... '''Goodbye.''' ... >>> print(bar.__doc__) Hello. Goodbye. """ def decorator(func): if not (extended_func.__doc__ is None or func.__doc__ is None): func.__doc__ = '\n\n'.join([extended_func.__doc__.rstrip('\n'), func.__doc__.lstrip('\n')]) return func return decorator # Duplicated from astropy.utils.decorators.deprecated # When fixing issues in this function fix them in astropy first, then # port the fixes over to astropy-helpers def deprecated(since, message='', name='', alternative='', pending=False, obj_type=None): """ Used to mark a function or class as deprecated. To mark an attribute as deprecated, use `deprecated_attribute`. Parameters ------------ since : str The release at which this API became deprecated. This is required. message : str, optional Override the default deprecation message. The format specifier ``func`` may be used for the name of the function, and ``alternative`` may be used in the deprecation message to insert the name of an alternative to the deprecated function. ``obj_type`` may be used to insert a friendly name for the type of object being deprecated. name : str, optional The name of the deprecated function or class; if not provided the name is automatically determined from the passed in function or class, though this is useful in the case of renamed functions, where the new function is just assigned to the name of the deprecated function. For example:: def new_function(): ... oldFunction = new_function alternative : str, optional An alternative function or class name that the user may use in place of the deprecated object. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a AstropyPendingDeprecationWarning instead of a AstropyDeprecationWarning. obj_type : str, optional The type of this object, if the automatically determined one needs to be overridden. """ method_types = (classmethod, staticmethod, types.MethodType) def deprecate_doc(old_doc, message): """ Returns a given docstring with a deprecation message prepended to it. """ if not old_doc: old_doc = '' old_doc = textwrap.dedent(old_doc).strip('\n') new_doc = (('\n.. deprecated:: %(since)s' '\n %(message)s\n\n' % {'since': since, 'message': message.strip()}) + old_doc) if not old_doc: # This is to prevent a spurious 'unexpected unindent' warning from # docutils when the original docstring was blank. new_doc += r'\ ' return new_doc def get_function(func): """ Given a function or classmethod (or other function wrapper type), get the function object. """ if isinstance(func, method_types): func = func.__func__ return func def deprecate_function(func, message): """ Returns a wrapped function that displays an ``AstropyDeprecationWarning`` when it is called. """ if isinstance(func, method_types): func_wrapper = type(func) else: func_wrapper = lambda f: f func = get_function(func) def deprecated_func(*args, **kwargs): if pending: category = AstropyPendingDeprecationWarning else: category = AstropyDeprecationWarning warnings.warn(message, category, stacklevel=2) return func(*args, **kwargs) # If this is an extension function, we can't call # functools.wraps on it, but we normally don't care. # This crazy way to get the type of a wrapper descriptor is # straight out of the Python 3.3 inspect module docs. if type(func) != type(str.__dict__['__add__']): deprecated_func = functools.wraps(func)(deprecated_func) deprecated_func.__doc__ = deprecate_doc( deprecated_func.__doc__, message) return func_wrapper(deprecated_func) def deprecate_class(cls, message): """ Returns a wrapper class with the docstrings updated and an __init__ function that will raise an ``AstropyDeprectationWarning`` warning when called. """ # Creates a new class with the same name and bases as the # original class, but updates the dictionary with a new # docstring and a wrapped __init__ method. __module__ needs # to be manually copied over, since otherwise it will be set # to *this* module (astropy.utils.misc). # This approach seems to make Sphinx happy (the new class # looks enough like the original class), and works with # extension classes (which functools.wraps does not, since # it tries to modify the original class). # We need to add a custom pickler or you'll get # Can't pickle : it's not found as ... # errors. Picklability is required for any class that is # documented by Sphinx. members = cls.__dict__.copy() members.update({ '__doc__': deprecate_doc(cls.__doc__, message), '__init__': deprecate_function(get_function(cls.__init__), message), }) return type(cls.__name__, cls.__bases__, members) def deprecate(obj, message=message, name=name, alternative=alternative, pending=pending): if obj_type is None: if isinstance(obj, type): obj_type_name = 'class' elif inspect.isfunction(obj): obj_type_name = 'function' elif inspect.ismethod(obj) or isinstance(obj, method_types): obj_type_name = 'method' else: obj_type_name = 'object' else: obj_type_name = obj_type if not name: name = get_function(obj).__name__ altmessage = '' if not message or type(message) == type(deprecate): if pending: message = ('The %(func)s %(obj_type)s will be deprecated in a ' 'future version.') else: message = ('The %(func)s %(obj_type)s is deprecated and may ' 'be removed in a future version.') if alternative: altmessage = '\n Use %s instead.' % alternative message = ((message % { 'func': name, 'name': name, 'alternative': alternative, 'obj_type': obj_type_name}) + altmessage) if isinstance(obj, type): return deprecate_class(obj, message) else: return deprecate_function(obj, message) if type(message) == type(deprecate): return deprecate(message) return deprecate def deprecated_attribute(name, since, message=None, alternative=None, pending=False): """ Used to mark a public attribute as deprecated. This creates a property that will warn when the given attribute name is accessed. To prevent the warning (i.e. for internal code), use the private name for the attribute by prepending an underscore (i.e. ``self._name``). Parameters ---------- name : str The name of the deprecated attribute. since : str The release at which this API became deprecated. This is required. message : str, optional Override the default deprecation message. The format specifier ``name`` may be used for the name of the attribute, and ``alternative`` may be used in the deprecation message to insert the name of an alternative to the deprecated function. alternative : str, optional An alternative attribute that the user may use in place of the deprecated attribute. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a AstropyPendingDeprecationWarning instead of a AstropyDeprecationWarning. Examples -------- :: class MyClass: # Mark the old_name as deprecated old_name = misc.deprecated_attribute('old_name', '0.1') def method(self): self._old_name = 42 """ private_name = '_' + name @deprecated(since, name=name, obj_type='attribute') def get(self): return getattr(self, private_name) @deprecated(since, name=name, obj_type='attribute') def set(self, val): setattr(self, private_name, val) @deprecated(since, name=name, obj_type='attribute') def delete(self): delattr(self, private_name) return property(get, set, delete) def minversion(module, version, inclusive=True, version_path='__version__'): """ Returns `True` if the specified Python module satisfies a minimum version requirement, and `False` if not. By default this uses `pkg_resources.parse_version` to do the version comparison if available. Otherwise it falls back on `distutils.version.LooseVersion`. Parameters ---------- module : module or `str` An imported module of which to check the version, or the name of that module (in which case an import of that module is attempted-- if this fails `False` is returned). version : `str` The version as a string that this module must have at a minimum (e.g. ``'0.12'``). inclusive : `bool` The specified version meets the requirement inclusively (i.e. ``>=``) as opposed to strictly greater than (default: `True`). version_path : `str` A dotted attribute path to follow in the module for the version. Defaults to just ``'__version__'``, which should work for most Python modules. Examples -------- >>> import astropy >>> minversion(astropy, '0.4.4') True """ if isinstance(module, types.ModuleType): module_name = module.__name__ elif isinstance(module, string_types): module_name = module try: module = resolve_name(module_name) except ImportError: return False else: raise ValueError('module argument must be an actual imported ' 'module, or the import name of the module; ' 'got {0!r}'.format(module)) if '.' not in version_path: have_version = getattr(module, version_path) else: have_version = resolve_name('.'.join([module.__name__, version_path])) try: from pkg_resources import parse_version except ImportError: from distutils.version import LooseVersion as parse_version if inclusive: return parse_version(have_version) >= parse_version(version) else: return parse_version(have_version) > parse_version(version) # Copy of the classproperty decorator from astropy.utils.decorators class classproperty(property): """ Similar to `property`, but allows class-level properties. That is, a property whose getter is like a `classmethod`. The wrapped method may explicitly use the `classmethod` decorator (which must become before this decorator), or the `classmethod` may be omitted (it is implicit through use of this decorator). .. note:: classproperty only works for *read-only* properties. It does not currently allow writeable/deleteable properties, due to subtleties of how Python descriptors work. In order to implement such properties on a class a metaclass for that class must be implemented. Parameters ---------- fget : callable The function that computes the value of this property (in particular, the function when this is used as a decorator) a la `property`. doc : str, optional The docstring for the property--by default inherited from the getter function. lazy : bool, optional If True, caches the value returned by the first call to the getter function, so that it is only called once (used for lazy evaluation of an attribute). This is analogous to `lazyproperty`. The ``lazy`` argument can also be used when `classproperty` is used as a decorator (see the third example below). When used in the decorator syntax this *must* be passed in as a keyword argument. Examples -------- :: >>> class Foo(object): ... _bar_internal = 1 ... @classproperty ... def bar(cls): ... return cls._bar_internal + 1 ... >>> Foo.bar 2 >>> foo_instance = Foo() >>> foo_instance.bar 2 >>> foo_instance._bar_internal = 2 >>> foo_instance.bar # Ignores instance attributes 2 As previously noted, a `classproperty` is limited to implementing read-only attributes:: >>> class Foo(object): ... _bar_internal = 1 ... @classproperty ... def bar(cls): ... return cls._bar_internal ... @bar.setter ... def bar(cls, value): ... cls._bar_internal = value ... Traceback (most recent call last): ... NotImplementedError: classproperty can only be read-only; use a metaclass to implement modifiable class-level properties When the ``lazy`` option is used, the getter is only called once:: >>> class Foo(object): ... @classproperty(lazy=True) ... def bar(cls): ... print("Performing complicated calculation") ... return 1 ... >>> Foo.bar Performing complicated calculation 1 >>> Foo.bar 1 If a subclass inherits a lazy `classproperty` the property is still re-evaluated for the subclass:: >>> class FooSub(Foo): ... pass ... >>> FooSub.bar Performing complicated calculation 1 >>> FooSub.bar 1 """ def __new__(cls, fget=None, doc=None, lazy=False): if fget is None: # Being used as a decorator--return a wrapper that implements # decorator syntax def wrapper(func): return cls(func, lazy=lazy) return wrapper return super(classproperty, cls).__new__(cls) def __init__(self, fget, doc=None, lazy=False): self._lazy = lazy if lazy: self._cache = {} fget = self._wrap_fget(fget) super(classproperty, self).__init__(fget=fget, doc=doc) # There is a buglet in Python where self.__doc__ doesn't # get set properly on instances of property subclasses if # the doc argument was used rather than taking the docstring # from fget if doc is not None: self.__doc__ = doc def __get__(self, obj, objtype=None): if self._lazy and objtype in self._cache: return self._cache[objtype] if objtype is not None: # The base property.__get__ will just return self here; # instead we pass objtype through to the original wrapped # function (which takes the class as its sole argument) val = self.fget.__wrapped__(objtype) else: val = super(classproperty, self).__get__(obj, objtype=objtype) if self._lazy: if objtype is None: objtype = obj.__class__ self._cache[objtype] = val return val def getter(self, fget): return super(classproperty, self).getter(self._wrap_fget(fget)) def setter(self, fset): raise NotImplementedError( "classproperty can only be read-only; use a metaclass to " "implement modifiable class-level properties") def deleter(self, fdel): raise NotImplementedError( "classproperty can only be read-only; use a metaclass to " "implement modifiable class-level properties") @staticmethod def _wrap_fget(orig_fget): if isinstance(orig_fget, classmethod): orig_fget = orig_fget.__func__ # Using stock functools.wraps instead of the fancier version # found later in this module, which is overkill for this purpose @functools.wraps(orig_fget) def fget(obj): return orig_fget(obj.__class__) # Set the __wrapped__ attribute manually for support on Python 2 fget.__wrapped__ = orig_fget return fget def find_data_files(package, pattern): """ Include files matching ``pattern`` inside ``package``. Parameters ---------- package : str The package inside which to look for data files pattern : str Pattern (glob-style) to match for the data files (e.g. ``*.dat``). This supports the Python 3.5 ``**``recursive syntax. For example, ``**/*.fits`` matches all files ending with ``.fits`` recursively. Only one instance of ``**`` can be included in the pattern. """ if sys.version_info[:2] >= (3, 5): return glob.glob(os.path.join(package, pattern), recursive=True) else: if '**' in pattern: start, end = pattern.split('**') if end.startswith(('/', os.sep)): end = end[1:] matches = glob.glob(os.path.join(package, start, end)) for root, dirs, files in os.walk(os.path.join(package, start)): for dirname in dirs: matches += glob.glob(os.path.join(root, dirname, end)) return matches else: return glob.glob(os.path.join(package, pattern)) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/distutils_helpers.py0000664000175000017500000001736213207611674030463 0ustar mseifertmseifert00000000000000""" This module contains various utilities for introspecting the distutils module and the setup process. Some of these utilities require the `astropy_helpers.setup_helpers.register_commands` function to be called first, as it will affect introspection of setuptools command-line arguments. Other utilities in this module do not have that restriction. """ import os import sys from distutils import ccompiler, log from distutils.dist import Distribution from distutils.errors import DistutilsError from .utils import silence # This function, and any functions that call it, require the setup in # `astropy_helpers.setup_helpers.register_commands` to be run first. def get_dummy_distribution(): """ Returns a distutils Distribution object used to instrument the setup environment before calling the actual setup() function. """ from .setup_helpers import _module_state if _module_state['registered_commands'] is None: raise RuntimeError( 'astropy_helpers.setup_helpers.register_commands() must be ' 'called before using ' 'astropy_helpers.setup_helpers.get_dummy_distribution()') # Pre-parse the Distutils command-line options and config files to if # the option is set. dist = Distribution({'script_name': os.path.basename(sys.argv[0]), 'script_args': sys.argv[1:]}) dist.cmdclass.update(_module_state['registered_commands']) with silence(): try: dist.parse_config_files() dist.parse_command_line() except (DistutilsError, AttributeError, SystemExit): # Let distutils handle DistutilsErrors itself AttributeErrors can # get raise for ./setup.py --help SystemExit can be raised if a # display option was used, for example pass return dist def get_distutils_option(option, commands): """ Returns the value of the given distutils option. Parameters ---------- option : str The name of the option commands : list of str The list of commands on which this option is available Returns ------- val : str or None the value of the given distutils option. If the option is not set, returns None. """ dist = get_dummy_distribution() for cmd in commands: cmd_opts = dist.command_options.get(cmd) if cmd_opts is not None and option in cmd_opts: return cmd_opts[option][1] else: return None def get_distutils_build_option(option): """ Returns the value of the given distutils build option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build option. If the option is not set, returns None. """ return get_distutils_option(option, ['build', 'build_ext', 'build_clib']) def get_distutils_install_option(option): """ Returns the value of the given distutils install option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build option. If the option is not set, returns None. """ return get_distutils_option(option, ['install']) def get_distutils_build_or_install_option(option): """ Returns the value of the given distutils build or install option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build or install option. If the option is not set, returns None. """ return get_distutils_option(option, ['build', 'build_ext', 'build_clib', 'install']) def get_compiler_option(): """ Determines the compiler that will be used to build extension modules. Returns ------- compiler : str The compiler option specified for the build, build_ext, or build_clib command; or the default compiler for the platform if none was specified. """ compiler = get_distutils_build_option('compiler') if compiler is None: return ccompiler.get_default_compiler() return compiler def add_command_option(command, name, doc, is_bool=False): """ Add a custom option to a setup command. Issues a warning if the option already exists on that command. Parameters ---------- command : str The name of the command as given on the command line name : str The name of the build option doc : str A short description of the option, for the `--help` message is_bool : bool, optional When `True`, the option is a boolean option and doesn't require an associated value. """ dist = get_dummy_distribution() cmdcls = dist.get_command_class(command) if (hasattr(cmdcls, '_astropy_helpers_options') and name in cmdcls._astropy_helpers_options): return attr = name.replace('-', '_') if hasattr(cmdcls, attr): raise RuntimeError( '{0!r} already has a {1!r} class attribute, barring {2!r} from ' 'being usable as a custom option name.'.format(cmdcls, attr, name)) for idx, cmd in enumerate(cmdcls.user_options): if cmd[0] == name: log.warn('Overriding existing {0!r} option ' '{1!r}'.format(command, name)) del cmdcls.user_options[idx] if name in cmdcls.boolean_options: cmdcls.boolean_options.remove(name) break cmdcls.user_options.append((name, None, doc)) if is_bool: cmdcls.boolean_options.append(name) # Distutils' command parsing requires that a command object have an # attribute with the same name as the option (with '-' replaced with '_') # in order for that option to be recognized as valid setattr(cmdcls, attr, None) # This caches the options added through add_command_option so that if it is # run multiple times in the same interpreter repeated adds are ignored # (this way we can still raise a RuntimeError if a custom option overrides # a built-in option) if not hasattr(cmdcls, '_astropy_helpers_options'): cmdcls._astropy_helpers_options = set([name]) else: cmdcls._astropy_helpers_options.add(name) def get_distutils_display_options(): """ Returns a set of all the distutils display options in their long and short forms. These are the setup.py arguments such as --name or --version which print the project's metadata and then exit. Returns ------- opts : set The long and short form display option arguments, including the - or -- """ short_display_opts = set('-' + o[1] for o in Distribution.display_options if o[1]) long_display_opts = set('--' + o[0] for o in Distribution.display_options) # Include -h and --help which are not explicitly listed in # Distribution.display_options (as they are handled by optparse) short_display_opts.add('-h') long_display_opts.add('--help') # This isn't the greatest approach to hardcode these commands. # However, there doesn't seem to be a good way to determine # whether build *will be* run as part of the command at this # phase. display_commands = set([ 'clean', 'register', 'setopt', 'saveopts', 'egg_info', 'alias']) return short_display_opts.union(long_display_opts.union(display_commands)) def is_distutils_display_option(): """ Returns True if sys.argv contains any of the distutils display options such as --version or --name. """ display_options = get_distutils_display_options() return bool(set(sys.argv[1:]).intersection(display_options)) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/0000775000175000017500000000000013207623133025634 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/ext/0000775000175000017500000000000013207623133026434 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/ext/doctest.py0000664000175000017500000000364113207611674030466 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This is a set of three directives that allow us to insert metadata about doctests into the .rst files so the testing framework knows which tests to skip. This is quite different from the doctest extension in Sphinx itself, which actually does something. For astropy, all of the testing is centrally managed from py.test and Sphinx is not used for running tests. """ import re from docutils.nodes import literal_block from docutils.parsers.rst import Directive class DoctestSkipDirective(Directive): has_content = True def run(self): # Check if there is any valid argument, and skip it. Currently only # 'win32' is supported in astropy.tests.pytest_plugins. if re.match('win32', self.content[0]): self.content = self.content[2:] code = '\n'.join(self.content) return [literal_block(code, code)] class DoctestOmitDirective(Directive): has_content = True def run(self): # Simply do not add any content when this directive is encountered return [] class DoctestRequiresDirective(DoctestSkipDirective): # This is silly, but we really support an unbounded number of # optional arguments optional_arguments = 64 def setup(app): app.add_directive('doctest-requires', DoctestRequiresDirective) app.add_directive('doctest-skip', DoctestSkipDirective) app.add_directive('doctest-skip-all', DoctestSkipDirective) app.add_directive('doctest', DoctestSkipDirective) # Code blocks that use this directive will not appear in the generated # documentation. This is intended to hide boilerplate code that is only # useful for testing documentation using doctest, but does not actually # belong in the documentation itself. app.add_directive('testsetup', DoctestOmitDirective) return {'parallel_read_safe': True, 'parallel_write_safe': True} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/ext/__init__.py0000664000175000017500000000010213207611674030545 0ustar mseifertmseifert00000000000000from __future__ import division, absolute_import, print_function ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/ext/edit_on_github.py0000664000175000017500000001346413207611674032010 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This extension makes it easy to edit documentation on github. It adds links associated with each docstring that go to the corresponding view source page on Github. From there, the user can push the "Edit" button, edit the docstring, and submit a pull request. It has the following configuration options (to be set in the project's ``conf.py``): * ``edit_on_github_project`` The name of the github project, in the form "username/projectname". * ``edit_on_github_branch`` The name of the branch to edit. If this is a released version, this should be a git tag referring to that version. For a dev version, it often makes sense for it to be "master". It may also be a git hash. * ``edit_on_github_source_root`` The location within the source tree of the root of the Python package. Defaults to "lib". * ``edit_on_github_doc_root`` The location within the source tree of the root of the documentation source. Defaults to "doc", but it may make sense to set it to "doc/source" if the project uses a separate source directory. * ``edit_on_github_docstring_message`` The phrase displayed in the links to edit a docstring. Defaults to "[edit on github]". * ``edit_on_github_page_message`` The phrase displayed in the links to edit a RST page. Defaults to "[edit this page on github]". * ``edit_on_github_help_message`` The phrase displayed as a tooltip on the edit links. Defaults to "Push the Edit button on the next page" * ``edit_on_github_skip_regex`` When the path to the .rst file matches this regular expression, no "edit this page on github" link will be added. Defaults to ``"_.*"``. """ import inspect import os import re import sys from docutils import nodes from sphinx import addnodes def import_object(modname, name): """ Import the object given by *modname* and *name* and return it. If not found, or the import fails, returns None. """ try: __import__(modname) mod = sys.modules[modname] obj = mod for part in name.split('.'): obj = getattr(obj, part) return obj except: return None def get_url_base(app): return 'http://github.com/%s/tree/%s/' % ( app.config.edit_on_github_project, app.config.edit_on_github_branch) def doctree_read(app, doctree): # Get the configuration parameters if app.config.edit_on_github_project == 'REQUIRED': raise ValueError( "The edit_on_github_project configuration variable must be " "provided in the conf.py") source_root = app.config.edit_on_github_source_root url = get_url_base(app) docstring_message = app.config.edit_on_github_docstring_message # Handle the docstring-editing links for objnode in doctree.traverse(addnodes.desc): if objnode.get('domain') != 'py': continue names = set() for signode in objnode: if not isinstance(signode, addnodes.desc_signature): continue modname = signode.get('module') if not modname: continue fullname = signode.get('fullname') if fullname in names: # only one link per name, please continue names.add(fullname) obj = import_object(modname, fullname) anchor = None if obj is not None: try: lines, lineno = inspect.getsourcelines(obj) except: pass else: anchor = '#L%d' % lineno if anchor: real_modname = inspect.getmodule(obj).__name__ path = '%s%s%s.py%s' % ( url, source_root, real_modname.replace('.', '/'), anchor) onlynode = addnodes.only(expr='html') onlynode += nodes.reference( reftitle=app.config.edit_on_github_help_message, refuri=path) onlynode[0] += nodes.inline( '', '', nodes.raw('', ' ', format='html'), nodes.Text(docstring_message), classes=['edit-on-github', 'viewcode-link']) signode += onlynode def html_page_context(app, pagename, templatename, context, doctree): if (templatename == 'page.html' and not re.match(app.config.edit_on_github_skip_regex, pagename)): doc_root = app.config.edit_on_github_doc_root if doc_root != '' and not doc_root.endswith('/'): doc_root += '/' doc_path = os.path.relpath(doctree.get('source'), app.builder.srcdir) url = get_url_base(app) page_message = app.config.edit_on_github_page_message context['edit_on_github'] = url + doc_root + doc_path context['edit_on_github_page_message'] = page_message def setup(app): app.add_config_value('edit_on_github_project', 'REQUIRED', True) app.add_config_value('edit_on_github_branch', 'master', True) app.add_config_value('edit_on_github_source_root', 'lib', True) app.add_config_value('edit_on_github_doc_root', 'doc', True) app.add_config_value('edit_on_github_docstring_message', '[edit on github]', True) app.add_config_value('edit_on_github_page_message', 'Edit This Page on Github', True) app.add_config_value('edit_on_github_help_message', 'Push the Edit button on the next page', True) app.add_config_value('edit_on_github_skip_regex', '_.*', True) app.connect('doctree-read', doctree_read) app.connect('html-page-context', html_page_context) return {'parallel_read_safe': True, 'parallel_write_safe': True} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/ext/tests/0000775000175000017500000000000013207623133027576 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/ext/tests/__init__.py0000664000175000017500000000000013207611674031704 0ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/ext/tocdepthfix.py0000664000175000017500000000137013207611674031337 0ustar mseifertmseifert00000000000000from sphinx import addnodes def fix_toc_entries(app, doctree): # Get the docname; I don't know why this isn't just passed in to the # callback # This seems a bit unreliable as it's undocumented, but it's not "private" # either: docname = app.builder.env.temp_data['docname'] if app.builder.env.metadata[docname].get('tocdepth', 0) != 0: # We need to reprocess any TOC nodes in the doctree and make sure all # the files listed in any TOCs are noted for treenode in doctree.traverse(addnodes.toctree): app.builder.env.note_toctree(docname, treenode) def setup(app): app.connect('doctree-read', fix_toc_entries) return {'parallel_read_safe': True, 'parallel_write_safe': True} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/ext/changelog_links.py0000664000175000017500000000554313207611674032153 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This sphinx extension makes the issue numbers in the changelog into links to GitHub issues. """ from __future__ import print_function import re from docutils.nodes import Text, reference BLOCK_PATTERN = re.compile('\[#.+\]', flags=re.DOTALL) ISSUE_PATTERN = re.compile('#[0-9]+') def process_changelog_links(app, doctree, docname): for rex in app.changelog_links_rexes: if rex.match(docname): break else: # if the doc doesn't match any of the changelog regexes, don't process return app.info('[changelog_links] Adding changelog links to "{0}"'.format(docname)) for item in doctree.traverse(): if not isinstance(item, Text): continue # We build a new list of items to replace the current item. If # a link is found, we need to use a 'reference' item. children = [] # First cycle through blocks of issues (delimited by []) then # iterate inside each one to find the individual issues. prev_block_end = 0 for block in BLOCK_PATTERN.finditer(item): block_start, block_end = block.start(), block.end() children.append(Text(item[prev_block_end:block_start])) block = item[block_start:block_end] prev_end = 0 for m in ISSUE_PATTERN.finditer(block): start, end = m.start(), m.end() children.append(Text(block[prev_end:start])) issue_number = block[start:end] refuri = app.config.github_issues_url + issue_number[1:] children.append(reference(text=issue_number, name=issue_number, refuri=refuri)) prev_end = end prev_block_end = block_end # If no issues were found, this adds the whole item, # otherwise it adds the remaining text. children.append(Text(block[prev_end:block_end])) # If no blocks were found, this adds the whole item, otherwise # it adds the remaining text. children.append(Text(item[prev_block_end:])) # Replace item by the new list of items we have generated, # which may contain links. item.parent.replace(item, children) def setup_patterns_rexes(app): app.changelog_links_rexes = [re.compile(pat) for pat in app.config.changelog_links_docpattern] def setup(app): app.connect('doctree-resolved', process_changelog_links) app.connect('builder-inited', setup_patterns_rexes) app.add_config_value('github_issues_url', None, True) app.add_config_value('changelog_links_docpattern', ['.*changelog.*', 'whatsnew/.*'], True) return {'parallel_read_safe': True, 'parallel_write_safe': True} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/__init__.py0000664000175000017500000000066513207611674027763 0ustar mseifertmseifert00000000000000""" This package contains utilities and extensions for the Astropy sphinx documentation. In particular, the `astropy.sphinx.conf` should be imported by the sphinx ``conf.py`` file for affiliated packages that wish to make use of the Astropy documentation format. Note that some sphinx extensions which are bundled as-is (numpydoc and sphinx-automodapi) are included in astropy_helpers.extern rather than astropy_helpers.sphinx.ext. """ ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/conf.py0000664000175000017500000002721713207611674027153 0ustar mseifertmseifert00000000000000# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # # Astropy shared Sphinx settings. These settings are shared between # astropy itself and affiliated packages. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys import warnings from os import path import sphinx from distutils.version import LooseVersion # -- General configuration ---------------------------------------------------- # The version check in Sphinx itself can only compare the major and # minor parts of the version number, not the micro. To do a more # specific version check, call check_sphinx_version("x.y.z.") from # your project's conf.py needs_sphinx = '1.3' on_rtd = os.environ.get('READTHEDOCS', None) == 'True' def check_sphinx_version(expected_version): sphinx_version = LooseVersion(sphinx.__version__) expected_version = LooseVersion(expected_version) if sphinx_version < expected_version: raise RuntimeError( "At least Sphinx version {0} is required to build this " "documentation. Found {1}.".format( expected_version, sphinx_version)) # Configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('https://docs.python.org/3/', (None, 'http://data.astropy.org/intersphinx/python3.inv')), 'pythonloc': ('http://docs.python.org/', path.abspath(path.join(path.dirname(__file__), 'local/python3_local_links.inv'))), 'numpy': ('https://docs.scipy.org/doc/numpy/', (None, 'http://data.astropy.org/intersphinx/numpy.inv')), 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', (None, 'http://data.astropy.org/intersphinx/scipy.inv')), 'matplotlib': ('http://matplotlib.org/', (None, 'http://data.astropy.org/intersphinx/matplotlib.inv')), 'astropy': ('http://docs.astropy.org/en/stable/', None), 'h5py': ('http://docs.h5py.org/en/latest/', None)} if sys.version_info[0] == 2: intersphinx_mapping['python'] = ( 'https://docs.python.org/2/', (None, 'http://data.astropy.org/intersphinx/python2.inv')) intersphinx_mapping['pythonloc'] = ( 'http://docs.python.org/', path.abspath(path.join(path.dirname(__file__), 'local/python2_local_links.inv'))) # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # Add any paths that contain templates here, relative to this directory. # templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # The reST default role (used for this markup: `text`) to use for all # documents. Set to the "smart" one. default_role = 'obj' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog = """ .. _Astropy: http://astropy.org """ # A list of warning types to suppress arbitrary warning messages. We mean to # override directives in astropy_helpers.sphinx.ext.autodoc_enhancements, # thus need to ignore those warning. This can be removed once the patch gets # released in upstream Sphinx (https://github.com/sphinx-doc/sphinx/pull/1843). # Suppress the warnings requires Sphinx v1.4.2 suppress_warnings = ['app.add_directive', ] # -- Project information ------------------------------------------------------ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. #pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Settings for extensions and extension options ---------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.viewcode', 'astropy_helpers.extern.numpydoc', 'astropy_helpers.extern.automodapi.automodapi', 'astropy_helpers.extern.automodapi.smart_resolver', 'astropy_helpers.sphinx.ext.tocdepthfix', 'astropy_helpers.sphinx.ext.doctest', 'astropy_helpers.sphinx.ext.changelog_links'] if not on_rtd and LooseVersion(sphinx.__version__) < LooseVersion('1.4'): extensions.append('sphinx.ext.pngmath') else: extensions.append('sphinx.ext.mathjax') try: import matplotlib.sphinxext.plot_directive extensions += [matplotlib.sphinxext.plot_directive.__name__] # AttributeError is checked here in case matplotlib is installed but # Sphinx isn't. Note that this module is imported by the config file # generator, even if we're not building the docs. except (ImportError, AttributeError): warnings.warn( "matplotlib's plot_directive could not be imported. " + "Inline plots will not be included in the output") # Don't show summaries of the members in each class along with the # class' docstring numpydoc_show_class_members = False autosummary_generate = True automodapi_toctreedirnm = 'api' # Class documentation should contain *both* the class docstring and # the __init__ docstring autoclass_content = "both" # Render inheritance diagrams in SVG graphviz_output_format = "svg" graphviz_dot_args = [ '-Nfontsize=10', '-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif', '-Efontsize=10', '-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif', '-Gfontsize=10', '-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif' ] # -- Options for HTML output ------------------------------------------------- # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [path.abspath(path.join(path.dirname(__file__), 'themes'))] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'bootstrap-astropy' # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': ['localtoc.html'], 'search': [], 'genindex': [], 'py-modindex': [], } # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # included in the bootstrap-astropy theme html_favicon = path.join(html_theme_path[0], html_theme, 'static', 'astropy_logo.ico') # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%d %b %Y' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # -- Options for LaTeX output ------------------------------------------------ # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. latex_toplevel_sectioning = 'part' # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False latex_elements = {} # Additional stuff for the LaTeX preamble. latex_elements['preamble'] = r""" % Use a more modern-looking monospace font \usepackage{inconsolata} % The enumitem package provides unlimited nesting of lists and enums. % Sphinx may use this in the future, in which case this can be removed. % See https://bitbucket.org/birkenfeld/sphinx/issue/777/latex-output-too-deeply-nested \usepackage{enumitem} \setlistdepth{15} % In the parameters section, place a newline after the Parameters % header. (This is stolen directly from Numpy's conf.py, since it % affects Numpy-style docstrings). \usepackage{expdlist} \let\latexdescription=\description \def\description{\latexdescription{}{} \breaklabel} % Support the superscript Unicode numbers used by the "unicode" units % formatter \DeclareUnicodeCharacter{2070}{\ensuremath{^0}} \DeclareUnicodeCharacter{00B9}{\ensuremath{^1}} \DeclareUnicodeCharacter{00B2}{\ensuremath{^2}} \DeclareUnicodeCharacter{00B3}{\ensuremath{^3}} \DeclareUnicodeCharacter{2074}{\ensuremath{^4}} \DeclareUnicodeCharacter{2075}{\ensuremath{^5}} \DeclareUnicodeCharacter{2076}{\ensuremath{^6}} \DeclareUnicodeCharacter{2077}{\ensuremath{^7}} \DeclareUnicodeCharacter{2078}{\ensuremath{^8}} \DeclareUnicodeCharacter{2079}{\ensuremath{^9}} \DeclareUnicodeCharacter{207B}{\ensuremath{^-}} \DeclareUnicodeCharacter{00B0}{\ensuremath{^{\circ}}} \DeclareUnicodeCharacter{2032}{\ensuremath{^{\prime}}} \DeclareUnicodeCharacter{2033}{\ensuremath{^{\prime\prime}}} % Make the "warning" and "notes" sections use a sans-serif font to % make them stand out more. \renewenvironment{notice}[2]{ \def\py@noticetype{#1} \csname py@noticestart@#1\endcsname \textsf{\textbf{#2}} }{\csname py@noticeend@\py@noticetype\endcsname} """ # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # -- Options for the linkcheck builder ---------------------------------------- # A timeout value, in seconds, for the linkcheck builder linkcheck_timeout = 60 ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/0000775000175000017500000000000013207623133027121 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/0000775000175000017500000000000013207623133032635 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/0000775000175000017500000000000013207623133034124 5ustar mseifertmseifert00000000000000././@LongLink0000000000000000000000000000015400000000000011215 Lustar 00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.icoccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_l0000664000175000017500000010033413207611674036073 0ustar mseifertmseifert00000000000000@@ (@F  (n@ ( P (Y(@  * Vy oK  H  I!y >K ? |LYerZJ*<3E X1r=$.C*3^p+7 (! sPf %   i# #"""" yu =!"""""""""""""""""""!!! $$$$$$"m8T%!$$$$$###""""###$$$$$$##$$#!' &j'(&&&&!Nim5 %&&&%%$##""""""##$$%&&&&&&&&%&3)%(*)))(%'IUq'!')((('&#("DPkpmNj!C&"%&'(((((((&'9 +*+*++)*Qo%%++++*'$"Gt}/R'&))******''U+;-.---,)Vt]z('----,$F?`)),,,,,,.,.t@//0000- :z1(0/./,+Zw#L'./////0.-I0%021110/bH)1221.8xTs+/100011/2$@3n3533318Ek,243418q103222320@ 2666655Dl @/65540t\~?e1\,X,X2]?fZ}u335555644X8 5:88877@j08888.Rx:eE1*-..-*0C3]jl168778855:O9<:::8 BM3;:;7O^@/49:::99984.6/]Mu39999;88<{;=<<<;9hc<:<<<2xOy65<=<<<<<<<<<<92 Af!S9;;;<=9;'=?@??=?k7g5>>>;Mc89?>>>>>>>>>>>>?>7:[=<>>>@>>ABBAA?B#[9AAA:U F9A@@@>:6557;?@@@@A;=e>o>@@@AB?<BDCCCADT>CCC9Ev7CCC>4DBssj0f>:BBBBC; FC@BBBDBBhUEGEEEBFOAEED@Y?FC9&a?r?BEEEE=$_4kCDDDDEB@ FJHHHEIxMDGGF OIE?H_bHEGGGEE\EEGGGIDJ4ILJJJGLtNGJJHZpB= ]]BIIIJA ` PGIIIKHJxLNLLLIN{ RILLJ\r;.h+iGKKKHNo5pJKKKKKLOPNNNLOYJNNMR5nFMNNNF9unMKMMMOLPQQQQORbJPPPH\NPPPI!gPNPPPRPRQTSSQT'mJSSSJ}ANRRRM`TPRRRTPV}UVUUSTfFOTUUQ9{_NTTTP Z~VQTTTVRVPWYWWVU0w _SWWVXS:}jPVVVTYj ]SVVVXVZ"X\YYYW^-vQYYYR;bQ9hSYYYWZbeWXXX[Xf X]\\\ZZ~z^W\[[WdOXKNVV[[[Y\e dY[[[^[f_]_^^]\*x<X]^^\_d)wT[Yt4Y]]]Zar`Z]]]`]b<`c```^]oY```][B?ZY`W"ve]___\h`\___b]mbdbbbba/i[abbaZ_;ryWoW[ab_]pYabbb]rb_aaad_Uewdgeeed`yo\bdddc_ZYZ\addda]G9_dddd\(|ebdddgdg%fjggggep3c_fgggfffffff_cGcefffe`GZeefffifihkiiihe5g'}e`dfghhgeabwj5chhhhd p&fghhhkhj0knkkkkjeNi=| qhdflv0Xddjkkkkd*khjjjjmknmpnnnnmgSm_ex|nkmmmmjmlqllmmmmpku%mrpppppoiD~ tkoonjki.#mnoooororgpuqrrrrqljsoqqps~ttnoqqqqqrn`tvuttttttqsR:ortttm%nb0rrssssusryx1vzwwwwwwvup {WTxquuuvs vpsuvvvvvyuw/x[w|yxxxxyyxwtv+ht2wswxxxxxq7vxxwwwyxw|x~{zzzzzzzzzxwux)-)|uwxzzz{{{{v Ovyyyyyz|w|H {~}}}}||||||||{{{z{{{||||||||||wPy{}||||}}z~~~~~~~~~~~~~~~y!}~~~~~}7}b/n;]+u %gL@Mxs&G#Vs]<T%qss wP70W7 &id 8iuE ( @ d W ,uJ>P&:K bs\n +!!!0J +%%%%ps(%%%%%%%%%%%$$$"5+0***>^@_))))$GyPl 1)))))'....0Xv...C{3---*$3#333Xy2228_#N22227t777O77O`K6 =;eJ6667O:<;I;;;@;;;;;;P@;::U@@@Crj@@(^C???????? Fs???ASADDgAtDDjM|DD7lHyDDDTNCCDIII|)fIIW QHHHx\HHHF!MMMo6sMM#e8sLL^LLLMcRRR\YQQhQQQQQQOVVV"mVVcUUUUUUS\l[[ZjZZz[ZZZ]YYWa*___r___G^*y^^^^^^[acchnccce({lccdAcccbbb_i_hhhHqhgggg mhgg zggggfkllljQ8G#llkVkkkip]qqqqN7ppp~tpppowtuuuu~ut ysttttv'y{yyyyy|@lqJ yyyy|yyyw}5~~~~~~~~~~~}}}}"P}}}}{<Ds6i %3a _$;$lw&(0 \&2$1e1i9E Bl{;O " )C!" &&k' g~g##-MTn[tGc<!''&(1+'&Ne!5XE$.*3d411&Mp@e*5187F;g&(X4\2) @Ms1^188D>;8iK2;,C G9,FE=>UB@[>O3j@^S7WO=DC5KFb@]+iJ@oKLIpQNPP W?|?3s YOPVU#n"nFvcD"mbTQ]v__~QdeUL"rhZYa:b^GOUdFR1VB!vU0 ja_q fmgwvb`hQaabjhgnHomt rdr]hqmrzqtXt xjQnytgzwu ~3^eFvqVrwx +}|{{||||'}~s, 6 >4 L;s 9qf$(  * mcmU#1 4E 16G\  'HU'+#!lKi!$0PUAC$v 3 D;->%>4$4"" R6;AJRT0aIx B UW7U-MRX_5A Tt\tOCBNYl-XNKWWG\IVdmd@Nudy_ou%umQ{}qvCU|Pvv totUH NZ: Ul*././@LongLink0000000000000000000000000000015400000000000011215 Lustar 00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.svgccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_l0000664000175000017500000001103213207611674036067 0ustar mseifertmseifert00000000000000 ././@LongLink0000000000000000000000000000015100000000000011212 Lustar 00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/copybutton.jsccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/copybutto0000664000175000017500000000532113207611674036107 0ustar mseifertmseifert00000000000000$(document).ready(function() { /* Add a [>>>] button on the top-right corner of code samples to hide * the >>> and ... prompts and the output and thus make the code * copyable. */ var div = $('.highlight-python .highlight,' + '.highlight-python3 .highlight,' + '.highlight-default .highlight') var pre = div.find('pre'); // get the styles from the current theme pre.parent().parent().css('position', 'relative'); var hide_text = 'Hide the prompts and output'; var show_text = 'Show the prompts and output'; var border_width = pre.css('border-top-width'); var border_style = pre.css('border-top-style'); var border_color = pre.css('border-top-color'); var button_styles = { 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', 'border-color': border_color, 'border-style': border_style, 'border-width': border_width, 'color': border_color, 'text-size': '75%', 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em', 'border-radius': '0 3px 0 0' } // create and add the button to all the code blocks that contain >>> div.each(function(index) { var jthis = $(this); if (jthis.find('.gp').length > 0) { var button = $('>>>'); button.css(button_styles) button.attr('title', hide_text); button.data('hidden', 'false'); jthis.prepend(button); } // tracebacks (.gt) contain bare text elements that need to be // wrapped in a span to work with .nextUntil() (see later) jthis.find('pre:has(.gt)').contents().filter(function() { return ((this.nodeType == 3) && (this.data.trim().length > 0)); }).wrap(''); }); // define the behavior of the button when it's clicked $('.copybutton').click(function(e){ e.preventDefault(); var button = $(this); if (button.data('hidden') === 'false') { // hide the code output button.parent().find('.go, .gp, .gt').hide(); button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); button.css('text-decoration', 'line-through'); button.attr('title', show_text); button.data('hidden', 'true'); } else { // show the code output button.parent().find('.go, .gp, .gt').show(); button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); button.css('text-decoration', 'none'); button.attr('title', hide_text); button.data('hidden', 'false'); } }); }); ././@LongLink0000000000000000000000000000016100000000000011213 Lustar 00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/bootstrap-astropy.cssccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/bootstrap0000664000175000017500000002744313207611674036105 0ustar mseifertmseifert00000000000000/*! * Bootstrap v1.4.0 * * Copyright 2011 Twitter, Inc * Licensed under the Apache License v2.0 * http://www.apache.org/licenses/LICENSE-2.0 * * Heavily modified by Kyle Barbary for the AstroPy Project for use with Sphinx. */ @import url("basic.css"); body { background-color: #ffffff; margin: 0; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 13px; font-weight: normal; line-height: 18px; color: #404040; } /* Hyperlinks ----------------------------------------------------------------*/ a { color: #0069d6; text-decoration: none; line-height: inherit; font-weight: inherit; } a:hover { color: #00438a; text-decoration: underline; } /* Typography ----------------------------------------------------------------*/ h1,h2,h3,h4,h5,h6 { color: #404040; margin: 0.7em 0 0 0; line-height: 1.5em; } h1 { font-size: 24px; margin: 0; } h2 { font-size: 21px; line-height: 1.2em; margin: 1em 0 0.5em 0; border-bottom: 1px solid #404040; } h3 { font-size: 18px; } h4 { font-size: 16px; } h5 { font-size: 14px; } h6 { font-size: 13px; text-transform: uppercase; } p { font-size: 13px; font-weight: normal; line-height: 18px; margin-top: 0px; margin-bottom: 9px; } ul, ol { margin-left: 0; padding: 0 0 0 25px; } ul ul, ul ol, ol ol, ol ul { margin-bottom: 0; } ul { list-style: disc; } ol { list-style: decimal; } li { line-height: 18px; color: #404040; } ul.unstyled { list-style: none; margin-left: 0; } dl { margin-bottom: 18px; } dl dt, dl dd { line-height: 18px; } dl dd { margin-left: 9px; } hr { margin: 20px 0 19px; border: 0; border-bottom: 1px solid #eee; } strong { font-style: inherit; font-weight: bold; } em { font-style: italic; font-weight: inherit; line-height: inherit; } .muted { color: #bfbfbf; } address { display: block; line-height: 18px; margin-bottom: 18px; } code, pre { padding: 0 3px 2px; font-family: monospace; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } tt { font-family: monospace; } code { padding: 1px 3px; } pre { display: block; padding: 8.5px; margin: 0 0 18px; line-height: 18px; border: 1px solid #ddd; border: 1px solid rgba(0, 0, 0, 0.12); -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; white-space: pre; word-wrap: break-word; } img { margin: 9px 0; } /* format inline code with a rounded box */ tt, code { margin: 0 2px; padding: 0 5px; border: 1px solid #ddd; border: 1px solid rgba(0, 0, 0, 0.12); border-radius: 3px; } code.xref, a code { margin: 0; padding: 0 1px 0 1px; background-color: none; border: none; } /* all code has same box background color, even in headers */ h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt, h1 code, h2 code, h3 code, h4 code, h5 code, h6 code, pre, code, tt { background-color: #f8f8f8; } /* override box for links & other sphinx-specifc stuff */ tt.xref, a tt, tt.descname, tt.descclassname { padding: 0 1px 0 1px; border: none; } /* override box for related bar at the top of the page */ .related tt { border: none; padding: 0 1px 0 1px; background-color: transparent; font-weight: bold; } th { background-color: #dddddd; } .viewcode-back { font-family: sans-serif; } div.viewcode-block:target { background-color: #f4debf; border-top: 1px solid #ac9; border-bottom: 1px solid #ac9; } table.docutils { border-spacing: 5px; border-collapse: separate; } /* Topbar --------------------------------------------------------------------*/ div.topbar { height: 40px; position: absolute; top: 0; left: 0; right: 0; z-index: 10000; padding: 0px 10px; background-color: #222; background-color: #222222; background-repeat: repeat-x; background-image: -khtml-gradient(linear, left top, left bottom, from(#333333), to(#222222)); background-image: -moz-linear-gradient(top, #333333, #222222); background-image: -ms-linear-gradient(top, #333333, #222222); background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #333333), color-stop(100%, #222222)); background-image: -webkit-linear-gradient(top, #333333, #222222); background-image: -o-linear-gradient(top, #333333, #222222); background-image: linear-gradient(top, #333333, #222222); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#333333', endColorstr='#222222', GradientType=0); overflow: auto; } div.topbar a.brand { font-family: 'Source Sans Pro', sans-serif; font-size: 26px; color: #ffffff; font-weight: 600; text-decoration: none; float: left; display: block; height: 32px; padding: 8px 12px 0px 45px; margin-left: -10px; background: transparent url("astropy_logo_32.png") no-repeat 10px 4px; background-image: url("astropy_logo.svg"), none; background-size: 32px 32px; } #logotext1 { } #logotext2 { font-weight:200; color: #ff5000; } #logotext3 { font-weight:200; } div.topbar .brand:hover, div.topbar ul li a.homelink:hover { background-color: #333; background-color: rgba(255, 255, 255, 0.05); } div.topbar ul { font-size: 110%; list-style: none; margin: 0; padding: 0 0 0 10px; float: right; color: #bfbfbf; text-align: center; text-decoration: none; height: 100%; } div.topbar ul li { float: left; display: inline; height: 30px; margin: 5px; padding: 0px; } div.topbar ul li a { color: #bfbfbf; text-decoration: none; padding: 5px; display: block; height: auto; text-align: center; vertical-align: middle; border-radius: 4px; } div.topbar ul li a:hover { color: #ffffff; text-decoration: none; } div.topbar ul li a.homelink { width: 112px; display: block; height: 20px; padding: 5px 0px; background: transparent url("astropy_linkout_20.png") no-repeat 10px 5px; background-image: url("astropy_linkout.svg"), none; background-size: 91px 20px; } div.topbar form { text-align: left; margin: 0 0 0 5px; position: relative; filter: alpha(opacity=100); -khtml-opacity: 1; -moz-opacity: 1; opacity: 1; } div.topbar input { background-color: #444; background-color: rgba(255, 255, 255, 0.3); font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: normal; font-weight: 13px; line-height: 1; padding: 4px 9px; color: #ffffff; color: rgba(255, 255, 255, 0.75); border: 1px solid #111; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); -moz-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); -webkit-transition: none; -moz-transition: none; -ms-transition: none; -o-transition: none; transition: none; } div.topbar input:-moz-placeholder { color: #e6e6e6; } div.topbar input::-webkit-input-placeholder { color: #e6e6e6; } div.topbar input:hover { background-color: #bfbfbf; background-color: rgba(255, 255, 255, 0.5); color: #ffffff; } div.topbar input:focus, div.topbar input.focused { outline: 0; background-color: #ffffff; color: #404040; text-shadow: 0 1px 0 #ffffff; border: 0; padding: 5px 10px; -webkit-box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); -moz-box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); } /* Relation bar (breadcrumbs, prev, next) ------------------------------------*/ div.related { height: 21px; width: auto; margin: 0 10px; position: absolute; top: 42px; clear: both; left: 0; right: 0; z-index: 10000; font-size: 100%; vertical-align: middle; background-color: #fff; border-bottom: 1px solid #bbb; } div.related ul { padding: 0; margin: 0; } /* Footer --------------------------------------------------------------------*/ footer { display: block; margin: 10px 10px 0px; padding: 10px 0 0 0; border-top: 1px solid #bbb; } .pull-right { float: right; width: 30em; text-align: right; } /* Sphinx sidebar ------------------------------------------------------------*/ div.sphinxsidebar { font-size: inherit; border-radius: 3px; background-color: #eee; border: 1px solid #bbb; word-wrap: break-word; /* overflow-wrap is the canonical name for word-wrap in the CSS3 text draft. We include it here mainly for future-proofing. */ overflow-wrap: break-word; } div.sphinxsidebarwrapper { padding: 0px 0px 0px 5px; } div.sphinxsidebar h3 { font-family: 'Trebuchet MS', sans-serif; font-size: 1.4em; font-weight: normal; margin: 5px 0px 0px 5px; padding: 0; line-height: 1.6em; } div.sphinxsidebar h4 { font-family: 'Trebuchet MS', sans-serif; font-size: 1.3em; font-weight: normal; margin: 5px 0 0 0; padding: 0; } div.sphinxsidebar p { } div.sphinxsidebar p.topless { margin: 5px 10px 10px 10px; } div.sphinxsidebar ul { margin: 0px 0px 0px 5px; padding: 0; } div.sphinxsidebar ul ul { margin-left: 15px; list-style-type: disc; } /* If showing the global TOC (toctree), color the current page differently */ div.sphinxsidebar a.current { color: #404040; } div.sphinxsidebar a.current:hover { color: #404040; } /* document, documentwrapper, body, bodywrapper ----------------------------- */ div.document { margin-top: 72px; margin-left: 10px; margin-right: 10px; } div.documentwrapper { float: left; width: 100%; } div.body { background-color: #ffffff; padding: 0 0 0px 20px; } div.bodywrapper { margin: 0 0 0 230px; max-width: 55em; } /* Header links ------------------------------------------------------------- */ a.headerlink { font-size: 0.8em; padding: 0 4px 0 4px; text-decoration: none; } a.headerlink:hover { background-color: #0069d6; color: white; text-docoration: none; } /* Admonitions and warnings ------------------------------------------------- */ /* Shared by admonitions and warnings */ div.admonition, div.warning { padding: 0px; border-radius: 3px; -moz-border-radius: 3px; -webkit-border-radius: 3px; } div.admonition p, div.warning p { margin: 0.5em 1em 0.5em 1em; padding: 0; } div.admonition pre, div.warning pre { margin: 0.4em 1em 0.4em 1em; } div.admonition p.admonition-title, div.warning p.admonition-title { margin: 0; padding: 0.1em 0 0.1em 0.5em; color: white; font-weight: bold; font-size: 1.1em; } div.admonition ul, div.admonition ol, div.warning ul, div.warning ol { margin: 0.1em 0.5em 0.5em 3em; padding: 0; } /* Admonitions only */ div.admonition { border: 1px solid #609060; background-color: #e9ffe9; } div.admonition p.admonition-title { background-color: #70A070; } /* Warnings only */ div.warning { border: 1px solid #900000; background-color: #ffe9e9; } div.warning p.admonition-title { background-color: #b04040; } /* Figures ------------------------------------------------------------------ */ .figure.align-center { clear: none; } /* This is a div for containing multiple figures side-by-side, for use with * .. container:: figures */ div.figures { border: 1px solid #CCCCCC; background-color: #F8F8F8; margin: 1em; text-align: center; } div.figures .figure { clear: none; float: none; display: inline-block; border: none; margin-left: 0.5em; margin-right: 0.5em; } .field-list th { white-space: nowrap; } table.field-list { border-spacing: 0px; margin-left: 1px; border-left: 5px solid rgb(238, 238, 238) !important; } table.field-list th.field-name { display: inline-block; padding: 1px 8px 1px 5px; white-space: nowrap; background-color: rgb(238, 238, 238); border-radius: 0 3px 3px 0; -webkit-border-radius: 0 3px 3px 0; } ././@LongLink0000000000000000000000000000015700000000000011220 Lustar 00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout.svgccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_l0000664000175000017500000001212113207611674036067 0ustar mseifertmseifert00000000000000 ././@LongLink0000000000000000000000000000015700000000000011220 Lustar 00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.pngccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_l0000664000175000017500000000353413207611674036077 0ustar mseifertmseifert00000000000000PNG  IHDR szzsBIT|d pHYstEXtSoftwarewww.inkscape.org<IDATXŗPT?cP  FUBȘ&diI# JhC2nj5tw:۪5Sƪڡ%ZF2VkJF`(Jdqa., ?3gsϏΞ\_ɕ#Hl 2pZ8ԯKLW;ACȋh4z>$?#hJ~`;&y#D b0¤u2RqKJr'7<6.´;`2ҋ@&a$`+Ɲ1WB], w.rM|rh?G6Bm"GïK0#&: WBa˰mL6p+Δxti@D1;z v7zCrׇE9,_Ghby; !,eUėAlO-^;V~;MKxUZK%:L剜"9Tr3WCWa89`p4XW;KxBjwɥׇ.WLD_e5w`DzFG;z9?@ghI^ UԳMl+ās%bZKo@`!8o)!pu4W;U00i'@V \}> u  bdǑY>rzc0iI,\1DX )ׇ__m cB3߬|f̃I.K;NAq!~*r8g)Bď߅;!*'#DrdN;Ql |( Xj[`aPy* ؗԥhbO 9el 0Hia29HRe 5*@)}˱ cU5aIr m0JnARPrj&5+ޝAL:KA\ e'_໩lg'm/!7|p7zT@50 K޹g@/fHN|ׯ@b b8Xl,yf} ڠU; )U1obS j~¦aS2!&A8/ 7hu.@0D=_oo nI/ I70Fާ&%,*}t {#$'@tbʾ?uO j&DK -T㎉E4| )p,;!7ÿ3i06XԾ8nBSjOENi 0-g<0c&T@e] K . ;z硳-TR[t:iy脷,,4EBY8{Z5FAK]?upjL,<" ^?aRe AO/YHKC}K7ټV='N h@$.:4}rsFp"jw^qo?%f$2H̀O675E)iנس\oF̄*j{YUIܹ !bQ[Ǣ&X])WHT] 텟A֭`ЇuWXq;dgڱ "20֯зka:ob3u2p!}rn,TjN$9L࿡k{rAMP*ari.i[ hШ7O$0 ˕Lg$33 G.8<IENDB`././@LongLink0000000000000000000000000000014600000000000011216 Lustar 00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/sidebar.jsccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/sidebar.j0000664000175000017500000001155313207611674035724 0ustar mseifertmseifert00000000000000/* * sidebar.js * ~~~~~~~~~~ * * This script makes the Sphinx sidebar collapsible. * * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton * used to collapse and expand the sidebar. * * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden * and the width of the sidebar and the margin-left of the document * are decreased. When the sidebar is expanded the opposite happens. * This script saves a per-browser/per-session cookie used to * remember the position of the sidebar among the pages. * Once the browser is closed the cookie is deleted and the position * reset to the default (expanded). * * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ $(function() { // global elements used by the functions. // the 'sidebarbutton' element is defined as global after its // creation, in the add_sidebar_button function var bodywrapper = $('.bodywrapper'); var sidebar = $('.sphinxsidebar'); var sidebarwrapper = $('.sphinxsidebarwrapper'); // for some reason, the document has no sidebar; do not run into errors if (!sidebar.length) return; // original margin-left of the bodywrapper and width of the sidebar // with the sidebar expanded var bw_margin_expanded = bodywrapper.css('margin-left'); var ssb_width_expanded = sidebar.width(); // margin-left of the bodywrapper and width of the sidebar // with the sidebar collapsed var bw_margin_collapsed = 12; var ssb_width_collapsed = 12; // custom colors var dark_color = '#404040'; var light_color = '#505050'; function sidebar_is_collapsed() { return sidebarwrapper.is(':not(:visible)'); } function toggle_sidebar() { if (sidebar_is_collapsed()) expand_sidebar(); else collapse_sidebar(); } function collapse_sidebar() { sidebarwrapper.hide(); sidebar.css('width', ssb_width_collapsed); bodywrapper.css('margin-left', bw_margin_collapsed); sidebarbutton.css({ 'margin-left': '-1px', 'height': bodywrapper.height(), 'border-radius': '3px' }); sidebarbutton.find('span').text('»'); sidebarbutton.attr('title', _('Expand sidebar')); document.cookie = 'sidebar=collapsed'; } function expand_sidebar() { bodywrapper.css('margin-left', bw_margin_expanded); sidebar.css('width', ssb_width_expanded); sidebarwrapper.show(); sidebarbutton.css({ 'margin-left': ssb_width_expanded - 12, 'height': bodywrapper.height(), 'border-radius': '0px 3px 3px 0px' }); sidebarbutton.find('span').text('«'); sidebarbutton.attr('title', _('Collapse sidebar')); document.cookie = 'sidebar=expanded'; } function add_sidebar_button() { sidebarwrapper.css({ 'float': 'left', 'margin-right': '0', 'width': ssb_width_expanded - 18 }); // create the button sidebar.append('
«
'); var sidebarbutton = $('#sidebarbutton'); // find the height of the viewport to center the '<<' in the page var viewport_height; if (window.innerHeight) viewport_height = window.innerHeight; else viewport_height = $(window).height(); var sidebar_offset = sidebar.offset().top; var sidebar_height = Math.max(bodywrapper.height(), sidebar.height()); sidebarbutton.find('span').css({ 'font-family': '"Lucida Grande",Arial,sans-serif', 'display': 'block', 'top': Math.min(viewport_height/2, sidebar_height/2 + sidebar_offset) - 10, 'width': 12, 'position': 'fixed', 'text-align': 'center' }); sidebarbutton.click(toggle_sidebar); sidebarbutton.attr('title', _('Collapse sidebar')); sidebarbutton.css({ 'color': '#FFFFFF', 'background-color': light_color, 'border': '1px solid ' + light_color, 'border-radius': '0px 3px 3px 0px', 'font-size': '1.2em', 'cursor': 'pointer', 'height': sidebar_height, 'padding-top': '1px', 'margin': '-1px', 'margin-left': ssb_width_expanded - 12 }); sidebarbutton.hover( function () { $(this).css('background-color', dark_color); }, function () { $(this).css('background-color', light_color); } ); } function set_position_from_cookie() { if (!document.cookie) return; var items = document.cookie.split(';'); for(var k=0; k)Nt$S`6p6pTm5Hs8{@` J:v=%``/9`/i~\`b{H7KB݀"Ɠof:/hR' J\"`n*[! `'I} o9#g6 l}mh[lOe~tgE;nkmϳ=^KQ&~* N Nx l30L-'w~u O lOm)ީ`"ױakٚs\"5ߟ[m,fB 9{g[ؓ'(8}';aq^{N:l_q-HZ"x.5kO|[86_Y?-B6m8wDqkׅ (eY5$ʯwdz"D%iZMh1/ѪbmZ۟0] V-_پ9냲1K%)AB089l *N' M/o;GcJ=IÁe:T6ܝ}ʳP F76J}K h,aSΌV`%XU [IWE԰ {%- endif %} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/theme.conf0000664000175000017500000000030013207611674034606 0ustar mseifertmseifert00000000000000# AstroPy theme based on Twitter Bootstrap CSS [theme] inherit = basic stylesheet = bootstrap-astropy.css pygments_style = sphinx [options] logotext1 = astro logotext2 = py logotext3 = :docs ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/layout.html0000664000175000017500000000655113207611674035056 0ustar mseifertmseifert00000000000000{% extends "basic/layout.html" %} {# Collapsible sidebar script from default/layout.html in Sphinx #} {% set script_files = script_files + ['_static/sidebar.js'] %} {# Add the google webfonts needed for the logo #} {% block extrahead %} {% if not embedded %}{% endif %} {% endblock %} {% block header %}
{{ theme_logotext1 }}{{ theme_logotext2 }}{{ theme_logotext3 }}
  • Index
  • Modules
  • {% block sidebarsearch %} {% include "searchbox.html" %} {% endblock %}
{% endblock %} {% block relbar1 %} {% endblock %} {# Silence the bottom relbar. #} {% block relbar2 %}{% endblock %} {%- block footer %}

{%- if edit_on_github %} {{ edit_on_github_page_message }}   {%- endif %} {%- if show_source and has_source and sourcename %} {{ _('Page Source') }} {%- endif %}   Back to Top

{%- if show_copyright %} {%- if hasdoc('copyright') %} {% trans path=pathto('copyright'), copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
{%- else %} {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
{%- endif %} {%- endif %} {%- if show_sphinx %} {% trans sphinx_version=sphinx_version|e %}Created using Sphinx {{ sphinx_version }}.{% endtrans %}   {%- endif %} {%- if last_updated %} {% trans last_updated=last_updated|e %}Last built {{ last_updated }}.{% endtrans %}
{%- endif %}

{%- endblock %} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/localtoc.html0000664000175000017500000000004213207611674035326 0ustar mseifertmseifert00000000000000

Page Contents

{{ toc }} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/globaltoc.html0000664000175000017500000000011113207611674035471 0ustar mseifertmseifert00000000000000

Table of Contents

{{ toctree(maxdepth=-1, titles_only=true) }} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/setup_package.py0000664000175000017500000000044413207611674031032 0ustar mseifertmseifert00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst def get_package_data(): # Install the theme files return { 'astropy_helpers.sphinx': [ 'local/*.inv', 'themes/bootstrap-astropy/*.*', 'themes/bootstrap-astropy/static/*.*']} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/local/0000775000175000017500000000000013207623133026726 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/local/python3_local_links.txt0000664000175000017500000000540413207611674033457 0ustar mseifertmseifert00000000000000# Sphinx inventory version 2 # Project: Python # Version: 2.7 and 3.5 # The remainder of this file should be compressed using zlib. # python2 only links cPickle py:module -1 2/library/pickle.html#module-cPickle - unicode py:function -1 2/library/functions.html#unicode - bsddb py:module -1 2/library/bsddb.html#module-bsddb - dict.has_key py:method -1 2/library/stdtypes.html#dict.has_key - dict.iteritems py:method -1 2/library/stdtypes.html#dict.iteritems - dict.iterkeys py:method -1 2/library/stdtypes.html#dict.iterkeys - dict.itervalues py:method -1 2/library/stdtypes.html#dict.itervalues - urllib2.urlopen py:function -1 2/library/urllib2.html#urllib2.urlopen - # python3 print() py:function -1 3/library/functions.html#print - # python3 collections.abc collections.Container py:class -1 3/library/collections.abc.html#collections.abc.Container - collections.Hashable py:class -1 3/library/collections.abc.html#collections.abc.Hashable - collections.Sized py:class -1 3/library/collections.abc.html#collections.abc.Sized - collections.Callable py:class -1 3/library/collections.abc.html#collections.abc.Callable - collections.Iterable py:class -1 3/library/collections.abc.html#collections.abc.Iterable - collections.Iterator py:class -1 3/library/collections.abc.html#collections.abc.Iterator - collections.Generator py:class -1 3/library/collections.abc.html#collections.abc.Generator - collections.Sequence py:class -1 3/library/collections.abc.html#collections.abc.Sequence - collections.MutableSequence py:class -1 3/library/collections.abc.html#collections.abc.MutableSequence - collections.ByteString py:class -1 3/library/collections.abc.html#collections.abc.ByteString - collections.Set py:class -1 3/library/collections.abc.html#collections.abc.Set - collections.MutableSet py:class -1 3/library/collections.abc.html#collections.abc.MutableSet - collections.Mapping py:class -1 3/library/collections.abc.html#collections.abc.Mapping - collections.MutableMapping py:class -1 3/library/collections.abc.html#collections.abc.MutableMapping - collections.MappingView py:class -1 3/library/collections.abc.html#collections.abc.MappingView - collections.ItemsView py:class -1 3/library/collections.abc.html#collections.abc.ItemsView - collections.KeysView py:class -1 3/library/collections.abc.html#collections.abc.KeysView - collections.ValuesView py:class -1 3/library/collections.abc.html#collections.abc.ValuesView - collections.Awaitable py:class -1 3/library/collections.abc.html#collections.abc.Awaitable - collections.Coroutine py:class -1 3/library/collections.abc.html#collections.abc.Coroutine - collections.AsyncIterable py:class -1 3/library/collections.abc.html#collections.abc.AsyncIterable - collections.AsyncIterator py:class -1 3/library/collections.abc.html#collections.abc.AsyncIterator - ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/local/python2_local_links.txt0000664000175000017500000000314613207611674033457 0ustar mseifertmseifert00000000000000# Sphinx inventory version 2 # Project: Python # Version: 2.7 and 3.5 # The remainder of this file should be compressed using zlib. # python2 IndexError py:exception -1 2/library/exceptions.html#IndexError - IOError py:exception -1 2/library/exceptions.html#IOError - KeyError py:exception -1 2/library/exceptions.html#KeyError - ValueError py:exception -1 2/library/exceptions.html#ValueError - TypeError py:exception -1 2/library/exceptions.html#TypeError - # python3 only TimeoutError py:exception -1 3/library/exceptions.html#TimeoutError - bytes py:function -1 3/library/functions.html#bytes - urllib.request.urlopen py:function -1 3/library/urllib.request.html#urllib.request.urlopen - concurrent.futures.Future py:class -1 3/library/concurrent.futures.html#concurrent.futures.Future - concurrent.futures.ThreadPoolExecutor py:class -1 3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor - queue.Queue py:class -1 3/library/queue.html#queue.Queue - print() py:function -1 3/library/functions.html#print - # python3 only collections.abc collections.Generator py:class -1 3/library/collections.abc.html#collections.abc.Generator - collections.ByteString py:class -1 3/library/collections.abc.html#collections.abc.ByteString - collections.Awaitable py:class -1 3/library/collections.abc.html#collections.abc.Awaitable - collections.Coroutine py:class -1 3/library/collections.abc.html#collections.abc.Coroutine - collections.AsyncIterable py:class -1 3/library/collections.abc.html#collections.abc.AsyncIterable - collections.AsyncIterator py:class -1 3/library/collections.abc.html#collections.abc.AsyncIterator - ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/local/python3_local_links.inv0000664000175000017500000000122213207611674033426 0ustar mseifertmseifert00000000000000# Sphinx inventory version 2 # Project: Python # Version: 3.5 # The remainder of this file should be compressed using zlib. x0{b$.!YTUa*!Qq{h\;ٯgɁlv VA#jolGN dk~#k40Zv]'`Z*H? %Z_H{\aj% Gব,:j'/xU2(j%PR\7(j֥5J?,Cf/բO4FZsz ouЏO l;4`6yDMA-}Jwq!dj!#T" h2oS߈~` t8RwjnKcRxr?%+\Ob 3s˻`Vһv@>2b;!I,=Wh_'l!Q%^B#Ô }inuD#e³\:{tu;/wxy. !nX{0BzoH /LxA&UXS{⮸5ߣ\RBiJF?ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/sphinx/local/python2_local_links.inv0000664000175000017500000000106213207611674033427 0ustar mseifertmseifert00000000000000# Sphinx inventory version 2 # Project: Python # Version: 2.7 and 3.5 # The remainder of this file should be compressed using zlib. x=O0@w Z!nU bw+1rpKïIiQeI˽w8g"Wf ʬxK%lS(ϭ1 k&Qrp)ɐ.Bi۠3H]a)_ZI>dH, _M_"撠bvIzЀ8 #include int main(void) { #pragma omp parallel printf("nthreads=%d\\n", omp_get_num_threads()); return 0; } """ def add_openmp_flags_if_available(extension): """ Add OpenMP compilation flags, if available (if not a warning will be printed to the console and no flags will be added) Returns `True` if the flags were added, `False` otherwise. """ ccompiler = new_compiler() customize_compiler(ccompiler) tmp_dir = tempfile.mkdtemp() start_dir = os.path.abspath('.') if get_compiler_option() == 'msvc': compile_flag = '-openmp' link_flag = '' else: compile_flag = '-fopenmp' link_flag = '-fopenmp' try: os.chdir(tmp_dir) with open('test_openmp.c', 'w') as f: f.write(CCODE) os.mkdir('objects') # Compile, link, and run test program ccompiler.compile(['test_openmp.c'], output_dir='objects', extra_postargs=[compile_flag]) ccompiler.link_executable(glob.glob(os.path.join('objects', '*')), 'test_openmp', extra_postargs=[link_flag]) output = subprocess.check_output('./test_openmp').decode(sys.stdout.encoding or 'utf-8').splitlines() if 'nthreads=' in output[0]: nthreads = int(output[0].strip().split('=')[1]) if len(output) == nthreads: using_openmp = True else: log.warn("Unexpected number of lines from output of test OpenMP " "program (output was {0})".format(output)) using_openmp = False else: log.warn("Unexpected output from test OpenMP " "program (output was {0})".format(output)) using_openmp = False except (CompileError, LinkError): using_openmp = False finally: os.chdir(start_dir) if using_openmp: log.info("Compiling Cython extension with OpenMP support") extension.extra_compile_args.append(compile_flag) extension.extra_link_args.append(link_flag) else: log.warn("Cannot compile Cython extension with OpenMP, reverting to non-parallel code") return using_openmp ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/compat/0000775000175000017500000000000013207623133025606 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/compat/__init__.py0000664000175000017500000000056013207611674027727 0ustar mseifertmseifert00000000000000def _fix_user_options(options): """ This is for Python 2.x and 3.x compatibility. distutils expects Command options to all be byte strings on Python 2 and Unicode strings on Python 3. """ def to_str_or_none(x): if x is None: return None return str(x) return [tuple(to_str_or_none(x) for x in y) for y in options] ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/0000775000175000017500000000000013207623133026124 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/_test_compat.py0000664000175000017500000002671213207611674031176 0ustar mseifertmseifert00000000000000""" Old implementation of ``./setup.py test`` command. This has been moved to astropy.tests as of Astropy v1.1.0, but a copy of the implementation is kept here for backwards compatibility. """ from __future__ import absolute_import, unicode_literals import inspect import os import shutil import subprocess import sys import tempfile from setuptools import Command from ..compat import _fix_user_options PY3 = sys.version_info[0] == 3 class AstropyTest(Command, object): description = 'Run the tests for this package' user_options = [ ('package=', 'P', "The name of a specific package to test, e.g. 'io.fits' or 'utils'. " "If nothing is specified, all default tests are run."), ('test-path=', 't', 'Specify a test location by path. If a relative path to a .py file, ' 'it is relative to the built package, so e.g., a leading "astropy/" ' 'is necessary. If a relative path to a .rst file, it is relative to ' 'the directory *below* the --docs-path directory, so a leading ' '"docs/" is usually necessary. May also be an absolute path.'), ('verbose-results', 'V', 'Turn on verbose output from pytest.'), ('plugins=', 'p', 'Plugins to enable when running pytest.'), ('pastebin=', 'b', "Enable pytest pastebin output. Either 'all' or 'failed'."), ('args=', 'a', 'Additional arguments to be passed to pytest.'), ('remote-data', 'R', 'Run tests that download remote data.'), ('pep8', '8', 'Enable PEP8 checking and disable regular tests. ' 'Requires the pytest-pep8 plugin.'), ('pdb', 'd', 'Start the interactive Python debugger on errors.'), ('coverage', 'c', 'Create a coverage report. Requires the coverage package.'), ('open-files', 'o', 'Fail if any tests leave files open. Requires the ' 'psutil package.'), ('parallel=', 'j', 'Run the tests in parallel on the specified number of ' 'CPUs. If negative, all the cores on the machine will be ' 'used. Requires the pytest-xdist plugin.'), ('docs-path=', None, 'The path to the documentation .rst files. If not provided, and ' 'the current directory contains a directory called "docs", that ' 'will be used.'), ('skip-docs', None, "Don't test the documentation .rst files."), ('repeat=', None, 'How many times to repeat each test (can be used to check for ' 'sporadic failures).'), ('temp-root=', None, 'The root directory in which to create the temporary testing files. ' 'If unspecified the system default is used (e.g. /tmp) as explained ' 'in the documentation for tempfile.mkstemp.') ] user_options = _fix_user_options(user_options) package_name = '' def initialize_options(self): self.package = None self.test_path = None self.verbose_results = False self.plugins = None self.pastebin = None self.args = None self.remote_data = False self.pep8 = False self.pdb = False self.coverage = False self.open_files = False self.parallel = 0 self.docs_path = None self.skip_docs = False self.repeat = None self.temp_root = None def finalize_options(self): # Normally we would validate the options here, but that's handled in # run_tests pass # Most of the test runner arguments have the same name as attributes on # this command class, with one exception (for now) _test_runner_arg_attr_map = { 'verbose': 'verbose_results' } def generate_testing_command(self): """ Build a Python script to run the tests. """ cmd_pre = '' # Commands to run before the test function cmd_post = '' # Commands to run after the test function if self.coverage: pre, post = self._generate_coverage_commands() cmd_pre += pre cmd_post += post def get_attr(arg): attr = self._test_runner_arg_attr_map.get(arg, arg) return getattr(self, attr) test_args = filter(lambda arg: hasattr(self, arg), self._get_test_runner_args()) test_args = ', '.join('{0}={1!r}'.format(arg, get_attr(arg)) for arg in test_args) if PY3: set_flag = "import builtins; builtins._ASTROPY_TEST_ = True" else: set_flag = "import __builtin__; __builtin__._ASTROPY_TEST_ = True" cmd = ('{cmd_pre}{0}; import {1.package_name}, sys; result = ' '{1.package_name}.test({test_args}); {cmd_post}' 'sys.exit(result)') return cmd.format(set_flag, self, cmd_pre=cmd_pre, cmd_post=cmd_post, test_args=test_args) def _validate_required_deps(self): """ This method checks that any required modules are installed before running the tests. """ try: import astropy # noqa except ImportError: raise ImportError( "The 'test' command requires the astropy package to be " "installed and importable.") def run(self): """ Run the tests! """ # Ensure there is a doc path if self.docs_path is None: if os.path.exists('docs'): self.docs_path = os.path.abspath('docs') # Build a testing install of the package self._build_temp_install() # Ensure all required packages are installed self._validate_required_deps() # Run everything in a try: finally: so that the tmp dir gets deleted. try: # Construct this modules testing command cmd = self.generate_testing_command() # Run the tests in a subprocess--this is necessary since # new extension modules may have appeared, and this is the # easiest way to set up a new environment # On Python 3.x prior to 3.3, the creation of .pyc files # is not atomic. py.test jumps through some hoops to make # this work by parsing import statements and carefully # importing files atomically. However, it can't detect # when __import__ is used, so its carefulness still fails. # The solution here (admittedly a bit of a hack), is to # turn off the generation of .pyc files altogether by # passing the `-B` switch to `python`. This does mean # that each core will have to compile .py file to bytecode # itself, rather than getting lucky and borrowing the work # already done by another core. Compilation is an # insignificant fraction of total testing time, though, so # it's probably not worth worrying about. retcode = subprocess.call([sys.executable, '-B', '-c', cmd], cwd=self.testing_path, close_fds=False) finally: # Remove temporary directory shutil.rmtree(self.tmp_dir) raise SystemExit(retcode) def _build_temp_install(self): """ Build the package and copy the build to a temporary directory for the purposes of testing this avoids creating pyc and __pycache__ directories inside the build directory """ self.reinitialize_command('build', inplace=True) self.run_command('build') build_cmd = self.get_finalized_command('build') new_path = os.path.abspath(build_cmd.build_lib) # On OSX the default path for temp files is under /var, but in most # cases on OSX /var is actually a symlink to /private/var; ensure we # dereference that link, because py.test is very sensitive to relative # paths... tmp_dir = tempfile.mkdtemp(prefix=self.package_name + '-test-', dir=self.temp_root) self.tmp_dir = os.path.realpath(tmp_dir) self.testing_path = os.path.join(self.tmp_dir, os.path.basename(new_path)) shutil.copytree(new_path, self.testing_path) new_docs_path = os.path.join(self.tmp_dir, os.path.basename(self.docs_path)) shutil.copytree(self.docs_path, new_docs_path) self.docs_path = new_docs_path shutil.copy('setup.cfg', self.tmp_dir) def _generate_coverage_commands(self): """ This method creates the post and pre commands if coverage is to be generated """ if self.parallel != 0: raise ValueError( "--coverage can not be used with --parallel") try: import coverage # noqa except ImportError: raise ImportError( "--coverage requires that the coverage package is " "installed.") # Don't use get_pkg_data_filename here, because it # requires importing astropy.config and thus screwing # up coverage results for those packages. coveragerc = os.path.join( self.testing_path, self.package_name, 'tests', 'coveragerc') # We create a coveragerc that is specific to the version # of Python we're running, so that we can mark branches # as being specifically for Python 2 or Python 3 with open(coveragerc, 'r') as fd: coveragerc_content = fd.read() if PY3: ignore_python_version = '2' else: ignore_python_version = '3' coveragerc_content = coveragerc_content.replace( "{ignore_python_version}", ignore_python_version).replace( "{packagename}", self.package_name) tmp_coveragerc = os.path.join(self.tmp_dir, 'coveragerc') with open(tmp_coveragerc, 'wb') as tmp: tmp.write(coveragerc_content.encode('utf-8')) cmd_pre = ( 'import coverage; ' 'cov = coverage.coverage(data_file="{0}", config_file="{1}"); ' 'cov.start();'.format( os.path.abspath(".coverage"), tmp_coveragerc)) cmd_post = ( 'cov.stop(); ' 'from astropy.tests.helper import _save_coverage; ' '_save_coverage(cov, result, "{0}", "{1}");'.format( os.path.abspath('.'), self.testing_path)) return cmd_pre, cmd_post def _get_test_runner_args(self): """ A hack to determine what arguments are supported by the package's test() function. In the future there should be a more straightforward API to determine this (really it should be determined by the ``TestRunner`` class for whatever version of Astropy is in use). """ if PY3: import builtins builtins._ASTROPY_TEST_ = True else: import __builtin__ __builtin__._ASTROPY_TEST_ = True try: pkg = __import__(self.package_name) if not hasattr(pkg, 'test'): raise ImportError( 'package {0} does not have a {0}.test() function as ' 'required by the Astropy test runner'.format(self.package_name)) argspec = inspect.getargspec(pkg.test) return argspec.args finally: if PY3: del builtins._ASTROPY_TEST_ else: del __builtin__._ASTROPY_TEST_ ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/src/0000775000175000017500000000000013207623133026713 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/src/compiler.c0000664000175000017500000000573113207611674030706 0ustar mseifertmseifert00000000000000#include /*************************************************************************** * Macros for determining the compiler version. * * These are borrowed from boost, and majorly abridged to include only * the compilers we care about. ***************************************************************************/ #ifndef PY3K #if PY_MAJOR_VERSION >= 3 #define PY3K 1 #else #define PY3K 0 #endif #endif #define STRINGIZE(X) DO_STRINGIZE(X) #define DO_STRINGIZE(X) #X #if defined __clang__ /* Clang C++ emulates GCC, so it has to appear early. */ # define COMPILER "Clang version " __clang_version__ #elif defined(__INTEL_COMPILER) || defined(__ICL) || defined(__ICC) || defined(__ECC) /* Intel */ # if defined(__INTEL_COMPILER) # define INTEL_VERSION __INTEL_COMPILER # elif defined(__ICL) # define INTEL_VERSION __ICL # elif defined(__ICC) # define INTEL_VERSION __ICC # elif defined(__ECC) # define INTEL_VERSION __ECC # endif # define COMPILER "Intel C compiler version " STRINGIZE(INTEL_VERSION) #elif defined(__GNUC__) /* gcc */ # define COMPILER "GCC version " __VERSION__ #elif defined(__SUNPRO_CC) /* Sun Workshop Compiler */ # define COMPILER "Sun compiler version " STRINGIZE(__SUNPRO_CC) #elif defined(_MSC_VER) /* Microsoft Visual C/C++ Must be last since other compilers define _MSC_VER for compatibility as well */ # if _MSC_VER < 1200 # define COMPILER_VERSION 5.0 # elif _MSC_VER < 1300 # define COMPILER_VERSION 6.0 # elif _MSC_VER == 1300 # define COMPILER_VERSION 7.0 # elif _MSC_VER == 1310 # define COMPILER_VERSION 7.1 # elif _MSC_VER == 1400 # define COMPILER_VERSION 8.0 # elif _MSC_VER == 1500 # define COMPILER_VERSION 9.0 # elif _MSC_VER == 1600 # define COMPILER_VERSION 10.0 # else # define COMPILER_VERSION _MSC_VER # endif # define COMPILER "Microsoft Visual C++ version " STRINGIZE(COMPILER_VERSION) #else /* Fallback */ # define COMPILER "Unknown compiler" #endif /*************************************************************************** * Module-level ***************************************************************************/ struct module_state { /* The Sun compiler can't handle empty structs */ #if defined(__SUNPRO_C) || defined(_MSC_VER) int _dummy; #endif }; #if PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "_compiler", NULL, sizeof(struct module_state), NULL, NULL, NULL, NULL, NULL }; #define INITERROR return NULL PyMODINIT_FUNC PyInit__compiler(void) #else #define INITERROR return PyMODINIT_FUNC init_compiler(void) #endif { PyObject* m; #if PY3K m = PyModule_Create(&moduledef); #else m = Py_InitModule3("_compiler", NULL, NULL); #endif if (m == NULL) INITERROR; PyModule_AddStringConstant(m, "compiler", COMPILER); #if PY3K return m; #endif } ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/test.py0000664000175000017500000000252413207611674027467 0ustar mseifertmseifert00000000000000""" Different implementations of the ``./setup.py test`` command depending on what's locally available. If Astropy v1.1.0.dev or later is available it should be possible to import AstropyTest from ``astropy.tests.command``. If ``astropy`` can be imported but not ``astropy.tests.command`` (i.e. an older version of Astropy), we can use the backwards-compat implementation of the command. If Astropy can't be imported at all then there is a skeleton implementation that allows users to at least discover the ``./setup.py test`` command and learn that they need Astropy to run it. """ # Previously these except statements caught only ImportErrors, but there are # some other obscure exceptional conditions that can occur when importing # astropy.tests (at least on older versions) that can cause these imports to # fail try: import astropy # noqa try: from astropy.tests.command import AstropyTest except Exception: from ._test_compat import AstropyTest except Exception: # No astropy at all--provide the dummy implementation from ._dummy import _DummyCommand class AstropyTest(_DummyCommand): command_name = 'test' description = 'Run the tests for this package' error_msg = ( "The 'test' command requires the astropy package to be " "installed and importable.") ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/_dummy.py0000664000175000017500000000557413207611674030012 0ustar mseifertmseifert00000000000000""" Provides a base class for a 'dummy' setup.py command that has no functionality (probably due to a missing requirement). This dummy command can raise an exception when it is run, explaining to the user what dependencies must be met to use this command. The reason this is at all tricky is that we want the command to be able to provide this message even when the user passes arguments to the command. If we don't know ahead of time what arguments the command can take, this is difficult, because distutils does not allow unknown arguments to be passed to a setup.py command. This hacks around that restriction to provide a useful error message even when a user passes arguments to the dummy implementation of a command. Use this like: try: from some_dependency import SetupCommand except ImportError: from ._dummy import _DummyCommand class SetupCommand(_DummyCommand): description = \ 'Implementation of SetupCommand from some_dependency; ' 'some_dependency must be installed to run this command' # This is the message that will be raised when a user tries to # run this command--define it as a class attribute. error_msg = \ "The 'setup_command' command requires the some_dependency " "package to be installed and importable." """ import sys from setuptools import Command from distutils.errors import DistutilsArgError from textwrap import dedent class _DummyCommandMeta(type): """ Causes an exception to be raised on accessing attributes of a command class so that if ``./setup.py command_name`` is run with additional command-line options we can provide a useful error message instead of the default that tells users the options are unrecognized. """ def __init__(cls, name, bases, members): if bases == (Command, object): # This is the _DummyCommand base class, presumably return if not hasattr(cls, 'description'): raise TypeError( "_DummyCommand subclass must have a 'description' " "attribute.") if not hasattr(cls, 'error_msg'): raise TypeError( "_DummyCommand subclass must have an 'error_msg' " "attribute.") def __getattribute__(cls, attr): if attr in ('description', 'error_msg'): # Allow cls.description to work so that `./setup.py # --help-commands` still works return super(_DummyCommandMeta, cls).__getattribute__(attr) raise DistutilsArgError(cls.error_msg) if sys.version_info[0] < 3: exec(dedent(""" class _DummyCommand(Command, object): __metaclass__ = _DummyCommandMeta """)) else: exec(dedent(""" class _DummyCommand(Command, object, metaclass=_DummyCommandMeta): pass """)) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/__init__.py0000664000175000017500000000000013207611674030232 0ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/build_sphinx.py0000664000175000017500000002427713207611674031211 0ustar mseifertmseifert00000000000000from __future__ import print_function import inspect import os import pkgutil import re import shutil import subprocess import sys import textwrap import warnings from distutils import log from distutils.cmd import DistutilsOptionError import sphinx from sphinx.setup_command import BuildDoc as SphinxBuildDoc from ..utils import minversion, AstropyDeprecationWarning PY3 = sys.version_info[0] >= 3 class AstropyBuildDocs(SphinxBuildDoc): """ A version of the ``build_docs`` command that uses the version of Astropy that is built by the setup ``build`` command, rather than whatever is installed on the system. To build docs against the installed version, run ``make html`` in the ``astropy/docs`` directory. This also automatically creates the docs/_static directories--this is needed because GitHub won't create the _static dir because it has no tracked files. """ description = 'Build Sphinx documentation for Astropy environment' user_options = SphinxBuildDoc.user_options[:] user_options.append( ('warnings-returncode', 'w', 'Parses the sphinx output and sets the return code to 1 if there ' 'are any warnings. Note that this will cause the sphinx log to ' 'only update when it completes, rather than continuously as is ' 'normally the case.')) user_options.append( ('clean-docs', 'l', 'Completely clean previous builds, including ' 'automodapi-generated files before building new ones')) user_options.append( ('no-intersphinx', 'n', 'Skip intersphinx, even if conf.py says to use it')) user_options.append( ('open-docs-in-browser', 'o', 'Open the docs in a browser (using the webbrowser module) if the ' 'build finishes successfully.')) boolean_options = SphinxBuildDoc.boolean_options[:] boolean_options.append('warnings-returncode') boolean_options.append('clean-docs') boolean_options.append('no-intersphinx') boolean_options.append('open-docs-in-browser') _self_iden_rex = re.compile(r"self\.([^\d\W][\w]+)", re.UNICODE) def initialize_options(self): SphinxBuildDoc.initialize_options(self) self.clean_docs = False self.no_intersphinx = False self.open_docs_in_browser = False self.warnings_returncode = False def finalize_options(self): SphinxBuildDoc.finalize_options(self) # Clear out previous sphinx builds, if requested if self.clean_docs: dirstorm = [os.path.join(self.source_dir, 'api'), os.path.join(self.source_dir, 'generated')] if self.build_dir is None: dirstorm.append('docs/_build') else: dirstorm.append(self.build_dir) for d in dirstorm: if os.path.isdir(d): log.info('Cleaning directory ' + d) shutil.rmtree(d) else: log.info('Not cleaning directory ' + d + ' because ' 'not present or not a directory') def run(self): # TODO: Break this method up into a few more subroutines and # document them better import webbrowser if PY3: from urllib.request import pathname2url else: from urllib import pathname2url # This is used at the very end of `run` to decide if sys.exit should # be called. If it's None, it won't be. retcode = None # If possible, create the _static dir if self.build_dir is not None: # the _static dir should be in the same place as the _build dir # for Astropy basedir, subdir = os.path.split(self.build_dir) if subdir == '': # the path has a trailing /... basedir, subdir = os.path.split(basedir) staticdir = os.path.join(basedir, '_static') if os.path.isfile(staticdir): raise DistutilsOptionError( 'Attempted to build_docs in a location where' + staticdir + 'is a file. Must be a directory.') self.mkpath(staticdir) # Now make sure Astropy is built and determine where it was built build_cmd = self.reinitialize_command('build') build_cmd.inplace = 0 self.run_command('build') build_cmd = self.get_finalized_command('build') build_cmd_path = os.path.abspath(build_cmd.build_lib) ah_importer = pkgutil.get_importer('astropy_helpers') ah_path = os.path.abspath(ah_importer.path) # Now generate the source for and spawn a new process that runs the # command. This is needed to get the correct imports for the built # version runlines, runlineno = inspect.getsourcelines(SphinxBuildDoc.run) subproccode = textwrap.dedent(""" from sphinx.setup_command import * os.chdir({srcdir!r}) sys.path.insert(0, {build_cmd_path!r}) sys.path.insert(0, {ah_path!r}) """).format(build_cmd_path=build_cmd_path, ah_path=ah_path, srcdir=self.source_dir) # runlines[1:] removes 'def run(self)' on the first line subproccode += textwrap.dedent(''.join(runlines[1:])) # All "self.foo" in the subprocess code needs to be replaced by the # values taken from the current self in *this* process subproccode = self._self_iden_rex.split(subproccode) for i in range(1, len(subproccode), 2): iden = subproccode[i] val = getattr(self, iden) if iden.endswith('_dir'): # Directories should be absolute, because the `chdir` call # in the new process moves to a different directory subproccode[i] = repr(os.path.abspath(val)) else: subproccode[i] = repr(val) subproccode = ''.join(subproccode) optcode = textwrap.dedent(""" class Namespace(object): pass self = Namespace() self.pdb = {pdb!r} self.verbosity = {verbosity!r} self.traceback = {traceback!r} """).format(pdb=getattr(self, 'pdb', False), verbosity=getattr(self, 'verbosity', 0), traceback=getattr(self, 'traceback', False)) subproccode = optcode + subproccode # This is a quick gross hack, but it ensures that the code grabbed from # SphinxBuildDoc.run will work in Python 2 if it uses the print # function if minversion(sphinx, '1.3'): subproccode = 'from __future__ import print_function' + subproccode if self.no_intersphinx: # the confoverrides variable in sphinx.setup_command.BuildDoc can # be used to override the conf.py ... but this could well break # if future versions of sphinx change the internals of BuildDoc, # so remain vigilant! subproccode = subproccode.replace( 'confoverrides = {}', 'confoverrides = {\'intersphinx_mapping\':{}}') log.debug('Starting subprocess of {0} with python code:\n{1}\n' '[CODE END])'.format(sys.executable, subproccode)) # To return the number of warnings, we need to capture stdout. This # prevents a continuous updating at the terminal, but there's no # apparent way around this. if self.warnings_returncode: proc = subprocess.Popen([sys.executable, '-c', subproccode], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) retcode = 1 with proc.stdout: for line in iter(proc.stdout.readline, b''): line = line.strip(b'\r\n') print(line.decode('utf-8')) if 'build succeeded.' == line.decode('utf-8'): retcode = 0 # Poll to set proc.retcode proc.wait() if retcode != 0: if os.environ.get('TRAVIS', None) == 'true': # this means we are in the travis build, so customize # the message appropriately. msg = ('The build_docs travis build FAILED ' 'because sphinx issued documentation ' 'warnings (scroll up to see the warnings).') else: # standard failure message msg = ('build_docs returning a non-zero exit ' 'code because sphinx issued documentation ' 'warnings.') log.warn(msg) else: proc = subprocess.Popen([sys.executable], stdin=subprocess.PIPE) proc.communicate(subproccode.encode('utf-8')) if proc.returncode == 0: if self.open_docs_in_browser: if self.builder == 'html': absdir = os.path.abspath(self.builder_target_dir) index_path = os.path.join(absdir, 'index.html') fileurl = 'file://' + pathname2url(index_path) webbrowser.open(fileurl) else: log.warn('open-docs-in-browser option was given, but ' 'the builder is not html! Ignoring.') else: log.warn('Sphinx Documentation subprocess failed with return ' 'code ' + str(proc.returncode)) retcode = proc.returncode if retcode is not None: # this is potentially dangerous in that there might be something # after the call to `setup` in `setup.py`, and exiting here will # prevent that from running. But there's no other apparent way # to signal what the return code should be. sys.exit(retcode) class AstropyBuildSphinx(AstropyBuildDocs): # pragma: no cover description = 'deprecated alias to the build_docs command' def run(self): warnings.warn( 'The "build_sphinx" command is now deprecated. Use' '"build_docs" instead.', AstropyDeprecationWarning) AstropyBuildDocs.run(self) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/setup_package.py0000664000175000017500000000017013207611674031316 0ustar mseifertmseifert00000000000000from os.path import join def get_package_data(): return {'astropy_helpers.commands': [join('src', 'compiler.c')]} ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/register.py0000664000175000017500000000454713207611674030343 0ustar mseifertmseifert00000000000000from setuptools.command.register import register as SetuptoolsRegister class AstropyRegister(SetuptoolsRegister): """Extends the built in 'register' command to support a ``--hidden`` option to make the registered version hidden on PyPI by default. The result of this is that when a version is registered as "hidden" it can still be downloaded from PyPI, but it does not show up in the list of actively supported versions under http://pypi.python.org/pypi/astropy, and is not set as the most recent version. Although this can always be set through the web interface it may be more convenient to be able to specify via the 'register' command. Hidden may also be considered a safer default when running the 'register' command, though this command uses distutils' normal behavior if the ``--hidden`` option is omitted. """ user_options = SetuptoolsRegister.user_options + [ ('hidden', None, 'mark this release as hidden on PyPI by default') ] boolean_options = SetuptoolsRegister.boolean_options + ['hidden'] def initialize_options(self): SetuptoolsRegister.initialize_options(self) self.hidden = False def build_post_data(self, action): data = SetuptoolsRegister.build_post_data(self, action) if action == 'submit' and self.hidden: data['_pypi_hidden'] = '1' return data def _set_config(self): # The original register command is buggy--if you use .pypirc with a # server-login section *at all* the repository you specify with the -r # option will be overwritten with either the repository in .pypirc or # with the default, # If you do not have a .pypirc using the -r option will just crash. # Way to go distutils # If we don't set self.repository back to a default value _set_config # can crash if there was a user-supplied value for this option; don't # worry, we'll get the real value back afterwards self.repository = 'pypi' SetuptoolsRegister._set_config(self) options = self.distribution.get_option_dict('register') if 'repository' in options: source, value = options['repository'] # Really anything that came from setup.cfg or the command line # should override whatever was in .pypirc self.repository = value ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/install_lib.py0000664000175000017500000000100013207611674030770 0ustar mseifertmseifert00000000000000from setuptools.command.install_lib import install_lib as SetuptoolsInstallLib from ..utils import _get_platlib_dir class AstropyInstallLib(SetuptoolsInstallLib): user_options = SetuptoolsInstallLib.user_options[:] boolean_options = SetuptoolsInstallLib.boolean_options[:] def finalize_options(self): build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) self.build_dir = platlib_dir SetuptoolsInstallLib.finalize_options(self) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/build_ext.py0000664000175000017500000004636413207611674030501 0ustar mseifertmseifert00000000000000import errno import os import re import shlex import shutil import subprocess import sys import textwrap from distutils import log, ccompiler, sysconfig from distutils.core import Extension from distutils.ccompiler import get_default_compiler from setuptools.command.build_ext import build_ext as SetuptoolsBuildExt from ..utils import get_numpy_include_path, invalidate_caches, classproperty from ..version_helpers import get_pkg_version_module def should_build_with_cython(package, release=None): """Returns the previously used Cython version (or 'unknown' if not previously built) if Cython should be used to build extension modules from pyx files. If the ``release`` parameter is not specified an attempt is made to determine the release flag from `astropy.version`. """ try: version_module = __import__(package + '.cython_version', fromlist=['release', 'cython_version']) except ImportError: version_module = None if release is None and version_module is not None: try: release = version_module.release except AttributeError: pass try: cython_version = version_module.cython_version except AttributeError: cython_version = 'unknown' # Only build with Cython if, of course, Cython is installed, we're in a # development version (i.e. not release) or the Cython-generated source # files haven't been created yet (cython_version == 'unknown'). The latter # case can happen even when release is True if checking out a release tag # from the repository have_cython = False try: import Cython # noqa have_cython = True except ImportError: pass if have_cython and (not release or cython_version == 'unknown'): return cython_version else: return False _compiler_versions = {} def get_compiler_version(compiler): if compiler in _compiler_versions: return _compiler_versions[compiler] # Different flags to try to get the compiler version # TODO: It might be worth making this configurable to support # arbitrary odd compilers; though all bets may be off in such # cases anyway flags = ['--version', '--Version', '-version', '-Version', '-v', '-V'] def try_get_version(flag): process = subprocess.Popen( shlex.split(compiler, posix=('win' not in sys.platform)) + [flag], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if process.returncode != 0: return 'unknown' output = stdout.strip().decode('latin-1') # Safest bet if not output: # Some compilers return their version info on stderr output = stderr.strip().decode('latin-1') if not output: output = 'unknown' return output for flag in flags: version = try_get_version(flag) if version != 'unknown': break # Cache results to speed up future calls _compiler_versions[compiler] = version return version # TODO: I think this can be reworked without having to create the class # programmatically. def generate_build_ext_command(packagename, release): """ Creates a custom 'build_ext' command that allows for manipulating some of the C extension options at build time. We use a function to build the class since the base class for build_ext may be different depending on certain build-time parameters (for example, we may use Cython's build_ext instead of the default version in distutils). Uses the default distutils.command.build_ext by default. """ class build_ext(SetuptoolsBuildExt, object): package_name = packagename is_release = release _user_options = SetuptoolsBuildExt.user_options[:] _boolean_options = SetuptoolsBuildExt.boolean_options[:] _help_options = SetuptoolsBuildExt.help_options[:] force_rebuild = False _broken_compiler_mapping = [ ('i686-apple-darwin[0-9]*-llvm-gcc-4.2', 'clang') ] # Warning: Spaghetti code ahead. # During setup.py, the setup_helpers module needs the ability to add # items to a command's user_options list. At this stage we don't know # whether or not we can build with Cython, and so don't know for sure # what base class will be used for build_ext; nevertheless we want to # be able to provide a list to add options into. # # Later, once setup() has been called we should have all build # dependencies included via setup_requires available. distutils needs # to be able to access the user_options as a *class* attribute before # the class has been initialized, but we do need to be able to # enumerate the options for the correct base class at that point @classproperty def user_options(cls): from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._user_options return cls._final_class.user_options @classproperty def boolean_options(cls): # Similar to user_options above from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._boolean_options return cls._final_class.boolean_options @classproperty def help_options(cls): # Similar to user_options above from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._help_options return cls._final_class.help_options @classproperty(lazy=True) def _final_class(cls): """ Late determination of what the build_ext base class should be, depending on whether or not Cython is available. """ uses_cython = should_build_with_cython(cls.package_name, cls.is_release) if uses_cython: # We need to decide late on whether or not to use Cython's # build_ext (since Cython may not be available earlier in the # setup.py if it was brought in via setup_requires) try: from Cython.Distutils.old_build_ext import old_build_ext as base_cls except ImportError: from Cython.Distutils import build_ext as base_cls else: base_cls = SetuptoolsBuildExt # Create and return an instance of a new class based on this class # using one of the above possible base classes def merge_options(attr): base = getattr(base_cls, attr) ours = getattr(cls, '_' + attr) all_base = set(opt[0] for opt in base) return base + [opt for opt in ours if opt[0] not in all_base] boolean_options = (base_cls.boolean_options + [opt for opt in cls._boolean_options if opt not in base_cls.boolean_options]) members = dict(cls.__dict__) members.update({ 'user_options': merge_options('user_options'), 'help_options': merge_options('help_options'), 'boolean_options': boolean_options, 'uses_cython': uses_cython, }) # Update the base class for the original build_ext command build_ext.__bases__ = (base_cls, object) # Create a new class for the existing class, but now with the # appropriate base class depending on whether or not to use Cython. # Ensure that object is one of the bases to make a new-style class. return type(cls.__name__, (build_ext,), members) def __new__(cls, *args, **kwargs): # By the time the command is actually instantialized, the # Distribution instance for the build has been instantiated, which # means setup_requires has been processed--now we can determine # what base class we can use for the actual build, and return an # instance of a build_ext command that uses that base class (right # now the options being Cython.Distutils.build_ext, or the stock # setuptools build_ext) new_cls = super(build_ext, cls._final_class).__new__( cls._final_class) # Since the new cls is not a subclass of the original cls, we must # manually call its __init__ new_cls.__init__(*args, **kwargs) return new_cls def finalize_options(self): # Add a copy of the _compiler.so module as well, but only if there # are in fact C modules to compile (otherwise there's no reason to # include a record of the compiler used) # Note, self.extensions may not be set yet, but # self.distribution.ext_modules is where any extension modules # passed to setup() can be found self._adjust_compiler() extensions = self.distribution.ext_modules if extensions: build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(packagename) src_path = os.path.relpath( os.path.join(os.path.dirname(__file__), 'src')) shutil.copy(os.path.join(src_path, 'compiler.c'), os.path.join(package_dir, '_compiler.c')) ext = Extension(self.package_name + '._compiler', [os.path.join(package_dir, '_compiler.c')]) extensions.insert(0, ext) super(build_ext, self).finalize_options() # Generate if self.uses_cython: try: from Cython import __version__ as cython_version except ImportError: # This shouldn't happen if we made it this far cython_version = None if (cython_version is not None and cython_version != self.uses_cython): self.force_rebuild = True # Update the used cython version self.uses_cython = cython_version # Regardless of the value of the '--force' option, force a rebuild # if the debug flag changed from the last build if self.force_rebuild: self.force = True def run(self): # For extensions that require 'numpy' in their include dirs, # replace 'numpy' with the actual paths np_include = get_numpy_include_path() for extension in self.extensions: if 'numpy' in extension.include_dirs: idx = extension.include_dirs.index('numpy') extension.include_dirs.insert(idx, np_include) extension.include_dirs.remove('numpy') self._check_cython_sources(extension) super(build_ext, self).run() # Update cython_version.py if building with Cython try: cython_version = get_pkg_version_module( packagename, fromlist=['cython_version'])[0] except (AttributeError, ImportError): cython_version = 'unknown' if self.uses_cython and self.uses_cython != cython_version: build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(packagename) cython_py = os.path.join(package_dir, 'cython_version.py') with open(cython_py, 'w') as f: f.write('# Generated file; do not modify\n') f.write('cython_version = {0!r}\n'.format(self.uses_cython)) if os.path.isdir(self.build_lib): # The build/lib directory may not exist if the build_py # command was not previously run, which may sometimes be # the case self.copy_file(cython_py, os.path.join(self.build_lib, cython_py), preserve_mode=False) invalidate_caches() def _adjust_compiler(self): """ This function detects broken compilers and switches to another. If the environment variable CC is explicitly set, or a compiler is specified on the commandline, no override is performed -- the purpose here is to only override a default compiler. The specific compilers with problems are: * The default compiler in XCode-4.2, llvm-gcc-4.2, segfaults when compiling wcslib. The set of broken compilers can be updated by changing the compiler_mapping variable. It is a list of 2-tuples where the first in the pair is a regular expression matching the version of the broken compiler, and the second is the compiler to change to. """ if 'CC' in os.environ: # Check that CC is not set to llvm-gcc-4.2 c_compiler = os.environ['CC'] try: version = get_compiler_version(c_compiler) except OSError: msg = textwrap.dedent( """ The C compiler set by the CC environment variable: {compiler:s} cannot be found or executed. """.format(compiler=c_compiler)) log.warn(msg) sys.exit(1) for broken, fixed in self._broken_compiler_mapping: if re.match(broken, version): msg = textwrap.dedent( """Compiler specified by CC environment variable ({compiler:s}:{version:s}) will fail to compile {pkg:s}. Please set CC={fixed:s} and try again. You can do this, for example, by running: CC={fixed:s} python setup.py where is the command you ran. """.format(compiler=c_compiler, version=version, pkg=self.package_name, fixed=fixed)) log.warn(msg) sys.exit(1) # If C compiler is set via CC, and isn't broken, we are good to go. We # should definitely not try accessing the compiler specified by # ``sysconfig.get_config_var('CC')`` lower down, because this may fail # if the compiler used to compile Python is missing (and maybe this is # why the user is setting CC). For example, the official Python 2.7.3 # MacOS X binary was compiled with gcc-4.2, which is no longer available # in XCode 4. return if self.compiler is not None: # At this point, self.compiler will be set only if a compiler # was specified in the command-line or via setup.cfg, in which # case we don't do anything return compiler_type = ccompiler.get_default_compiler() if compiler_type == 'unix': # We have to get the compiler this way, as this is the one that is # used if os.environ['CC'] is not set. It is actually read in from # the Python Makefile. Note that this is not necessarily the same # compiler as returned by ccompiler.new_compiler() c_compiler = sysconfig.get_config_var('CC') try: version = get_compiler_version(c_compiler) except OSError: msg = textwrap.dedent( """ The C compiler used to compile Python {compiler:s}, and which is normally used to compile C extensions, is not available. You can explicitly specify which compiler to use by setting the CC environment variable, for example: CC=gcc python setup.py or if you are using MacOS X, you can try: CC=clang python setup.py """.format(compiler=c_compiler)) log.warn(msg) sys.exit(1) for broken, fixed in self._broken_compiler_mapping: if re.match(broken, version): os.environ['CC'] = fixed break def _check_cython_sources(self, extension): """ Where relevant, make sure that the .c files associated with .pyx modules are present (if building without Cython installed). """ # Determine the compiler we'll be using if self.compiler is None: compiler = get_default_compiler() else: compiler = self.compiler # Replace .pyx with C-equivalents, unless c files are missing for jdx, src in enumerate(extension.sources): base, ext = os.path.splitext(src) pyxfn = base + '.pyx' cfn = base + '.c' cppfn = base + '.cpp' if not os.path.isfile(pyxfn): continue if self.uses_cython: extension.sources[jdx] = pyxfn else: if os.path.isfile(cfn): extension.sources[jdx] = cfn elif os.path.isfile(cppfn): extension.sources[jdx] = cppfn else: msg = ( 'Could not find C/C++ file {0}.(c/cpp) for Cython ' 'file {1} when building extension {2}. Cython ' 'must be installed to build from a git ' 'checkout.'.format(base, pyxfn, extension.name)) raise IOError(errno.ENOENT, msg, cfn) # Current versions of Cython use deprecated Numpy API features # the use of which produces a few warnings when compiling. # These additional flags should squelch those warnings. # TODO: Feel free to remove this if/when a Cython update # removes use of the deprecated Numpy API if compiler == 'unix': extension.extra_compile_args.extend([ '-Wp,-w', '-Wno-unused-function']) return build_ext ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/build_py.py0000664000175000017500000000265613207611674030325 0ustar mseifertmseifert00000000000000from setuptools.command.build_py import build_py as SetuptoolsBuildPy from ..utils import _get_platlib_dir class AstropyBuildPy(SetuptoolsBuildPy): user_options = SetuptoolsBuildPy.user_options[:] boolean_options = SetuptoolsBuildPy.boolean_options[:] def finalize_options(self): # Update build_lib settings from the build command to always put # build files in platform-specific subdirectories of build/, even # for projects with only pure-Python source (this is desirable # specifically for support of multiple Python version). build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) build_cmd.build_purelib = platlib_dir build_cmd.build_lib = platlib_dir self.build_lib = platlib_dir SetuptoolsBuildPy.finalize_options(self) def run_2to3(self, files, doctests=False): # Filter the files to exclude things that shouldn't be 2to3'd skip_2to3 = self.distribution.skip_2to3 filtered_files = [] for filename in files: for package in skip_2to3: if filename[len(self.build_lib) + 1:].startswith(package): break else: filtered_files.append(filename) SetuptoolsBuildPy.run_2to3(self, filtered_files, doctests) def run(self): # first run the normal build_py SetuptoolsBuildPy.run(self) ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers/commands/install.py0000664000175000017500000000074613207611674030162 0ustar mseifertmseifert00000000000000from setuptools.command.install import install as SetuptoolsInstall from ..utils import _get_platlib_dir class AstropyInstall(SetuptoolsInstall): user_options = SetuptoolsInstall.user_options[:] boolean_options = SetuptoolsInstall.boolean_options[:] def finalize_options(self): build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) self.build_lib = platlib_dir SetuptoolsInstall.finalize_options(self) ccdproc-1.3.0.post1/astropy_helpers/licenses/0000775000175000017500000000000013207623133022705 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/licenses/LICENSE_NUMPYDOC.rst0000664000175000017500000001350713207611674026054 0ustar mseifertmseifert00000000000000------------------------------------------------------------------------------- The files - numpydoc.py - docscrape.py - docscrape_sphinx.py - phantom_import.py have the following license: Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ------------------------------------------------------------------------------- The files - compiler_unparse.py - comment_eater.py - traitsdoc.py have the following license: This software is OSI Certified Open Source Software. OSI Certified is a certification mark of the Open Source Initiative. Copyright (c) 2006, Enthought, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Enthought, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ------------------------------------------------------------------------------- The file - plot_directive.py originates from Matplotlib (http://matplotlib.sf.net/) which has the following license: Copyright (c) 2002-2008 John D. Hunter; All Rights Reserved. 1. This LICENSE AGREEMENT is between John D. Hunter (“JDH”), and the Individual or Organization (“Licensee”) accessing and otherwise using matplotlib software in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, JDH hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use matplotlib 0.98.3 alone or in any derivative version, provided, however, that JDH’s License Agreement and JDH’s notice of copyright, i.e., “Copyright (c) 2002-2008 John D. Hunter; All Rights Reserved” are retained in matplotlib 0.98.3 alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates matplotlib 0.98.3 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to matplotlib 0.98.3. 4. JDH is making matplotlib 0.98.3 available to Licensee on an “AS IS” basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB 0.98.3 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB 0.98.3 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING MATPLOTLIB 0.98.3, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between JDH and Licensee. This License Agreement does not grant permission to use JDH trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using matplotlib 0.98.3, Licensee agrees to be bound by the terms and conditions of this License Agreement. ccdproc-1.3.0.post1/astropy_helpers/licenses/LICENSE_COPYBUTTON.rst0000664000175000017500000000471113207611674026321 0ustar mseifertmseifert00000000000000Copyright 2014 Python Software Foundation License: PSF PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- . 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. . 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. . 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. . 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. . 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. . 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. . 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. . 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. ccdproc-1.3.0.post1/astropy_helpers/licenses/LICENSE_ASTROSCRAPPY.rst0000664000175000017500000000315413207611674026545 0ustar mseifertmseifert00000000000000# The OpenMP helpers include code heavily adapted from astroscrappy, released # under the following license: # # Copyright (c) 2015, Curtis McCully # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, this # list of conditions and the following disclaimer in the documentation and/or # other materials provided with the distribution. # * Neither the name of the Astropy Team nor the names of its contributors may be # used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers.egg-info/0000775000175000017500000000000013207623133026015 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers.egg-info/not-zip-safe0000664000175000017500000000000113207611706030246 0ustar mseifertmseifert00000000000000 ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers.egg-info/dependency_links.txt0000664000175000017500000000000113207611706032066 0ustar mseifertmseifert00000000000000 ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers.egg-info/PKG-INFO0000664000175000017500000000760113207611706027121 0ustar mseifertmseifert00000000000000Metadata-Version: 1.1 Name: astropy-helpers Version: 2.0.2 Summary: Utilities for building and installing Astropy, Astropy affiliated packages, and their respective documentation. Home-page: https://github.com/astropy/astropy-helpers Author: The Astropy Developers Author-email: astropy.team@gmail.com License: BSD Description-Content-Type: UNKNOWN Description: astropy-helpers =============== * Stable versions: https://pypi.org/project/astropy-helpers/ * Development version, issue tracker: https://github.com/astropy/astropy-helpers This project provides a Python package, ``astropy_helpers``, which includes many build, installation, and documentation-related tools used by the Astropy project, but packaged separately for use by other projects that wish to leverage this work. The motivation behind this package and details of its implementation are in the accepted `Astropy Proposal for Enhancement (APE) 4 `_. The ``astropy_helpers.extern`` sub-module includes modules developed elsewhere that are bundled here for convenience. At the moment, this consists of the following two sphinx extensions: * `numpydoc `_, a Sphinx extension developed as part of the Numpy project. This is used to parse docstrings in Numpy format * `sphinx-automodapi `_, a Sphinx developed as part of the Astropy project. This used to be developed directly in ``astropy-helpers`` but is now a standalone package. Issues with these sub-modules should be reported in their respective repositories, and we will regularly update the bundled versions to reflect the latest released versions. ``astropy_helpers`` includes a special "bootstrap" module called ``ah_bootstrap.py`` which is intended to be used by a project's setup.py in order to ensure that the ``astropy_helpers`` package is available for build/installation. This is similar to the ``ez_setup.py`` module that is shipped with some projects to bootstrap `setuptools `_. As described in APE4, the version numbers for ``astropy_helpers`` follow the corresponding major/minor version of the `astropy core package `_, but with an independent sequence of micro (bugfix) version numbers. Hence, the initial release is 0.4, in parallel with Astropy v0.4, which will be the first version of Astropy to use ``astropy-helpers``. For examples of how to implement ``astropy-helpers`` in a project, see the ``setup.py`` and ``setup.cfg`` files of the `Affiliated package template `_. .. image:: https://travis-ci.org/astropy/astropy-helpers.svg :target: https://travis-ci.org/astropy/astropy-helpers .. image:: https://coveralls.io/repos/astropy/astropy-helpers/badge.svg :target: https://coveralls.io/r/astropy/astropy-helpers Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: Framework :: Setuptools Plugin Classifier: Framework :: Sphinx :: Extension Classifier: Framework :: Sphinx :: Theme Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Software Development :: Build Tools Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: System :: Archiving :: Packaging ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers.egg-info/top_level.txt0000664000175000017500000000002013207611706030542 0ustar mseifertmseifert00000000000000astropy_helpers ccdproc-1.3.0.post1/astropy_helpers/astropy_helpers.egg-info/SOURCES.txt0000664000175000017500000000642513207611706027713 0ustar mseifertmseifert00000000000000CHANGES.rst LICENSE.rst MANIFEST.in README.rst ah_bootstrap.py ez_setup.py setup.cfg setup.py astropy_helpers/__init__.py astropy_helpers/distutils_helpers.py astropy_helpers/git_helpers.py astropy_helpers/openmp_helpers.py astropy_helpers/setup_helpers.py astropy_helpers/test_helpers.py astropy_helpers/utils.py astropy_helpers/version.py astropy_helpers/version_helpers.py astropy_helpers.egg-info/PKG-INFO astropy_helpers.egg-info/SOURCES.txt astropy_helpers.egg-info/dependency_links.txt astropy_helpers.egg-info/not-zip-safe astropy_helpers.egg-info/top_level.txt astropy_helpers/commands/__init__.py astropy_helpers/commands/_dummy.py astropy_helpers/commands/_test_compat.py astropy_helpers/commands/build_ext.py astropy_helpers/commands/build_py.py astropy_helpers/commands/build_sphinx.py astropy_helpers/commands/install.py astropy_helpers/commands/install_lib.py astropy_helpers/commands/register.py astropy_helpers/commands/setup_package.py astropy_helpers/commands/test.py astropy_helpers/commands/src/compiler.c astropy_helpers/compat/__init__.py astropy_helpers/extern/__init__.py astropy_helpers/extern/setup_package.py astropy_helpers/extern/automodapi/__init__.py astropy_helpers/extern/automodapi/autodoc_enhancements.py astropy_helpers/extern/automodapi/automodapi.py astropy_helpers/extern/automodapi/automodsumm.py astropy_helpers/extern/automodapi/smart_resolver.py astropy_helpers/extern/automodapi/utils.py astropy_helpers/extern/automodapi/templates/autosummary_core/base.rst astropy_helpers/extern/automodapi/templates/autosummary_core/class.rst astropy_helpers/extern/automodapi/templates/autosummary_core/module.rst astropy_helpers/extern/numpydoc/__init__.py astropy_helpers/extern/numpydoc/docscrape.py astropy_helpers/extern/numpydoc/docscrape_sphinx.py astropy_helpers/extern/numpydoc/numpydoc.py astropy_helpers/extern/numpydoc/templates/numpydoc_docstring.rst astropy_helpers/sphinx/__init__.py astropy_helpers/sphinx/conf.py astropy_helpers/sphinx/setup_package.py astropy_helpers/sphinx/ext/__init__.py astropy_helpers/sphinx/ext/changelog_links.py astropy_helpers/sphinx/ext/doctest.py astropy_helpers/sphinx/ext/edit_on_github.py astropy_helpers/sphinx/ext/tocdepthfix.py astropy_helpers/sphinx/ext/tests/__init__.py astropy_helpers/sphinx/local/python2_local_links.inv astropy_helpers/sphinx/local/python3_local_links.inv astropy_helpers/sphinx/themes/bootstrap-astropy/globaltoc.html astropy_helpers/sphinx/themes/bootstrap-astropy/layout.html astropy_helpers/sphinx/themes/bootstrap-astropy/localtoc.html astropy_helpers/sphinx/themes/bootstrap-astropy/searchbox.html astropy_helpers/sphinx/themes/bootstrap-astropy/theme.conf astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout.svg astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout_20.png astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.ico astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.svg astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.png astropy_helpers/sphinx/themes/bootstrap-astropy/static/bootstrap-astropy.css astropy_helpers/sphinx/themes/bootstrap-astropy/static/copybutton.js astropy_helpers/sphinx/themes/bootstrap-astropy/static/sidebar.js licenses/LICENSE_ASTROSCRAPPY.rst licenses/LICENSE_COPYBUTTON.rst licenses/LICENSE_NUMPYDOC.rstccdproc-1.3.0.post1/astropy_helpers/CHANGES.rst0000664000175000017500000003744613207611674022727 0ustar mseifertmseifert00000000000000astropy-helpers Changelog ************************* 2.0.2 (2017-10-13) ------------------ - Added new helper function add_openmp_flags_if_available that can add OpenMP compilation flags to a C/Cython extension if needed. [#346] - Update numpydoc to v0.7. [#343] - The function ``get_git_devstr`` now returns ``'0'`` instead of ``None`` when no git repository is present. This allows generation of development version strings that are in a format that ``setuptools`` expects (e.g. "1.1.3.dev0" instead of "1.1.3.dev"). [#330] - It is now possible to override generated timestamps to make builds reproducible by setting the ``SOURCE_DATE_EPOCH`` environment variable [#341] - Mark Sphinx extensions as parallel-safe. [#344] - Switch to using mathjax instead of imgmath for local builds. [#342] - Deprecate ``exclude`` parameter of various functions in setup_helpers since it could not work as intended. Add new function ``add_exclude_packages`` to provide intended behavior. [#331] - Allow custom Sphinx doctest extension to recognize and process standard doctest directives ``testsetup`` and ``doctest``. [#335] 2.0.1 (2017-07-28) ------------------ - Fix compatibility with Sphinx <1.5. [#326] 2.0 (2017-07-06) ---------------- - Add support for package that lies in a subdirectory. [#249] - Removing ``compat.subprocess``. [#298] - Python 3.3 is no longer supported. [#300] - The 'automodapi' Sphinx extension (and associated dependencies) has now been moved to a standalone package which can be found at https://github.com/astropy/sphinx-automodapi - this is now bundled in astropy-helpers under astropy_helpers.extern.automodapi for convenience. Version shipped with astropy-helpers is v0.6. [#278, #303, #309, #323] - The ``numpydoc`` Sphinx extension has now been moved to ``astropy_helpers.extern``. [#278] - Fix ``build_docs`` error catching, so it doesn't hide Sphinx errors. [#292] - Fix compatibility with Sphinx 1.6. [#318] - Updating ez_setup.py to the last version before it's removal. [#321] 1.3.1 (2017-03-18) ------------------ - Fixed the missing button to hide output in documentation code blocks. [#287] - Fixed bug when ``build_docs`` when running with the clean (-l) option. [#289] - Add alternative location for various intersphinx inventories to fall back to. [#293] 1.3 (2016-12-16) ---------------- - ``build_sphinx`` has been deprecated in favor of the ``build_docs`` command. [#246] - Force the use of Cython's old ``build_ext`` command. A new ``build_ext`` command was added in Cython 0.25, but it does not work with astropy-helpers currently. [#261] 1.2 (2016-06-18) ---------------- - Added sphinx configuration value ``automodsumm_inherited_members``. If ``True`` this will include members that are inherited from a base class in the generated API docs. Defaults to ``False`` which matches the previous behavior. [#215] - Fixed ``build_sphinx`` to recognize builds that succeeded but have output *after* the "build succeeded." statement. This only applies when ``--warnings-returncode`` is given (which is primarily relevant for Travis documentation builds). [#223] - Fixed ``build_sphinx`` the sphinx extensions to not output a spurious warning for sphinx versions > 1.4. [#229] - Add Python version dependent local sphinx inventories that contain otherwise missing references. [#216] - ``astropy_helpers`` now require Sphinx 1.3 or later. [#226] 1.1.2 (2016-03-9) ----------------- - The CSS for the sphinx documentation was altered to prevent some text overflow problems. [#217] 1.1.1 (2015-12-23) ------------------ - Fixed crash in build with ``AttributeError: cython_create_listing`` with older versions of setuptools. [#209, #210] 1.1 (2015-12-10) ---------------- - The original ``AstropyTest`` class in ``astropy_helpers``, which implements the ``setup.py test`` command, is deprecated in favor of moving the implementation of that command closer to the actual Astropy test runner in ``astropy.tests``. Now a dummy ``test`` command is provided solely for informing users that they need ``astropy`` installed to run the tests (however, the previous, now deprecated implementation is still provided and continues to work with older versions of Astropy). See the related issue for more details. [#184] - Added a useful new utility function to ``astropy_helpers.utils`` called ``find_data_files``. This is similar to the ``find_packages`` function in setuptools in that it can be used to search a package for data files (matching a pattern) that can be passed to the ``package_data`` argument for ``setup()``. See the docstring to ``astropy_helpers.utils.find_data_files`` for more details. [#42] - The ``astropy_helpers`` module now sets the global ``_ASTROPY_SETUP_`` flag upon import (from within a ``setup.py``) script, so it's not necessary to have this in the ``setup.py`` script explicitly. If in doubt though, there's no harm in setting it twice. Putting it in ``astropy_helpers`` just ensures that any other imports that occur during build will have this flag set. [#191] - It is now possible to use Cython as a ``setup_requires`` build requirement, and still build Cython extensions even if Cython wasn't available at the beginning of the build processes (that is, is automatically downloaded via setuptools' processing of ``setup_requires``). [#185] - Moves the ``adjust_compiler`` check into the ``build_ext`` command itself, so it's only used when actually building extension modules. This also deprecates the stand-alone ``adjust_compiler`` function. [#76] - When running the ``build_sphinx`` / ``build_docs`` command with the ``-w`` option, the output from Sphinx is streamed as it runs instead of silently buffering until the doc build is complete. [#197] 1.0.7 (unreleased) ------------------ - Fix missing import in ``astropy_helpers/utils.py``. [#196] 1.0.6 (2015-12-04) ------------------ - Fixed bug where running ``./setup.py build_sphinx`` could return successfully even when the build was not successful (and should have returned a non-zero error code). [#199] 1.0.5 (2015-10-02) ------------------ - Fixed a regression in the ``./setup.py test`` command that was introduced in v1.0.4. 1.0.4 (2015-10-02) ------------------ - Fixed issue with the sphinx documentation css where the line numbers for code blocks were not aligned with the code. [#179, #180] - Fixed crash that could occur when trying to build Cython extension modules when Cython isn't installed. Normally this still results in a failed build, but was supposed to provide a useful error message rather than crash outright (this was a regression introduced in v1.0.3). [#181] - Fixed a crash that could occur on Python 3 when a working C compiler isn't found. [#182] - Quieted warnings about deprecated Numpy API in Cython extensions, when building Cython extensions against Numpy >= 1.7. [#183, #186] - Improved support for py.test >= 2.7--running the ``./setup.py test`` command now copies all doc pages into the temporary test directory as well, so that all test files have a "common root directory". [#189, #190] 1.0.3 (2015-07-22) ------------------ - Added workaround for sphinx-doc/sphinx#1843, a but in Sphinx which prevented descriptor classes with a custom metaclass from being documented correctly. [#158] - Added an alias for the ``./setup.py build_sphinx`` command as ``./setup.py build_docs`` which, to a new contributor, should hopefully be less cryptic. [#161] - The fonts in graphviz diagrams now match the font of the HTML content. [#169] - When the documentation is built on readthedocs.org, MathJax will be used for math rendering. When built elsewhere, the "pngmath" extension is still used for math rendering. [#170] - Fix crash when importing astropy_helpers when running with ``python -OO`` [#171] - The ``build`` and ``build_ext`` stages now correctly recognize the presence of C++ files in Cython extensions (previously only vanilla C worked). [#173] 1.0.2 (2015-04-02) ------------------ - Various fixes enabling the astropy-helpers Sphinx build command and Sphinx extensions to work with Sphinx 1.3. [#148] - More improvement to the ability to handle multiple versions of astropy-helpers being imported in the same Python interpreter session in the (somewhat rare) case of nested installs. [#147] - To better support high resolution displays, use SVG for the astropy logo and linkout image, falling back to PNGs for browsers that support it. [#150, #151] - Improve ``setup_helpers.get_compiler_version`` to work with more compilers, and to return more info. This will help fix builds of Astropy on less common compilers, like Sun C. [#153] 1.0.1 (2015-03-04) ------------------ - Released in concert with v0.4.8 to address the same issues. 0.4.8 (2015-03-04) ------------------ - Improved the ``ah_bootstrap`` script's ability to override existing installations of astropy-helpers with new versions in the context of installing multiple packages simultaneously within the same Python interpreter (e.g. when one package has in its ``setup_requires`` another package that uses a different version of astropy-helpers. [#144] - Added a workaround to an issue in matplotlib that can, in rare cases, lead to a crash when installing packages that import matplotlib at build time. [#144] 1.0 (2015-02-17) ---------------- - Added new pre-/post-command hook points for ``setup.py`` commands. Now any package can define code to run before and/or after any ``setup.py`` command without having to manually subclass that command by adding ``pre__hook`` and ``post__hook`` callables to the package's ``setup_package.py`` module. See the PR for more details. [#112] - The following objects in the ``astropy_helpers.setup_helpers`` module have been relocated: - ``get_dummy_distribution``, ``get_distutils_*``, ``get_compiler_option``, ``add_command_option``, ``is_distutils_display_option`` -> ``astropy_helpers.distutils_helpers`` - ``should_build_with_cython``, ``generate_build_ext_command`` -> ``astropy_helpers.commands.build_ext`` - ``AstropyBuildPy`` -> ``astropy_helpers.commands.build_py`` - ``AstropyBuildSphinx`` -> ``astropy_helpers.commands.build_sphinx`` - ``AstropyInstall`` -> ``astropy_helpers.commands.install`` - ``AstropyInstallLib`` -> ``astropy_helpers.commands.install_lib`` - ``AstropyRegister`` -> ``astropy_helpers.commands.register`` - ``get_pkg_version_module`` -> ``astropy_helpers.version_helpers`` - ``write_if_different``, ``import_file``, ``get_numpy_include_path`` -> ``astropy_helpers.utils`` All of these are "soft" deprecations in the sense that they are still importable from ``astropy_helpers.setup_helpers`` for now, and there is no (easy) way to produce deprecation warnings when importing these objects from ``setup_helpers`` rather than directly from the modules they are defined in. But please consider updating any imports to these objects. [#110] - Use of the ``astropy.sphinx.ext.astropyautosummary`` extension is deprecated for use with Sphinx < 1.2. Instead it should suffice to remove this extension for the ``extensions`` list in your ``conf.py`` and add the stock ``sphinx.ext.autosummary`` instead. [#131] 0.4.7 (2015-02-17) ------------------ - Fixed incorrect/missing git hash being added to the generated ``version.py`` when creating a release. [#141] 0.4.6 (2015-02-16) ------------------ - Fixed problems related to the automatically generated _compiler module not being created properly. [#139] 0.4.5 (2015-02-11) ------------------ - Fixed an issue where ah_bootstrap.py could blow up when astropy_helper's version number is 1.0. - Added a workaround for documentation of properties in the rare case where the class's metaclass has a property of the same name. [#130] - Fixed an issue on Python 3 where importing a package using astropy-helper's generated version.py module would crash when the current working directory is an empty git repository. [#114, #137] - Fixed an issue where the "revision count" appended to .dev versions by the generated version.py did not accurately reflect the revision count for the package it belongs to, and could be invalid if the current working directory is an unrelated git repository. [#107, #137] - Likewise, fixed a confusing warning message that could occur in the same circumstances as the above issue. [#121, #137] 0.4.4 (2014-12-31) ------------------ - More improvements for building the documentation using Python 3.x. [#100] - Additional minor fixes to Python 3 support. [#115] - Updates to support new test features in Astropy [#92, #106] 0.4.3 (2014-10-22) ------------------ - The generated ``version.py`` file now preserves the git hash of installed copies of the package as well as when building a source distribution. That is, the git hash of the changeset that was installed/released is preserved. [#87] - In smart resolver add resolution for class links when they exist in the intersphinx inventory, but not the mapping of the current package (e.g. when an affiliated package uses an astropy core class of which "actual" and "documented" location differs) [#88] - Fixed a bug that could occur when running ``setup.py`` for the first time in a repository that uses astropy-helpers as a submodule: ``AttributeError: 'NoneType' object has no attribute 'mkdtemp'`` [#89] - Fixed a bug where optional arguments to the ``doctest-skip`` Sphinx directive were sometimes being left in the generated documentation output. [#90] - Improved support for building the documentation using Python 3.x. [#96] - Avoid error message if .git directory is not present. [#91] 0.4.2 (2014-08-09) ------------------ - Fixed some CSS issues in generated API docs. [#69] - Fixed the warning message that could be displayed when generating a version number with some older versions of git. [#77] - Fixed automodsumm to work with new versions of Sphinx (>= 1.2.2). [#80] 0.4.1 (2014-08-08) ------------------ - Fixed git revision count on systems with git versions older than v1.7.2. [#70] - Fixed display of warning text when running a git command fails (previously the output of stderr was not being decoded properly). [#70] - The ``--offline`` flag to ``setup.py`` understood by ``ah_bootstrap.py`` now also prevents git from going online to fetch submodule updates. [#67] - The Sphinx extension for converting issue numbers to links in the changelog now supports working on arbitrary pages via a new ``conf.py`` setting: ``changelog_links_docpattern``. By default it affects the ``changelog`` and ``whatsnew`` pages in one's Sphinx docs. [#61] - Fixed crash that could result from users with missing/misconfigured locale settings. [#58] - The font used for code examples in the docs is now the system-defined ``monospace`` font, rather than ``Minaco``, which is not available on all platforms. [#50] 0.4 (2014-07-15) ---------------- - Initial release of astropy-helpers. See `APE4 `_ for details of the motivation and design of this package. - The ``astropy_helpers`` package replaces the following modules in the ``astropy`` package: - ``astropy.setup_helpers`` -> ``astropy_helpers.setup_helpers`` - ``astropy.version_helpers`` -> ``astropy_helpers.version_helpers`` - ``astropy.sphinx`` - > ``astropy_helpers.sphinx`` These modules should be considered deprecated in ``astropy``, and any new, non-critical changes to those modules will be made in ``astropy_helpers`` instead. Affiliated packages wishing to make use those modules (as in the Astropy package-template) should use the versions from ``astropy_helpers`` instead, and include the ``ah_bootstrap.py`` script in their project, for bootstrapping the ``astropy_helpers`` package in their setup.py script. ccdproc-1.3.0.post1/astropy_helpers/ez_setup.py0000664000175000017500000003037113207611674023323 0ustar mseifertmseifert00000000000000#!/usr/bin/env python """ Setuptools bootstrapping installer. Maintained at https://github.com/pypa/setuptools/tree/bootstrap. Run this script to install or upgrade setuptools. This method is DEPRECATED. Check https://github.com/pypa/setuptools/issues/581 for more details. """ import os import shutil import sys import tempfile import zipfile import optparse import subprocess import platform import textwrap import contextlib from distutils import log try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen try: from site import USER_SITE except ImportError: USER_SITE = None # 33.1.1 is the last version that supports setuptools self upgrade/installation. DEFAULT_VERSION = "33.1.1" DEFAULT_URL = "https://pypi.io/packages/source/s/setuptools/" DEFAULT_SAVE_DIR = os.curdir DEFAULT_DEPRECATION_MESSAGE = "ez_setup.py is deprecated and when using it setuptools will be pinned to {0} since it's the last version that supports setuptools self upgrade/installation, check https://github.com/pypa/setuptools/issues/581 for more info; use pip to install setuptools" MEANINGFUL_INVALID_ZIP_ERR_MSG = 'Maybe {0} is corrupted, delete it and try again.' log.warn(DEFAULT_DEPRECATION_MESSAGE.format(DEFAULT_VERSION)) def _python_cmd(*args): """ Execute a command. Return True if the command succeeded. """ args = (sys.executable,) + args return subprocess.call(args) == 0 def _install(archive_filename, install_args=()): """Install Setuptools.""" with archive_context(archive_filename): # installing log.warn('Installing Setuptools') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 def _build_egg(egg, archive_filename, to_dir): """Build Setuptools egg.""" with archive_context(archive_filename): # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') class ContextualZipFile(zipfile.ZipFile): """Supplement ZipFile class to support context manager for Python 2.6.""" def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def __new__(cls, *args, **kwargs): """Construct a ZipFile or ContextualZipFile as appropriate.""" if hasattr(zipfile.ZipFile, '__exit__'): return zipfile.ZipFile(*args, **kwargs) return super(ContextualZipFile, cls).__new__(cls) @contextlib.contextmanager def archive_context(filename): """ Unzip filename to a temporary directory, set to the cwd. The unzipped target is cleaned up after. """ tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) try: with ContextualZipFile(filename) as archive: archive.extractall() except zipfile.BadZipfile as err: if not err.args: err.args = ('', ) err.args = err.args + ( MEANINGFUL_INVALID_ZIP_ERR_MSG.format(filename), ) raise # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) yield finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _do_download(version, download_base, to_dir, download_delay): """Download Setuptools.""" py_desig = 'py{sys.version_info[0]}.{sys.version_info[1]}'.format(sys=sys) tp = 'setuptools-{version}-{py_desig}.egg' egg = os.path.join(to_dir, tp.format(**locals())) if not os.path.exists(egg): archive = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, archive, to_dir) sys.path.insert(0, egg) # Remove previously-imported pkg_resources if present (see # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). if 'pkg_resources' in sys.modules: _unload_pkg_resources() import setuptools setuptools.bootstrap_install_from = egg def use_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=DEFAULT_SAVE_DIR, download_delay=15): """ Ensure that a setuptools version is installed. Return None. Raise SystemExit if the requested version or later cannot be installed. """ to_dir = os.path.abspath(to_dir) # prior to importing, capture the module state for # representative modules. rep_modules = 'pkg_resources', 'setuptools' imported = set(sys.modules).intersection(rep_modules) try: import pkg_resources pkg_resources.require("setuptools>=" + version) # a suitable version is already installed return except ImportError: # pkg_resources not available; setuptools is not installed; download pass except pkg_resources.DistributionNotFound: # no version of setuptools was found; allow download pass except pkg_resources.VersionConflict as VC_err: if imported: _conflict_bail(VC_err, version) # otherwise, unload pkg_resources to allow the downloaded version to # take precedence. del pkg_resources _unload_pkg_resources() return _do_download(version, download_base, to_dir, download_delay) def _conflict_bail(VC_err, version): """ Setuptools was imported prior to invocation, so it is unsafe to unload it. Bail out. """ conflict_tmpl = textwrap.dedent(""" The required version of setuptools (>={version}) is not available, and can't be installed while this script is running. Please install a more recent version first, using 'easy_install -U setuptools'. (Currently using {VC_err.args[0]!r}) """) msg = conflict_tmpl.format(**locals()) sys.stderr.write(msg) sys.exit(2) def _unload_pkg_resources(): sys.meta_path = [ importer for importer in sys.meta_path if importer.__class__.__module__ != 'pkg_resources.extern' ] del_modules = [ name for name in sys.modules if name.startswith('pkg_resources') ] for mod_name in del_modules: del sys.modules[mod_name] def _clean_check(cmd, target): """ Run the command to download target. If the command fails, clean up before re-raising the error. """ try: subprocess.check_call(cmd) except subprocess.CalledProcessError: if os.access(target, os.F_OK): os.unlink(target) raise def download_file_powershell(url, target): """ Download the file at url to target using Powershell. Powershell will validate trust. Raise an exception if the command cannot complete. """ target = os.path.abspath(target) ps_cmd = ( "[System.Net.WebRequest]::DefaultWebProxy.Credentials = " "[System.Net.CredentialCache]::DefaultCredentials; " '(new-object System.Net.WebClient).DownloadFile("%(url)s", "%(target)s")' % locals() ) cmd = [ 'powershell', '-Command', ps_cmd, ] _clean_check(cmd, target) def has_powershell(): """Determine if Powershell is available.""" if platform.system() != 'Windows': return False cmd = ['powershell', '-Command', 'echo test'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_powershell.viable = has_powershell def download_file_curl(url, target): cmd = ['curl', url, '--location', '--silent', '--output', target] _clean_check(cmd, target) def has_curl(): cmd = ['curl', '--version'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_curl.viable = has_curl def download_file_wget(url, target): cmd = ['wget', url, '--quiet', '--output-document', target] _clean_check(cmd, target) def has_wget(): cmd = ['wget', '--version'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_wget.viable = has_wget def download_file_insecure(url, target): """Use Python to download the file, without connection authentication.""" src = urlopen(url) try: # Read all the data in one block. data = src.read() finally: src.close() # Write all the data in one block to avoid creating a partial file. with open(target, "wb") as dst: dst.write(data) download_file_insecure.viable = lambda: True def get_best_downloader(): downloaders = ( download_file_powershell, download_file_curl, download_file_wget, download_file_insecure, ) viable_downloaders = (dl for dl in downloaders if dl.viable()) return next(viable_downloaders, None) def download_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=DEFAULT_SAVE_DIR, delay=15, downloader_factory=get_best_downloader): """ Download setuptools from a specified location and return its filename. `version` should be a valid setuptools version number that is available as an sdist for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. ``downloader_factory`` should be a function taking no arguments and returning a function for downloading a URL to a target. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) zip_name = "setuptools-%s.zip" % version url = download_base + zip_name saveto = os.path.join(to_dir, zip_name) if not os.path.exists(saveto): # Avoid repeated downloads log.warn("Downloading %s", url) downloader = downloader_factory() downloader(url, saveto) return os.path.realpath(saveto) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the setuptools package. Returns list of command line arguments. """ return ['--user'] if options.user_install else [] def _parse_args(): """Parse the command line for options.""" parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the setuptools package') parser.add_option( '--insecure', dest='downloader_factory', action='store_const', const=lambda: download_file_insecure, default=get_best_downloader, help='Use internal, non-validating downloader' ) parser.add_option( '--version', help="Specify which version to download", default=DEFAULT_VERSION, ) parser.add_option( '--to-dir', help="Directory to save (and re-use) package", default=DEFAULT_SAVE_DIR, ) options, args = parser.parse_args() # positional arguments are ignored return options def _download_args(options): """Return args for download_setuptools function from cmdline args.""" return dict( version=options.version, download_base=options.download_base, downloader_factory=options.downloader_factory, to_dir=options.to_dir, ) def main(): """Install or upgrade setuptools and EasyInstall.""" options = _parse_args() archive = download_setuptools(**_download_args(options)) return _install(archive, _build_install_args(options)) if __name__ == '__main__': sys.exit(main()) ccdproc-1.3.0.post1/astropy_helpers/ah_bootstrap.py0000664000175000017500000010434413207611674024154 0ustar mseifertmseifert00000000000000""" This bootstrap module contains code for ensuring that the astropy_helpers package will be importable by the time the setup.py script runs. It also includes some workarounds to ensure that a recent-enough version of setuptools is being used for the installation. This module should be the first thing imported in the setup.py of distributions that make use of the utilities in astropy_helpers. If the distribution ships with its own copy of astropy_helpers, this module will first attempt to import from the shipped copy. However, it will also check PyPI to see if there are any bug-fix releases on top of the current version that may be useful to get past platform-specific bugs that have been fixed. When running setup.py, use the ``--offline`` command-line option to disable the auto-upgrade checks. When this module is imported or otherwise executed it automatically calls a main function that attempts to read the project's setup.cfg file, which it checks for a configuration section called ``[ah_bootstrap]`` the presences of that section, and options therein, determine the next step taken: If it contains an option called ``auto_use`` with a value of ``True``, it will automatically call the main function of this module called `use_astropy_helpers` (see that function's docstring for full details). Otherwise no further action is taken (however, ``ah_bootstrap.use_astropy_helpers`` may be called manually from within the setup.py script). Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same names as the arguments to `use_astropy_helpers`, and can be used to configure the bootstrap script when ``auto_use = True``. See https://github.com/astropy/astropy-helpers for more details, and for the latest version of this module. """ import contextlib import errno import imp import io import locale import os import re import subprocess as sp import sys try: from ConfigParser import ConfigParser, RawConfigParser except ImportError: from configparser import ConfigParser, RawConfigParser if sys.version_info[0] < 3: _str_types = (str, unicode) _text_type = unicode PY3 = False else: _str_types = (str, bytes) _text_type = str PY3 = True # What follows are several import statements meant to deal with install-time # issues with either missing or misbehaving pacakges (including making sure # setuptools itself is installed): # Some pre-setuptools checks to ensure that either distribute or setuptools >= # 0.7 is used (over pre-distribute setuptools) if it is available on the path; # otherwise the latest setuptools will be downloaded and bootstrapped with # ``ez_setup.py``. This used to be included in a separate file called # setuptools_bootstrap.py; but it was combined into ah_bootstrap.py try: import pkg_resources _setuptools_req = pkg_resources.Requirement.parse('setuptools>=0.7') # This may raise a DistributionNotFound in which case no version of # setuptools or distribute is properly installed _setuptools = pkg_resources.get_distribution('setuptools') if _setuptools not in _setuptools_req: # Older version of setuptools; check if we have distribute; again if # this results in DistributionNotFound we want to give up _distribute = pkg_resources.get_distribution('distribute') if _setuptools != _distribute: # It's possible on some pathological systems to have an old version # of setuptools and distribute on sys.path simultaneously; make # sure distribute is the one that's used sys.path.insert(1, _distribute.location) _distribute.activate() imp.reload(pkg_resources) except: # There are several types of exceptions that can occur here; if all else # fails bootstrap and use the bootstrapped version from ez_setup import use_setuptools use_setuptools() # typing as a dependency for 1.6.1+ Sphinx causes issues when imported after # initializing submodule with ah_boostrap.py # See discussion and references in # https://github.com/astropy/astropy-helpers/issues/302 try: import typing # noqa except ImportError: pass # Note: The following import is required as a workaround to # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this # module now, it will get cleaned up after `run_setup` is called, but that will # later cause the TemporaryDirectory class defined in it to stop working when # used later on by setuptools try: import setuptools.py31compat # noqa except ImportError: pass # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass # End compatibility imports... # In case it didn't successfully import before the ez_setup checks import pkg_resources from setuptools import Distribution from setuptools.package_index import PackageIndex from setuptools.sandbox import run_setup from distutils import log from distutils.debug import DEBUG # TODO: Maybe enable checking for a specific version of astropy_helpers? DIST_NAME = 'astropy-helpers' PACKAGE_NAME = 'astropy_helpers' # Defaults for other options DOWNLOAD_IF_NEEDED = True INDEX_URL = 'https://pypi.python.org/simple' USE_GIT = True OFFLINE = False AUTO_UPGRADE = True # A list of all the configuration options and their required types CFG_OPTIONS = [ ('auto_use', bool), ('path', str), ('download_if_needed', bool), ('index_url', str), ('use_git', bool), ('offline', bool), ('auto_upgrade', bool) ] class _Bootstrapper(object): """ Bootstrapper implementation. See ``use_astropy_helpers`` for parameter documentation. """ def __init__(self, path=None, index_url=None, use_git=None, offline=None, download_if_needed=None, auto_upgrade=None): if path is None: path = PACKAGE_NAME if not (isinstance(path, _str_types) or path is False): raise TypeError('path must be a string or False') if PY3 and not isinstance(path, _text_type): fs_encoding = sys.getfilesystemencoding() path = path.decode(fs_encoding) # path to unicode self.path = path # Set other option attributes, using defaults where necessary self.index_url = index_url if index_url is not None else INDEX_URL self.offline = offline if offline is not None else OFFLINE # If offline=True, override download and auto-upgrade if self.offline: download_if_needed = False auto_upgrade = False self.download = (download_if_needed if download_if_needed is not None else DOWNLOAD_IF_NEEDED) self.auto_upgrade = (auto_upgrade if auto_upgrade is not None else AUTO_UPGRADE) # If this is a release then the .git directory will not exist so we # should not use git. git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) if use_git is None and not git_dir_exists: use_git = False self.use_git = use_git if use_git is not None else USE_GIT # Declared as False by default--later we check if astropy-helpers can be # upgraded from PyPI, but only if not using a source distribution (as in # the case of import from a git submodule) self.is_submodule = False @classmethod def main(cls, argv=None): if argv is None: argv = sys.argv config = cls.parse_config() config.update(cls.parse_command_line(argv)) auto_use = config.pop('auto_use', False) bootstrapper = cls(**config) if auto_use: # Run the bootstrapper, otherwise the setup.py is using the old # use_astropy_helpers() interface, in which case it will run the # bootstrapper manually after reconfiguring it. bootstrapper.run() return bootstrapper @classmethod def parse_config(cls): if not os.path.exists('setup.cfg'): return {} cfg = ConfigParser() try: cfg.read('setup.cfg') except Exception as e: if DEBUG: raise log.error( "Error reading setup.cfg: {0!r}\n{1} will not be " "automatically bootstrapped and package installation may fail." "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) return {} if not cfg.has_section('ah_bootstrap'): return {} config = {} for option, type_ in CFG_OPTIONS: if not cfg.has_option('ah_bootstrap', option): continue if type_ is bool: value = cfg.getboolean('ah_bootstrap', option) else: value = cfg.get('ah_bootstrap', option) config[option] = value return config @classmethod def parse_command_line(cls, argv=None): if argv is None: argv = sys.argv config = {} # For now we just pop recognized ah_bootstrap options out of the # arg list. This is imperfect; in the unlikely case that a setup.py # custom command or even custom Distribution class defines an argument # of the same name then we will break that. However there's a catch22 # here that we can't just do full argument parsing right here, because # we don't yet know *how* to parse all possible command-line arguments. if '--no-git' in argv: config['use_git'] = False argv.remove('--no-git') if '--offline' in argv: config['offline'] = True argv.remove('--offline') return config def run(self): strategies = ['local_directory', 'local_file', 'index'] dist = None # First, remove any previously imported versions of astropy_helpers; # this is necessary for nested installs where one package's installer # is installing another package via setuptools.sandbox.run_setup, as in # the case of setup_requires for key in list(sys.modules): try: if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): del sys.modules[key] except AttributeError: # Sometimes mysterious non-string things can turn up in # sys.modules continue # Check to see if the path is a submodule self.is_submodule = self._check_submodule() for strategy in strategies: method = getattr(self, 'get_{0}_dist'.format(strategy)) dist = method() if dist is not None: break else: raise _AHBootstrapSystemExit( "No source found for the {0!r} package; {0} must be " "available and importable as a prerequisite to building " "or installing this package.".format(PACKAGE_NAME)) # This is a bit hacky, but if astropy_helpers was loaded from a # directory/submodule its Distribution object gets a "precedence" of # "DEVELOP_DIST". However, in other cases it gets a precedence of # "EGG_DIST". However, when activing the distribution it will only be # placed early on sys.path if it is treated as an EGG_DIST, so always # do that dist = dist.clone(precedence=pkg_resources.EGG_DIST) # Otherwise we found a version of astropy-helpers, so we're done # Just active the found distribution on sys.path--if we did a # download this usually happens automatically but it doesn't hurt to # do it again # Note: Adding the dist to the global working set also activates it # (makes it importable on sys.path) by default. try: pkg_resources.working_set.add(dist, replace=True) except TypeError: # Some (much) older versions of setuptools do not have the # replace=True option here. These versions are old enough that all # bets may be off anyways, but it's easy enough to work around just # in case... if dist.key in pkg_resources.working_set.by_key: del pkg_resources.working_set.by_key[dist.key] pkg_resources.working_set.add(dist) @property def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname)) def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_index_dist(self): if not self.download: log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) return None log.warn( "Downloading {0!r}; run setup.py with the --offline option to " "force offline installation.".format(DIST_NAME)) try: dist = self._do_download() except Exception as e: if DEBUG: raise log.warn( 'Failed to download and/or install {0!r} from {1!r}:\n' '{2}'.format(DIST_NAME, self.index_url, str(e))) dist = None # No need to run auto-upgrade here since we've already presumably # gotten the most up-to-date version from the package index return dist def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): with _silence(): run_setup(os.path.join(path, 'setup.py'), ['egg_info']) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist def _do_download(self, version='', find_links=None): if find_links: allow_hosts = '' index_url = None else: allow_hosts = None index_url = self.index_url # Annoyingly, setuptools will not handle other arguments to # Distribution (such as options) before handling setup_requires, so it # is not straightforward to programmatically augment the arguments which # are passed to easy_install class _Distribution(Distribution): def get_option_dict(self, command_name): opts = Distribution.get_option_dict(self, command_name) if command_name == 'easy_install': if find_links is not None: opts['find_links'] = ('setup script', find_links) if index_url is not None: opts['index_url'] = ('setup script', index_url) if allow_hosts is not None: opts['allow_hosts'] = ('setup script', allow_hosts) return opts if version: req = '{0}=={1}'.format(DIST_NAME, version) else: req = DIST_NAME attrs = {'setup_requires': [req]} try: if DEBUG: _Distribution(attrs=attrs) else: with _silence(): _Distribution(attrs=attrs) # If the setup_requires succeeded it will have added the new dist to # the main working_set return pkg_resources.working_set.by_key.get(DIST_NAME) except Exception as e: if DEBUG: raise msg = 'Error retrieving {0} from {1}:\n{2}' if find_links: source = find_links[0] elif index_url != INDEX_URL: source = index_url else: source = 'PyPI' raise Exception(msg.format(DIST_NAME, source, repr(e))) def _do_upgrade(self, dist): # Build up a requirement for a higher bugfix release but a lower minor # release (so API compatibility is guaranteed) next_version = _next_version(dist.parsed_version) req = pkg_resources.Requirement.parse( '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) package_index = PackageIndex(index_url=self.index_url) upgrade = package_index.obtain(req) if upgrade is not None: return self._do_download(version=upgrade.version) def _check_submodule(self): """ Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details. """ if (self.path is None or (os.path.exists(self.path) and not os.path.isdir(self.path))): return False if self.use_git: return self._check_submodule_using_git() else: return self._check_submodule_no_git() def _check_submodule_using_git(self): """ Check if the given path is a git submodule. If so, attempt to initialize and/or update the submodule if needed. This function makes calls to the ``git`` command in subprocesses. The ``_check_submodule_no_git`` option uses pure Python to check if the given path looks like a git submodule, but it cannot perform updates. """ cmd = ['git', 'submodule', 'status', '--', self.path] try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except _CommandNotFound: # The git command simply wasn't found; this is most likely the # case on user systems that don't have git and are simply # trying to install the package from PyPI or a source # distribution. Silently ignore this case and simply don't try # to use submodules return False stderr = stderr.strip() if returncode != 0 and stderr: # Unfortunately the return code alone cannot be relied on, as # earlier versions of git returned 0 even if the requested submodule # does not exist # This is a warning that occurs in perl (from running git submodule) # which only occurs with a malformatted locale setting which can # happen sometimes on OSX. See again # https://github.com/astropy/astropy/issues/2749 perl_warning = ('perl: warning: Falling back to the standard locale ' '("C").') if not stderr.strip().endswith(perl_warning): # Some other unknown error condition occurred log.warn('git submodule command failed ' 'unexpectedly:\n{0}'.format(stderr)) return False # Output of `git submodule status` is as follows: # # 1: Status indicator: '-' for submodule is uninitialized, '+' if # submodule is initialized but is not at the commit currently indicated # in .gitmodules (and thus needs to be updated), or 'U' if the # submodule is in an unstable state (i.e. has merge conflicts) # # 2. SHA-1 hash of the current commit of the submodule (we don't really # need this information but it's useful for checking that the output is # correct) # # 3. The output of `git describe` for the submodule's current commit # hash (this includes for example what branches the commit is on) but # only if the submodule is initialized. We ignore this information for # now _git_submodule_status_re = re.compile( '^(?P[+-U ])(?P[0-9a-f]{40}) ' '(?P\S+)( .*)?$') # The stdout should only contain one line--the status of the # requested submodule m = _git_submodule_status_re.match(stdout) if m: # Yes, the path *is* a git submodule self._update_submodule(m.group('submodule'), m.group('status')) return True else: log.warn( 'Unexpected output from `git submodule status`:\n{0}\n' 'Will attempt import from {1!r} regardless.'.format( stdout, self.path)) return False def _check_submodule_no_git(self): """ Like ``_check_submodule_using_git``, but simply parses the .gitmodules file to determine if the supplied path is a git submodule, and does not exec any subprocesses. This can only determine if a path is a submodule--it does not perform updates, etc. This function may need to be updated if the format of the .gitmodules file is changed between git versions. """ gitmodules_path = os.path.abspath('.gitmodules') if not os.path.isfile(gitmodules_path): return False # This is a minimal reader for gitconfig-style files. It handles a few of # the quirks that make gitconfig files incompatible with ConfigParser-style # files, but does not support the full gitconfig syntax (just enough # needed to read a .gitmodules file). gitmodules_fileobj = io.StringIO() # Must use io.open for cross-Python-compatible behavior wrt unicode with io.open(gitmodules_path) as f: for line in f: # gitconfig files are more flexible with leading whitespace; just # go ahead and remove it line = line.lstrip() # comments can start with either # or ; if line and line[0] in (':', ';'): continue gitmodules_fileobj.write(line) gitmodules_fileobj.seek(0) cfg = RawConfigParser() try: cfg.readfp(gitmodules_fileobj) except Exception as exc: log.warn('Malformatted .gitmodules file: {0}\n' '{1} cannot be assumed to be a git submodule.'.format( exc, self.path)) return False for section in cfg.sections(): if not cfg.has_option(section, 'path'): continue submodule_path = cfg.get(section, 'path').rstrip(os.sep) if submodule_path == self.path.rstrip(os.sep): return True return False def _update_submodule(self, submodule, status): if status == ' ': # The submodule is up to date; no action necessary return elif status == '-': if self.offline: raise _AHBootstrapSystemExit( "Cannot initialize the {0} submodule in --offline mode; " "this requires being able to clone the submodule from an " "online repository.".format(submodule)) cmd = ['update', '--init'] action = 'Initializing' elif status == '+': cmd = ['update'] action = 'Updating' if self.offline: cmd.append('--no-fetch') elif status == 'U': raise _AHBootstrapSystemExit( 'Error: Submodule {0} contains unresolved merge conflicts. ' 'Please complete or abandon any changes in the submodule so that ' 'it is in a usable state, then try again.'.format(submodule)) else: log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 'attempt to use the submodule as-is, but try to ensure ' 'that the submodule is in a clean state and contains no ' 'conflicts or errors.\n{2}'.format(status, submodule, _err_help_msg)) return err_msg = None cmd = ['git', 'submodule'] + cmd + ['--', submodule] log.warn('{0} {1} submodule with: `{2}`'.format( action, submodule, ' '.join(cmd))) try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except OSError as e: err_msg = str(e) else: if returncode != 0: err_msg = stderr if err_msg is not None: log.warn('An unexpected error occurred updating the git submodule ' '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, _err_help_msg)) class _CommandNotFound(OSError): """ An exception raised when a command run with run_cmd is not found on the system. """ def run_cmd(cmd): """ Run a command in a subprocess, given as a list of command-line arguments. Returns a ``(returncode, stdout, stderr)`` tuple. """ try: p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) # XXX: May block if either stdout or stderr fill their buffers; # however for the commands this is currently used for that is # unlikely (they should have very brief output) stdout, stderr = p.communicate() except OSError as e: if DEBUG: raise if e.errno == errno.ENOENT: msg = 'Command not found: `{0}`'.format(' '.join(cmd)) raise _CommandNotFound(msg, cmd) else: raise _AHBootstrapSystemExit( 'An unexpected error occurred when running the ' '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) # Can fail of the default locale is not configured properly. See # https://github.com/astropy/astropy/issues/2749. For the purposes under # consideration 'latin1' is an acceptable fallback. try: stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' except ValueError: # Due to an OSX oddity locale.getdefaultlocale() can also crash # depending on the user's locale/language settings. See: # http://bugs.python.org/issue18378 stdio_encoding = 'latin1' # Unlikely to fail at this point but even then let's be flexible if not isinstance(stdout, _text_type): stdout = stdout.decode(stdio_encoding, 'replace') if not isinstance(stderr, _text_type): stderr = stderr.decode(stdio_encoding, 'replace') return (p.returncode, stdout, stderr) def _next_version(version): """ Given a parsed version from pkg_resources.parse_version, returns a new version string with the next minor version. Examples ======== >>> _next_version(pkg_resources.parse_version('1.2.3')) '1.3.0' """ if hasattr(version, 'base_version'): # New version parsing from setuptools >= 8.0 if version.base_version: parts = version.base_version.split('.') else: parts = [] else: parts = [] for part in version: if part.startswith('*'): break parts.append(part) parts = [int(p) for p in parts] if len(parts) < 3: parts += [0] * (3 - len(parts)) major, minor, micro = parts[:3] return '{0}.{1}.{2}'.format(major, minor + 1, 0) class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x encoding = 'utf-8' def write(self, s): pass def flush(self): pass @contextlib.contextmanager def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr _err_help_msg = """ If the problem persists consider installing astropy_helpers manually using pip (`pip install astropy_helpers`) or by manually downloading the source archive, extracting it, and installing by running `python setup.py install` from the root of the extracted source code. """ class _AHBootstrapSystemExit(SystemExit): def __init__(self, *args): if not args: msg = 'An unknown problem occurred bootstrapping astropy_helpers.' else: msg = args[0] msg += '\n' + _err_help_msg super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) BOOTSTRAPPER = _Bootstrapper.main() def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run() ccdproc-1.3.0.post1/astropy_helpers/README.rst0000664000175000017500000000502113207611674022574 0ustar mseifertmseifert00000000000000astropy-helpers =============== * Stable versions: https://pypi.org/project/astropy-helpers/ * Development version, issue tracker: https://github.com/astropy/astropy-helpers This project provides a Python package, ``astropy_helpers``, which includes many build, installation, and documentation-related tools used by the Astropy project, but packaged separately for use by other projects that wish to leverage this work. The motivation behind this package and details of its implementation are in the accepted `Astropy Proposal for Enhancement (APE) 4 `_. The ``astropy_helpers.extern`` sub-module includes modules developed elsewhere that are bundled here for convenience. At the moment, this consists of the following two sphinx extensions: * `numpydoc `_, a Sphinx extension developed as part of the Numpy project. This is used to parse docstrings in Numpy format * `sphinx-automodapi `_, a Sphinx developed as part of the Astropy project. This used to be developed directly in ``astropy-helpers`` but is now a standalone package. Issues with these sub-modules should be reported in their respective repositories, and we will regularly update the bundled versions to reflect the latest released versions. ``astropy_helpers`` includes a special "bootstrap" module called ``ah_bootstrap.py`` which is intended to be used by a project's setup.py in order to ensure that the ``astropy_helpers`` package is available for build/installation. This is similar to the ``ez_setup.py`` module that is shipped with some projects to bootstrap `setuptools `_. As described in APE4, the version numbers for ``astropy_helpers`` follow the corresponding major/minor version of the `astropy core package `_, but with an independent sequence of micro (bugfix) version numbers. Hence, the initial release is 0.4, in parallel with Astropy v0.4, which will be the first version of Astropy to use ``astropy-helpers``. For examples of how to implement ``astropy-helpers`` in a project, see the ``setup.py`` and ``setup.cfg`` files of the `Affiliated package template `_. .. image:: https://travis-ci.org/astropy/astropy-helpers.svg :target: https://travis-ci.org/astropy/astropy-helpers .. image:: https://coveralls.io/repos/astropy/astropy-helpers/badge.svg :target: https://coveralls.io/r/astropy/astropy-helpers ccdproc-1.3.0.post1/licenses/0000775000175000017500000000000013207623133017462 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/licenses/LICENSE_STSCI_TOOLS.txt0000664000175000017500000000266713207605210023241 0ustar mseifertmseifert00000000000000Copyright (C) 2005 Association of Universities for Research in Astronomy (AURA) Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of AURA and its representatives may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ccdproc-1.3.0.post1/licenses/README.rst0000664000175000017500000000037313207605210021150 0ustar mseifertmseifert00000000000000Licenses ======== This directory holds license and credit information for works the ccdproc package is derived from or distributes, and/or datasets. The license file for the ccdproc package itself is placed in the root directory of this repository. ccdproc-1.3.0.post1/PKG-INFO0000664000175000017500000000103113207623133016745 0ustar mseifertmseifert00000000000000Metadata-Version: 1.1 Name: ccdproc Version: 1.3.0.post1 Summary: Astropy affiliated package Home-page: http://ccdproc.readthedocs.io/ Author: Steve Crawford and Matt Craig Author-email: ccdproc@gmail.com License: BSD Description-Content-Type: UNKNOWN Description: The ccdproc package is a collection of code that will be helpful in basic CCD processing. These steps will allow reduction of basic CCD data as either a stand-alone processing or as part of a pipeline. Platform: UNKNOWN Provides: ccdproc ccdproc-1.3.0.post1/setup.cfg0000664000175000017500000000371013207617404017503 0ustar mseifertmseifert00000000000000[build_sphinx] source-dir = docs build-dir = docs/_build all_files = 1 [upload_docs] upload-dir = docs/_build/html show-response = 1 [pytest] minversion = 2.2 norecursedirs = build docs/_build doctest_plus = enabled [ah_bootstrap] auto_use = True [metadata] package_name = ccdproc description = Astropy affiliated package long_description = This is a package for reducing optical/IR CCD data that relies on astropy author = Steve Crawford and Matt Craig author_email = ccdproc@gmail.com license = BSD url = http://ccdproc.readthedocs.io/ edit_on_github = False github_project = astropy/ccdproc [pycodestyle] # PEP8 errors/warnings: # (partially) taken from # https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes # E101 - mix of tabs and spaces # E111 - 4 spaces per indentation level # E112 - 4 spaces per indentation level # E113 - 4 spaces per indentation level # E221 - multiple spaces before operator # E222 - multiple spaces after operator # E223 - tab before operator # E224 - tab after operator # E225 - missing whitespace around operator # E241 - multiple whitespace after ',' # E242 - tab after ‘,’ # E251 - unexpected spaces around keyword / parameter equals # E271 - multiple spaces after keyword # E272 - multiple spaces before keyword # E303 - too many blank lines # E304 - blank lines found after function decorator # E502 - the backslash is redundant between brackets # E703 - statement ends with a semicolon # E901 - SyntaxError or IndentationError # E902 - IOError # W191 - indentation contains tabs # W291 - trailing whitespace # W292 - no newline at end of file # W293 - blank line contains whitespace # W391 - blank line at end of file select = E101,E111,E112,E113,E221,E222,E223,E224,E225,E241,E242,E251,E271,E272,E303,E304,E502,E703,E901,E902,W191,W291,W292,W293,W391 # PEP errors to ignore # ignore = ... # Excluding files that are directly copied from the package template or # generated exclude = _astropy_init.py,version.py [entry_points] ccdproc-1.3.0.post1/CHANGES.rst0000664000175000017500000002501413207617404017465 0ustar mseifertmseifert000000000000001.3.0 (2017-11-1) ----------------- New Features ^^^^^^^^^^^^ - Add representation for ImageFileCollection. [#475, #515] - Added ext parameter and property to ImageFileCollection to specify the FITS extension. [#463] - Add keywords.deleter method to ImageFileCollection. [#474] - Added ``glob_include`` and ``glob_exclude`` parameter to ``ImageFileCollection``. [#484] - Add ``bitfield_to_boolean_mask`` function to convert a ``bitfield`` to a boolean mask (following the numpy conventions). [#460] - Added ``gain_corrected`` option in ccd_process so that calibration files do not need to previously been gain corrected. [#491] - Add a new ``wcs_relax`` argument to ``CCDData.to_header()`` that is passed through to the ``WCS`` method of the same name to allow more flexible handing of headers with SIP distortion. [#501] - ``combine`` now accepts ``numpy.ndarray`` as the input ``img_list``. [#493, #503] - Added ``sum`` option in method for ``combime``. [#500, #508] - Add ``norm_value`` argument to ``flat_correct`` that allows the normalization of the flat frame to be manually specified. [#584, #577] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - removed ability to set unit of CCDData to None. [#451] - deprecated ``summary_info`` property of ``ImageFileCollection`` now raises a deprecation warning. [#486] - Logging will include the abbreviation even if the ``meta`` attribute of the processed ``CCDData`` isn't a ``fits.Header``. [#528] - The ``CCDData`` class and the functions ``fits_ccddata_reader`` and ``fits_ccddata_writer`` will be imported from ``astropy.nddata`` if astropy >= 2.0 is installed (instead of the one defined in ``ccdproc``). [#528] - Building the documentation requires astropy >= 2.0. [#528] - When reading a ``CCDData`` from a file the WCS-related keywords are removed from the header. [#568] - The ``info_file`` argument for ``ImageFileCollection`` is now deprecated. [#585] Bug Fixes ^^^^^^^^^ - ``ImageFileCollection`` now handles Headers with duplicated keywords (other than ``COMMENT`` and ``HISTORY``) by ignoring all but the first. [#467] - The ``ccd`` method of ``ImageFileCollection`` will raise an ``NotImplementedError`` in case the parameter ``overwrite=True`` or ``clobber=True`` is used instead of silently ignoring the parameter. [#527] - The ``sort`` method of ``ImageFileCollection`` now requires an explicitly given ``keys`` argument. [#534] - Fixed a problem with ``CCDData.read`` when the extension wasn't given and the primary HDU contained no ``data`` but another HDU did. In that case the header were not correctly combined. [#541] - Suppress errors during WCS creation in CCDData.read(). [#552] - The generator methods in ``ImageFileCollection`` now don't leave open file handles in case the iterator wasn't advanced or an exception was raised either inside the method itself or during the loop. [#553] - Allow non-string columns when filtering an ``ImageFileCollection`` with a string value. [#567] 1.2.0 (2016-12-13) ------------------ ccdproc has now the following additional dependency: - scikit-image. New Features ^^^^^^^^^^^^ - Add an optional attribute named ``filenames`` to ``ImageFileCollection``, so that users can pass a list of FITS files to the collection. [#374, #403] - Added ``block_replicate``, ``block_reduce`` and ``block_average`` functions. [#402] - Added ``median_filter`` function. [#420] - ``combine`` now takes an additional ``combine_uncertainty_function`` argument which is passed as ``uncertainty_func`` parameter to ``Combiner.median_combine`` or ``Combiner.average_combine``. [#416] - Added ``ccdmask`` function. [#414, #432] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ccdprocs core functions now explicitly add HIERARCH cards. [#359, #399, #413] - ``combine`` now accepts a ``dtype`` argument which is passed to ``Combiner.__init__``. [#391, #392] - Removed ``CaseInsensitiveOrderedDict`` because it is not used in the current code base. [#428] Bug Fixes ^^^^^^^^^ - The default dtype of the ``combine``-result doesn't depend on the dtype of the first CCDData anymore. This also corrects the memory consumption calculation. [#391, #392] - ``ccd_process`` now copies the meta of the input when subtracting the master bias. [#404] - Fixed ``combine`` with ``CCDData`` objects using ``StdDevUncertainty`` as uncertainty. [#416, #424] - ``ccds`` generator from ``ImageFileCollection`` now uses the full path to the file when calling ``fits_ccddata_reader``. [#421 #422] 1.1.0 (2016-08-01) ------------------ New Features ^^^^^^^^^^^^ - Add an additional combination method, ``clip_extrema``, that drops the highest and/or lowest pixels in an image stack. [#356, #358] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ``cosmicray_lacosmic`` default ``satlevel`` changed from 65536 to 65535. [#347] - Auto-identify files with extension ``fts`` as FITS files. [#355, #364] - Raise more explicit exception if unit of uncalibrated image and master do not match in ``subtract_bias`` or ``subtract_dark``. [#361, #366] - Updated the ``Combiner`` class so that it could process images with >2 dimensions. [#340, #375] Bug Fixes ^^^^^^^^^ - ``Combiner`` creates plain array uncertainties when using``average_combine`` or ``median_combine``. [#351] - ``flat_correct`` does not properly scale uncertainty in the flat. [#345, #363] - Error message in weights setter fixed. [#376] 1.0.1 (2016-03-15) ------------------ The 1.0.1 release was a release to fix some minor packaging issues. 1.0.0 (2016-03-15) ------------------ General ^^^^^^^ - ccdproc has now the following requirements: - Python 2.7 or 3.4 or later. - astropy 1.0 or later - numpy 1.9 or later - scipy - astroscrappy - reproject New Features ^^^^^^^^^^^^ - Add a WCS setter for ``CCDData``. [#256] - Allow user to set the function used for uncertainty calculation in ``average_combine`` and ``median_combine``. [#258] - Add a new keyword to ImageFileCollection.files_filtered to return the full path to a file [#275] - Added ccd_process for handling multiple steps. [#211] - CCDData.write now writes multi-extension-FITS files. The mask and uncertainty are saved as extensions if these attributes were set. The name of the extensions can be altered with the parameters ``hdu_mask`` (default extension name ``'MASK'``) and ``hdu_uncertainty`` (default ``'UNCERT'``). CCDData.read can read these files and has the same optional parameters. [#302] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Issue warning if there are no FITS images in an ``ImageFileCollection``. [#246] - The overscan_axis argument in subtract_overscan can now be set to None, to let subtract_overscan provide a best guess for the axis. [#263] - Add support for wildcard and reversed FITS style slicing. [#265] - When reading a FITS file with CCDData.read, if no data exists in the primary hdu, the resultant header object is a combination of the header information in the primary hdu and the first hdu with data. [#271] - Changed cosmicray_lacosmic to use astroscrappy for cleaning cosmic rays. [#272] - CCDData arithmetic with number/Quantity now preserves any existing WCS. [#278] - Update astropy_helpers to 1.1.1. [#287] - Drop support for Python 2.6. [#300] - The ``add_keyword`` parameter now has a default of ``True``, to be more explicit. [#310] - Return name of file instead of full path in ``ImageFileCollection`` generators. [#315] Bug Fixes ^^^^^^^^^ - Adding/Subtracting a CCDData instance with a Quantity with a different unit produced wrong results. [#291] - The uncertainty resulting when combining CCDData will be divided by the square root of the number of combined pixel [#309] - Improve documentation for read/write methods on ``CCDData`` [#320] - Add correct path separator when returning full path from ``ImageFileCollection.files_filtered``. [#325] 0.3.3 (2015-10-24) ------------------ New Features ^^^^^^^^^^^^ - add a ``sort`` method to ImageFileCollection [#274] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Opt in to new container-based builds on travis. [#227] - Update astropy_helpers to 1.0.5. [#245] Bug Fixes ^^^^^^^^^ - Ensure that creating a WCS from a header that contains list-like keywords (e.g. ``BLANK`` or ``HISTORY``) succeeds. [#229, #231] 0.3.2 (never released) ---------------------- There was no 0.3.2 release because of a packaging error. 0.3.1 (2015-05-12) ------------------ New Features ^^^^^^^^^^^^ - Add CCDData generator for ImageCollection [#405] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Add extensive tests to ensure ``ccdproc`` functions do not modify the input data. [#208] - Remove red-box warning about API stability from docs. [#210] - Support astropy 1.0.5, which made changes to ``NDData``. [#242] Bug Fixes ^^^^^^^^^ - Make ``subtract_overscan`` act on a copy of the input data. [#206] - Overscan subtraction failed on non-square images if the overscan axis was the first index, ``0``. [#240, #244] 0.3.0 (2015-03-17) ------------------ New Features ^^^^^^^^^^^^ - When reading in a FITS file, the extension to be used can be specified. If it is not and there is no data in the primary extension, the first extension with data will be used. - Set wcs attribute when reading from a FITS file that contains WCS keywords and write WCS keywords to header when converting to an HDU. [#195] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Updated CCDData to use the new version of NDDATA in astropy v1.0. This breaks backward compatibility with earlier versions of astropy. Bug Fixes ^^^^^^^^^ - Ensure ``dtype`` of combined images matches the ``dtype`` of the ``Combiner`` object. [#189] 0.2.2 (2014-11-05) ------------------ New Features ^^^^^^^^^^^^ - Add dtype argument to `ccdproc.Combiner` to help control memory use [#178] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Added Changes to the docs [#183] Bug Fixes ^^^^^^^^^ - Allow the unit string "adu" to be upper or lower case in a FITS header [#182] 0.2.1 (2014-09-09) ------------------ New Features ^^^^^^^^^^^^ - Add a unit directly from BUNIT if it is available in the FITS header [#169] Other Changes and Additions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Relaxed the requirements on what the metadata must be. It can be anything dict-like, e.g. an astropy.io.fits.Header, a python dict, an OrderedDict or some custom object created by the user. [#167] Bug Fixes ^^^^^^^^^ - Fixed a new-style formating issue in the logging [#170] 0.2 (2014-07-28) ---------------- - Initial release. ccdproc-1.3.0.post1/ez_setup.py0000664000175000017500000003037113207605210020065 0ustar mseifertmseifert00000000000000#!/usr/bin/env python """ Setuptools bootstrapping installer. Maintained at https://github.com/pypa/setuptools/tree/bootstrap. Run this script to install or upgrade setuptools. This method is DEPRECATED. Check https://github.com/pypa/setuptools/issues/581 for more details. """ import os import shutil import sys import tempfile import zipfile import optparse import subprocess import platform import textwrap import contextlib from distutils import log try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen try: from site import USER_SITE except ImportError: USER_SITE = None # 33.1.1 is the last version that supports setuptools self upgrade/installation. DEFAULT_VERSION = "33.1.1" DEFAULT_URL = "https://pypi.io/packages/source/s/setuptools/" DEFAULT_SAVE_DIR = os.curdir DEFAULT_DEPRECATION_MESSAGE = "ez_setup.py is deprecated and when using it setuptools will be pinned to {0} since it's the last version that supports setuptools self upgrade/installation, check https://github.com/pypa/setuptools/issues/581 for more info; use pip to install setuptools" MEANINGFUL_INVALID_ZIP_ERR_MSG = 'Maybe {0} is corrupted, delete it and try again.' log.warn(DEFAULT_DEPRECATION_MESSAGE.format(DEFAULT_VERSION)) def _python_cmd(*args): """ Execute a command. Return True if the command succeeded. """ args = (sys.executable,) + args return subprocess.call(args) == 0 def _install(archive_filename, install_args=()): """Install Setuptools.""" with archive_context(archive_filename): # installing log.warn('Installing Setuptools') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 def _build_egg(egg, archive_filename, to_dir): """Build Setuptools egg.""" with archive_context(archive_filename): # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') class ContextualZipFile(zipfile.ZipFile): """Supplement ZipFile class to support context manager for Python 2.6.""" def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def __new__(cls, *args, **kwargs): """Construct a ZipFile or ContextualZipFile as appropriate.""" if hasattr(zipfile.ZipFile, '__exit__'): return zipfile.ZipFile(*args, **kwargs) return super(ContextualZipFile, cls).__new__(cls) @contextlib.contextmanager def archive_context(filename): """ Unzip filename to a temporary directory, set to the cwd. The unzipped target is cleaned up after. """ tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) try: with ContextualZipFile(filename) as archive: archive.extractall() except zipfile.BadZipfile as err: if not err.args: err.args = ('', ) err.args = err.args + ( MEANINGFUL_INVALID_ZIP_ERR_MSG.format(filename), ) raise # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) yield finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _do_download(version, download_base, to_dir, download_delay): """Download Setuptools.""" py_desig = 'py{sys.version_info[0]}.{sys.version_info[1]}'.format(sys=sys) tp = 'setuptools-{version}-{py_desig}.egg' egg = os.path.join(to_dir, tp.format(**locals())) if not os.path.exists(egg): archive = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, archive, to_dir) sys.path.insert(0, egg) # Remove previously-imported pkg_resources if present (see # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). if 'pkg_resources' in sys.modules: _unload_pkg_resources() import setuptools setuptools.bootstrap_install_from = egg def use_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=DEFAULT_SAVE_DIR, download_delay=15): """ Ensure that a setuptools version is installed. Return None. Raise SystemExit if the requested version or later cannot be installed. """ to_dir = os.path.abspath(to_dir) # prior to importing, capture the module state for # representative modules. rep_modules = 'pkg_resources', 'setuptools' imported = set(sys.modules).intersection(rep_modules) try: import pkg_resources pkg_resources.require("setuptools>=" + version) # a suitable version is already installed return except ImportError: # pkg_resources not available; setuptools is not installed; download pass except pkg_resources.DistributionNotFound: # no version of setuptools was found; allow download pass except pkg_resources.VersionConflict as VC_err: if imported: _conflict_bail(VC_err, version) # otherwise, unload pkg_resources to allow the downloaded version to # take precedence. del pkg_resources _unload_pkg_resources() return _do_download(version, download_base, to_dir, download_delay) def _conflict_bail(VC_err, version): """ Setuptools was imported prior to invocation, so it is unsafe to unload it. Bail out. """ conflict_tmpl = textwrap.dedent(""" The required version of setuptools (>={version}) is not available, and can't be installed while this script is running. Please install a more recent version first, using 'easy_install -U setuptools'. (Currently using {VC_err.args[0]!r}) """) msg = conflict_tmpl.format(**locals()) sys.stderr.write(msg) sys.exit(2) def _unload_pkg_resources(): sys.meta_path = [ importer for importer in sys.meta_path if importer.__class__.__module__ != 'pkg_resources.extern' ] del_modules = [ name for name in sys.modules if name.startswith('pkg_resources') ] for mod_name in del_modules: del sys.modules[mod_name] def _clean_check(cmd, target): """ Run the command to download target. If the command fails, clean up before re-raising the error. """ try: subprocess.check_call(cmd) except subprocess.CalledProcessError: if os.access(target, os.F_OK): os.unlink(target) raise def download_file_powershell(url, target): """ Download the file at url to target using Powershell. Powershell will validate trust. Raise an exception if the command cannot complete. """ target = os.path.abspath(target) ps_cmd = ( "[System.Net.WebRequest]::DefaultWebProxy.Credentials = " "[System.Net.CredentialCache]::DefaultCredentials; " '(new-object System.Net.WebClient).DownloadFile("%(url)s", "%(target)s")' % locals() ) cmd = [ 'powershell', '-Command', ps_cmd, ] _clean_check(cmd, target) def has_powershell(): """Determine if Powershell is available.""" if platform.system() != 'Windows': return False cmd = ['powershell', '-Command', 'echo test'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_powershell.viable = has_powershell def download_file_curl(url, target): cmd = ['curl', url, '--location', '--silent', '--output', target] _clean_check(cmd, target) def has_curl(): cmd = ['curl', '--version'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_curl.viable = has_curl def download_file_wget(url, target): cmd = ['wget', url, '--quiet', '--output-document', target] _clean_check(cmd, target) def has_wget(): cmd = ['wget', '--version'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_wget.viable = has_wget def download_file_insecure(url, target): """Use Python to download the file, without connection authentication.""" src = urlopen(url) try: # Read all the data in one block. data = src.read() finally: src.close() # Write all the data in one block to avoid creating a partial file. with open(target, "wb") as dst: dst.write(data) download_file_insecure.viable = lambda: True def get_best_downloader(): downloaders = ( download_file_powershell, download_file_curl, download_file_wget, download_file_insecure, ) viable_downloaders = (dl for dl in downloaders if dl.viable()) return next(viable_downloaders, None) def download_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=DEFAULT_SAVE_DIR, delay=15, downloader_factory=get_best_downloader): """ Download setuptools from a specified location and return its filename. `version` should be a valid setuptools version number that is available as an sdist for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. ``downloader_factory`` should be a function taking no arguments and returning a function for downloading a URL to a target. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) zip_name = "setuptools-%s.zip" % version url = download_base + zip_name saveto = os.path.join(to_dir, zip_name) if not os.path.exists(saveto): # Avoid repeated downloads log.warn("Downloading %s", url) downloader = downloader_factory() downloader(url, saveto) return os.path.realpath(saveto) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the setuptools package. Returns list of command line arguments. """ return ['--user'] if options.user_install else [] def _parse_args(): """Parse the command line for options.""" parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the setuptools package') parser.add_option( '--insecure', dest='downloader_factory', action='store_const', const=lambda: download_file_insecure, default=get_best_downloader, help='Use internal, non-validating downloader' ) parser.add_option( '--version', help="Specify which version to download", default=DEFAULT_VERSION, ) parser.add_option( '--to-dir', help="Directory to save (and re-use) package", default=DEFAULT_SAVE_DIR, ) options, args = parser.parse_args() # positional arguments are ignored return options def _download_args(options): """Return args for download_setuptools function from cmdline args.""" return dict( version=options.version, download_base=options.download_base, downloader_factory=options.downloader_factory, to_dir=options.to_dir, ) def main(): """Install or upgrade setuptools and EasyInstall.""" options = _parse_args() archive = download_setuptools(**_download_args(options)) return _install(archive, _build_install_args(options)) if __name__ == '__main__': sys.exit(main()) ccdproc-1.3.0.post1/setup.py0000775000175000017500000000751413207623051017400 0ustar mseifertmseifert00000000000000#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import glob import os import sys import ah_bootstrap from setuptools import setup # A dirty hack to get around some early import/configurations ambiguities if sys.version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True from astropy_helpers.setup_helpers import ( register_commands, get_debug_option, get_package_info) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'packagename') DESCRIPTION = metadata.get('description', 'Astropy affiliated package') AUTHOR = metadata.get('author', '') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', 'http://astropy.org') # Get the long description from the package's docstring __import__(PACKAGENAME) package = sys.modules[PACKAGENAME] LONG_DESCRIPTION = package.__doc__ # Store the package name in a built-in variable so it's easy # to get from other parts of the setup infrastructure builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME # VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) VERSION = '1.3.0.post1' # Indicates if this version is a release version RELEASE = 'dev' not in VERSION if not RELEASE: VERSION += get_git_devstr(False) # Populate the dict of setup command overrides; this should be done before # invoking any other functionality from distutils since it can potentially # modify distutils' behavior. cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE) # Freeze build information in version.py generate_version_py(PACKAGENAME, VERSION, RELEASE, get_debug_option(PACKAGENAME)) # Treat everything in scripts except README.rst as a script to be installed scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) if os.path.basename(fname) != 'README.rst'] # Get configuration information from all of the various subpackages. # See the docstring for setup_helpers.update_package_files for more # details. package_info = get_package_info() # Add the project-global data package_info['package_data'].setdefault(PACKAGENAME, []) package_info['package_data'][PACKAGENAME].append('data/*') # Define entry points for command-line scripts entry_points = {'console_scripts': []} entry_point_list = conf.items('entry_points') for entry_point in entry_point_list: entry_points['console_scripts'].append('{0} = {1}'.format(entry_point[0], entry_point[1])) # Include all .c files, recursively, including those generated by # Cython, since we can not do this in MANIFEST.in with a "dynamic" # directory name. c_files = [] for root, dirs, files in os.walk(PACKAGENAME): for filename in files: if filename.endswith('.c'): c_files.append( os.path.join( os.path.relpath(root, PACKAGENAME), filename)) package_info['package_data'][PACKAGENAME].extend(c_files) setup(name=PACKAGENAME, version=VERSION, description=DESCRIPTION, scripts=scripts, install_requires=['astropy>=1.0', 'numpy', 'scipy', 'astroscrappy', 'reproject', 'scikit-image'], provides=[PACKAGENAME], author=AUTHOR, author_email=AUTHOR_EMAIL, license=LICENSE, url=URL, long_description=LONG_DESCRIPTION, cmdclass=cmdclassd, zip_safe=False, use_2to3=False, entry_points=entry_points, **package_info ) ccdproc-1.3.0.post1/ah_bootstrap.py0000664000175000017500000010434413207605210020716 0ustar mseifertmseifert00000000000000""" This bootstrap module contains code for ensuring that the astropy_helpers package will be importable by the time the setup.py script runs. It also includes some workarounds to ensure that a recent-enough version of setuptools is being used for the installation. This module should be the first thing imported in the setup.py of distributions that make use of the utilities in astropy_helpers. If the distribution ships with its own copy of astropy_helpers, this module will first attempt to import from the shipped copy. However, it will also check PyPI to see if there are any bug-fix releases on top of the current version that may be useful to get past platform-specific bugs that have been fixed. When running setup.py, use the ``--offline`` command-line option to disable the auto-upgrade checks. When this module is imported or otherwise executed it automatically calls a main function that attempts to read the project's setup.cfg file, which it checks for a configuration section called ``[ah_bootstrap]`` the presences of that section, and options therein, determine the next step taken: If it contains an option called ``auto_use`` with a value of ``True``, it will automatically call the main function of this module called `use_astropy_helpers` (see that function's docstring for full details). Otherwise no further action is taken (however, ``ah_bootstrap.use_astropy_helpers`` may be called manually from within the setup.py script). Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same names as the arguments to `use_astropy_helpers`, and can be used to configure the bootstrap script when ``auto_use = True``. See https://github.com/astropy/astropy-helpers for more details, and for the latest version of this module. """ import contextlib import errno import imp import io import locale import os import re import subprocess as sp import sys try: from ConfigParser import ConfigParser, RawConfigParser except ImportError: from configparser import ConfigParser, RawConfigParser if sys.version_info[0] < 3: _str_types = (str, unicode) _text_type = unicode PY3 = False else: _str_types = (str, bytes) _text_type = str PY3 = True # What follows are several import statements meant to deal with install-time # issues with either missing or misbehaving pacakges (including making sure # setuptools itself is installed): # Some pre-setuptools checks to ensure that either distribute or setuptools >= # 0.7 is used (over pre-distribute setuptools) if it is available on the path; # otherwise the latest setuptools will be downloaded and bootstrapped with # ``ez_setup.py``. This used to be included in a separate file called # setuptools_bootstrap.py; but it was combined into ah_bootstrap.py try: import pkg_resources _setuptools_req = pkg_resources.Requirement.parse('setuptools>=0.7') # This may raise a DistributionNotFound in which case no version of # setuptools or distribute is properly installed _setuptools = pkg_resources.get_distribution('setuptools') if _setuptools not in _setuptools_req: # Older version of setuptools; check if we have distribute; again if # this results in DistributionNotFound we want to give up _distribute = pkg_resources.get_distribution('distribute') if _setuptools != _distribute: # It's possible on some pathological systems to have an old version # of setuptools and distribute on sys.path simultaneously; make # sure distribute is the one that's used sys.path.insert(1, _distribute.location) _distribute.activate() imp.reload(pkg_resources) except: # There are several types of exceptions that can occur here; if all else # fails bootstrap and use the bootstrapped version from ez_setup import use_setuptools use_setuptools() # typing as a dependency for 1.6.1+ Sphinx causes issues when imported after # initializing submodule with ah_boostrap.py # See discussion and references in # https://github.com/astropy/astropy-helpers/issues/302 try: import typing # noqa except ImportError: pass # Note: The following import is required as a workaround to # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this # module now, it will get cleaned up after `run_setup` is called, but that will # later cause the TemporaryDirectory class defined in it to stop working when # used later on by setuptools try: import setuptools.py31compat # noqa except ImportError: pass # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass # End compatibility imports... # In case it didn't successfully import before the ez_setup checks import pkg_resources from setuptools import Distribution from setuptools.package_index import PackageIndex from setuptools.sandbox import run_setup from distutils import log from distutils.debug import DEBUG # TODO: Maybe enable checking for a specific version of astropy_helpers? DIST_NAME = 'astropy-helpers' PACKAGE_NAME = 'astropy_helpers' # Defaults for other options DOWNLOAD_IF_NEEDED = True INDEX_URL = 'https://pypi.python.org/simple' USE_GIT = True OFFLINE = False AUTO_UPGRADE = True # A list of all the configuration options and their required types CFG_OPTIONS = [ ('auto_use', bool), ('path', str), ('download_if_needed', bool), ('index_url', str), ('use_git', bool), ('offline', bool), ('auto_upgrade', bool) ] class _Bootstrapper(object): """ Bootstrapper implementation. See ``use_astropy_helpers`` for parameter documentation. """ def __init__(self, path=None, index_url=None, use_git=None, offline=None, download_if_needed=None, auto_upgrade=None): if path is None: path = PACKAGE_NAME if not (isinstance(path, _str_types) or path is False): raise TypeError('path must be a string or False') if PY3 and not isinstance(path, _text_type): fs_encoding = sys.getfilesystemencoding() path = path.decode(fs_encoding) # path to unicode self.path = path # Set other option attributes, using defaults where necessary self.index_url = index_url if index_url is not None else INDEX_URL self.offline = offline if offline is not None else OFFLINE # If offline=True, override download and auto-upgrade if self.offline: download_if_needed = False auto_upgrade = False self.download = (download_if_needed if download_if_needed is not None else DOWNLOAD_IF_NEEDED) self.auto_upgrade = (auto_upgrade if auto_upgrade is not None else AUTO_UPGRADE) # If this is a release then the .git directory will not exist so we # should not use git. git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) if use_git is None and not git_dir_exists: use_git = False self.use_git = use_git if use_git is not None else USE_GIT # Declared as False by default--later we check if astropy-helpers can be # upgraded from PyPI, but only if not using a source distribution (as in # the case of import from a git submodule) self.is_submodule = False @classmethod def main(cls, argv=None): if argv is None: argv = sys.argv config = cls.parse_config() config.update(cls.parse_command_line(argv)) auto_use = config.pop('auto_use', False) bootstrapper = cls(**config) if auto_use: # Run the bootstrapper, otherwise the setup.py is using the old # use_astropy_helpers() interface, in which case it will run the # bootstrapper manually after reconfiguring it. bootstrapper.run() return bootstrapper @classmethod def parse_config(cls): if not os.path.exists('setup.cfg'): return {} cfg = ConfigParser() try: cfg.read('setup.cfg') except Exception as e: if DEBUG: raise log.error( "Error reading setup.cfg: {0!r}\n{1} will not be " "automatically bootstrapped and package installation may fail." "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) return {} if not cfg.has_section('ah_bootstrap'): return {} config = {} for option, type_ in CFG_OPTIONS: if not cfg.has_option('ah_bootstrap', option): continue if type_ is bool: value = cfg.getboolean('ah_bootstrap', option) else: value = cfg.get('ah_bootstrap', option) config[option] = value return config @classmethod def parse_command_line(cls, argv=None): if argv is None: argv = sys.argv config = {} # For now we just pop recognized ah_bootstrap options out of the # arg list. This is imperfect; in the unlikely case that a setup.py # custom command or even custom Distribution class defines an argument # of the same name then we will break that. However there's a catch22 # here that we can't just do full argument parsing right here, because # we don't yet know *how* to parse all possible command-line arguments. if '--no-git' in argv: config['use_git'] = False argv.remove('--no-git') if '--offline' in argv: config['offline'] = True argv.remove('--offline') return config def run(self): strategies = ['local_directory', 'local_file', 'index'] dist = None # First, remove any previously imported versions of astropy_helpers; # this is necessary for nested installs where one package's installer # is installing another package via setuptools.sandbox.run_setup, as in # the case of setup_requires for key in list(sys.modules): try: if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): del sys.modules[key] except AttributeError: # Sometimes mysterious non-string things can turn up in # sys.modules continue # Check to see if the path is a submodule self.is_submodule = self._check_submodule() for strategy in strategies: method = getattr(self, 'get_{0}_dist'.format(strategy)) dist = method() if dist is not None: break else: raise _AHBootstrapSystemExit( "No source found for the {0!r} package; {0} must be " "available and importable as a prerequisite to building " "or installing this package.".format(PACKAGE_NAME)) # This is a bit hacky, but if astropy_helpers was loaded from a # directory/submodule its Distribution object gets a "precedence" of # "DEVELOP_DIST". However, in other cases it gets a precedence of # "EGG_DIST". However, when activing the distribution it will only be # placed early on sys.path if it is treated as an EGG_DIST, so always # do that dist = dist.clone(precedence=pkg_resources.EGG_DIST) # Otherwise we found a version of astropy-helpers, so we're done # Just active the found distribution on sys.path--if we did a # download this usually happens automatically but it doesn't hurt to # do it again # Note: Adding the dist to the global working set also activates it # (makes it importable on sys.path) by default. try: pkg_resources.working_set.add(dist, replace=True) except TypeError: # Some (much) older versions of setuptools do not have the # replace=True option here. These versions are old enough that all # bets may be off anyways, but it's easy enough to work around just # in case... if dist.key in pkg_resources.working_set.by_key: del pkg_resources.working_set.by_key[dist.key] pkg_resources.working_set.add(dist) @property def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname)) def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_index_dist(self): if not self.download: log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) return None log.warn( "Downloading {0!r}; run setup.py with the --offline option to " "force offline installation.".format(DIST_NAME)) try: dist = self._do_download() except Exception as e: if DEBUG: raise log.warn( 'Failed to download and/or install {0!r} from {1!r}:\n' '{2}'.format(DIST_NAME, self.index_url, str(e))) dist = None # No need to run auto-upgrade here since we've already presumably # gotten the most up-to-date version from the package index return dist def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): with _silence(): run_setup(os.path.join(path, 'setup.py'), ['egg_info']) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist def _do_download(self, version='', find_links=None): if find_links: allow_hosts = '' index_url = None else: allow_hosts = None index_url = self.index_url # Annoyingly, setuptools will not handle other arguments to # Distribution (such as options) before handling setup_requires, so it # is not straightforward to programmatically augment the arguments which # are passed to easy_install class _Distribution(Distribution): def get_option_dict(self, command_name): opts = Distribution.get_option_dict(self, command_name) if command_name == 'easy_install': if find_links is not None: opts['find_links'] = ('setup script', find_links) if index_url is not None: opts['index_url'] = ('setup script', index_url) if allow_hosts is not None: opts['allow_hosts'] = ('setup script', allow_hosts) return opts if version: req = '{0}=={1}'.format(DIST_NAME, version) else: req = DIST_NAME attrs = {'setup_requires': [req]} try: if DEBUG: _Distribution(attrs=attrs) else: with _silence(): _Distribution(attrs=attrs) # If the setup_requires succeeded it will have added the new dist to # the main working_set return pkg_resources.working_set.by_key.get(DIST_NAME) except Exception as e: if DEBUG: raise msg = 'Error retrieving {0} from {1}:\n{2}' if find_links: source = find_links[0] elif index_url != INDEX_URL: source = index_url else: source = 'PyPI' raise Exception(msg.format(DIST_NAME, source, repr(e))) def _do_upgrade(self, dist): # Build up a requirement for a higher bugfix release but a lower minor # release (so API compatibility is guaranteed) next_version = _next_version(dist.parsed_version) req = pkg_resources.Requirement.parse( '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) package_index = PackageIndex(index_url=self.index_url) upgrade = package_index.obtain(req) if upgrade is not None: return self._do_download(version=upgrade.version) def _check_submodule(self): """ Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details. """ if (self.path is None or (os.path.exists(self.path) and not os.path.isdir(self.path))): return False if self.use_git: return self._check_submodule_using_git() else: return self._check_submodule_no_git() def _check_submodule_using_git(self): """ Check if the given path is a git submodule. If so, attempt to initialize and/or update the submodule if needed. This function makes calls to the ``git`` command in subprocesses. The ``_check_submodule_no_git`` option uses pure Python to check if the given path looks like a git submodule, but it cannot perform updates. """ cmd = ['git', 'submodule', 'status', '--', self.path] try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except _CommandNotFound: # The git command simply wasn't found; this is most likely the # case on user systems that don't have git and are simply # trying to install the package from PyPI or a source # distribution. Silently ignore this case and simply don't try # to use submodules return False stderr = stderr.strip() if returncode != 0 and stderr: # Unfortunately the return code alone cannot be relied on, as # earlier versions of git returned 0 even if the requested submodule # does not exist # This is a warning that occurs in perl (from running git submodule) # which only occurs with a malformatted locale setting which can # happen sometimes on OSX. See again # https://github.com/astropy/astropy/issues/2749 perl_warning = ('perl: warning: Falling back to the standard locale ' '("C").') if not stderr.strip().endswith(perl_warning): # Some other unknown error condition occurred log.warn('git submodule command failed ' 'unexpectedly:\n{0}'.format(stderr)) return False # Output of `git submodule status` is as follows: # # 1: Status indicator: '-' for submodule is uninitialized, '+' if # submodule is initialized but is not at the commit currently indicated # in .gitmodules (and thus needs to be updated), or 'U' if the # submodule is in an unstable state (i.e. has merge conflicts) # # 2. SHA-1 hash of the current commit of the submodule (we don't really # need this information but it's useful for checking that the output is # correct) # # 3. The output of `git describe` for the submodule's current commit # hash (this includes for example what branches the commit is on) but # only if the submodule is initialized. We ignore this information for # now _git_submodule_status_re = re.compile( '^(?P[+-U ])(?P[0-9a-f]{40}) ' '(?P\S+)( .*)?$') # The stdout should only contain one line--the status of the # requested submodule m = _git_submodule_status_re.match(stdout) if m: # Yes, the path *is* a git submodule self._update_submodule(m.group('submodule'), m.group('status')) return True else: log.warn( 'Unexpected output from `git submodule status`:\n{0}\n' 'Will attempt import from {1!r} regardless.'.format( stdout, self.path)) return False def _check_submodule_no_git(self): """ Like ``_check_submodule_using_git``, but simply parses the .gitmodules file to determine if the supplied path is a git submodule, and does not exec any subprocesses. This can only determine if a path is a submodule--it does not perform updates, etc. This function may need to be updated if the format of the .gitmodules file is changed between git versions. """ gitmodules_path = os.path.abspath('.gitmodules') if not os.path.isfile(gitmodules_path): return False # This is a minimal reader for gitconfig-style files. It handles a few of # the quirks that make gitconfig files incompatible with ConfigParser-style # files, but does not support the full gitconfig syntax (just enough # needed to read a .gitmodules file). gitmodules_fileobj = io.StringIO() # Must use io.open for cross-Python-compatible behavior wrt unicode with io.open(gitmodules_path) as f: for line in f: # gitconfig files are more flexible with leading whitespace; just # go ahead and remove it line = line.lstrip() # comments can start with either # or ; if line and line[0] in (':', ';'): continue gitmodules_fileobj.write(line) gitmodules_fileobj.seek(0) cfg = RawConfigParser() try: cfg.readfp(gitmodules_fileobj) except Exception as exc: log.warn('Malformatted .gitmodules file: {0}\n' '{1} cannot be assumed to be a git submodule.'.format( exc, self.path)) return False for section in cfg.sections(): if not cfg.has_option(section, 'path'): continue submodule_path = cfg.get(section, 'path').rstrip(os.sep) if submodule_path == self.path.rstrip(os.sep): return True return False def _update_submodule(self, submodule, status): if status == ' ': # The submodule is up to date; no action necessary return elif status == '-': if self.offline: raise _AHBootstrapSystemExit( "Cannot initialize the {0} submodule in --offline mode; " "this requires being able to clone the submodule from an " "online repository.".format(submodule)) cmd = ['update', '--init'] action = 'Initializing' elif status == '+': cmd = ['update'] action = 'Updating' if self.offline: cmd.append('--no-fetch') elif status == 'U': raise _AHBootstrapSystemExit( 'Error: Submodule {0} contains unresolved merge conflicts. ' 'Please complete or abandon any changes in the submodule so that ' 'it is in a usable state, then try again.'.format(submodule)) else: log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 'attempt to use the submodule as-is, but try to ensure ' 'that the submodule is in a clean state and contains no ' 'conflicts or errors.\n{2}'.format(status, submodule, _err_help_msg)) return err_msg = None cmd = ['git', 'submodule'] + cmd + ['--', submodule] log.warn('{0} {1} submodule with: `{2}`'.format( action, submodule, ' '.join(cmd))) try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except OSError as e: err_msg = str(e) else: if returncode != 0: err_msg = stderr if err_msg is not None: log.warn('An unexpected error occurred updating the git submodule ' '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, _err_help_msg)) class _CommandNotFound(OSError): """ An exception raised when a command run with run_cmd is not found on the system. """ def run_cmd(cmd): """ Run a command in a subprocess, given as a list of command-line arguments. Returns a ``(returncode, stdout, stderr)`` tuple. """ try: p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) # XXX: May block if either stdout or stderr fill their buffers; # however for the commands this is currently used for that is # unlikely (they should have very brief output) stdout, stderr = p.communicate() except OSError as e: if DEBUG: raise if e.errno == errno.ENOENT: msg = 'Command not found: `{0}`'.format(' '.join(cmd)) raise _CommandNotFound(msg, cmd) else: raise _AHBootstrapSystemExit( 'An unexpected error occurred when running the ' '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) # Can fail of the default locale is not configured properly. See # https://github.com/astropy/astropy/issues/2749. For the purposes under # consideration 'latin1' is an acceptable fallback. try: stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' except ValueError: # Due to an OSX oddity locale.getdefaultlocale() can also crash # depending on the user's locale/language settings. See: # http://bugs.python.org/issue18378 stdio_encoding = 'latin1' # Unlikely to fail at this point but even then let's be flexible if not isinstance(stdout, _text_type): stdout = stdout.decode(stdio_encoding, 'replace') if not isinstance(stderr, _text_type): stderr = stderr.decode(stdio_encoding, 'replace') return (p.returncode, stdout, stderr) def _next_version(version): """ Given a parsed version from pkg_resources.parse_version, returns a new version string with the next minor version. Examples ======== >>> _next_version(pkg_resources.parse_version('1.2.3')) '1.3.0' """ if hasattr(version, 'base_version'): # New version parsing from setuptools >= 8.0 if version.base_version: parts = version.base_version.split('.') else: parts = [] else: parts = [] for part in version: if part.startswith('*'): break parts.append(part) parts = [int(p) for p in parts] if len(parts) < 3: parts += [0] * (3 - len(parts)) major, minor, micro = parts[:3] return '{0}.{1}.{2}'.format(major, minor + 1, 0) class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x encoding = 'utf-8' def write(self, s): pass def flush(self): pass @contextlib.contextmanager def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr _err_help_msg = """ If the problem persists consider installing astropy_helpers manually using pip (`pip install astropy_helpers`) or by manually downloading the source archive, extracting it, and installing by running `python setup.py install` from the root of the extracted source code. """ class _AHBootstrapSystemExit(SystemExit): def __init__(self, *args): if not args: msg = 'An unknown problem occurred bootstrapping astropy_helpers.' else: msg = args[0] msg += '\n' + _err_help_msg super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) BOOTSTRAPPER = _Bootstrapper.main() def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run() ccdproc-1.3.0.post1/docs/0000775000175000017500000000000013207623133016605 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/docs/license.rst0000664000175000017500000000024013207605210020751 0ustar mseifertmseifert00000000000000.. _license: ******** Licenses ******** Ccdproc License =============== Ccdproc is licensed under a 3-clause BSD style license: .. include:: ../LICENSE.rst ccdproc-1.3.0.post1/docs/changelog.rst0000664000175000017500000000013213207605210021256 0ustar mseifertmseifert00000000000000.. _changelog: ************** Full Changelog ************** .. include:: ../CHANGES.rst ccdproc-1.3.0.post1/docs/ccdproc/0000775000175000017500000000000013207623133020222 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/docs/ccdproc/image_management.rst0000664000175000017500000001477613207605210024245 0ustar mseifertmseifert00000000000000.. _image_management: Image Management ================ .. _image_collection: Working with a directory of images ---------------------------------- For the sake of argument all of the examples below assume you are working in a directory that contains FITS images. The class :class:`~ccdproc.image_collection.ImageFileCollection` is meant to make working with a directory of FITS images easier by allowing you select the files you act on based on the values of FITS keywords in their headers or based on Unix shell-style filename matching. It is initialized with the name of a directory containing FITS images and a list of FITS keywords you want the :class:`~ccdproc.image_collection.ImageFileCollection` to be aware of. An example initialization looks like:: >>> from ccdproc import ImageFileCollection >>> keys = ['imagetyp', 'object', 'filter', 'exposure'] >>> ic1 = ImageFileCollection('.', keywords=keys) # only keep track of keys You can use the wildcard ``*`` in place of a list to indicate you want the collection to use all keywords in the headers:: >>> ic_all = ImageFileCollection('.', keywords='*') You can indicate filename patterns to include or exclude using Unix shell-style expressions. For example, to include all filenames that begin with ``1d_`` but not ones that include the word ``bad``, you could do:: >>> ic_all = ImageFileCollection('.', glob_include='1d_*', ... glob_exclude='*bad*') Most of the useful interaction with the image collection is via its ``.summary`` property, a :class:`~astropy.table.Table` of the value of each keyword for each file in the collection:: >>> ic1.summary.colnames ['file', 'imagetyp', 'object', 'filter', 'exposure'] >>> ic_all.summary.colnames # doctest: +SKIP # long list of keyword names omitted Note that the name of the file is automatically added to the table as a column named ``file``. Selecting files --------------- Selecting the files that match a set of criteria, for example all images in the I band with exposure time less than 60 seconds you could do:: >>> matches = (ic1.summary['filter'] == 'I') & (ic1.summary['exposure'] < 60) # doctest: +SKIP >>> my_files = ic1.summary['file'][matches] # doctest: +SKIP The column ``file`` is added automatically when the image collection is created. For more simple selection, when you just want files whose keywords exactly match particular values, say all I band images with exposure time of 30 seconds, there is a convenience method ``.files_filtered``:: >>> my_files = ic1.files_filtered(filter='I', exposure=30) # doctest: +SKIP The optional arguments to ``files_filtered`` are used to filter the list of files. Sorting files ------------- Sometimes it is useful to bring the files into a specific order, e.g. if you make a plot for each object you probably want all images of the same object next to each other. To do this, the images in a collection can be sorted with the ``sort`` method using the fits header keys in the same way you would sort a :class:`~astropy.table.Table`:: >>> ic1.sort(['object', 'filter']) # doctest: +SKIP Iterating over hdus, headers, data, or ccds ------------------------------------------- Four methods are provided for iterating over the images in the collection, optionally filtered by keyword values. For example, to iterate over all of the I band images with exposure of 30 seconds, performing some basic operation on the data (very contrived example):: >>> for hdu in ic1.hdus(imagetyp='LiGhT', filter='I', exposure=30): # doctest: +SKIP ... hdu.header['exposure'] ... new_data = hdu.data - hdu.data.mean() Note that the names of the arguments to ``hdus`` here are the names of FITS keywords in the collection and the values are the values of those keywords you want to select. Note also that string comparisons are not case sensitive. The other iterators are ``headers``, ``data``, and ``ccds``. All of them have the option to also provide the file name in addition to the hdu (or header or data):: >>> for hdu, fname in ic1.hdus(return_fname=True, ... imagetyp='LiGhT', filter='I', exposure=30): # doctest: +SKIP ... hdu.header['meansub'] = True ... hdu.data = hdu.data - hdu.data.mean() ... hdu.writeto(fname + '.new') That last use case, doing something to several files and saving them somewhere afterwards, is common enough that the iterators provide arguments to automate it. Automatic saving from the iterators ----------------------------------- There are three ways of triggering automatic saving. 1. One is with the argument ``save_with_name``; it adds the value of the argument to the file name between the original base name and extension. The example below has (almost) the same effect of the example above, subtracting the mean from each image and saving to a new file:: >>> for hdu in ic1.hdus(save_with_name='_new', ... imagetyp='LiGhT', filter='I', exposure=30): # doctest: +SKIP ... hdu.header['meansub'] = True ... hdu.data = hdu.data - hdu.data.mean() It saves, in the ``location`` of the image collection, a new FITS file with the mean subtracted from the data, with ``_new`` added to the name; as an example, if one of the files iterated over was ``intput001.fit`` then a new file, in the same directory, called ``input001_new.fit`` would be created. 2. You can also provide the directory to which you want to save the files with ``save_location``; note that you do not need to actually do anything to the hdu (or header or data) to cause the copy to be made. The example below copies all of the I band images with 30 second exposure from the original location to ``other_dir``:: >>> for hdu in ic1.hdus(save_location='other_dir', ... imagetyp='LiGhT', filter='I', exposure=30): # doctest: +SKIP ... pass This option can be combined with the previous one to also give the files a new name. 3. Finally, if you want to live dangerously, you can overwrite the files in the same location with the ``overwrite`` argument; use it carefully because it preserves no backup. The example below replaces each of the I band images with 30 second exposure with a file that has had the mean subtracted:: >>> for hdu in ic1.hdus(overwrite=True, ... imagetyp='LiGhT', filter='I', exposure=30): # doctest: +SKIP ... hdu.header['meansub'] = True ... hdu.data = hdu.data - hdu.data.mean() .. note:: This functionality is not currently available on Windows. ccdproc-1.3.0.post1/docs/ccdproc/install.rst0000664000175000017500000000364513207605210022426 0ustar mseifertmseifert00000000000000************ Installation ************ Requirements ============ Ccdproc has the following requirements: - `Astropy`_ v1.0 or later - `NumPy `_ - `SciPy `_ - `scikit-image `_ - `astroscrappy `_ - `reproject `_ One easy way to get these dependencies is to install a python distribution like `anaconda `_. Installing ccdproc ================== Using pip ------------- To install ccdproc with `pip `_, simply run:: pip install --no-deps ccdproc .. note:: The ``--no-deps`` flag is optional, but highly recommended if you already have Numpy installed, since otherwise pip will sometimes try to "help" you by upgrading your Numpy installation, which may not always be desired. Using conda ------------- To install ccdproc with `anaconda `_, simply run:: conda install -c astropy ccdproc Building from source ==================== Obtaining the source packages ----------------------------- Source packages ^^^^^^^^^^^^^^^ The latest stable source package for ccdproc can be `downloaded here `_. Development repository ^^^^^^^^^^^^^^^^^^^^^^ The latest development version of ccdproc can be cloned from github using this command:: git clone git://github.com/astropy/ccdproc.git Building and Installing ----------------------- To build ccdproc (from the root of the source tree):: python setup.py build To install ccdproc (from the root of the source tree):: python setup.py install Testing a source code build of ccdproc -------------------------------------- The easiest way to test that your ccdproc built correctly (without installing ccdproc) is to run this from the root of the source tree:: python setup.py test ccdproc-1.3.0.post1/docs/ccdproc/reduction_toolbox.rst0000664000175000017500000004241613207605210024521 0ustar mseifertmseifert00000000000000.. _reduction_toolbox: Reduction toolbox ================= .. note:: This is not intended to be an introduction to image reduction. While performing the steps presented here may be the correct way to reduce data in some cases, it is not correct in all cases. Logging in `ccdproc` -------------------- All logging in `ccdproc` is done in the sense of recording the steps performed in image metadata. if you want to do `logging in the python sense of the word `_ please see those docs. There are basically three logging options: 1. Implicit logging: No setup or keywords needed, each of the functions below adds a note to the metadata when it is performed. 2. Explicit logging: You can specify what information is added to the metadata using the ``add_keyword`` argument for any of the functions below. 3. No logging: If you prefer no logging be done you can "opt-out" by calling each function with ``add_keyword=None``. .. _create_deviation: Gain correct and create deviation image ---------------------------------------- Uncertainty +++++++++++ An uncertainty can be calculated from your data with `~ccdproc.create_deviation`: >>> from astropy import units as u >>> import numpy as np >>> import ccdproc >>> img = np.random.normal(loc=10, scale=0.5, size=(100, 232)) >>> data = ccdproc.CCDData(img, unit=u.adu) >>> data_with_deviation = ccdproc.create_deviation( ... data, gain=1.5 * u.electron/u.adu, ... readnoise=5 * u.electron) >>> data_with_deviation.header['exposure'] = 30.0 # for dark subtraction The uncertainty, :math:`u_{ij}`, at pixel :math:`(i,~j)` with value :math:`p_{ij}` is calculated as .. math:: u_{ij} = \left(g * p_{ij} + \sigma_{rn}^2\right)^{\frac{1}{2}}, where :math:`\sigma_{rn}` is the read noise. Gain is only necessary when the image units are different than the units of the read noise, and is used only to calculate the uncertainty. The data itself is not scaled by this function. As with all of the functions in `ccdproc`, *the input image is not modified*. In the example above the new image ``data_with_deviation`` has its uncertainty set. Gain ++++ To apply a gain to an image, do: >>> gain_corrected = ccdproc.gain_correct(data_with_deviation, 1.5*u.electron/u.adu) The result ``gain_corrected`` has its data *and uncertainty* scaled by the gain and its unit updated. There are several ways to provide the gain, among them as an `astropy.units.Quantity`, as in the example above, as a `ccdproc.Keyword`. See to documentation for `~ccdproc.gain_correct` for details. Clean image ----------- There are two ways to clean an image of cosmic rays. One is to use clipping to create a mask for a stack of images, as described in :ref:`clipping`. The other is to replace, in a single image, each pixel that is several standard deviations from a central value in a region surrounding that pixel. The methods below describe how to do that. LACosmic ++++++++ The lacosmic technique identifies cosmic rays by identifying pixels based on a variation of the Laplacian edge detection. The algorithm is an implementation of the code describe in van Dokkum (2001) [1]_ as implemented in [astroscrappy](https://github.com/astropy/astroscrappy) [2]_. Use this technique with `~ccdproc.cosmicray_lacosmic`: >>> cr_cleaned = ccdproc.cosmicray_lacosmic(gain_corrected, sigclip=5) median ++++++ Another cosmic ray cleaning algorithm available in ccdproc is `~ccdproc.cosmicray_median` that is analogous to iraf.imred.crutil.crmedian. This technique can be used with `ccdproc.cosmicray_median`: >>> cr_cleaned = ccdproc.cosmicray_median(gain_corrected, mbox=11, ... rbox=11, gbox=5) Although `ccdproc` provides functions for identifying outlying pixels and for calculating the deviation of the background you are free to provide your own error image instead. There is one additional argument, ``gbox``, that specifies the size of the box, centered on a outlying pixel, in which pixel should be grown. The argument ``rbox`` specifies the size of the box used to calculate a median value if values for bad pixels should be replaced. Subtract overscan and trim images --------------------------------- .. note:: + Images reduced with `ccdproc` do **NOT** have to come from FITS files. The discussion below is intended to ease the transition from the indexing conventions used in FITS and IRAF to python indexing. + No bounds checking is done when trimming arrays, so indexes that are too large are silently set to the upper bound of the array. This is because `numpy`, which provides the infrastructure for the arrays in `ccdproc` has this behavior. Indexing: python and FITS +++++++++++++++++++++++++ Overscan subtraction and image trimming are done with two separate functions. Both are straightforward to use once you are familiar with python's rules for array indexing; both have arguments that allow you to specify the part of the image you want in the FITS standard way. The difference between python and FITS indexing is that python starts indexes at 0, FITS starts at 1, and the order of the indexes is switched (FITS follows the FORTRAN convention for array ordering, python follows the C convention). The examples below include both python-centric versions and FITS-centric versions to help illustrate the differences between the two. Consider an image from a FITS file in which ``NAXIS1=232`` and ``NAXIS2=100``, in which the last 32 columns along ``NAXIS1`` are overscan. In FITS parlance, the overscan is described by the region ``[201:232, 1:100]``. If that image has been read into a python array ``img`` by `astropy.io.fits` then the overscan is ``img[0:100, 200:232]`` (or, more compactly ``img[:, 200:])``, the starting value of the first index implicitly being zero, and the ending value for both indices implicitly the last index). One aspect of python indexing may particularly surprising to newcomers: indexing goes up to *but not including* the end value. In ``img[0:100, 200:232]`` the end value of the first index is 99 and the second index is 231, both what you would expect given that python indexing starts at zero, not one. Those transitioning from IRAF to ccdproc do not need to worry about this too much because the functions for overscan subtraction and image trimming both allow you to use the familiar ``BIASSEC`` and ``TRIMSEC`` conventions for specifying the overscan and region to be retained in a trim. Overscan subtraction ++++++++++++++++++++ To subtract the overscan in our image from a FITS file in which ``NAXIS1=232`` and ``NAXIS2=100``, in which the last 32 columns along ``NAXIS1`` are overscan, use `~ccdproc.subtract_overscan`: >>> # python-style indexing first >>> oscan_subtracted = ccdproc.subtract_overscan(cr_cleaned, ... overscan=cr_cleaned[:, 200:], ... overscan_axis=1) >>> # FITS/IRAF-style indexing to accomplish the same thing >>> oscan_subtracted = ccdproc.subtract_overscan(cr_cleaned, ... fits_section='[201:232,1:100]', ... overscan_axis=1) **Note well** that the argument ``overscan_axis`` *always* follows the python convention for axis ordering. Since the order of the indexes in the ``fits_section`` get switched in the (internal) conversion to a python index, the overscan axis ends up being the *second* axis, which is numbered 1 in python zero-based numbering. With the arguments in this example the overscan is averaged over the overscan columns (i.e. 200 through 231) and then subtracted row-by-row from the image. The ``median`` argument can be used to median combine instead. This example is not very realistic: typically one wants to fit a low-order polynomial to the overscan region and subtract that fit: >>> from astropy.modeling import models >>> poly_model = models.Polynomial1D(1) # one-term, i.e. constant >>> oscan_subtracted = ccdproc.subtract_overscan(cr_cleaned, ... overscan=cr_cleaned[:, 200:], ... overscan_axis=1, ... model=poly_model) See the documentation for `astropy.modeling.polynomial` for more examples of the available models and for a description of creating your own model. Trim an image +++++++++++++ The overscan-subtracted image constructed above still contains the overscan portion. We are assuming came from a FITS file in which ``NAXIS1=232`` and ``NAXIS2=100``, in which the last 32 columns along ``NAXIS1`` are overscan. Trim it using `~ccdproc.trim_image`,shown below in both python- style and FITS-style indexing: >>> # FITS-style: >>> trimmed = ccdproc.trim_image(oscan_subtracted, ... fits_section='[1:200, 1:100]') >>> # python-style: >>> trimmed = ccdproc.trim_image(oscan_subtracted[:, :200]) Note again that in python the order of indices is opposite that assumed in FITS format, that the last value in an index means "up to, but not including", and that a missing value implies either first or last value. Those familiar with python may wonder what the point of `~ccdproc.trim_image` is; it looks like simply indexing ``oscan_subtracted`` would accomplish the same thing. The only additional thing `~ccdproc.trim_image` does is to make a copy of the image before trimming it. .. note:: By default, python automatically reduces array indices that extend beyond the actual length of the array to the actual length. In practice, this means you can supply an invalid shape for, e.g. trimming, and an error will not be raised. To make this concrete, ``ccdproc.trim_image(oscan_subtracted[:, :200000000])`` will be treated as if you had put in the correct upper bound, ``200``. Subtract bias and dark ---------------------- Both of the functions below propagate the uncertainties in the science and calibration images if either or both is defined. Assume in this section that you have created a master bias image called ``master_bias`` and a master dark image called ``master_dark`` that *has been bias-subtracted* so that it can be scaled by exposure time if necessary. Subtract the bias with `~ccdproc.subtract_bias`: >>> fake_bias_data = np.random.normal(size=trimmed.shape) # just for illustration >>> master_bias = ccdproc.CCDData(fake_bias_data, ... unit=u.electron, ... mask=np.zeros(trimmed.shape)) >>> bias_subtracted = ccdproc.subtract_bias(trimmed, master_bias) There are several ways you can specify the exposure times of the dark and science images; see `~ccdproc.subtract_dark` for a full description. In the example below we assume there is a keyword ``exposure`` in the metadata of the trimmed image and the master dark and that the units of the exposure are seconds (note that you can instead explicitly provide these times). To perform the dark subtraction use `~ccdproc.subtract_dark`: >>> master_dark = master_bias.multiply(0.1) # just for illustration >>> master_dark.header['exposure'] = 15.0 >>> dark_subtracted = ccdproc.subtract_dark(bias_subtracted, master_dark, ... exposure_time='exposure', ... exposure_unit=u.second, ... scale=True) Note that scaling of the dark is not done by default; use ``scale=True`` to scale. Correct flat ------------ Given a flat frame called ``master_flat``, use `~ccdproc.flat_correct` to perform this calibration: >>> fake_flat_data = np.random.normal(loc=1.0, scale=0.05, size=trimmed.shape) >>> master_flat = ccdproc.CCDData(fake_flat_data, unit=u.electron) >>> reduced_image = ccdproc.flat_correct(dark_subtracted, master_flat) As with the additive calibrations, uncertainty is propagated in the division. The flat is scaled by the mean of ``master_flat`` before dividing. If desired, you can specify a minimum value the flat can have (e.g. to prevent division by zero). Any pixels in the flat whose value is less than ``min_value`` are replaced with ``min_value``): >>> reduced_image = ccdproc.flat_correct(dark_subtracted, master_flat, ... min_value=0.9) Basic Processing ---------------- All of the basic processing steps can be accomplished in a single step using `~ccdproc.ccd_process`. This step will call overscan correct, trim, gain correct, add a bad pixel mask, create an uncertainty frame, subtract the master bias, and flat-field the image. The unit of the master calibration frames must match that of the image *after* the gain, if any, is applied. In the example below, ``img`` has unit ``adu``, but the master frames have unit ``electron``. These can be run together as: >>> ccd = ccdproc.CCDData(img, unit=u.adu) >>> ccd.header['exposure'] = 30.0 # for dark subtraction >>> nccd = ccdproc.ccd_process(ccd, oscan='[201:232,1:100]', ... trim='[1:200, 1:100]', ... error=True, ... gain=2.0*u.electron/u.adu, ... readnoise=5*u.electron, ... dark_frame=master_dark, ... exposure_key='exposure', ... exposure_unit=u.second, ... dark_scale=True, ... master_flat=master_flat) Reprojecting onto a different image footprint --------------------------------------------- An image with coordinate information (WCS) can be reprojected onto a different image footprint. The underlying functionality is proved by the `reproject project`_. Please see :ref:`reprojection` for more details. Data Quality Flags (Bitfields and bitmasks) ------------------------------------------- Some FITS files contain data quality flags or bitfield extension, while these are currently not supported as part of `~astropy.nddata.CCDData` these can be loaded manually using `~astropy.io.fits` and converted to regular (`numpy`-like) masks (with `~ccdproc.bitfield_to_boolean_mask`) that are supported by many operations in `ccdproc`. .. code:: import numpy as np from astropy.io import fits from ccdproc import bitfield_to_boolean_mask, CCDData fitsfilename = 'some_fits_file.fits' bitfieldextension = extensionname_or_extensionnumber # Read the data of the fits file as CCDData object ccd = CCDData.read(fitsfilename) # Open the file again (assuming the bitfield is saved in the same FITS file) mask = bitfield_to_boolean_mask(fits.getdata(fitsfilename, bitfieldextension)) # Save the mask as "mask" attribute of the ccd ccd.mask = mask Filter and Convolution ---------------------- There are several convolution and filter functions for `numpy.ndarray` across the scientific python packages: - ``scipy.ndimage.filters``, offers a variety of filters. - ``astropy.convolution``, offers some filters which also handle ``NaN`` values. - ``scikit-image.filters``, offers several filters which can also handle masks but are mostly limited to special data types (mostly unsigned integers). For convenience one of these is also accessible through the ``ccdproc`` package namespace which accepts `~astropy.nddata.CCDData` objects and then also returns one: - `~ccdproc.median_filter` Median Filter +++++++++++++ The median filter is especially useful if the data contains sharp noise peaks which should be removed rather than propagated: .. plot:: :include-source: import ccdproc import numpy as np import matplotlib.pyplot as plt from astropy.modeling.functional_models import Gaussian2D from astropy.utils.misc import NumpyRNGContext from scipy.ndimage import uniform_filter # Create some source signal source = Gaussian2D(60, 70, 70, 20, 25) data = source(*np.mgrid[0:250, 0:250]) # and another one source = Gaussian2D(70, 150, 180, 15, 15) data += source(*np.mgrid[0:250, 0:250]) # create some random signals with NumpyRNGContext(1234): noise = np.random.exponential(40, (250, 250)) # remove low signal noise[noise < 100] = 0 data += noise # create a CCD object based on the data ccd = ccdproc.CCDData(data, unit='adu') # Create some plots fig, (ax1, ax2, ax3) = plt.subplots(1, 3) ax1.set_title('Unprocessed') ax1.imshow(ccd, origin='lower', interpolation='none', cmap=plt.cm.gray) ax2.set_title('Mean filtered') ax2.imshow(uniform_filter(ccd.data, 5), origin='lower', interpolation='none', cmap=plt.cm.gray) ax3.set_title('Median filtered') ax3.imshow(ccdproc.median_filter(ccd, 5), origin='lower', interpolation='none', cmap=plt.cm.gray) plt.tight_layout() plt.show() .. [1] van Dokkum, P; 2001, "Cosmic-Ray Rejection by Laplacian Edge Detection". The Publications of the Astronomical Society of the Pacific, Volume 113, Issue 789, pp. 1420-1427. doi: 10.1086/323894 .. [2] McCully, C., 2014, "Astro-SCRAPPY", https://github.com/astropy/astroscrappy .. _reproject project: http://reproject.readthedocs.io/ ccdproc-1.3.0.post1/docs/ccdproc/ccddata.rst0000664000175000017500000001650613207617404022353 0ustar mseifertmseifert00000000000000.. _ccddata: CCDData class ============= Getting started --------------- Getting data in +++++++++++++++ The tools in `ccdproc` accept only `~astropy.nddata.CCDData` objects, a subclass of `~astropy.nddata.NDData`. Creating a `~astropy.nddata.CCDData` object from any array-like data is easy: >>> import numpy as np >>> import ccdproc >>> ccd = ccdproc.CCDData(np.arange(10), unit="adu") Note that behind the scenes, `~astropy.nddata.NDData` creates references to (not copies of) your data when possible, so modifying the data in ``ccd`` will modify the underlying data. You are **required** to provide a unit for your data. The most frequently used units for these objects are likely to be ``adu``, ``photon`` and ``electron``, which can be set either by providing the string name of the unit (as in the example above) or from unit objects: >>> from astropy import units as u >>> ccd_photon = ccdproc.CCDData([1, 2, 3], unit=u.photon) >>> ccd_electron = ccdproc.CCDData([1, 2, 3], unit="electron") If you prefer *not* to use the unit functionality then use the special unit ``u.dimensionless_unscaled`` when you create your `~astropy.nddata.CCDData` images: >>> ccd_unitless = ccdproc.CCDData(np.zeros((10, 10)), ... unit=u.dimensionless_unscaled) A `~astropy.nddata.CCDData` object can also be initialized from a FITS file: >>> ccd = ccdproc.CCDData.read('my_file.fits', unit="adu") # doctest: +SKIP If there is a unit in the FITS file (in the ``BUNIT`` keyword), that will be used, but a unit explicitly provided in ``read`` will override any unit in the FITS file. There is no restriction at all on what the unit can be -- any unit in `astropy.units` or that you create yourself will work. In addition, the user can specify the extension in a FITS file to use: >>> ccd = ccdproc.CCDData.read('my_file.fits', hdu=1, unit="adu") # doctest: +SKIP If ``hdu`` is not specified, it will assume the data is in the primary extension. If there is no data in the primary extension, the first extension with data will be used. Metadata ++++++++ When initializing from a FITS file, the ``header`` property is initialized using the header of the FITS file. Metadata is optional, and can be provided by any dictionary or dict-like object: >>> ccd_simple = ccdproc.CCDData(np.arange(10), unit="adu") >>> my_meta = {'observer': 'Edwin Hubble', 'exposure': 30.0} >>> ccd_simple.header = my_meta # or use ccd_simple.meta = my_meta Whether the metadata is case sensitive or not depends on how it is initialized. A FITS header, for example, is not case sensitive, but a python dictionary is. Getting data out ++++++++++++++++ A `~astropy.nddata.CCDData` object behaves like a numpy array (masked if the `~astropy.nddata.CCDData` mask is set) in expressions, and the underlying data (ignoring any mask) is accessed through ``data`` attribute: >>> ccd_masked = ccdproc.CCDData([1, 2, 3], unit="adu", mask=[0, 0, 1]) >>> 2 * np.ones(3) * ccd_masked # one return value will be masked masked_array(data = [2.0 4.0 --], mask = [False False True], fill_value = 1e+20) >>> 2 * np.ones(3) * ccd_masked.data # doctest: +FLOAT_CMP array([ 2., 4., 6.]) You can force conversion to a numpy array with: >>> np.asarray(ccd_masked) array([1, 2, 3]) >>> np.ma.array(ccd_masked.data, mask=ccd_masked.mask) masked_array(data = [1 2 --], mask = [False False True], fill_value = 999999) A method for converting a `~astropy.nddata.CCDData` object to a FITS HDU list is also available. It converts the metadata to a FITS header: >>> hdulist = ccd_masked.to_hdu() You can also write directly to a FITS file: >>> ccd_masked.write('my_image.fits') Masks and flags +++++++++++++++ Although not required when a `~astropy.nddata.CCDData` image is created you can also specify a mask and/or flags. A mask is a boolean array the same size as the data in which a value of ``True`` indicates that a particular pixel should be masked, *i.e.* not be included in arithmetic operations or aggregation. Flags are one or more additional arrays (of any type) whose shape matches the shape of the data. For more details on setting flags see `astropy.nddata.NDData`. WCS +++ The ``wcs`` attribute of `~astropy.nddata.CCDData` object can be set two ways. + If the `~astropy.nddata.CCDData` object is created from a FITS file that has WCS keywords in the header, the ``wcs`` attribute is set to a `astropy.wcs.WCS` object using the information in the FITS header. + The WCS can also be provided when the `~astropy.nddata.CCDData` object is constructed with the ``wcs`` argument. Either way, the ``wcs`` attribute is kept up to date if the `~astropy.nddata.CCDData` image is trimmed. Uncertainty ----------- Pixel-by-pixel uncertainty can be calculated for you: >>> data = np.random.normal(size=(10, 10), loc=1.0, scale=0.1) >>> ccd = ccdproc.CCDData(data, unit="electron") >>> ccd_new = ccdproc.create_deviation(ccd, readnoise=5 * u.electron) See :ref:`create_deviation` for more details. You can also set the uncertainty directly, either by creating a `~astropy.nddata.StdDevUncertainty` object first: >>> from astropy.nddata.nduncertainty import StdDevUncertainty >>> uncertainty = 0.1 * ccd.data # can be any array whose shape matches the data >>> my_uncertainty = StdDevUncertainty(uncertainty) >>> ccd.uncertainty = my_uncertainty or by providing a `~numpy.ndarray` with the same shape as the data: >>> ccd.uncertainty = 0.1 * ccd.data # doctest: +ELLIPSIS INFO: array provided for uncertainty; assuming it is a StdDevUncertainty. [...] In this case the uncertainty is assumed to be `~astropy.nddata.StdDevUncertainty`. Using `~astropy.nddata.StdDevUncertainty` is required to enable error propagation in `~astropy.nddata.CCDData` If you want access to the underlying uncertainty use its ``.array`` attribute: >>> ccd.uncertainty.array # doctest: +ELLIPSIS array(...) Arithmetic with images ---------------------- Methods are provided to perform arithmetic operations with a `~astropy.nddata.CCDData` image and a number, an astropy `~astropy.units.Quantity` (a number with units) or another `~astropy.nddata.CCDData` image. Using these methods propagates errors correctly (if the errors are uncorrelated), take care of any necessary unit conversions, and apply masks appropriately. Note that the metadata of the result is *not* set if the operation is between two `~astropy.nddata.CCDData` objects. >>> result = ccd.multiply(0.2 * u.adu) >>> uncertainty_ratio = result.uncertainty.array[0, 0]/ccd.uncertainty.array[0, 0] >>> round(uncertainty_ratio, 5) # doctest: +FLOAT_CMP 0.2 >>> result.unit Unit("adu electron") .. note:: In most cases you should use the functions described in :ref:`reduction_toolbox` to perform common operations like scaling by gain or doing dark or sky subtraction. Those functions try to construct a sensible header for the result and provide a mechanism for logging the action of the function in the header. The arithmetic operators ``*``, ``/``, ``+`` and ``-`` are *not* overridden. .. note:: If two images have different WCS values, the wcs on the first `~astropy.nddata.CCDData` object will be used for the resultant object. ccdproc-1.3.0.post1/docs/ccdproc/index.rst0000664000175000017500000001214313207605210022060 0ustar mseifertmseifert00000000000000CCD Data reduction (`ccdproc`) ============================== Introduction ------------ .. note:: `ccdproc` works only with astropy version 1.0 or later. The `ccdproc` package provides: + An image class, `~astropy.nddata.CCDData`, that includes an uncertainty for the data, units and methods for performing arithmetic with images including the propagation of uncertainties. + A set of functions performing common CCD data reduction steps (e.g. dark subtraction, flat field correction) with a flexible mechanism for logging reduction steps in the image metadata. + A function for reprojecting an image onto another WCS, useful for stacking science images. The actual reprojection is done by the `reproject package `_. + A class for combining and/or clipping images, `~ccdproc.Combiner`, and associated functions. + A class, `~ccdproc.ImageFileCollection`, for working with a directory of images. Getting Started --------------- A ``CCDData`` object can be created from a numpy array (masked or not) or from a FITS file: >>> import numpy as np >>> from astropy import units as u >>> import ccdproc >>> image_1 = ccdproc.CCDData(np.ones((10, 10)), unit="adu") An example of reading from a FITS file is ``image_2 = astropy.nddata.CCDData.read('my_image.fits', unit="electron")`` (the ``electron`` unit is defined as part of ``ccdproc``). The metadata of a ``CCDData`` object may be any dictionary-like object, including a FITS header. When a ``CCDData`` object is initialized from FITS file its metadata is a FITS header. The data is accessible either by indexing directly or through the ``data`` attribute: >>> sub_image = image_1[:, 1:-3] # a CCDData object >>> sub_data = image_1.data[:, 1:-3] # a numpy array See the documentation for `~astropy.nddata.CCDData` for a complete list of attributes. Most operations are performed by functions in `ccdproc`: >>> dark = ccdproc.CCDData(np.random.normal(size=(10, 10)), unit="adu") >>> dark_sub = ccdproc.subtract_dark(image_1, dark, ... dark_exposure=30*u.second, ... data_exposure=15*u.second, ... scale=True) See the documentation for `~ccdproc.subtract_dark` for more compact ways of providing exposure times. Every function returns a *copy* of the data with the operation performed. Every function in `ccdproc` supports logging through the addition of information to the image metadata. Logging can be simple -- add a string to the metadata: >>> dark_sub_gained = ccdproc.gain_correct(dark_sub, 1.5 * u.photon/u.adu, add_keyword='gain_corrected') Logging can be more complicated -- add several keyword/value pairs by passing a dictionary to ``add_keyword``: >>> my_log = {'gain_correct': 'Gain value was 1.5', ... 'calstat': 'G'} >>> dark_sub_gained = ccdproc.gain_correct(dark_sub, ... 1.5 * u.photon/u.adu, ... add_keyword=my_log) You might wonder why there is a `~ccdproc.gain_correct` at all, since the implemented gain correction simple multiplies by a constant. There are two things you get with `~ccdproc.gain_correct` that you do not get with multiplication: + Appropriate scaling of uncertainties. + Units The same advantages apply to operations that are more complex, like flat correction, in which one image is divided by another: >>> flat = ccdproc.CCDData(np.random.normal(1.0, scale=0.1, size=(10, 10)), ... unit='adu') >>> image_1_flat = ccdproc.flat_correct(image_1, flat) In addition to doing the necessary division, `~ccdproc.flat_correct` propagates uncertainties (if they are set). The function `~ccdproc.wcs_project` allows you to reproject an image onto a different WCS. To make applying the same operations to a set of files in a directory easier, use an `~ccdproc.image_collection.ImageFileCollection`. It constructs, given a directory, a `~astropy.table.Table` containing the values of user-selected keywords in the directory. It also provides methods for iterating over the files. The example below was used to find an image in which the sky background was high for use in a talk: >>> from __future__ import division, print_function >>> from ccdproc import ImageFileCollection >>> import numpy as np >>> from glob import glob >>> dirs = glob('/Users/mcraig/Documents/Data/feder-images/fixed_headers/20*-??-??') >>> for d in dirs: ... print(d) ... ic = ImageFileCollection(d, keywords='*') ... for data, fname in ic.data(imagetyp='LIGHT', return_fname=True): ... if data.mean() > 4000.: ... print(fname) Using `ccdproc` --------------- .. toctree:: :maxdepth: 1 ccddata.rst image_combination.rst reduction_toolbox.rst image_management.rst reduction_examples.rst .. automodapi:: ccdproc :skip: CCDData :skip: fits_ccddata_writer :skip: fits_ccddata_reader .. automodapi:: ccdproc.utils.slices .. _GitHub repo: https://github.com/astropy/ccdproc ccdproc-1.3.0.post1/docs/ccdproc/image_combination.rst0000664000175000017500000001414313207605210024417 0ustar mseifertmseifert00000000000000.. _image_combination: Combining images and generating masks from clipping =================================================== .. note:: No attempt has been made yet to optimize memory usage in `~ccdproc.Combiner`. A copy is made, and a mask array constructed, for each input image. The first step in combining a set of images is creating a `~ccdproc.Combiner` instance: >>> from astropy import units as u >>> from ccdproc import CCDData, Combiner >>> import numpy as np >>> ccd1 = CCDData(np.random.normal(size=(10,10)), ... unit=u.adu) >>> ccd2 = ccd1.copy() >>> ccd3 = ccd1.copy() >>> combiner = Combiner([ccd1, ccd2, ccd3]) The combiner task really combines two things: generation of masks for individual images via several clipping techniques and combination of images. .. _clipping: Image masks/clipping -------------------- There are currently three methods of clipping. None affect the data directly; instead each constructs a mask that is applied when images are combined. Masking done by clipping operations is combined with the image mask provided when the `~ccdproc.Combiner` is created. Min/max clipping ++++++++++++++++ `~ccdproc.Combiner.minmax_clipping` masks all pixels above or below user-specified levels. For example, to mask all values above the value ``0.1`` and below the value ``-0.3``: >>> combiner.minmax_clipping(min_clip=-0.3, max_clip=0.1) Either ``min_clip`` or ``max_clip`` can be omitted. Sigma clipping ++++++++++++++ For each pixel of an image in the combiner, `~ccdproc.combiner.Combiner.sigma_clipping` masks the pixel if is more than a user-specified number of deviations from the central value of that pixel in the list of images. The `~ccdproc.combiner.Combiner.sigma_clipping` method is very flexible: you can specify both the function for calculating the central value and the function for calculating the deviation. The default is to use the mean (ignoring any masked pixels) for the central value and the standard deviation (again ignoring any masked values) for the deviation. You can mask pixels more than 5 standard deviations above or 2 standard deviations below the median with >>> combiner.sigma_clipping(low_thresh=2, high_thresh=5, func=np.ma.median) .. note:: Numpy masked median can be very slow in exactly the situation typically encountered in reducing ccd data: a cube of data in which one dimension (in the case the number of frames in the combiner) is much smaller than the number of pixels. Extrema clipping ++++++++++++++++ For each pixel position in the input arrays, the algorithm will mask the highest ``nhigh`` and lowest ``nlow`` pixel values. The resulting image will be a combination of ``Nimages-nlow-nhigh`` pixel values instead of the combination of ``Nimages`` worth of pixel values. You can mask the lowest pixel value and the highest two pixel values with: >>> combiner.clip_extrema(nlow=1, nhigh=2) Iterative clipping ++++++++++++++++++ To clip iteratively, continuing the clipping process until no more pixels are rejected, loop in the code calling the clipping method: >>> old_n_masked = 0 # dummy value to make loop execute at least once >>> new_n_masked = combiner.data_arr.mask.sum() >>> while (new_n_masked > old_n_masked): ... combiner.sigma_clipping(func=np.ma.median) ... old_n_masked = new_n_masked ... new_n_masked = combiner.data_arr.mask.sum() Note that the default values for the high and low thresholds for rejection are 3 standard deviations. Image combination ----------------- Image combination is straightforward; to combine by taking the average, excluding any pixels mapped by clipping: >>> combined_average = combiner.average_combine() Performing a median combination is also straightforward, >>> combined_median = combiner.median_combine() # can be slow, see below With image scaling ------------------ In some circumstances it may be convenient to scale all images to some value before combining them. Do so by setting `~ccdproc.Combiner.scaling`: >>> scaling_func = lambda arr: 1/np.ma.average(arr) >>> combiner.scaling = scaling_func >>> combined_average_scaled = combiner.average_combine() This will normalize each image by its mean before combining (note that the underlying images are *not* scaled; scaling is only done as part of combining using `~ccdproc.Combiner.average_combine` or `~ccdproc.Combiner.median_combine`). .. _reprojection: With image transformation ------------------------- .. note:: **Flux conservation** Whether flux is conserved in performing the reprojection depends on the method you use for reprojecting and the extent to which pixel area varies across the image. `~ccdproc.wcs_project` rescales counts by the ratio of pixel area *of the pixel indicated by the keywords* ``CRPIX`` of the input and output images. The reprojection methods available are described in detail in the documentation for the `reproject project`_; consult those documents for details. You should carefully check whether flux conservation provided in CCDPROC is adequate for your needs. Suggestions for improvement are welcome! Align and then combine images based on World Coordinate System (WCS) information in the image headers in two steps. First, reproject each image onto the same footprint using `~ccdproc.wcs_project`. The example below assumes you have an image with WCS information and another image (or WCS) onto which you want to project your images: .. doctest-skip:: >>> from ccdproc import wcs_project >>> reprojected_image = wcs_project(input_image, target_wcs) Repeat this for each of the images you want to combine, building up a list of reprojected images: .. doctest-skip:: >>> reprojected = [] >>> for img in my_list_of_images: ... new_image = wcs_project(img, target_wcs) ... reprojected.append(new_image) Then, combine the images as described above for any set of images: .. doctest-skip:: >>> combiner = Combiner(reprojected) >>> stacked_image = combiner.average_combine() .. _reproject project: http://reproject.readthedocs.io/ ccdproc-1.3.0.post1/docs/ccdproc/reduction_examples.rst0000664000175000017500000000104313207605210024640 0ustar mseifertmseifert00000000000000Reduction examples ================== Here are some examples and different repositories using `ccdproc`. * `ipython notebook`_ * `WHT basic reductions`_ * `pyhrs`_ * `reduceccd`_ * `astrolib`_ .. _ipython notebook: http://nbviewer.ipython.org/gist/mwcraig/06060d789cc298bbb08e .. _WHT basic reductions: https://github.com/crawfordsm/wht_reduction_scripts/blob/master/wht_basic_reductions.py .. _pyhrs: https://github.com/saltastro/pyhrs .. _reduceccd: https://github.com/rgbIAA/reduceccd .. _astrolib: https://github.com/yucelkilic/astrolib ccdproc-1.3.0.post1/docs/_static/0000775000175000017500000000000013207623133020233 5ustar mseifertmseifert00000000000000ccdproc-1.3.0.post1/docs/_static/ccdproc.css0000664000175000017500000000032013207605210022351 0ustar mseifertmseifert00000000000000 @import url("bootstrap-astropy.css"); div.topbar a.brand { background: transparent url("ccd_proc.png") no-repeat 10px 4px; background-image: url("ccdproc.svg"), none; background-size: 32px 32px; } ccdproc-1.3.0.post1/docs/_static/ccd_proc.png0000775000175000017500000001532313207605210022520 0ustar mseifertmseifert00000000000000PNG  IHDRߊsBIT|d pHYs 7˭tEXtSoftwarewww.inkscape.org<PIDATx\/්ݎ0&F\{"y@\)FU Y)ѬD*d3jEM"1X vر.܅rw)]2O˩_qV1GѦClA)9^:^UU gi3b;v/sEUŞMf`PcymǦB{:k $2Xag|iRtZջ3afӱٱٲ)q磴{.WwΦ?HT<||xx);@z?]fv>;;-;w;$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$ 'o2]v~nuir[]OߔAOgggĝn`:G{VxUU%ψz|rZnCHUUiZ_m:ǍUUnZw7F|vcՉn#Z"nwe)9KPYe)9rFO/5eͳ4wקq $ʝS[D^=}8V 3;ΤG;ms=OFđӅnӱٙxxLB:;wPB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ P@ꋕ/>d έ-잹g_#ƧosVXYlx=Yn񺡝;<"~omsMD2Ca);|T0:XYo|V83W9;al2NbӱٙxQ֌W,cu;$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$ Vۭ""ZpkюKGшNk AcƭUTE;ڃhǰ\.t|vJyD|48@I:v>J"beL01?eeSZ1ͳVDt)e| (tH@@_=Mf`PcڱiĮZT VmRRtZąΨƞ)ePk|h7<"F,e|i_B;?~l?W@z>{,^xx_C HT履 7vP);;$^xv|n7<"/oszޟ#kc~&zOD|s  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  tT8,|4v~n4W9 j7ePmV#@{bȁ߼үxmgYPݰuvUYxDm:e)9KPYe9r:qǼ;rjJ9Rr,%g J9"rvQ('g J9Rr,%g J9"r r_A<~,vǛ76m}['HT@|\ݹipOy:<}rD3#{5w"wۜz#5,ӋGuع|:%_v>;m~g3 5D?bs?dr>V'ơ֩{cx݅~~m3Q{&6z;wOΧ~n@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ hu*"b!h󌜫꾫?ZZn,׸{dꏣ>r{m@Eg|/4g+GgFī$xD|9'IT"bجΧSunYע 7jNV{z/j: 튈gߌk:Ͼ{?Ҹ"v:);y޹[@'"t-8rjJ9Rr,%g J9"rv"[M؂!筦,%g J9Rr,"[衸}RY6uWȦՇ; $bHݺD[z~|:vq:@zXf<.8Qc ;tq&y("~Q3);1CթBmjyrQѪ}~뭎|:v~:3W9;1S89g3uvsv޲)߹[B:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PBZnhEXm85hǥ\xD0bXqkщEy=.yΧ3i;b6kXjlV#b5"V UoWD,^y]#fNDο;]IUDϯAcP@θ?ܲZWLsҹZ;v% (tH@@_=ËMf`jrkǦB5]ZꭴbU;sQ1e類3&Rty@B{gT}FLjv>;5ўj,Ը gw|gK?Ccuo=_8z?GJyDy&zGij5oM*}ϝ)j Q8A=M׼eSXs  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  tqbaY6sW\{irk/Mz^Qndq|P#@{bȁ s;I:1hmq s;n`:G,A)gYJr,A)gYDN N\1NRβ%(,KYRβ(6Br#YRβ%(,KYRβn@A<̲x+ߨ{띈6ǹ'#H"⿶9"b#v>ڝ?ٟb{ñꌏ#@I߫J ĩҁxMyh;k_^o,ԸOpxLB~6Ǚ=cnә'nZ9@zo?Vo~a/t;B;`哹<:vc>;wPB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ P@V =AyFu]xD݈xPo^y>o#d#SDZ??B{bD ę@D|k;Jsw+n,Ըw'^6+wn,Mf\woGe糵<6b |vŽ tnr:ǦClVSYe)9KPYj:9o5e)9KPYe9rF-}q)&l]냏Խۈms=$",ط~`S?z=.i N_>T,߶cì?x@z/~HwP%<"F ę䡈Χ~B q©B݄~C\[ssu|fәg<;amgqks;ﯝXvnYs  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  nъ_6aڝ?W^FJ#튈+<"$w{\^Ov;hWBxbTcRvީ0g݉AkvUD0u"bv%J"bb,ƵyYv>[inAr=C܈yn@ P=k̲Վ A5Jg!r6.?5//pv>wÈ8@I#b_|%AÐ~LΗ.Eg3Gi] 5ľMGO}??4ޱ,^p[O9D^3;N;~&zODkzhLgkޚΧ3:Ξj Qn@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ \}1hľMf=w-.UsKzmgYtJyD,z';u<4a~`r0Χ3:)ֵn`:G,A)gYJr,A)gYDN N\1NRβ%(,KYRβ(6Br#YRβ%(,KYRβn@&lpviwy76q $s?o QWp/GD8|="7>씼s)q\[g~PNܽvdr{ڥ?q{uS?Snӱ)y>ۧSr:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$  (tH@@ PB:$йbea!=XY68kvWHqmN3B^vguC;N;>wvUYxDm:e)9KPYe9r:ǦClVSYe)9KPYj:9o5e)9KPYe9rFO/]ϽRY6讻׏;v>[w>:*"VItv ȹy֊;q#v>;ٲٱ/-wHu!L]IENDB`ccdproc-1.3.0.post1/docs/_static/ccdproc.svg0000775000175000017500000014447113207605210022403 0ustar mseifertmseifert00000000000000 image/svg+xml ccdproc-1.3.0.post1/docs/_static/ccd_proc.ico0000664000175000017500000132262613207605210022513 0ustar mseifertmseifert00000000000000 hf 00 %v@@ (B; (F} ( n(  @ddd/!!"!(!("dddC)/!("66y&,!(""!(!(") ."/ +5Dy&3"/ +dddA19)2 +) . .),1!6ccc/,1!6ccc/ccc/dddCdddCccc/,11,96LydddCddd/96Ly6Ly99==9ddd/!A=9E>TdddSdddCE>T>TE6Ly>P1MEdddC)L!HEL1VF^wdddAL>XFYw K5ZyT`{c>XLdddA1Q&NLSU!Vccc/S6ZydddCccc/S6Zy6ZySccc/dddC6ZyS`a!addd/`aa``6by6by`ddd/dddC6by`e!e#e!f6by>b>b6by6by-e&eedddC1f&eej i ij5myTg{cTg{c5my5my&j"h fdddA)i ijtrrtt6my6myttr!qccc/ccc/!qrt~~~~~!|ddd/ddd/!|~!!!!1y6x!!#!  11 ( @ !!0000!!dddWddd1PP00!!!!0000!!!!0000!!$$$$ddd1GG$$$$$$$$$$02$&;=$&$&;=$&02ddd{ddd]VVy$&$&;=$&0266**@@$&$&;=$&0202$&;=$&$&;=$&02$.$.ddd1GK$.ddd1GK$.$.$.$.$.ddd1GK$.ddd1GK$.$.$.0>$6;F$6$6NSFKOR6E*=@K$6$6NSFKORdddyddd]``iFKFKVXyFKOR0>$6;F$6$6;F$60>, $8 GPddd1, $8 GPddd1ddd1ddd]ddd]ddd1, $8 $8 ,!6,0@,,PVddd1dddW!6,0@,,PVddd1dddWdddWddd1ddd{ddd1ddd1ddd{ddd1dddW!6,0@,,0@,!6!A97Lddd1ddd1ddd{ddd1dddW!A97Lddd1ddd17L9!A!A90I990I9!AdddWddd1PY990I9!A90*Fddd]ddd190*F*F0990$B00$B09ddd1GU00$B090K$EASddd]ddd]dddddd]ddd{0K$EASddd]ddd]AS$E0K6L*FAR*F*F=P$E0Kddd{ddd]V]y$E$E;P$E0KF@*Oddd]ddd1F@*O*O@Fddd1ddd]*O@Fddd1GY@@$M@FF@*Oddd]ddd1F@*O*O@Fddd1ddd]*O@Fddd1GY@@$M@F0W$T>/Hccc{dddIcccIccc{dddycccIdddIddd{0I>>/Hddd{dddIcccIddd{/H>>0I0I>>/H0H>>0H/H>>0Iddd{dddIdddIdddy0H>>0H/H>>0I>00>cccGcccGdddEdddI>00>dddGdddG>00>>00>>00>>00>dddIcccE>00>>00>>//>dddGcccGdddEcccI>//>dddGdddG>//>>0/>>//>>/0>cccIcccE>//>>/0>/H>>.HdddwcccEdddEdddwcccucccEcccEcccw/H>>.HdddwcccEdddEdddw.H>>/H/H>>.H/H>>/H.H>>/HdddwdddEcccEdddu/H>>/H.H>>/H/QII/PdddydddGdddGdddydddwdddGdddGdddy/QII/PdddydddGdddGdddy/PII/QdddydddGdddGdddwdddydddGdddGdddy/PII/QdddydddGdddGdddw/QII/Q/PII/QJ@@IcccGcccGdddEcccIJ@@IdddGdddGI@@JcccIdddEcccGdddGI@@JcccIdddEI@@II@@JJ@@IcccGdddGdddEcccIJ@@IdddGdddGI@@JcccIdddEdddGcccGI@@JcccIdddEI@@II@@J/QII.PcccwdddEcccEdddwcccucccEcccEdddy/QII.PdddwdddEdddEdddw.PII/QdddydddEcccEddducccwcccEcccEcccw.PII/QdddydddEcccEdddu/QII/Q.PII/Q/YUU.Y/YUU/YddducccEcccEdddw/YUU.YdddwdddEdddEdddwdddudddEcccEdddw/YUU.YdddwcccEdddEdddw.YUU/YdddwcccEdddEdddudddwcccEdddEdddw.YUU/YUOPUUPPUdddEdddIUPPUdddGdddGdddEdddIUPOUdddGdddGUOPUcccIcccEdddGdddGUOPUUPPUUPPUdddEcccIUPPUdddGdddGdddEdddIUPPUdddGdddGUPPUdddIdddEdddGdddGUPPU0YUU/Y/YUU/YcccwdddIdddIddd{0YUU/YdddycccIdddIdddycccwdddIdddIccc{0YUU/YcccydddIdddIcccy/YUU0Yddd{dddIdddIdddwcccydddIdddIcccy/YUU0Y0aaa/a0aaa0adddycccIdddIccc{0aaa/a0aaa0a/aaa0a0aaa/addd{dddIdddIddd{/aaa0accc{cccIdddIdddyddd{dddIdddIddd{/aaa0aa__aa__adddEdddIa__aa__aa__aa__adddGdddGa__adddIcccEdddGdddGa__aa``aa``adddEdddIa``aa``aa``aa``adddGdddGa``acccIcccEdddGdddGa``a/aaa.a/aaa/acccudddEcccEdddw/aaa.a/aaa/a.aaa/a/aaa.adddwcccEcccEcccw.aaa/adddwdddEdddEcccudddwcccEcccEcccw.aaa/a/fff/f/fff/f/fff/fcccydddGcccGcccwcccycccGcccGcccycccwcccGdddGdddycccycccGcccGcccw/fff/f/fff/fcccycccGcccGcccw/fff/f/fff/ffggffhhffhhfdddIcccEdddGdddGcccEdddIcccIdddEfggffghfcccIdddEfggffghffghffghffhhfdddIcccEdddGdddGcccEdddIcccIcccEfggffghfcccIcccEfggffghf/fff.f/fff/f.fff/fdddycccEdddEcccudddwdddEcccEdddwcccudddEcccEdddycccydddEdddEdddu/fff/f.fff/fcccydddEdddEdddu/fff/f.fff/f/nrr.n/nrr/n.nrr/n/nrr.ncccwcccEcccEcccw.nrr/n/nrr.n/nrr/ncccucccEcccEcccwcccwcccEcccEcccu/nrr/n.nrr/nrwxrrxwrrwxrrxxrdddGdddGrxxrrxwrrwwrcccEcccIcccIcccErwwrrwxrrxxrrxxrrxxrrxxrdddGdddGrxxrrxxrrxxrcccEdddIdddIcccErxxrrxxr0nrr/n/nrr/n/nrr0n0nrr/ndddydddIdddIdddy/nrr0n0nrr/n/nrr/ndddwdddIdddIddd{ddd{dddIdddIdddw/nrr/n/nrr0n0v}}/v0v}}0v/v}}0v0v}}/v0v}}0v/v}}0v0v}}/v0v}}0vdddydddIcccIddd{ddd{cccIdddIdddy0v}}0v/v}}0v}~~~~}}~~~~}}~~~dddEdddIdddIcccE~~~}}~~~~}}~~~~}}~~~dddEdddIdddIdddE~~~}/v~~.w/w~~/w.w~~/v/v~~.w/w~~/w.w~~/v/v~~.w/w~~/wcccudddEcccEdddwdddwdddEdddEcccu/w~~/w.w~~/v//////////////cccydddGdddGcccy////////cccGdddGdddGdddG/.//.//.//.//.cccwdddEcccEcccw.//.//.//.//.//.//.//.//.//.//./0////00////00////00////0(@ B<<%%%%%%((QQ%%%%%%%%QQ((%%%%%%< v documentation". html_title = '{0} v{1}'.format(project, release) # Output file base name for HTML help builder. htmlhelp_basename = project + 'doc' # Static files to copy after template files html_static_path = ['_static'] html_style = 'ccdproc.css' # -- Options for LaTeX output -------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [('index', project + '.tex', project + u' Documentation', author, 'manual')] # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [('index', project.lower(), project + u' Documentation', [author], 1)] ## -- Options for the edit_on_github extension ---------------------------------------- if eval(setup_cfg.get('edit_on_github')): extensions += ['astropy_helpers.sphinx.ext.edit_on_github'] versionmod = __import__(setup_cfg['package_name'] + '.version') edit_on_github_project = setup_cfg['github_project'] if versionmod.version.release: edit_on_github_branch = "v" + versionmod.version.version else: edit_on_github_branch = "master" edit_on_github_source_root = "" edit_on_github_doc_root = "docs" github_issues_url = 'https://github.com/astropy/ccdproc/issues/' nitpicky = True nitpick_ignore = [] for line in open('nitpick-exceptions'): if line.strip() == "" or line.startswith("#"): continue dtype, target = line.split(None, 1) target = target.strip() nitpick_ignore.append((dtype, six.u(target))) ccdproc-1.3.0.post1/docs/Makefile0000664000175000017500000001116413207605210020244 0ustar mseifertmseifert00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest #This is needed with git because git doesn't create a dir if it's empty $(shell [ -d "_static" ] || mkdir -p _static) help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR) -rm -rf api html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." ccdproc-1.3.0.post1/docs/rtd-pip-requirements0000664000175000017500000000006413207605210022624 0ustar mseifertmseifert00000000000000numpy>=1.9 scipy scikit-image numpydoc astropy>=1.0 ccdproc-1.3.0.post1/docs/nitpick-exceptions0000664000175000017500000000032713207605210022346 0ustar mseifertmseifert00000000000000# Temporary exception of inherited astropy classes py:class astropy.nddata.mixins.ndio.NDIOMixin py:class astropy.nddata.mixins.ndslicing.NDSlicingMixin py:class astropy.nddata.mixins.ndarithmetic.NDArithmeticMixin ccdproc-1.3.0.post1/docs/make.bat0000664000175000017500000001064113207605210020210 0ustar mseifertmseifert00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end ccdproc-1.3.0.post1/docs/authors_for_sphinx.rst0000664000175000017500000000006713207605210023262 0ustar mseifertmseifert00000000000000Contributors ************ .. include:: ../AUTHORS.rst ccdproc-1.3.0.post1/docs/index.rst0000664000175000017500000000116613207605210020446 0ustar mseifertmseifert00000000000000 ================ CCDPROC ================ Welcome to the ccdproc documentation! Ccdproc is is an affiliated package for the AstroPy package for basic data reductions of CCD images. The ccdproc package provides many of the necessary tools for processing of ccd images built on a framework to provide error propagation and bad pixel tracking throughout the reduction process. Documentation ============= The documentation for this package is here: .. toctree:: :maxdepth: 2 ccdproc/install.rst .. toctree:: :maxdepth: 3 ccdproc/index.rst .. toctree:: :maxdepth: 1 authors_for_sphinx changelog license ccdproc-1.3.0.post1/README.rst0000664000175000017500000000633413207605210017346 0ustar mseifertmseifert00000000000000ccdproc ======= .. image:: https://travis-ci.org/astropy/ccdproc.svg?branch=master :target: https://travis-ci.org/astropy/ccdproc .. image:: https://coveralls.io/repos/astropy/ccdproc/badge.svg :target: https://coveralls.io/r/astropy/ccdproc .. image:: https://zenodo.org/badge/12153/astropy/ccdproc.svg :target: https://zenodo.org/badge/latestdoi/12153/astropy/ccdproc Ccdproc is is an affiliated package for the AstroPy package for basic data reductions of CCD images. The ccdproc package provides many of the necessary tools for processing of ccd images built on a framework to provide error propagation and bad pixel tracking throughout the reduction process. Ccdproc can currently be installed via pip or from the source code. For installation instructions, see the `online documentation`_ or docs/install.rst in this source distribution. Documentation is at `ccdproc.readthedocs.io `_ Contributing ------------ We have had the first stable release, but there is still plenty to do! Please open a new issue or new pull request for bugs, feedback, or new features you would like to see. If there is an issue you would like to work on, please leave a comment and we will be happy to assist. New contributions and contributors are very welcome! New to github or open source projects? If you are unsure about where to start or haven't used github before, please feel free to email `@crawfordsm`_ or `@mwcraig`_ . We will more than happily help you make your first contribution. Feedback and feature requests? Is there something missing you would like to see? Please open an issue or send an email to `@mwcraig`_ or `@crawfordsm`_. Ccdproc follows the `Astropy Code of Conduct`_ and strives to provide a welcoming community to all of our users and contributors. Want more information about how to make a contribution? Take a look at the astropy `contributing`_ and `developer`_ documentation. Acknowledgements ---------------- If you have found ccdproc useful to your research, please considering adding a citation to `ccdproc contributors; Craig, M. W.; Crawford, S. M.; Deil, Christoph; Gasdia, Forrest; Gomez, Carlos; Günther, Hans Moritz; Heidt, Nathan; Horton, Anthony; Karr, Jennifer; Nelson, Stefan; Ninan, Joe Phillip; Pattnaik, Punyaslok; Rol, Evert; Schoenell, William; Seifert, Michael; Singh, Sourav; Sipocz, Brigitta; Stotts, Connor; Streicher, Ole; Tollerud, Erik; and Walker, Nathan, 2015, Astrophysics Source Code Library, 1510.007, DOI: 10.5281/zenodo.47652 `_ Thanks to Kyle Barbary (`@kbarbary`_) for designing the `ccdproc` logo. .. _Astropy: http://www.astropy.org/ .. _git: http://git-scm.com/ .. _github: http://github.com .. _Cython: http://cython.org/ .. _online documentation: http://ccdproc.readthedocs.io/en/latest/ccdproc/install.html .. _@kbarbary: https://github.com/kbarbary .. _@crawfordsm: https://github.com/crawfordsm .. _@mwcraig: https://github.com/mwcraig .. _Astropy Code of Conduct: http://www.astropy.org/about.html#codeofconduct .. _contributing: http://docs.astropy.org/en/stable/index.html#contributing .. _developer: http://docs.astropy.org/en/stable/index.html#developer-documentation