astropy-helpers-1.1.1/0000755001134200020070000000000012637041310015554 5ustar embrayscience00000000000000astropy-helpers-1.1.1/setup.py0000755001134200020070000000402312637041237017300 0ustar embrayscience00000000000000#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import ah_bootstrap import pkg_resources from setuptools import setup from astropy_helpers.setup_helpers import register_commands, get_package_info from astropy_helpers.version_helpers import generate_version_py NAME = 'astropy_helpers' VERSION = '1.1.1' RELEASE = 'dev' not in VERSION DOWNLOAD_BASE_URL = 'http://pypi.python.org/packages/source/a/astropy-helpers' generate_version_py(NAME, VERSION, RELEASE, False, uses_git=not RELEASE) # Use the updated version including the git rev count from astropy_helpers.version import version as VERSION cmdclass = register_commands(NAME, VERSION, RELEASE) # This package actually doesn't use the Astropy test command del cmdclass['test'] setup( name=pkg_resources.safe_name(NAME), # astropy_helpers -> astropy-helpers version=VERSION, description='Utilities for building and installing Astropy, Astropy ' 'affiliated packages, and their respective documentation.', author='The Astropy Developers', author_email='astropy.team@gmail.com', license='BSD', url='http://astropy.org', long_description=open('README.rst').read(), download_url='{0}/astropy-helpers-{1}.tar.gz'.format(DOWNLOAD_BASE_URL, VERSION), classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Framework :: Setuptools Plugin', 'Framework :: Sphinx :: Extension', 'Framework :: Sphinx :: Theme', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Archiving :: Packaging' ], cmdclass=cmdclass, zip_safe=False, **get_package_info(exclude=['astropy_helpers.tests']) ) astropy-helpers-1.1.1/README.rst0000644001134200020070000000323112602613465017252 0ustar embrayscience00000000000000astropy-helpers =============== This project provides a Python package, ``astropy_helpers``, which includes many build, installation, and documentation-related tools used by the Astropy project, but packaged separately for use by other projects that wish to leverage this work. The motivation behind this package and details of its implementation are in the accepted `Astropy Proposal for Enhancement (APE) 4 `_. ``astropy_helpers`` includes a special "bootstrap" module called ``ah_bootstrap.py`` which is intended to be used by a project's setup.py in order to ensure that the ``astropy_helpers`` package is available for build/installation. This is similar to the ``ez_setup.py`` module that is shipped with some projects to bootstrap `setuptools `_. As described in APE4, the version numbers for ``astropy_helpers`` follow the corresponding major/minor version of the `astropy core package `_, but with an independent sequence of micro (bugfix) version numbers. Hence, the initial release is 0.4, in parallel with Astropy v0.4, which will be the first version of Astropy to use ``astropy-helpers``. For examples of how to implement ``astropy-helpers`` in a project, see the ``setup.py`` and ``setup.cfg`` files of the `Affiliated package template `_. .. image:: https://travis-ci.org/astropy/astropy-helpers.png :target: https://travis-ci.org/astropy/astropy-helpers .. image:: https://coveralls.io/repos/astropy/astropy-helpers/badge.png :target: https://coveralls.io/r/astropy/astropy-helpers astropy-helpers-1.1.1/LICENSE.rst0000644001134200020070000000272312602613464017403 0ustar embrayscience00000000000000Copyright (c) 2014, Astropy Developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Astropy Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. astropy-helpers-1.1.1/PKG-INFO0000644001134200020070000000563212637041310016657 0ustar embrayscience00000000000000Metadata-Version: 1.1 Name: astropy-helpers Version: 1.1.1 Summary: Utilities for building and installing Astropy, Astropy affiliated packages, and their respective documentation. Home-page: http://astropy.org Author: The Astropy Developers Author-email: astropy.team@gmail.com License: BSD Download-URL: http://pypi.python.org/packages/source/a/astropy-helpers/astropy-helpers-1.1.1.tar.gz Description: astropy-helpers =============== This project provides a Python package, ``astropy_helpers``, which includes many build, installation, and documentation-related tools used by the Astropy project, but packaged separately for use by other projects that wish to leverage this work. The motivation behind this package and details of its implementation are in the accepted `Astropy Proposal for Enhancement (APE) 4 `_. ``astropy_helpers`` includes a special "bootstrap" module called ``ah_bootstrap.py`` which is intended to be used by a project's setup.py in order to ensure that the ``astropy_helpers`` package is available for build/installation. This is similar to the ``ez_setup.py`` module that is shipped with some projects to bootstrap `setuptools `_. As described in APE4, the version numbers for ``astropy_helpers`` follow the corresponding major/minor version of the `astropy core package `_, but with an independent sequence of micro (bugfix) version numbers. Hence, the initial release is 0.4, in parallel with Astropy v0.4, which will be the first version of Astropy to use ``astropy-helpers``. For examples of how to implement ``astropy-helpers`` in a project, see the ``setup.py`` and ``setup.cfg`` files of the `Affiliated package template `_. .. image:: https://travis-ci.org/astropy/astropy-helpers.png :target: https://travis-ci.org/astropy/astropy-helpers .. image:: https://coveralls.io/repos/astropy/astropy-helpers/badge.png :target: https://coveralls.io/r/astropy/astropy-helpers Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: Framework :: Setuptools Plugin Classifier: Framework :: Sphinx :: Extension Classifier: Framework :: Sphinx :: Theme Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Software Development :: Build Tools Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: System :: Archiving :: Packaging astropy-helpers-1.1.1/ah_bootstrap.py0000644001134200020070000010650212602613466020630 0ustar embrayscience00000000000000""" This bootstrap module contains code for ensuring that the astropy_helpers package will be importable by the time the setup.py script runs. It also includes some workarounds to ensure that a recent-enough version of setuptools is being used for the installation. This module should be the first thing imported in the setup.py of distributions that make use of the utilities in astropy_helpers. If the distribution ships with its own copy of astropy_helpers, this module will first attempt to import from the shipped copy. However, it will also check PyPI to see if there are any bug-fix releases on top of the current version that may be useful to get past platform-specific bugs that have been fixed. When running setup.py, use the ``--offline`` command-line option to disable the auto-upgrade checks. When this module is imported or otherwise executed it automatically calls a main function that attempts to read the project's setup.cfg file, which it checks for a configuration section called ``[ah_bootstrap]`` the presences of that section, and options therein, determine the next step taken: If it contains an option called ``auto_use`` with a value of ``True``, it will automatically call the main function of this module called `use_astropy_helpers` (see that function's docstring for full details). Otherwise no further action is taken (however, ``ah_bootstrap.use_astropy_helpers`` may be called manually from within the setup.py script). Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same names as the arguments to `use_astropy_helpers`, and can be used to configure the bootstrap script when ``auto_use = True``. See https://github.com/astropy/astropy-helpers for more details, and for the latest version of this module. """ import contextlib import errno import imp import io import locale import os import re import subprocess as sp import sys try: from ConfigParser import ConfigParser, RawConfigParser except ImportError: from configparser import ConfigParser, RawConfigParser if sys.version_info[0] < 3: _str_types = (str, unicode) _text_type = unicode PY3 = False else: _str_types = (str, bytes) _text_type = str PY3 = True # What follows are several import statements meant to deal with install-time # issues with either missing or misbehaving pacakges (including making sure # setuptools itself is installed): # Some pre-setuptools checks to ensure that either distribute or setuptools >= # 0.7 is used (over pre-distribute setuptools) if it is available on the path; # otherwise the latest setuptools will be downloaded and bootstrapped with # ``ez_setup.py``. This used to be included in a separate file called # setuptools_bootstrap.py; but it was combined into ah_bootstrap.py try: import pkg_resources _setuptools_req = pkg_resources.Requirement.parse('setuptools>=0.7') # This may raise a DistributionNotFound in which case no version of # setuptools or distribute is properly installed _setuptools = pkg_resources.get_distribution('setuptools') if _setuptools not in _setuptools_req: # Older version of setuptools; check if we have distribute; again if # this results in DistributionNotFound we want to give up _distribute = pkg_resources.get_distribution('distribute') if _setuptools != _distribute: # It's possible on some pathological systems to have an old version # of setuptools and distribute on sys.path simultaneously; make # sure distribute is the one that's used sys.path.insert(1, _distribute.location) _distribute.activate() imp.reload(pkg_resources) except: # There are several types of exceptions that can occur here; if all else # fails bootstrap and use the bootstrapped version from ez_setup import use_setuptools use_setuptools() # Note: The following import is required as a workaround to # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this # module now, it will get cleaned up after `run_setup` is called, but that will # later cause the TemporaryDirectory class defined in it to stop working when # used later on by setuptools try: import setuptools.py31compat except ImportError: pass # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass # End compatibility imports... # In case it didn't successfully import before the ez_setup checks import pkg_resources from setuptools import Distribution from setuptools.package_index import PackageIndex from setuptools.sandbox import run_setup from distutils import log from distutils.debug import DEBUG # TODO: Maybe enable checking for a specific version of astropy_helpers? DIST_NAME = 'astropy-helpers' PACKAGE_NAME = 'astropy_helpers' # Defaults for other options DOWNLOAD_IF_NEEDED = True INDEX_URL = 'https://pypi.python.org/simple' USE_GIT = True OFFLINE = False AUTO_UPGRADE = True # A list of all the configuration options and their required types CFG_OPTIONS = [ ('auto_use', bool), ('path', str), ('download_if_needed', bool), ('index_url', str), ('use_git', bool), ('offline', bool), ('auto_upgrade', bool) ] class _Bootstrapper(object): """ Bootstrapper implementation. See ``use_astropy_helpers`` for parameter documentation. """ def __init__(self, path=None, index_url=None, use_git=None, offline=None, download_if_needed=None, auto_upgrade=None): if path is None: path = PACKAGE_NAME if not (isinstance(path, _str_types) or path is False): raise TypeError('path must be a string or False') if PY3 and not isinstance(path, _text_type): fs_encoding = sys.getfilesystemencoding() path = path.decode(fs_encoding) # path to unicode self.path = path # Set other option attributes, using defaults where necessary self.index_url = index_url if index_url is not None else INDEX_URL self.offline = offline if offline is not None else OFFLINE # If offline=True, override download and auto-upgrade if self.offline: download_if_needed = False auto_upgrade = False self.download = (download_if_needed if download_if_needed is not None else DOWNLOAD_IF_NEEDED) self.auto_upgrade = (auto_upgrade if auto_upgrade is not None else AUTO_UPGRADE) # If this is a release then the .git directory will not exist so we # should not use git. git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) if use_git is None and not git_dir_exists: use_git = False self.use_git = use_git if use_git is not None else USE_GIT # Declared as False by default--later we check if astropy-helpers can be # upgraded from PyPI, but only if not using a source distribution (as in # the case of import from a git submodule) self.is_submodule = False @classmethod def main(cls, argv=None): if argv is None: argv = sys.argv config = cls.parse_config() config.update(cls.parse_command_line(argv)) auto_use = config.pop('auto_use', False) bootstrapper = cls(**config) if auto_use: # Run the bootstrapper, otherwise the setup.py is using the old # use_astropy_helpers() interface, in which case it will run the # bootstrapper manually after reconfiguring it. bootstrapper.run() return bootstrapper @classmethod def parse_config(cls): if not os.path.exists('setup.cfg'): return {} cfg = ConfigParser() try: cfg.read('setup.cfg') except Exception as e: if DEBUG: raise log.error( "Error reading setup.cfg: {0!r}\n{1} will not be " "automatically bootstrapped and package installation may fail." "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) return {} if not cfg.has_section('ah_bootstrap'): return {} config = {} for option, type_ in CFG_OPTIONS: if not cfg.has_option('ah_bootstrap', option): continue if type_ is bool: value = cfg.getboolean('ah_bootstrap', option) else: value = cfg.get('ah_bootstrap', option) config[option] = value return config @classmethod def parse_command_line(cls, argv=None): if argv is None: argv = sys.argv config = {} # For now we just pop recognized ah_bootstrap options out of the # arg list. This is imperfect; in the unlikely case that a setup.py # custom command or even custom Distribution class defines an argument # of the same name then we will break that. However there's a catch22 # here that we can't just do full argument parsing right here, because # we don't yet know *how* to parse all possible command-line arguments. if '--no-git' in argv: config['use_git'] = False argv.remove('--no-git') if '--offline' in argv: config['offline'] = True argv.remove('--offline') return config def run(self): strategies = ['local_directory', 'local_file', 'index'] dist = None # First, remove any previously imported versions of astropy_helpers; # this is necessary for nested installs where one package's installer # is installing another package via setuptools.sandbox.run_setup, as in # the case of setup_requires for key in list(sys.modules): try: if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): del sys.modules[key] except AttributeError: # Sometimes mysterious non-string things can turn up in # sys.modules continue # Check to see if the path is a submodule self.is_submodule = self._check_submodule() for strategy in strategies: method = getattr(self, 'get_{0}_dist'.format(strategy)) dist = method() if dist is not None: break else: raise _AHBootstrapSystemExit( "No source found for the {0!r} package; {0} must be " "available and importable as a prerequisite to building " "or installing this package.".format(PACKAGE_NAME)) # This is a bit hacky, but if astropy_helpers was loaded from a # directory/submodule its Distribution object gets a "precedence" of # "DEVELOP_DIST". However, in other cases it gets a precedence of # "EGG_DIST". However, when activing the distribution it will only be # placed early on sys.path if it is treated as an EGG_DIST, so always # do that dist = dist.clone(precedence=pkg_resources.EGG_DIST) # Otherwise we found a version of astropy-helpers, so we're done # Just active the found distribution on sys.path--if we did a # download this usually happens automatically but it doesn't hurt to # do it again # Note: Adding the dist to the global working set also activates it # (makes it importable on sys.path) by default. try: pkg_resources.working_set.add(dist, replace=True) except TypeError: # Some (much) older versions of setuptools do not have the # replace=True option here. These versions are old enough that all # bets may be off anyways, but it's easy enough to work around just # in case... if dist.key in pkg_resources.working_set.by_key: del pkg_resources.working_set.by_key[dist.key] pkg_resources.working_set.add(dist) @property def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname)) def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_index_dist(self): if not self.download: log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) return None log.warn( "Downloading {0!r}; run setup.py with the --offline option to " "force offline installation.".format(DIST_NAME)) try: dist = self._do_download() except Exception as e: if DEBUG: raise log.warn( 'Failed to download and/or install {0!r} from {1!r}:\n' '{2}'.format(DIST_NAME, self.index_url, str(e))) dist = None # No need to run auto-upgrade here since we've already presumably # gotten the most up-to-date version from the package index return dist def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): with _silence(): run_setup(os.path.join(path, 'setup.py'), ['egg_info']) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist def _do_download(self, version='', find_links=None): if find_links: allow_hosts = '' index_url = None else: allow_hosts = None index_url = self.index_url # Annoyingly, setuptools will not handle other arguments to # Distribution (such as options) before handling setup_requires, so it # is not straightforward to programmatically augment the arguments which # are passed to easy_install class _Distribution(Distribution): def get_option_dict(self, command_name): opts = Distribution.get_option_dict(self, command_name) if command_name == 'easy_install': if find_links is not None: opts['find_links'] = ('setup script', find_links) if index_url is not None: opts['index_url'] = ('setup script', index_url) if allow_hosts is not None: opts['allow_hosts'] = ('setup script', allow_hosts) return opts if version: req = '{0}=={1}'.format(DIST_NAME, version) else: req = DIST_NAME attrs = {'setup_requires': [req]} try: if DEBUG: _Distribution(attrs=attrs) else: with _silence(): _Distribution(attrs=attrs) # If the setup_requires succeeded it will have added the new dist to # the main working_set return pkg_resources.working_set.by_key.get(DIST_NAME) except Exception as e: if DEBUG: raise msg = 'Error retrieving {0} from {1}:\n{2}' if find_links: source = find_links[0] elif index_url != INDEX_URL: source = index_url else: source = 'PyPI' raise Exception(msg.format(DIST_NAME, source, repr(e))) def _do_upgrade(self, dist): # Build up a requirement for a higher bugfix release but a lower minor # release (so API compatibility is guaranteed) next_version = _next_version(dist.parsed_version) req = pkg_resources.Requirement.parse( '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) package_index = PackageIndex(index_url=self.index_url) upgrade = package_index.obtain(req) if upgrade is not None: return self._do_download(version=upgrade.version) def _check_submodule(self): """ Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details. """ if (self.path is None or (os.path.exists(self.path) and not os.path.isdir(self.path))): return False if self.use_git: return self._check_submodule_using_git() else: return self._check_submodule_no_git() def _check_submodule_using_git(self): """ Check if the given path is a git submodule. If so, attempt to initialize and/or update the submodule if needed. This function makes calls to the ``git`` command in subprocesses. The ``_check_submodule_no_git`` option uses pure Python to check if the given path looks like a git submodule, but it cannot perform updates. """ cmd = ['git', 'submodule', 'status', '--', self.path] try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except _CommandNotFound: # The git command simply wasn't found; this is most likely the # case on user systems that don't have git and are simply # trying to install the package from PyPI or a source # distribution. Silently ignore this case and simply don't try # to use submodules return False stderr = stderr.strip() if returncode != 0 and stderr: # Unfortunately the return code alone cannot be relied on, as # earlier versions of git returned 0 even if the requested submodule # does not exist # This is a warning that occurs in perl (from running git submodule) # which only occurs with a malformatted locale setting which can # happen sometimes on OSX. See again # https://github.com/astropy/astropy/issues/2749 perl_warning = ('perl: warning: Falling back to the standard locale ' '("C").') if not stderr.strip().endswith(perl_warning): # Some other unknown error condition occurred log.warn('git submodule command failed ' 'unexpectedly:\n{0}'.format(stderr)) return False # Output of `git submodule status` is as follows: # # 1: Status indicator: '-' for submodule is uninitialized, '+' if # submodule is initialized but is not at the commit currently indicated # in .gitmodules (and thus needs to be updated), or 'U' if the # submodule is in an unstable state (i.e. has merge conflicts) # # 2. SHA-1 hash of the current commit of the submodule (we don't really # need this information but it's useful for checking that the output is # correct) # # 3. The output of `git describe` for the submodule's current commit # hash (this includes for example what branches the commit is on) but # only if the submodule is initialized. We ignore this information for # now _git_submodule_status_re = re.compile( '^(?P[+-U ])(?P[0-9a-f]{40}) ' '(?P\S+)( .*)?$') # The stdout should only contain one line--the status of the # requested submodule m = _git_submodule_status_re.match(stdout) if m: # Yes, the path *is* a git submodule self._update_submodule(m.group('submodule'), m.group('status')) return True else: log.warn( 'Unexpected output from `git submodule status`:\n{0}\n' 'Will attempt import from {1!r} regardless.'.format( stdout, self.path)) return False def _check_submodule_no_git(self): """ Like ``_check_submodule_using_git``, but simply parses the .gitmodules file to determine if the supplied path is a git submodule, and does not exec any subprocesses. This can only determine if a path is a submodule--it does not perform updates, etc. This function may need to be updated if the format of the .gitmodules file is changed between git versions. """ gitmodules_path = os.path.abspath('.gitmodules') if not os.path.isfile(gitmodules_path): return False # This is a minimal reader for gitconfig-style files. It handles a few of # the quirks that make gitconfig files incompatible with ConfigParser-style # files, but does not support the full gitconfig syntax (just enough # needed to read a .gitmodules file). gitmodules_fileobj = io.StringIO() # Must use io.open for cross-Python-compatible behavior wrt unicode with io.open(gitmodules_path) as f: for line in f: # gitconfig files are more flexible with leading whitespace; just # go ahead and remove it line = line.lstrip() # comments can start with either # or ; if line and line[0] in (':', ';'): continue gitmodules_fileobj.write(line) gitmodules_fileobj.seek(0) cfg = RawConfigParser() try: cfg.readfp(gitmodules_fileobj) except Exception as exc: log.warn('Malformatted .gitmodules file: {0}\n' '{1} cannot be assumed to be a git submodule.'.format( exc, self.path)) return False for section in cfg.sections(): if not cfg.has_option(section, 'path'): continue submodule_path = cfg.get(section, 'path').rstrip(os.sep) if submodule_path == self.path.rstrip(os.sep): return True return False def _update_submodule(self, submodule, status): if status == ' ': # The submodule is up to date; no action necessary return elif status == '-': if self.offline: raise _AHBootstrapSystemExit( "Cannot initialize the {0} submodule in --offline mode; " "this requires being able to clone the submodule from an " "online repository.".format(submodule)) cmd = ['update', '--init'] action = 'Initializing' elif status == '+': cmd = ['update'] action = 'Updating' if self.offline: cmd.append('--no-fetch') elif status == 'U': raise _AHBoostrapSystemExit( 'Error: Submodule {0} contains unresolved merge conflicts. ' 'Please complete or abandon any changes in the submodule so that ' 'it is in a usable state, then try again.'.format(submodule)) else: log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 'attempt to use the submodule as-is, but try to ensure ' 'that the submodule is in a clean state and contains no ' 'conflicts or errors.\n{2}'.format(status, submodule, _err_help_msg)) return err_msg = None cmd = ['git', 'submodule'] + cmd + ['--', submodule] log.warn('{0} {1} submodule with: `{2}`'.format( action, submodule, ' '.join(cmd))) try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except OSError as e: err_msg = str(e) else: if returncode != 0: err_msg = stderr if err_msg is not None: log.warn('An unexpected error occurred updating the git submodule ' '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, _err_help_msg)) class _CommandNotFound(OSError): """ An exception raised when a command run with run_cmd is not found on the system. """ def run_cmd(cmd): """ Run a command in a subprocess, given as a list of command-line arguments. Returns a ``(returncode, stdout, stderr)`` tuple. """ try: p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) # XXX: May block if either stdout or stderr fill their buffers; # however for the commands this is currently used for that is # unlikely (they should have very brief output) stdout, stderr = p.communicate() except OSError as e: if DEBUG: raise if e.errno == errno.ENOENT: msg = 'Command not found: `{0}`'.format(' '.join(cmd)) raise _CommandNotFound(msg, cmd) else: raise _AHBoostrapSystemExit( 'An unexpected error occurred when running the ' '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) # Can fail of the default locale is not configured properly. See # https://github.com/astropy/astropy/issues/2749. For the purposes under # consideration 'latin1' is an acceptable fallback. try: stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' except ValueError: # Due to an OSX oddity locale.getdefaultlocale() can also crash # depending on the user's locale/language settings. See: # http://bugs.python.org/issue18378 stdio_encoding = 'latin1' # Unlikely to fail at this point but even then let's be flexible if not isinstance(stdout, _text_type): stdout = stdout.decode(stdio_encoding, 'replace') if not isinstance(stderr, _text_type): stderr = stderr.decode(stdio_encoding, 'replace') return (p.returncode, stdout, stderr) def _next_version(version): """ Given a parsed version from pkg_resources.parse_version, returns a new version string with the next minor version. Examples ======== >>> _next_version(pkg_resources.parse_version('1.2.3')) '1.3.0' """ if hasattr(version, 'base_version'): # New version parsing from setuptools >= 8.0 if version.base_version: parts = version.base_version.split('.') else: parts = [] else: parts = [] for part in version: if part.startswith('*'): break parts.append(part) parts = [int(p) for p in parts] if len(parts) < 3: parts += [0] * (3 - len(parts)) major, minor, micro = parts[:3] return '{0}.{1}.{2}'.format(major, minor + 1, 0) class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x encoding = 'utf-8' def write(self, s): pass def flush(self): pass @contextlib.contextmanager def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr _err_help_msg = """ If the problem persists consider installing astropy_helpers manually using pip (`pip install astropy_helpers`) or by manually downloading the source archive, extracting it, and installing by running `python setup.py install` from the root of the extracted source code. """ class _AHBootstrapSystemExit(SystemExit): def __init__(self, *args): if not args: msg = 'An unknown problem occurred bootstrapping astropy_helpers.' else: msg = args[0] msg += '\n' + _err_help_msg super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) if sys.version_info[:2] < (2, 7): # In Python 2.6 the distutils log does not log warnings, errors, etc. to # stderr so we have to wrap it to ensure consistency at least in this # module import distutils class log(object): def __getattr__(self, attr): return getattr(distutils.log, attr) def warn(self, msg, *args): self._log_to_stderr(distutils.log.WARN, msg, *args) def error(self, msg): self._log_to_stderr(distutils.log.ERROR, msg, *args) def fatal(self, msg): self._log_to_stderr(distutils.log.FATAL, msg, *args) def log(self, level, msg, *args): if level in (distutils.log.WARN, distutils.log.ERROR, distutils.log.FATAL): self._log_to_stderr(level, msg, *args) else: distutils.log.log(level, msg, *args) def _log_to_stderr(self, level, msg, *args): # This is the only truly 'public' way to get the current threshold # of the log current_threshold = distutils.log.set_threshold(distutils.log.WARN) distutils.log.set_threshold(current_threshold) if level >= current_threshold: if args: msg = msg % args sys.stderr.write('%s\n' % msg) sys.stderr.flush() log = log() BOOTSTRAPPER = _Bootstrapper.main() def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run() astropy-helpers-1.1.1/CHANGES.rst0000644001134200020070000003126512637041237017375 0ustar embrayscience00000000000000astropy-helpers Changelog ========================= 1.1.1 (2015-12-23) ------------------ - Fixed crash in build with ``AttributeError: cython_create_listing`` with older versions of setuptools. [#209] 1.1 (2015-12-10) ---------------- - The original ``AstropyTest`` class in ``astropy_helpers``, which implements the ``setup.py test`` command, is deprecated in favor of moving the implementation of that command closer to the actual Astropy test runner in ``astropy.tests``. Now a dummy ``test`` command is provided solely for informing users that they need ``astropy`` installed to run the tests (however, the previous, now deprecated implementation is still provided and continues to work with older versions of Astropy). See the related issue for more details. [#184] - Added a useful new utility function to ``astropy_helpers.utils`` called ``find_data_files``. This is similar to the ``find_packages`` function in setuptools in that it can be used to search a package for data files (matching a pattern) that can be passed to the ``package_data`` argument for ``setup()``. See the docstring to ``astropy_helpers.utils.find_data_files`` for more details. [#42] - The ``astropy_helpers`` module now sets the global ``_ASTROPY_SETUP_`` flag upon import (from within a ``setup.py``) script, so it's not necessary to have this in the ``setup.py`` script explicitly. If in doubt though, there's no harm in setting it twice. Putting it in ``astropy_helpers`` just ensures that any other imports that occur during build will have this flag set. [#191] - It is now possible to use Cython as a ``setup_requires`` build requirement, and still build Cython extensions even if Cython wasn't available at the beginning of the build processes (that is, is automatically downloaded via setuptools' processing of ``setup_requires``). [#185] - Moves the ``adjust_compiler`` check into the ``build_ext`` command itself, so it's only used when actually building extension modules. This also deprecates the stand-alone ``adjust_compiler`` function. [#76] - When running the ``build_sphinx`` / ``build_docs`` command with the ``-w`` option, the output from Sphinx is streamed as it runs instead of silently buffering until the doc build is complete. [#197] 1.0.6 (2015-12-04) ------------------ - Fixed bug where running ``./setup.py build_sphinx`` could return successfully even when the build was not successful (and should have returned a non-zero error code). [#199] 1.0.5 (2015-10-02) ------------------ - Fixed a regression in the ``./setup.py test`` command that was introduced in v1.0.4. 1.0.4 (2015-10-02) ------------------ - Fixed issue with the sphinx documentation css where the line numbers for code blocks were not aligned with the code. [#179] - Fixed crash that could occur when trying to build Cython extension modules when Cython isn't installed. Normally this still results in a failed build, but was supposed to provide a useful error message rather than crash outright (this was a regression introduced in v1.0.3). [#181] - Fixed a crash that could occur on Python 3 when a working C compiler isn't found. [#182] - Quieted warnings about deprecated Numpy API in Cython extensions, when building Cython extensions against Numpy >= 1.7. [#183] - Improved support for py.test >= 2.7--running the ``./setup.py test`` command now copies all doc pages into the temporary test directory as well, so that all test files have a "common root directory". [#189] 1.0.3 (2015-07-22) ------------------ - Added workaround for sphinx-doc/sphinx#1843, a but in Sphinx which prevented descriptor classes with a custom metaclass from being documented correctly. [#158] - Added an alias for the ``./setup.py build_sphinx`` command as ``./setup.py build_docs`` which, to a new contributor, should hopefully be less cryptic. [#161] - The fonts in graphviz diagrams now match the font of the HTML content. [#169] - When the documentation is built on readthedocs.org, MathJax will be used for math rendering. When built elsewhere, the "pngmath" extension is still used for math rendering. [#170] - Fix crash when importing astropy_helpers when running with ``python -OO`` [#171] - The ``build`` and ``build_ext`` stages now correctly recognize the presence of C++ files in Cython extensions (previously only vanilla C worked). [#173] 1.0.2 (2015-04-02) ------------------ - Various fixes enabling the astropy-helpers Sphinx build command and Sphinx extensions to work with Sphinx 1.3. [#148] - More improvement to the ability to handle multiple versions of astropy-helpers being imported in the same Python interpreter session in the (somewhat rare) case of nested installs. [#147] - To better support high resolution displays, use SVG for the astropy logo and linkout image, falling back to PNGs for browsers that support it. [#150, #151] - Improve ``setup_helpers.get_compiler_version`` to work with more compilers, and to return more info. This will help fix builds of Astropy on less common compilers, like Sun C. [#153] 1.0.1 (2015-03-04) ------------------ - Released in concert with v0.4.8 to address the same issues. - Improved the ``ah_bootstrap`` script's ability to override existing installations of astropy-helpers with new versions in the context of installing multiple packages simultaneously within the same Python interpreter (e.g. when one package has in its ``setup_requires`` another package that uses a different version of astropy-helpers. [#144] - Added a workaround to an issue in matplotlib that can, in rare cases, lead to a crash when installing packages that import matplotlib at build time. [#144] 0.4.8 (2015-03-04) ------------------ - Improved the ``ah_bootstrap`` script's ability to override existing installations of astropy-helpers with new versions in the context of installing multiple packages simultaneously within the same Python interpreter (e.g. when one package has in its ``setup_requires`` another package that uses a different version of astropy-helpers. [#144] - Added a workaround to an issue in matplotlib that can, in rare cases, lead to a crash when installing packages that import matplotlib at build time. [#144] 1.0 (2015-02-17) ---------------- - Added new pre-/post-command hook points for ``setup.py`` commands. Now any package can define code to run before and/or after any ``setup.py`` command without having to manually subclass that command by adding ``pre__hook`` and ``post__hook`` callables to the package's ``setup_package.py`` module. See the PR for more details. [#112] - The following objects in the ``astropy_helpers.setup_helpers`` module have been relocated: - ``get_dummy_distribution``, ``get_distutils_*``, ``get_compiler_option``, ``add_command_option``, ``is_distutils_display_option`` -> ``astropy_helpers.distutils_helpers`` - ``should_build_with_cython``, ``generate_build_ext_command`` -> ``astropy_helpers.commands.build_ext`` - ``AstropyBuildPy`` -> ``astropy_helpers.commands.build_py`` - ``AstropyBuildSphinx`` -> ``astropy_helpers.commands.build_sphinx`` - ``AstropyInstall`` -> ``astropy_helpers.commands.install`` - ``AstropyInstallLib`` -> ``astropy_helpers.commands.install_lib`` - ``AstropyRegister`` -> ``astropy_helpers.commands.register`` - ``get_pkg_version_module`` -> ``astropy_helpers.version_helpers`` - ``write_if_different``, ``import_file``, ``get_numpy_include_path`` -> ``astropy_helpers.utils`` All of these are "soft" deprecations in the sense that they are still importable from ``astropy_helpers.setup_helpers`` for now, and there is no (easy) way to produce deprecation warnings when importing these objects from ``setup_helpers`` rather than directly from the modules they are defined in. But please consider updating any imports to these objects. [#110] - Use of the ``astropy.sphinx.ext.astropyautosummary`` extension is deprecated for use with Sphinx < 1.2. Instead it should suffice to remove this extension for the ``extensions`` list in your ``conf.py`` and add the stock ``sphinx.ext.autosummary`` instead. [#131] 0.4.7 (2015-02-17) ------------------ - Fixed incorrect/missing git hash being added to the generated ``version.py`` when creating a release. [#141] 0.4.6 (2015-02-16) ------------------ - Fixed problems related to the automatically generated _compiler module not being created properly. [#139] 0.4.5 (2015-02-11) ------------------ - Fixed an issue where ah_bootstrap.py could blow up when astropy_helper's version number is 1.0. - Added a workaround for documentation of properties in the rare case where the class's metaclass has a property of the same name. [#130] - Fixed an issue on Python 3 where importing a package using astropy-helper's generated version.py module would crash when the current working directory is an empty git repository. [#114] - Fixed an issue where the "revision count" appended to .dev versions by the generated version.py did not accurately reflect the revision count for the package it belongs to, and could be invalid if the current working directory is an unrelated git repository. [#107] - Likewise, fixed a confusing warning message that could occur in the same circumstances as the above issue. [#121] 0.4.4 (2014-12-31) ------------------ - More improvements for building the documentation using Python 3.x. [#100] - Additional minor fixes to Python 3 support. [#115] - Updates to support new test features in Astropy [#92, #106] 0.4.3 (2014-10-22) ------------------ - The generated ``version.py`` file now preserves the git hash of installed copies of the package as well as when building a source distribution. That is, the git hash of the changeset that was installed/released is preserved. [#87] - In smart resolver add resolution for class links when they exist in the intersphinx inventory, but not the mapping of the current package (e.g. when an affiliated package uses an astropy core class of which "actual" and "documented" location differs) [#88] - Fixed a bug that could occur when running ``setup.py`` for the first time in a repository that uses astropy-helpers as a submodule: ``AttributeError: 'NoneType' object has no attribute 'mkdtemp'`` [#89] - Fixed a bug where optional arguments to the ``doctest-skip`` Sphinx directive were sometimes being left in the generated documentation output. [#90] - Improved support for building the documentation using Python 3.x. [#96] - Avoid error message if .git directory is not present. [#91] 0.4.2 (2014-08-09) ------------------ - Fixed some CSS issues in generated API docs. [#69] - Fixed the warning message that could be displayed when generating a version number with some older versions of git. [#77] - Fixed automodsumm to work with new versions of Sphinx (>= 1.2.2). [#80] 0.4.1 (2014-08-08) ------------------ - Fixed git revision count on systems with git versions older than v1.7.2. [#70] - Fixed display of warning text when running a git command fails (previously the output of stderr was not being decoded properly). [#70] - The ``--offline`` flag to ``setup.py`` understood by ``ah_bootstrap.py`` now also prevents git from going online to fetch submodule updates. [#67] - The Sphinx extension for converting issue numbers to links in the changelog now supports working on arbitrary pages via a new ``conf.py`` setting: ``changelog_links_docpattern``. By default it affects the ``changelog`` and ``whatsnew`` pages in one's Sphinx docs. [#61] - Fixed crash that could result from users with missing/misconfigured locale settings. [#58] - The font used for code examples in the docs is now the system-defined ``monospace`` font, rather than ``Minaco``, which is not available on all platforms. [#50] 0.4 (2014-07-15) ---------------- - Initial release of astropy-helpers. See `APE4 `_ for details of the motivation and design of this package. - The ``astropy_helpers`` package replaces the following modules in the ``astropy`` package: - ``astropy.setup_helpers`` -> ``astropy_helpers.setup_helpers`` - ``astropy.version_helpers`` -> ``astropy_helpers.version_helpers`` - ``astropy.sphinx`` - > ``astropy_helpers.sphinx`` These modules should be considered deprecated in ``astropy``, and any new, non-critical changes to those modules will be made in ``astropy_helpers`` instead. Affiliated packages wishing to make use those modules (as in the Astropy package-template) should use the versions from ``astropy_helpers`` instead, and include the ``ah_bootstrap.py`` script in their project, for bootstrapping the ``astropy_helpers`` package in their setup.py script. astropy-helpers-1.1.1/ez_setup.py0000644001134200020070000002757312602613464020011 0ustar embrayscience00000000000000#!python """Bootstrap setuptools installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from ez_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import os import shutil import sys import tempfile import tarfile import optparse import subprocess import platform from distutils import log try: from site import USER_SITE except ImportError: USER_SITE = None DEFAULT_VERSION = "1.4.2" DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" def _python_cmd(*args): args = (sys.executable,) + args return subprocess.call(args) == 0 def _check_call_py24(cmd, *args, **kwargs): res = subprocess.call(cmd, *args, **kwargs) class CalledProcessError(Exception): pass if not res == 0: msg = "Command '%s' return non-zero exit status %d" % (cmd, res) raise CalledProcessError(msg) vars(subprocess).setdefault('check_call', _check_call_py24) def _install(tarball, install_args=()): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # installing log.warn('Installing Setuptools') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _build_egg(egg, tarball, to_dir): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) finally: os.chdir(old_wd) shutil.rmtree(tmpdir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') def _do_download(version, download_base, to_dir, download_delay): egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' % (version, sys.version_info[0], sys.version_info[1])) if not os.path.exists(egg): tarball = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, tarball, to_dir) sys.path.insert(0, egg) # Remove previously-imported pkg_resources if present (see # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). if 'pkg_resources' in sys.modules: del sys.modules['pkg_resources'] import setuptools setuptools.bootstrap_install_from = egg def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15): # making sure we use the absolute path to_dir = os.path.abspath(to_dir) was_imported = 'pkg_resources' in sys.modules or \ 'setuptools' in sys.modules try: import pkg_resources except ImportError: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("setuptools>=" + version) return except pkg_resources.VersionConflict: e = sys.exc_info()[1] if was_imported: sys.stderr.write( "The required version of setuptools (>=%s) is not available,\n" "and can't be installed while this script is running. Please\n" "install a more recent version first, using\n" "'easy_install -U setuptools'." "\n\n(Currently using %r)\n" % (version, e.args[0])) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) def _clean_check(cmd, target): """ Run the command to download target. If the command fails, clean up before re-raising the error. """ try: subprocess.check_call(cmd) except subprocess.CalledProcessError: if os.access(target, os.F_OK): os.unlink(target) raise def download_file_powershell(url, target): """ Download the file at url to target using Powershell (which will validate trust). Raise an exception if the command cannot complete. """ target = os.path.abspath(target) cmd = [ 'powershell', '-Command', "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), ] _clean_check(cmd, target) def has_powershell(): if platform.system() != 'Windows': return False cmd = ['powershell', '-Command', 'echo test'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_powershell.viable = has_powershell def download_file_curl(url, target): cmd = ['curl', url, '--silent', '--output', target] _clean_check(cmd, target) def has_curl(): cmd = ['curl', '--version'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_curl.viable = has_curl def download_file_wget(url, target): cmd = ['wget', url, '--quiet', '--output-document', target] _clean_check(cmd, target) def has_wget(): cmd = ['wget', '--version'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_wget.viable = has_wget def download_file_insecure(url, target): """ Use Python to download the file, even though it cannot authenticate the connection. """ try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen src = dst = None try: src = urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src.read() dst = open(target, "wb") dst.write(data) finally: if src: src.close() if dst: dst.close() download_file_insecure.viable = lambda: True def get_best_downloader(): downloaders = [ download_file_powershell, download_file_curl, download_file_wget, download_file_insecure, ] for dl in downloaders: if dl.viable(): return dl def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader): """Download setuptools from a specified location and return its filename `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. ``downloader_factory`` should be a function taking no arguments and returning a function for downloading a URL to a target. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) tgz_name = "setuptools-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) if not os.path.exists(saveto): # Avoid repeated downloads log.warn("Downloading %s", url) downloader = downloader_factory() downloader(url, saveto) return os.path.realpath(saveto) def _extractall(self, path=".", members=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). """ import copy import operator from tarfile import ExtractError directories = [] if members is None: members = self for tarinfo in members: if tarinfo.isdir(): # Extract directories with a safe mode. directories.append(tarinfo) tarinfo = copy.copy(tarinfo) tarinfo.mode = 448 # decimal for oct 0700 self.extract(tarinfo, path) # Reverse sort directories. if sys.version_info < (2, 4): def sorter(dir1, dir2): return cmp(dir1.name, dir2.name) directories.sort(sorter) directories.reverse() else: directories.sort(key=operator.attrgetter('name'), reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: dirpath = os.path.join(path, tarinfo.name) try: self.chown(tarinfo, dirpath) self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError: e = sys.exc_info()[1] if self.errorlevel > 1: raise else: self._dbg(1, "tarfile: %s" % e) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the setuptools package """ install_args = [] if options.user_install: if sys.version_info < (2, 6): log.warn("--user requires Python 2.6 or later") raise SystemExit(1) install_args.append('--user') return install_args def _parse_args(): """ Parse the command line for options """ parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package (requires Python 2.6 or later)') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the setuptools package') parser.add_option( '--insecure', dest='downloader_factory', action='store_const', const=lambda: download_file_insecure, default=get_best_downloader, help='Use internal, non-validating downloader' ) options, args = parser.parse_args() # positional arguments are ignored return options def main(version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" options = _parse_args() tarball = download_setuptools(download_base=options.download_base, downloader_factory=options.downloader_factory) return _install(tarball, _build_install_args(options)) if __name__ == '__main__': sys.exit(main()) astropy-helpers-1.1.1/setup.cfg0000644001134200020070000000014612602613466017407 0ustar embrayscience00000000000000[pytest] norecursedirs = .tox astropy_helpers/tests/package_template python_functions = test_ astropy-helpers-1.1.1/astropy_helpers/0000755001134200020070000000000012637041310020777 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/setup_helpers.py0000644001134200020070000006310612630421426024244 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This module contains a number of utilities for use during setup/build/packaging that are useful to astropy as a whole. """ from __future__ import absolute_import, print_function import collections import os import re import shutil import subprocess import sys import textwrap import traceback import warnings from distutils import log from distutils.dist import Distribution from distutils.errors import DistutilsOptionError, DistutilsModuleError from distutils.core import Extension from distutils.core import Command from distutils.command.sdist import sdist as DistutilsSdist from setuptools import find_packages as _find_packages from .distutils_helpers import * from .version_helpers import get_pkg_version_module from .utils import (silence, walk_skip_hidden, import_file, extends_doc, resolve_name, AstropyDeprecationWarning) from .commands.build_ext import generate_build_ext_command from .commands.build_py import AstropyBuildPy from .commands.install import AstropyInstall from .commands.install_lib import AstropyInstallLib from .commands.register import AstropyRegister from .commands.test import AstropyTest # These imports are not used in this module, but are included for backwards # compat with older versions of this module from .utils import get_numpy_include_path, write_if_different from .commands.build_ext import should_build_with_cython, get_compiler_version _module_state = { 'registered_commands': None, 'have_sphinx': False, 'package_cache': None, } try: import sphinx _module_state['have_sphinx'] = True except ValueError as e: # This can occur deep in the bowels of Sphinx's imports by way of docutils # and an occurrence of this bug: http://bugs.python.org/issue18378 # In this case sphinx is effectively unusable if 'unknown locale' in e.args[0]: log.warn( "Possible misconfiguration of one of the environment variables " "LC_ALL, LC_CTYPES, LANG, or LANGUAGE. For an example of how to " "configure your system's language environment on OSX see " "http://blog.remibergsma.com/2012/07/10/" "setting-locales-correctly-on-mac-osx-terminal-application/") except ImportError: pass except SyntaxError: # occurs if markupsafe is recent version, which doesn't support Python 3.2 pass PY3 = sys.version_info[0] >= 3 # This adds a new keyword to the setup() function Distribution.skip_2to3 = [] def adjust_compiler(package): """ This function detects broken compilers and switches to another. If the environment variable CC is explicitly set, or a compiler is specified on the commandline, no override is performed -- the purpose here is to only override a default compiler. The specific compilers with problems are: * The default compiler in XCode-4.2, llvm-gcc-4.2, segfaults when compiling wcslib. The set of broken compilers can be updated by changing the compiler_mapping variable. It is a list of 2-tuples where the first in the pair is a regular expression matching the version of the broken compiler, and the second is the compiler to change to. """ warnings.warn( 'Direct use of the adjust_compiler function in setup.py is ' 'deprecated and can be removed from your setup.py. This ' 'functionality is now incorporated directly into the build_ext ' 'command.', AstropyDeprecationWarning) def get_debug_option(packagename): """ Determines if the build is in debug mode. Returns ------- debug : bool True if the current build was started with the debug option, False otherwise. """ try: current_debug = get_pkg_version_module(packagename, fromlist=['debug'])[0] except (ImportError, AttributeError): current_debug = None # Only modify the debug flag if one of the build commands was explicitly # run (i.e. not as a sub-command of something else) dist = get_dummy_distribution() if any(cmd in dist.commands for cmd in ['build', 'build_ext']): debug = bool(get_distutils_build_option('debug')) else: debug = bool(current_debug) if current_debug is not None and current_debug != debug: build_ext_cmd = dist.get_command_class('build_ext') build_ext_cmd.force_rebuild = True return debug def register_commands(package, version, release, srcdir='.'): if _module_state['registered_commands'] is not None: return _module_state['registered_commands'] if _module_state['have_sphinx']: from .commands.build_sphinx import AstropyBuildSphinx, AstropyBuildDocs else: AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx _module_state['registered_commands'] = registered_commands = { 'test': generate_test_command(package), # Use distutils' sdist because it respects package_data. # setuptools/distributes sdist requires duplication of information in # MANIFEST.in 'sdist': DistutilsSdist, # The exact form of the build_ext command depends on whether or not # we're building a release version 'build_ext': generate_build_ext_command(package, release), # We have a custom build_py to generate the default configuration file 'build_py': AstropyBuildPy, # Since install can (in some circumstances) be run without # first building, we also need to override install and # install_lib. See #2223 'install': AstropyInstall, 'install_lib': AstropyInstallLib, 'register': AstropyRegister, 'build_sphinx': AstropyBuildSphinx, 'build_docs': AstropyBuildDocs } # Need to override the __name__ here so that the commandline options are # presented as being related to the "build" command, for example; normally # this wouldn't be necessary since commands also have a command_name # attribute, but there is a bug in distutils' help display code that it # uses __name__ instead of command_name. Yay distutils! for name, cls in registered_commands.items(): cls.__name__ = name # Add a few custom options; more of these can be added by specific packages # later for option in [ ('use-system-libraries', "Use system libraries whenever possible", True)]: add_command_option('build', *option) add_command_option('install', *option) add_command_hooks(registered_commands, srcdir=srcdir) return registered_commands def add_command_hooks(commands, srcdir='.'): """ Look through setup_package.py modules for functions with names like ``pre__hook`` and ``post__hook`` where ```` is the name of a ``setup.py`` command (e.g. build_ext). If either hook is present this adds a wrapped version of that command to the passed in ``commands`` `dict`. ``commands`` may be pre-populated with other custom distutils command classes that should be wrapped if there are hooks for them (e.g. `AstropyBuildPy`). """ hook_re = re.compile(r'^(pre|post)_(.+)_hook$') # Distutils commands have a method of the same name, but it is not a # *classmethod* (which probably didn't exist when distutils was first # written) def get_command_name(cmdcls): if hasattr(cmdcls, 'command_name'): return cmdcls.command_name else: return cmdcls.__name__ packages = filter_packages(find_packages(srcdir)) dist = get_dummy_distribution() hooks = collections.defaultdict(dict) for setuppkg in iter_setup_packages(srcdir, packages): for name, obj in vars(setuppkg).items(): match = hook_re.match(name) if not match: continue hook_type = match.group(1) cmd_name = match.group(2) cmd_cls = dist.get_command_class(cmd_name) if hook_type not in hooks[cmd_name]: hooks[cmd_name][hook_type] = [] hooks[cmd_name][hook_type].append((setuppkg.__name__, obj)) for cmd_name, cmd_hooks in hooks.items(): commands[cmd_name] = generate_hooked_command( cmd_name, dist.get_command_class(cmd_name), cmd_hooks) def generate_hooked_command(cmd_name, cmd_cls, hooks): """ Returns a generated subclass of ``cmd_cls`` that runs the pre- and post-command hooks for that command before and after the ``cmd_cls.run`` method. """ def run(self, orig_run=cmd_cls.run): self.run_command_hooks('pre_hooks') orig_run(self) self.run_command_hooks('post_hooks') return type(cmd_name, (cmd_cls, object), {'run': run, 'run_command_hooks': run_command_hooks, 'pre_hooks': hooks.get('pre', []), 'post_hooks': hooks.get('post', [])}) def run_command_hooks(cmd_obj, hook_kind): """Run hooks registered for that command and phase. *cmd_obj* is a finalized command object; *hook_kind* is either 'pre_hook' or 'post_hook'. """ hooks = getattr(cmd_obj, hook_kind, None) if not hooks: return for modname, hook in hooks: if isinstance(hook, str): try: hook_obj = resolve_name(hook) except ImportError as exc: raise DistutilsModuleError( 'cannot find hook {0}: {1}'.format(hook, err)) else: hook_obj = hook if not callable(hook_obj): raise DistutilsOptionError('hook {0!r} is not callable' % hook) log.info('running {0} from {1} for {2} command'.format( hook_kind.rstrip('s'), modname, cmd_obj.get_command_name())) try : hook_obj(cmd_obj) except Exception as exc: log.error('{0} command hook {1} raised an exception: %s\n'.format( hook_obj.__name__, cmd_obj.get_command_name())) log.error(traceback.format_exc()) sys.exit(1) def generate_test_command(package_name): """ Creates a custom 'test' command for the given package which sets the command's ``package_name`` class attribute to the name of the package being tested. """ return type(package_name.title() + 'Test', (AstropyTest,), {'package_name': package_name}) def update_package_files(srcdir, extensions, package_data, packagenames, package_dirs): """ This function is deprecated and maintained for backward compatibility with affiliated packages. Affiliated packages should update their setup.py to use `get_package_info` instead. """ info = get_package_info(srcdir) extensions.extend(info['ext_modules']) package_data.update(info['package_data']) packagenames = list(set(packagenames + info['packages'])) package_dirs.update(info['package_dir']) def get_package_info(srcdir='.', exclude=()): """ Collates all of the information for building all subpackages subpackages and returns a dictionary of keyword arguments that can be passed directly to `distutils.setup`. The purpose of this function is to allow subpackages to update the arguments to the package's ``setup()`` function in its setup.py script, rather than having to specify all extensions/package data directly in the ``setup.py``. See Astropy's own ``setup.py`` for example usage and the Astropy development docs for more details. This function obtains that information by iterating through all packages in ``srcdir`` and locating a ``setup_package.py`` module. This module can contain the following functions: ``get_extensions()``, ``get_package_data()``, ``get_build_options()``, ``get_external_libraries()``, and ``requires_2to3()``. Each of those functions take no arguments. - ``get_extensions`` returns a list of `distutils.extension.Extension` objects. - ``get_package_data()`` returns a dict formatted as required by the ``package_data`` argument to ``setup()``. - ``get_build_options()`` returns a list of tuples describing the extra build options to add. - ``get_external_libraries()`` returns a list of libraries that can optionally be built using external dependencies. - ``get_entry_points()`` returns a dict formatted as required by the ``entry_points`` argument to ``setup()``. - ``requires_2to3()`` should return `True` when the source code requires `2to3` processing to run on Python 3.x. If ``requires_2to3()`` is missing, it is assumed to return `True`. """ ext_modules = [] packages = [] package_data = {} package_dir = {} skip_2to3 = [] # Use the find_packages tool to locate all packages and modules packages = filter_packages(find_packages(srcdir, exclude=exclude)) # For each of the setup_package.py modules, extract any # information that is needed to install them. The build options # are extracted first, so that their values will be available in # subsequent calls to `get_extensions`, etc. for setuppkg in iter_setup_packages(srcdir, packages): if hasattr(setuppkg, 'get_build_options'): options = setuppkg.get_build_options() for option in options: add_command_option('build', *option) if hasattr(setuppkg, 'get_external_libraries'): libraries = setuppkg.get_external_libraries() for library in libraries: add_external_library(library) if hasattr(setuppkg, 'requires_2to3'): requires_2to3 = setuppkg.requires_2to3() else: requires_2to3 = True if not requires_2to3: skip_2to3.append( os.path.dirname(setuppkg.__file__)) for setuppkg in iter_setup_packages(srcdir, packages): # get_extensions must include any Cython extensions by their .pyx # filename. if hasattr(setuppkg, 'get_extensions'): ext_modules.extend(setuppkg.get_extensions()) if hasattr(setuppkg, 'get_package_data'): package_data.update(setuppkg.get_package_data()) # Locate any .pyx files not already specified, and add their extensions in. # The default include dirs include numpy to facilitate numerical work. ext_modules.extend(get_cython_extensions(srcdir, packages, ext_modules, ['numpy'])) # Now remove extensions that have the special name 'skip_cython', as they # exist Only to indicate that the cython extensions shouldn't be built for i, ext in reversed(list(enumerate(ext_modules))): if ext.name == 'skip_cython': del ext_modules[i] # On Microsoft compilers, we need to pass the '/MANIFEST' # commandline argument. This was the default on MSVC 9.0, but is # now required on MSVC 10.0, but it doesn't seem to hurt to add # it unconditionally. if get_compiler_option() == 'msvc': for ext in ext_modules: ext.extra_link_args.append('/MANIFEST') return { 'ext_modules': ext_modules, 'packages': packages, 'package_dir': package_dir, 'package_data': package_data, 'skip_2to3': skip_2to3 } def iter_setup_packages(srcdir, packages): """ A generator that finds and imports all of the ``setup_package.py`` modules in the source packages. Returns ------- modgen : generator A generator that yields (modname, mod), where `mod` is the module and `modname` is the module name for the ``setup_package.py`` modules. """ for packagename in packages: package_parts = packagename.split('.') package_path = os.path.join(srcdir, *package_parts) setup_package = os.path.relpath( os.path.join(package_path, 'setup_package.py')) if os.path.isfile(setup_package): module = import_file(setup_package, name=packagename + '.setup_package') yield module def iter_pyx_files(package_dir, package_name): """ A generator that yields Cython source files (ending in '.pyx') in the source packages. Returns ------- pyxgen : generator A generator that yields (extmod, fullfn) where `extmod` is the full name of the module that the .pyx file would live in based on the source directory structure, and `fullfn` is the path to the .pyx file. """ for dirpath, dirnames, filenames in walk_skip_hidden(package_dir): for fn in filenames: if fn.endswith('.pyx'): fullfn = os.path.relpath(os.path.join(dirpath, fn)) # Package must match file name extmod = '.'.join([package_name, fn[:-4]]) yield (extmod, fullfn) break # Don't recurse into subdirectories def get_cython_extensions(srcdir, packages, prevextensions=tuple(), extincludedirs=None): """ Looks for Cython files and generates Extensions if needed. Parameters ---------- srcdir : str Path to the root of the source directory to search. prevextensions : list of `~distutils.core.Extension` objects The extensions that are already defined. Any .pyx files already here will be ignored. extincludedirs : list of str or None Directories to include as the `include_dirs` argument to the generated `~distutils.core.Extension` objects. Returns ------- exts : list of `~distutils.core.Extension` objects The new extensions that are needed to compile all .pyx files (does not include any already in `prevextensions`). """ # Vanilla setuptools and old versions of distribute include Cython files # as .c files in the sources, not .pyx, so we cannot simply look for # existing .pyx sources in the previous sources, but we should also check # for .c files with the same remaining filename. So we look for .pyx and # .c files, and we strip the extension. prevsourcepaths = [] ext_modules = [] for ext in prevextensions: for s in ext.sources: if s.endswith(('.pyx', '.c', '.cpp')): sourcepath = os.path.realpath(os.path.splitext(s)[0]) prevsourcepaths.append(sourcepath) for package_name in packages: package_parts = package_name.split('.') package_path = os.path.join(srcdir, *package_parts) for extmod, pyxfn in iter_pyx_files(package_path, package_name): sourcepath = os.path.realpath(os.path.splitext(pyxfn)[0]) if sourcepath not in prevsourcepaths: ext_modules.append(Extension(extmod, [pyxfn], include_dirs=extincludedirs)) return ext_modules class DistutilsExtensionArgs(collections.defaultdict): """ A special dictionary whose default values are the empty list. This is useful for building up a set of arguments for `distutils.Extension` without worrying whether the entry is already present. """ def __init__(self, *args, **kwargs): def default_factory(): return [] super(DistutilsExtensionArgs, self).__init__( default_factory, *args, **kwargs) def update(self, other): for key, val in other.items(): self[key].extend(val) def pkg_config(packages, default_libraries, executable='pkg-config'): """ Uses pkg-config to update a set of distutils Extension arguments to include the flags necessary to link against the given packages. If the pkg-config lookup fails, default_libraries is applied to libraries. Parameters ---------- packages : list of str A list of pkg-config packages to look up. default_libraries : list of str A list of library names to use if the pkg-config lookup fails. Returns ------- config : dict A dictionary containing keyword arguments to `distutils.Extension`. These entries include: - ``include_dirs``: A list of include directories - ``library_dirs``: A list of library directories - ``libraries``: A list of libraries - ``define_macros``: A list of macro defines - ``undef_macros``: A list of macros to undefine - ``extra_compile_args``: A list of extra arguments to pass to the compiler """ flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries', '-D': 'define_macros', '-U': 'undef_macros'} command = "{0} --libs --cflags {1}".format(executable, ' '.join(packages)), result = DistutilsExtensionArgs() try: pipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) output = pipe.communicate()[0].strip() except subprocess.CalledProcessError as e: lines = [ "{0} failed. This may cause the build to fail below.".format(executable), " command: {0}".format(e.cmd), " returncode: {0}".format(e.returncode), " output: {0}".format(e.output) ] log.warn('\n'.join(lines)) result['libraries'].extend(default_libraries) else: if pipe.returncode != 0: lines = [ "pkg-config could not lookup up package(s) {0}.".format( ", ".join(packages)), "This may cause the build to fail below." ] log.warn('\n'.join(lines)) result['libraries'].extend(default_libraries) else: for token in output.split(): # It's not clear what encoding the output of # pkg-config will come to us in. It will probably be # some combination of pure ASCII (for the compiler # flags) and the filesystem encoding (for any argument # that includes directories or filenames), but this is # just conjecture, as the pkg-config documentation # doesn't seem to address it. arg = token[:2].decode('ascii') value = token[2:].decode(sys.getfilesystemencoding()) if arg in flag_map: if arg == '-D': value = tuple(value.split('=', 1)) result[flag_map[arg]].append(value) else: result['extra_compile_args'].append(value) return result def add_external_library(library): """ Add a build option for selecting the internal or system copy of a library. Parameters ---------- library : str The name of the library. If the library is `foo`, the build option will be called `--use-system-foo`. """ for command in ['build', 'build_ext', 'install']: add_command_option(command, str('use-system-' + library), 'Use the system {0} library'.format(library), is_bool=True) def use_system_library(library): """ Returns `True` if the build configuration indicates that the given library should use the system copy of the library rather than the internal one. For the given library `foo`, this will be `True` if `--use-system-foo` or `--use-system-libraries` was provided at the commandline or in `setup.cfg`. Parameters ---------- library : str The name of the library Returns ------- use_system : bool `True` if the build should use the system copy of the library. """ return ( get_distutils_build_or_install_option('use_system_{0}'.format(library)) or get_distutils_build_or_install_option('use_system_libraries')) @extends_doc(_find_packages) def find_packages(where='.', exclude=(), invalidate_cache=False): """ This version of ``find_packages`` caches previous results to speed up subsequent calls. Use ``invalide_cache=True`` to ignore cached results from previous ``find_packages`` calls, and repeat the package search. """ if not invalidate_cache and _module_state['package_cache'] is not None: return _module_state['package_cache'] packages = _find_packages(where=where, exclude=exclude) _module_state['package_cache'] = packages return packages def filter_packages(packagenames): """ Removes some packages from the package list that shouldn't be installed on the current version of Python. """ if PY3: exclude = '_py2' else: exclude = '_py3' return [x for x in packagenames if not x.endswith(exclude)] class FakeBuildSphinx(Command): """ A dummy build_sphinx command that is called if Sphinx is not installed and displays a relevant error message """ #user options inherited from sphinx.setup_command.BuildDoc user_options = [ ('fresh-env', 'E', '' ), ('all-files', 'a', ''), ('source-dir=', 's', ''), ('build-dir=', None, ''), ('config-dir=', 'c', ''), ('builder=', 'b', ''), ('project=', None, ''), ('version=', None, ''), ('release=', None, ''), ('today=', None, ''), ('link-index', 'i', ''), ] #user options appended in astropy.setup_helpers.AstropyBuildSphinx user_options.append(('warnings-returncode', 'w','')) user_options.append(('clean-docs', 'l', '')) user_options.append(('no-intersphinx', 'n', '')) user_options.append(('open-docs-in-browser', 'o','')) def initialize_options(self): try: raise RuntimeError("Sphinx must be installed for build_sphinx") except: log.error('error : Sphinx must be installed for build_sphinx') sys.exit(1) astropy-helpers-1.1.1/astropy_helpers/distutils_helpers.py0000644001134200020070000001735512602613465025142 0ustar embrayscience00000000000000""" This module contains various utilities for introspecting the distutils module and the setup process. Some of these utilities require the `astropy_helpers.setup_helpers.register_commands` function to be called first, as it will affect introspection of setuptools command-line arguments. Other utilities in this module do not have that restriction. """ import os import sys from distutils import ccompiler from distutils.dist import Distribution from distutils.errors import DistutilsError from .utils import silence # This function, and any functions that call it, require the setup in # `astropy_helpers.setup_helpers.register_commands` to be run first. def get_dummy_distribution(): """ Returns a distutils Distribution object used to instrument the setup environment before calling the actual setup() function. """ from .setup_helpers import _module_state if _module_state['registered_commands'] is None: raise RuntimeError( 'astropy_helpers.setup_helpers.register_commands() must be ' 'called before using ' 'astropy_helpers.setup_helpers.get_dummy_distribution()') # Pre-parse the Distutils command-line options and config files to if # the option is set. dist = Distribution({'script_name': os.path.basename(sys.argv[0]), 'script_args': sys.argv[1:]}) dist.cmdclass.update(_module_state['registered_commands']) with silence(): try: dist.parse_config_files() dist.parse_command_line() except (DistutilsError, AttributeError, SystemExit): # Let distutils handle DistutilsErrors itself AttributeErrors can # get raise for ./setup.py --help SystemExit can be raised if a # display option was used, for example pass return dist def get_distutils_option(option, commands): """ Returns the value of the given distutils option. Parameters ---------- option : str The name of the option commands : list of str The list of commands on which this option is available Returns ------- val : str or None the value of the given distutils option. If the option is not set, returns None. """ dist = get_dummy_distribution() for cmd in commands: cmd_opts = dist.command_options.get(cmd) if cmd_opts is not None and option in cmd_opts: return cmd_opts[option][1] else: return None def get_distutils_build_option(option): """ Returns the value of the given distutils build option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build option. If the option is not set, returns None. """ return get_distutils_option(option, ['build', 'build_ext', 'build_clib']) def get_distutils_install_option(option): """ Returns the value of the given distutils install option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build option. If the option is not set, returns None. """ return get_distutils_option(option, ['install']) def get_distutils_build_or_install_option(option): """ Returns the value of the given distutils build or install option. Parameters ---------- option : str The name of the option Returns ------- val : str or None The value of the given distutils build or install option. If the option is not set, returns None. """ return get_distutils_option(option, ['build', 'build_ext', 'build_clib', 'install']) def get_compiler_option(): """ Determines the compiler that will be used to build extension modules. Returns ------- compiler : str The compiler option specified for the build, build_ext, or build_clib command; or the default compiler for the platform if none was specified. """ compiler = get_distutils_build_option('compiler') if compiler is None: return ccompiler.get_default_compiler() return compiler def add_command_option(command, name, doc, is_bool=False): """ Add a custom option to a setup command. Issues a warning if the option already exists on that command. Parameters ---------- command : str The name of the command as given on the command line name : str The name of the build option doc : str A short description of the option, for the `--help` message is_bool : bool, optional When `True`, the option is a boolean option and doesn't require an associated value. """ dist = get_dummy_distribution() cmdcls = dist.get_command_class(command) if (hasattr(cmdcls, '_astropy_helpers_options') and name in cmdcls._astropy_helpers_options): return attr = name.replace('-', '_') if hasattr(cmdcls, attr): raise RuntimeError( '{0!r} already has a {1!r} class attribute, barring {2!r} from ' 'being usable as a custom option name.'.format(cmdcls, attr, name)) for idx, cmd in enumerate(cmdcls.user_options): if cmd[0] == name: log.warn('Overriding existing {0!r} option ' '{1!r}'.format(command, name)) del cmdcls.user_options[idx] if name in cmdcls.boolean_options: cmdcls.boolean_options.remove(name) break cmdcls.user_options.append((name, None, doc)) if is_bool: cmdcls.boolean_options.append(name) # Distutils' command parsing requires that a command object have an # attribute with the same name as the option (with '-' replaced with '_') # in order for that option to be recognized as valid setattr(cmdcls, attr, None) # This caches the options added through add_command_option so that if it is # run multiple times in the same interpreter repeated adds are ignored # (this way we can still raise a RuntimeError if a custom option overrides # a built-in option) if not hasattr(cmdcls, '_astropy_helpers_options'): cmdcls._astropy_helpers_options = set([name]) else: cmdcls._astropy_helpers_options.add(name) def get_distutils_display_options(): """ Returns a set of all the distutils display options in their long and short forms. These are the setup.py arguments such as --name or --version which print the project's metadata and then exit. Returns ------- opts : set The long and short form display option arguments, including the - or -- """ short_display_opts = set('-' + o[1] for o in Distribution.display_options if o[1]) long_display_opts = set('--' + o[0] for o in Distribution.display_options) # Include -h and --help which are not explicitly listed in # Distribution.display_options (as they are handled by optparse) short_display_opts.add('-h') long_display_opts.add('--help') # This isn't the greatest approach to hardcode these commands. # However, there doesn't seem to be a good way to determine # whether build *will be* run as part of the command at this # phase. display_commands = set([ 'clean', 'register', 'setopt', 'saveopts', 'egg_info', 'alias']) return short_display_opts.union(long_display_opts.union(display_commands)) def is_distutils_display_option(): """ Returns True if sys.argv contains any of the distutils display options such as --version or --name. """ display_options = get_distutils_display_options() return bool(set(sys.argv[1:]).intersection(display_options)) astropy-helpers-1.1.1/astropy_helpers/version_helpers.py0000644001134200020070000002264412602613466024601 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Utilities for generating the version string for Astropy (or an affiliated package) and the version.py module, which contains version info for the package. Within the generated astropy.version module, the `major`, `minor`, and `bugfix` variables hold the respective parts of the version number (bugfix is '0' if absent). The `release` variable is True if this is a release, and False if this is a development version of astropy. For the actual version string, use:: from astropy.version import version or:: from astropy import __version__ """ from __future__ import division import datetime import imp import os import pkgutil import sys from distutils import log import pkg_resources from . import git_helpers from .distutils_helpers import is_distutils_display_option from .utils import invalidate_caches PY3 = sys.version_info[0] == 3 def _version_split(version): """ Split a version string into major, minor, and bugfix numbers. If any of those numbers are missing the default is zero. Any pre/post release modifiers are ignored. Examples ======== >>> _version_split('1.2.3') (1, 2, 3) >>> _version_split('1.2') (1, 2, 0) >>> _version_split('1.2rc1') (1, 2, 0) >>> _version_split('1') (1, 0, 0) >>> _version_split('') (0, 0, 0) """ parsed_version = pkg_resources.parse_version(version) if hasattr(parsed_version, 'base_version'): # New version parsing for setuptools >= 8.0 if parsed_version.base_version: parts = [int(part) for part in parsed_version.base_version.split('.')] else: parts = [] else: parts = [] for part in parsed_version: if part.startswith('*'): # Ignore any .dev, a, b, rc, etc. break parts.append(int(part)) if len(parts) < 3: parts += [0] * (3 - len(parts)) # In principle a version could have more parts (like 1.2.3.4) but we only # support .. return tuple(parts[:3]) # This is used by setup.py to create a new version.py - see that file for # details. Note that the imports have to be absolute, since this is also used # by affiliated packages. _FROZEN_VERSION_PY_TEMPLATE = """ # Autogenerated by {packagetitle}'s setup.py on {timestamp!s} from __future__ import unicode_literals import datetime {header} major = {major} minor = {minor} bugfix = {bugfix} release = {rel} timestamp = {timestamp!r} debug = {debug} try: from ._compiler import compiler except ImportError: compiler = "unknown" try: from .cython_version import cython_version except ImportError: cython_version = "unknown" """[1:] _FROZEN_VERSION_PY_WITH_GIT_HEADER = """ {git_helpers} _packagename = "{packagename}" _last_generated_version = "{verstr}" _last_githash = "{githash}" # Determine where the source code for this module # lives. If __file__ is not a filesystem path then # it is assumed not to live in a git repo at all. if _get_repo_path(__file__, levels=len(_packagename.split('.'))): version = update_git_devstr(_last_generated_version, path=__file__) githash = get_git_devstr(sha=True, show_warning=False, path=__file__) or _last_githash else: # The file does not appear to live in a git repo so don't bother # invoking git version = _last_generated_version githash = _last_githash """[1:] _FROZEN_VERSION_PY_STATIC_HEADER = """ version = "{verstr}" githash = "{githash}" """[1:] def _get_version_py_str(packagename, version, githash, release, debug, uses_git=True): timestamp = datetime.datetime.now() major, minor, bugfix = _version_split(version) if packagename.lower() == 'astropy': packagetitle = 'Astropy' else: packagetitle = 'Astropy-affiliated package ' + packagename header = '' if uses_git: header = _generate_git_header(packagename, version, githash) elif not githash: # _generate_git_header will already generate a new git has for us, but # for creating a new version.py for a release (even if uses_git=False) # we still need to get the githash to include in the version.py # See https://github.com/astropy/astropy-helpers/issues/141 githash = git_helpers.get_git_devstr(sha=True, show_warning=True) if not header: # If _generate_git_header fails it returns an empty string header = _FROZEN_VERSION_PY_STATIC_HEADER.format(verstr=version, githash=githash) return _FROZEN_VERSION_PY_TEMPLATE.format(packagetitle=packagetitle, timestamp=timestamp, header=header, major=major, minor=minor, bugfix=bugfix, rel=release, debug=debug) def _generate_git_header(packagename, version, githash): """ Generates a header to the version.py module that includes utilities for probing the git repository for updates (to the current git hash, etc.) These utilities should only be available in development versions, and not in release builds. If this fails for any reason an empty string is returned. """ loader = pkgutil.get_loader(git_helpers) source = loader.get_source(git_helpers.__name__) or '' source_lines = source.splitlines() if not source_lines: log.warn('Cannot get source code for astropy_helpers.git_helpers; ' 'git support disabled.') return '' idx = 0 for idx, line in enumerate(source_lines): if line.startswith('# BEGIN'): break git_helpers_py = '\n'.join(source_lines[idx + 1:]) if PY3: verstr = version else: # In Python 2 don't pass in a unicode string; otherwise verstr will # be represented with u'' syntax which breaks on Python 3.x with x # < 3. This is only an issue when developing on multiple Python # versions at once verstr = version.encode('utf8') new_githash = git_helpers.get_git_devstr(sha=True, show_warning=False) if new_githash: githash = new_githash return _FROZEN_VERSION_PY_WITH_GIT_HEADER.format( git_helpers=git_helpers_py, packagename=packagename, verstr=verstr, githash=githash) def generate_version_py(packagename, version, release=None, debug=None, uses_git=True): """Regenerate the version.py module if necessary.""" try: version_module = get_pkg_version_module(packagename) try: last_generated_version = version_module._last_generated_version except AttributeError: last_generated_version = version_module.version try: last_githash = version_module._last_githash except AttributeError: last_githash = version_module.githash current_release = version_module.release current_debug = version_module.debug except ImportError: version_module = None last_generated_version = None last_githash = None current_release = None current_debug = None if release is None: # Keep whatever the current value is, if it exists release = bool(current_release) if debug is None: # Likewise, keep whatever the current value is, if it exists debug = bool(current_debug) version_py = os.path.join(packagename, 'version.py') if (last_generated_version != version or current_release != release or current_debug != debug): if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) if is_distutils_display_option(): # Always silence unnecessary log messages when display options are # being used log.set_threshold(log.WARN) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(packagename, version, last_githash, release, debug, uses_git=uses_git)) invalidate_caches() if version_module: imp.reload(version_module) def get_pkg_version_module(packagename, fromlist=None): """Returns the package's .version module generated by `astropy_helpers.version_helpers.generate_version_py`. Raises an ImportError if the version module is not found. If ``fromlist`` is an iterable, return a tuple of the members of the version module corresponding to the member names given in ``fromlist``. Raises an `AttributeError` if any of these module members are not found. """ if not fromlist: # Due to a historical quirk of Python's import implementation, # __import__ will not return submodules of a package if 'fromlist' is # empty. # TODO: For Python 3.1 and up it may be preferable to use importlib # instead of the __import__ builtin return __import__(packagename + '.version', fromlist=['']) else: mod = __import__(packagename + '.version', fromlist=fromlist) return tuple(getattr(mod, member) for member in fromlist) astropy-helpers-1.1.1/astropy_helpers/utils.py0000644001134200020070000006646212630421426022532 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import absolute_import, unicode_literals import contextlib import functools import imp import inspect import os import sys import glob import textwrap import types import warnings try: from importlib import machinery as import_machinery # Python 3.2 does not have SourceLoader if not hasattr(import_machinery, 'SourceLoader'): import_machinery = None except ImportError: import_machinery = None # Python 3.3's importlib caches filesystem reads for faster imports in the # general case. But sometimes it's necessary to manually invalidate those # caches so that the import system can pick up new generated files. See # https://github.com/astropy/astropy/issues/820 if sys.version_info[:2] >= (3, 3): from importlib import invalidate_caches else: invalidate_caches = lambda: None # Python 2/3 compatibility if sys.version_info[0] < 3: string_types = (str, unicode) else: string_types = (str,) # Note: The following Warning subclasses are simply copies of the Warnings in # Astropy of the same names. class AstropyWarning(Warning): """ The base warning class from which all Astropy warnings should inherit. Any warning inheriting from this class is handled by the Astropy logger. """ class AstropyDeprecationWarning(AstropyWarning): """ A warning class to indicate a deprecated feature. """ class AstropyPendingDeprecationWarning(PendingDeprecationWarning, AstropyWarning): """ A warning class to indicate a soon-to-be deprecated feature. """ def _get_platlib_dir(cmd): """ Given a build command, return the name of the appropriate platform-specific build subdirectory directory (e.g. build/lib.linux-x86_64-2.7) """ plat_specifier = '.{0}-{1}'.format(cmd.plat_name, sys.version[0:3]) return os.path.join(cmd.build_base, 'lib' + plat_specifier) def get_numpy_include_path(): """ Gets the path to the numpy headers. """ # We need to go through this nonsense in case setuptools # downloaded and installed Numpy for us as part of the build or # install, since Numpy may still think it's in "setup mode", when # in fact we're ready to use it to build astropy now. if sys.version_info[0] >= 3: import builtins if hasattr(builtins, '__NUMPY_SETUP__'): del builtins.__NUMPY_SETUP__ import imp import numpy imp.reload(numpy) else: import __builtin__ if hasattr(__builtin__, '__NUMPY_SETUP__'): del __builtin__.__NUMPY_SETUP__ import numpy reload(numpy) try: numpy_include = numpy.get_include() except AttributeError: numpy_include = numpy.get_numpy_include() return numpy_include class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x def write(self, s): pass def flush(self): pass @contextlib.contextmanager def silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr if sys.platform == 'win32': import ctypes def _has_hidden_attribute(filepath): """ Returns True if the given filepath has the hidden attribute on MS-Windows. Based on a post here: http://stackoverflow.com/questions/284115/cross-platform-hidden-file-detection """ if isinstance(filepath, bytes): filepath = filepath.decode(sys.getfilesystemencoding()) try: attrs = ctypes.windll.kernel32.GetFileAttributesW(filepath) assert attrs != -1 result = bool(attrs & 2) except (AttributeError, AssertionError): result = False return result else: def _has_hidden_attribute(filepath): return False def is_path_hidden(filepath): """ Determines if a given file or directory is hidden. Parameters ---------- filepath : str The path to a file or directory Returns ------- hidden : bool Returns `True` if the file is hidden """ name = os.path.basename(os.path.abspath(filepath)) if isinstance(name, bytes): is_dotted = name.startswith(b'.') else: is_dotted = name.startswith('.') return is_dotted or _has_hidden_attribute(filepath) def walk_skip_hidden(top, onerror=None, followlinks=False): """ A wrapper for `os.walk` that skips hidden files and directories. This function does not have the parameter `topdown` from `os.walk`: the directories must always be recursed top-down when using this function. See also -------- os.walk : For a description of the parameters """ for root, dirs, files in os.walk( top, topdown=True, onerror=onerror, followlinks=followlinks): # These lists must be updated in-place so os.walk will skip # hidden directories dirs[:] = [d for d in dirs if not is_path_hidden(d)] files[:] = [f for f in files if not is_path_hidden(f)] yield root, dirs, files def write_if_different(filename, data): """Write `data` to `filename`, if the content of the file is different. Parameters ---------- filename : str The file name to be written to. data : bytes The data to be written to `filename`. """ assert isinstance(data, bytes) if os.path.exists(filename): with open(filename, 'rb') as fd: original_data = fd.read() else: original_data = None if original_data != data: with open(filename, 'wb') as fd: fd.write(data) def import_file(filename, name=None): """ Imports a module from a single file as if it doesn't belong to a particular package. The returned module will have the optional ``name`` if given, or else a name generated from the filename. """ # Specifying a traditional dot-separated fully qualified name here # results in a number of "Parent module 'astropy' not found while # handling absolute import" warnings. Using the same name, the # namespaces of the modules get merged together. So, this # generates an underscore-separated name which is more likely to # be unique, and it doesn't really matter because the name isn't # used directly here anyway. mode = 'U' if sys.version_info[0] < 3 else 'r' if name is None: basename = os.path.splitext(filename)[0] name = '_'.join(os.path.relpath(basename).split(os.sep)[1:]) if import_machinery: loader = import_machinery.SourceFileLoader(name, filename) mod = loader.load_module() else: with open(filename, mode) as fd: mod = imp.load_module(name, fd, filename, ('.py', mode, 1)) return mod def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. Raise `ImportError` if the module or name is not found. """ parts = name.split('.') cursor = len(parts) - 1 module_name = parts[:cursor] attr_name = parts[-1] while cursor > 0: try: ret = __import__('.'.join(module_name), fromlist=[attr_name]) break except ImportError: if cursor == 0: raise cursor -= 1 module_name = parts[:cursor] attr_name = parts[cursor] ret = '' for part in parts[cursor:]: try: ret = getattr(ret, part) except AttributeError: raise ImportError(name) return ret if sys.version_info[0] >= 3: def iteritems(dictionary): return dictionary.items() else: def iteritems(dictionary): return dictionary.iteritems() def extends_doc(extended_func): """ A function decorator for use when wrapping an existing function but adding additional functionality. This copies the docstring from the original function, and appends to it (along with a newline) the docstring of the wrapper function. Example ------- >>> def foo(): ... '''Hello.''' ... >>> @extends_doc(foo) ... def bar(): ... '''Goodbye.''' ... >>> print(bar.__doc__) Hello. Goodbye. """ def decorator(func): if not (extended_func.__doc__ is None or func.__doc__ is None): func.__doc__ = '\n\n'.join([extended_func.__doc__.rstrip('\n'), func.__doc__.lstrip('\n')]) return func return decorator # Duplicated from astropy.utils.decorators.deprecated # When fixing issues in this function fix them in astropy first, then # port the fixes over to astropy-helpers def deprecated(since, message='', name='', alternative='', pending=False, obj_type=None): """ Used to mark a function or class as deprecated. To mark an attribute as deprecated, use `deprecated_attribute`. Parameters ------------ since : str The release at which this API became deprecated. This is required. message : str, optional Override the default deprecation message. The format specifier ``func`` may be used for the name of the function, and ``alternative`` may be used in the deprecation message to insert the name of an alternative to the deprecated function. ``obj_type`` may be used to insert a friendly name for the type of object being deprecated. name : str, optional The name of the deprecated function or class; if not provided the name is automatically determined from the passed in function or class, though this is useful in the case of renamed functions, where the new function is just assigned to the name of the deprecated function. For example:: def new_function(): ... oldFunction = new_function alternative : str, optional An alternative function or class name that the user may use in place of the deprecated object. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a AstropyPendingDeprecationWarning instead of a AstropyDeprecationWarning. obj_type : str, optional The type of this object, if the automatically determined one needs to be overridden. """ method_types = (classmethod, staticmethod, types.MethodType) def deprecate_doc(old_doc, message): """ Returns a given docstring with a deprecation message prepended to it. """ if not old_doc: old_doc = '' old_doc = textwrap.dedent(old_doc).strip('\n') new_doc = (('\n.. deprecated:: %(since)s' '\n %(message)s\n\n' % {'since': since, 'message': message.strip()}) + old_doc) if not old_doc: # This is to prevent a spurious 'unexpected unindent' warning from # docutils when the original docstring was blank. new_doc += r'\ ' return new_doc def get_function(func): """ Given a function or classmethod (or other function wrapper type), get the function object. """ if isinstance(func, method_types): try: func = func.__func__ except AttributeError: # classmethods in Python2.6 and below lack the __func__ # attribute so we need to hack around to get it method = func.__get__(None, object) if isinstance(method, types.FunctionType): # For staticmethods anyways the wrapped object is just a # plain function (not a bound method or anything like that) func = method elif hasattr(method, '__func__'): func = method.__func__ elif hasattr(method, 'im_func'): func = method.im_func else: # Nothing we can do really... just return the original # classmethod, etc. return func return func def deprecate_function(func, message): """ Returns a wrapped function that displays an ``AstropyDeprecationWarning`` when it is called. """ if isinstance(func, method_types): func_wrapper = type(func) else: func_wrapper = lambda f: f func = get_function(func) def deprecated_func(*args, **kwargs): if pending: category = AstropyPendingDeprecationWarning else: category = AstropyDeprecationWarning warnings.warn(message, category, stacklevel=2) return func(*args, **kwargs) # If this is an extension function, we can't call # functools.wraps on it, but we normally don't care. # This crazy way to get the type of a wrapper descriptor is # straight out of the Python 3.3 inspect module docs. if type(func) != type(str.__dict__['__add__']): deprecated_func = functools.wraps(func)(deprecated_func) deprecated_func.__doc__ = deprecate_doc( deprecated_func.__doc__, message) return func_wrapper(deprecated_func) def deprecate_class(cls, message): """ Returns a wrapper class with the docstrings updated and an __init__ function that will raise an ``AstropyDeprectationWarning`` warning when called. """ # Creates a new class with the same name and bases as the # original class, but updates the dictionary with a new # docstring and a wrapped __init__ method. __module__ needs # to be manually copied over, since otherwise it will be set # to *this* module (astropy.utils.misc). # This approach seems to make Sphinx happy (the new class # looks enough like the original class), and works with # extension classes (which functools.wraps does not, since # it tries to modify the original class). # We need to add a custom pickler or you'll get # Can't pickle : it's not found as ... # errors. Picklability is required for any class that is # documented by Sphinx. members = cls.__dict__.copy() members.update({ '__doc__': deprecate_doc(cls.__doc__, message), '__init__': deprecate_function(get_function(cls.__init__), message), }) return type(cls.__name__, cls.__bases__, members) def deprecate(obj, message=message, name=name, alternative=alternative, pending=pending): if obj_type is None: if isinstance(obj, type): obj_type_name = 'class' elif inspect.isfunction(obj): obj_type_name = 'function' elif inspect.ismethod(obj) or isinstance(obj, method_types): obj_type_name = 'method' else: obj_type_name = 'object' else: obj_type_name = obj_type if not name: name = get_function(obj).__name__ altmessage = '' if not message or type(message) == type(deprecate): if pending: message = ('The %(func)s %(obj_type)s will be deprecated in a ' 'future version.') else: message = ('The %(func)s %(obj_type)s is deprecated and may ' 'be removed in a future version.') if alternative: altmessage = '\n Use %s instead.' % alternative message = ((message % { 'func': name, 'name': name, 'alternative': alternative, 'obj_type': obj_type_name}) + altmessage) if isinstance(obj, type): return deprecate_class(obj, message) else: return deprecate_function(obj, message) if type(message) == type(deprecate): return deprecate(message) return deprecate def deprecated_attribute(name, since, message=None, alternative=None, pending=False): """ Used to mark a public attribute as deprecated. This creates a property that will warn when the given attribute name is accessed. To prevent the warning (i.e. for internal code), use the private name for the attribute by prepending an underscore (i.e. ``self._name``). Parameters ---------- name : str The name of the deprecated attribute. since : str The release at which this API became deprecated. This is required. message : str, optional Override the default deprecation message. The format specifier ``name`` may be used for the name of the attribute, and ``alternative`` may be used in the deprecation message to insert the name of an alternative to the deprecated function. alternative : str, optional An alternative attribute that the user may use in place of the deprecated attribute. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a AstropyPendingDeprecationWarning instead of a AstropyDeprecationWarning. Examples -------- :: class MyClass: # Mark the old_name as deprecated old_name = misc.deprecated_attribute('old_name', '0.1') def method(self): self._old_name = 42 """ private_name = '_' + name @deprecated(since, name=name, obj_type='attribute') def get(self): return getattr(self, private_name) @deprecated(since, name=name, obj_type='attribute') def set(self, val): setattr(self, private_name, val) @deprecated(since, name=name, obj_type='attribute') def delete(self): delattr(self, private_name) return property(get, set, delete) def minversion(module, version, inclusive=True, version_path='__version__'): """ Returns `True` if the specified Python module satisfies a minimum version requirement, and `False` if not. By default this uses `pkg_resources.parse_version` to do the version comparison if available. Otherwise it falls back on `distutils.version.LooseVersion`. Parameters ---------- module : module or `str` An imported module of which to check the version, or the name of that module (in which case an import of that module is attempted-- if this fails `False` is returned). version : `str` The version as a string that this module must have at a minimum (e.g. ``'0.12'``). inclusive : `bool` The specified version meets the requirement inclusively (i.e. ``>=``) as opposed to strictly greater than (default: `True`). version_path : `str` A dotted attribute path to follow in the module for the version. Defaults to just ``'__version__'``, which should work for most Python modules. Examples -------- >>> import astropy >>> minversion(astropy, '0.4.4') True """ if isinstance(module, types.ModuleType): module_name = module.__name__ elif isinstance(module, string_types): module_name = module try: module = resolve_name(module_name) except ImportError: return False else: raise ValueError('module argument must be an actual imported ' 'module, or the import name of the module; ' 'got {0!r}'.format(module)) if '.' not in version_path: have_version = getattr(module, version_path) else: have_version = resolve_name('.'.join([module.__name__, version_path])) try: from pkg_resources import parse_version except ImportError: from distutils.version import LooseVersion as parse_version if inclusive: return parse_version(have_version) >= parse_version(version) else: return parse_version(have_version) > parse_version(version) # Copy of the classproperty decorator from astropy.utils.decorators class classproperty(property): """ Similar to `property`, but allows class-level properties. That is, a property whose getter is like a `classmethod`. The wrapped method may explicitly use the `classmethod` decorator (which must become before this decorator), or the `classmethod` may be omitted (it is implicit through use of this decorator). .. note:: classproperty only works for *read-only* properties. It does not currently allow writeable/deleteable properties, due to subtleties of how Python descriptors work. In order to implement such properties on a class a metaclass for that class must be implemented. Parameters ---------- fget : callable The function that computes the value of this property (in particular, the function when this is used as a decorator) a la `property`. doc : str, optional The docstring for the property--by default inherited from the getter function. lazy : bool, optional If True, caches the value returned by the first call to the getter function, so that it is only called once (used for lazy evaluation of an attribute). This is analogous to `lazyproperty`. The ``lazy`` argument can also be used when `classproperty` is used as a decorator (see the third example below). When used in the decorator syntax this *must* be passed in as a keyword argument. Examples -------- :: >>> class Foo(object): ... _bar_internal = 1 ... @classproperty ... def bar(cls): ... return cls._bar_internal + 1 ... >>> Foo.bar 2 >>> foo_instance = Foo() >>> foo_instance.bar 2 >>> foo_instance._bar_internal = 2 >>> foo_instance.bar # Ignores instance attributes 2 As previously noted, a `classproperty` is limited to implementing read-only attributes:: >>> class Foo(object): ... _bar_internal = 1 ... @classproperty ... def bar(cls): ... return cls._bar_internal ... @bar.setter ... def bar(cls, value): ... cls._bar_internal = value ... Traceback (most recent call last): ... NotImplementedError: classproperty can only be read-only; use a metaclass to implement modifiable class-level properties When the ``lazy`` option is used, the getter is only called once:: >>> class Foo(object): ... @classproperty(lazy=True) ... def bar(cls): ... print("Performing complicated calculation") ... return 1 ... >>> Foo.bar Performing complicated calculation 1 >>> Foo.bar 1 If a subclass inherits a lazy `classproperty` the property is still re-evaluated for the subclass:: >>> class FooSub(Foo): ... pass ... >>> FooSub.bar Performing complicated calculation 1 >>> FooSub.bar 1 """ def __new__(cls, fget=None, doc=None, lazy=False): if fget is None: # Being used as a decorator--return a wrapper that implements # decorator syntax def wrapper(func): return cls(func, lazy=lazy) return wrapper return super(classproperty, cls).__new__(cls) def __init__(self, fget, doc=None, lazy=False): self._lazy = lazy if lazy: self._cache = {} fget = self._wrap_fget(fget) super(classproperty, self).__init__(fget=fget, doc=doc) # There is a buglet in Python where self.__doc__ doesn't # get set properly on instances of property subclasses if # the doc argument was used rather than taking the docstring # from fget if doc is not None: self.__doc__ = doc def __get__(self, obj, objtype=None): if self._lazy and objtype in self._cache: return self._cache[objtype] if objtype is not None: # The base property.__get__ will just return self here; # instead we pass objtype through to the original wrapped # function (which takes the class as its sole argument) val = self.fget.__wrapped__(objtype) else: val = super(classproperty, self).__get__(obj, objtype=objtype) if self._lazy: if objtype is None: objtype = obj.__class__ self._cache[objtype] = val return val def getter(self, fget): return super(classproperty, self).getter(self._wrap_fget(fget)) def setter(self, fset): raise NotImplementedError( "classproperty can only be read-only; use a metaclass to " "implement modifiable class-level properties") def deleter(self, fdel): raise NotImplementedError( "classproperty can only be read-only; use a metaclass to " "implement modifiable class-level properties") @staticmethod def _wrap_fget(orig_fget): if isinstance(orig_fget, classmethod): orig_fget = orig_fget.__func__ # Using stock functools.wraps instead of the fancier version # found later in this module, which is overkill for this purpose @functools.wraps(orig_fget) def fget(obj): return orig_fget(obj.__class__) # Set the __wrapped__ attribute manually for support on Python 2 fget.__wrapped__ = orig_fget return fget def find_data_files(package, pattern): """ Include files matching ``pattern`` inside ``package``. Parameters ---------- package : str The package inside which to look for data files pattern : str Pattern (glob-style) to match for the data files (e.g. ``*.dat``). This supports the Python 3.5 ``**``recursive syntax. For example, ``**/*.fits`` matches all files ending with ``.fits`` recursively. Only one instance of ``**`` can be included in the pattern. """ if sys.version_info[:2] >= (3, 5): return glob.glob(os.path.join(package, pattern), recursive=True) else: if '**' in pattern: start, end = pattern.split('**') if end.startswith(('/', os.sep)): end = end[1:] matches = glob.glob(os.path.join(package, start, end)) for root, dirs, files in os.walk(os.path.join(package, start)): for dirname in dirs: matches += glob.glob(os.path.join(root, dirname, end)) return matches else: return glob.glob(os.path.join(package, pattern)) astropy-helpers-1.1.1/astropy_helpers/test_helpers.py0000644001134200020070000000077312630421426024064 0ustar embrayscience00000000000000from __future__ import (absolute_import, division, print_function, unicode_literals) import warnings from .commands.test import AstropyTest # Leaving this module here for now, but really it needn't exist # (and it's doubtful that any code depends on it anymore) warnings.warn('The astropy_helpers.test_helpers module is deprecated as ' 'of version 1.1.0; the AstropyTest command can be found in ' 'astropy_helpers.commands.test.', DeprecationWarning) astropy-helpers-1.1.1/astropy_helpers/version.py0000644001134200020070000000104212637041253023041 0ustar embrayscience00000000000000# Autogenerated by Astropy-affiliated package astropy_helpers's setup.py on 2015-12-24 14:08:27.005773 from __future__ import unicode_literals import datetime version = "1.1.1" githash = "fedd7c52ba80e2246f83b8d1752baa3a153a33e2" major = 1 minor = 1 bugfix = 1 release = True timestamp = datetime.datetime(2015, 12, 24, 14, 8, 27, 5773) debug = False try: from ._compiler import compiler except ImportError: compiler = "unknown" try: from .cython_version import cython_version except ImportError: cython_version = "unknown" astropy-helpers-1.1.1/astropy_helpers/sphinx/0000755001134200020070000000000012637041310022310 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/conf.py0000644001134200020070000002564212602613466023631 0ustar embrayscience00000000000000# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # # Astropy shared Sphinx settings. These settings are shared between # astropy itself and affiliated packages. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import warnings from os import path # -- General configuration ---------------------------------------------------- # The version check in Sphinx itself can only compare the major and # minor parts of the version number, not the micro. To do a more # specific version check, call check_sphinx_version("x.y.z.") from # your project's conf.py needs_sphinx = '1.2' on_rtd = os.environ.get('READTHEDOCS', None) == 'True' def check_sphinx_version(expected_version): import sphinx from distutils import version sphinx_version = version.LooseVersion(sphinx.__version__) expected_version = version.LooseVersion(expected_version) if sphinx_version < expected_version: raise RuntimeError( "At least Sphinx version {0} is required to build this " "documentation. Found {1}.".format( expected_version, sphinx_version)) # Configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('http://docs.python.org/', None), 'python3': ('http://docs.python.org/3/', path.abspath(path.join(path.dirname(__file__), 'local/python3links.inv'))), 'numpy': ('http://docs.scipy.org/doc/numpy/', None), 'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None), 'matplotlib': ('http://matplotlib.org/', None), 'astropy': ('http://docs.astropy.org/en/stable/', None), 'h5py': ('http://docs.h5py.org/en/latest/', None) } # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # Add any paths that contain templates here, relative to this directory. # templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # The reST default role (used for this markup: `text`) to use for all # documents. Set to the "smart" one. default_role = 'obj' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog = """ .. _Astropy: http://astropy.org """ # -- Project information ------------------------------------------------------ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. #pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Settings for extensions and extension options ---------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.inheritance_diagram', 'astropy_helpers.sphinx.ext.numpydoc', 'astropy_helpers.sphinx.ext.astropyautosummary', 'astropy_helpers.sphinx.ext.autodoc_enhancements', 'astropy_helpers.sphinx.ext.automodsumm', 'astropy_helpers.sphinx.ext.automodapi', 'astropy_helpers.sphinx.ext.tocdepthfix', 'astropy_helpers.sphinx.ext.doctest', 'astropy_helpers.sphinx.ext.changelog_links', 'astropy_helpers.sphinx.ext.viewcode', # Use patched version of viewcode 'astropy_helpers.sphinx.ext.smart_resolver' ] if on_rtd: extensions.append('sphinx.ext.mathjax') else: extensions.append('sphinx.ext.pngmath') # Above, we use a patched version of viewcode rather than 'sphinx.ext.viewcode' # This can be changed to the sphinx version once the following issue is fixed # in sphinx: # https://bitbucket.org/birkenfeld/sphinx/issue/623/ # extension-viewcode-fails-with-function try: import matplotlib.sphinxext.plot_directive extensions += [matplotlib.sphinxext.plot_directive.__name__] # AttributeError is checked here in case matplotlib is installed but # Sphinx isn't. Note that this module is imported by the config file # generator, even if we're not building the docs. except (ImportError, AttributeError): warnings.warn( "matplotlib's plot_directive could not be imported. " + "Inline plots will not be included in the output") # Don't show summaries of the members in each class along with the # class' docstring numpydoc_show_class_members = False autosummary_generate = True automodapi_toctreedirnm = 'api' # Class documentation should contain *both* the class docstring and # the __init__ docstring autoclass_content = "both" # Render inheritance diagrams in SVG graphviz_output_format = "svg" graphviz_dot_args = [ '-Nfontsize=10', '-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif', '-Efontsize=10', '-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif', '-Gfontsize=10', '-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif' ] # -- Options for HTML output ------------------------------------------------- # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [path.abspath(path.join(path.dirname(__file__), 'themes'))] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'bootstrap-astropy' # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': ['localtoc.html'], 'search': [], 'genindex': [], 'py-modindex': [], } # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # included in the bootstrap-astropy theme html_favicon = path.join(html_theme_path[0], html_theme, 'static', 'astropy_logo.ico') # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%d %b %Y' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # -- Options for LaTeX output ------------------------------------------------ # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. latex_use_parts = True # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. latex_preamble = r""" % Use a more modern-looking monospace font \usepackage{inconsolata} % The enumitem package provides unlimited nesting of lists and enums. % Sphinx may use this in the future, in which case this can be removed. % See https://bitbucket.org/birkenfeld/sphinx/issue/777/latex-output-too-deeply-nested \usepackage{enumitem} \setlistdepth{15} % In the parameters section, place a newline after the Parameters % header. (This is stolen directly from Numpy's conf.py, since it % affects Numpy-style docstrings). \usepackage{expdlist} \let\latexdescription=\description \def\description{\latexdescription{}{} \breaklabel} % Support the superscript Unicode numbers used by the "unicode" units % formatter \DeclareUnicodeCharacter{2070}{\ensuremath{^0}} \DeclareUnicodeCharacter{00B9}{\ensuremath{^1}} \DeclareUnicodeCharacter{00B2}{\ensuremath{^2}} \DeclareUnicodeCharacter{00B3}{\ensuremath{^3}} \DeclareUnicodeCharacter{2074}{\ensuremath{^4}} \DeclareUnicodeCharacter{2075}{\ensuremath{^5}} \DeclareUnicodeCharacter{2076}{\ensuremath{^6}} \DeclareUnicodeCharacter{2077}{\ensuremath{^7}} \DeclareUnicodeCharacter{2078}{\ensuremath{^8}} \DeclareUnicodeCharacter{2079}{\ensuremath{^9}} \DeclareUnicodeCharacter{207B}{\ensuremath{^-}} \DeclareUnicodeCharacter{00B0}{\ensuremath{^{\circ}}} \DeclareUnicodeCharacter{2032}{\ensuremath{^{\prime}}} \DeclareUnicodeCharacter{2033}{\ensuremath{^{\prime\prime}}} % Make the "warning" and "notes" sections use a sans-serif font to % make them stand out more. \renewenvironment{notice}[2]{ \def\py@noticetype{#1} \csname py@noticestart@#1\endcsname \textsf{\textbf{#2}} }{\csname py@noticeend@\py@noticetype\endcsname} """ # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # -- Options for the linkcheck builder ---------------------------------------- # A timeout value, in seconds, for the linkcheck builder linkcheck_timeout = 60 astropy-helpers-1.1.1/astropy_helpers/sphinx/setup_package.py0000644001134200020070000000050412602613465025504 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst def get_package_data(): # Install the theme files return { 'astropy_helpers.sphinx': [ 'ext/templates/*/*', 'local/*.inv', 'themes/bootstrap-astropy/*.*', 'themes/bootstrap-astropy/static/*.*']} astropy-helpers-1.1.1/astropy_helpers/sphinx/local/0000755001134200020070000000000012637041310023402 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/local/python3links.inv0000644001134200020070000000074312602613466026602 0ustar embrayscience00000000000000# Sphinx inventory version 2 # Project: Python # Version: 3.4 # The remainder of this file is compressed using zlib. xœ¥“OSƒ0Åïù;Ó‹ÀñÏ©÷z®¶SωÜl:à§7@­i‡:¨²ï÷–äm°i*eZPf†-u°Grʸ X“}CÉKXw\YVvcu ÷éCøÜV„u¦L¶®”ƒRiWY¯ Ȥ­Bç°ï”y…­òT䣃¦[–ÞHî[&·*”QwóµæÒŠk½µ‰Øª­ç‘¥ÅVbsÎ𠔈Ü+Í*mÞo®·‘:sî‡öeÄjåf‘ƒ.â¸kp7è"nÐ×R(æà±ïÉœPpÑe²Ã¶"ÌŠµµzÕ¢ô<ðŸnމ{õ˜>õÏIÿ±>XÆÒD4¤ _]ÏLvPêSÊÐastropy-helpers-1.1.1/astropy_helpers/sphinx/ext/0000755001134200020070000000000012637041310023110 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/phantom_import.py0000644001134200020070000001333612602613466026541 0ustar embrayscience00000000000000""" ============== phantom_import ============== Sphinx extension to make directives from ``sphinx.ext.autodoc`` and similar extensions to use docstrings loaded from an XML file. This extension loads an XML file in the Pydocweb format [1] and creates a dummy module that contains the specified docstrings. This can be used to get the current docstrings from a Pydocweb instance without needing to rebuild the documented module. .. [1] http://code.google.com/p/pydocweb """ from __future__ import division, absolute_import, print_function import imp, sys, compiler, types, os, inspect, re def setup(app): app.connect('builder-inited', initialize) app.add_config_value('phantom_import_file', None, True) def initialize(app): fn = app.config.phantom_import_file if (fn and os.path.isfile(fn)): print("[numpydoc] Phantom importing modules from", fn, "...") import_phantom_module(fn) #------------------------------------------------------------------------------ # Creating 'phantom' modules from an XML description #------------------------------------------------------------------------------ def import_phantom_module(xml_file): """ Insert a fake Python module to sys.modules, based on a XML file. The XML file is expected to conform to Pydocweb DTD. The fake module will contain dummy objects, which guarantee the following: - Docstrings are correct. - Class inheritance relationships are correct (if present in XML). - Function argspec is *NOT* correct (even if present in XML). Instead, the function signature is prepended to the function docstring. - Class attributes are *NOT* correct; instead, they are dummy objects. Parameters ---------- xml_file : str Name of an XML file to read """ import lxml.etree as etree object_cache = {} tree = etree.parse(xml_file) root = tree.getroot() # Sort items so that # - Base classes come before classes inherited from them # - Modules come before their contents all_nodes = dict([(n.attrib['id'], n) for n in root]) def _get_bases(node, recurse=False): bases = [x.attrib['ref'] for x in node.findall('base')] if recurse: j = 0 while True: try: b = bases[j] except IndexError: break if b in all_nodes: bases.extend(_get_bases(all_nodes[b])) j += 1 return bases type_index = ['module', 'class', 'callable', 'object'] def base_cmp(a, b): x = cmp(type_index.index(a.tag), type_index.index(b.tag)) if x != 0: return x if a.tag == 'class' and b.tag == 'class': a_bases = _get_bases(a, recurse=True) b_bases = _get_bases(b, recurse=True) x = cmp(len(a_bases), len(b_bases)) if x != 0: return x if a.attrib['id'] in b_bases: return -1 if b.attrib['id'] in a_bases: return 1 return cmp(a.attrib['id'].count('.'), b.attrib['id'].count('.')) nodes = root.getchildren() nodes.sort(base_cmp) # Create phantom items for node in nodes: name = node.attrib['id'] doc = (node.text or '').decode('string-escape') + "\n" if doc == "\n": doc = "" # create parent, if missing parent = name while True: parent = '.'.join(parent.split('.')[:-1]) if not parent: break if parent in object_cache: break obj = imp.new_module(parent) object_cache[parent] = obj sys.modules[parent] = obj # create object if node.tag == 'module': obj = imp.new_module(name) obj.__doc__ = doc sys.modules[name] = obj elif node.tag == 'class': bases = [object_cache[b] for b in _get_bases(node) if b in object_cache] bases.append(object) init = lambda self: None init.__doc__ = doc obj = type(name, tuple(bases), {'__doc__': doc, '__init__': init}) obj.__name__ = name.split('.')[-1] elif node.tag == 'callable': funcname = node.attrib['id'].split('.')[-1] argspec = node.attrib.get('argspec') if argspec: argspec = re.sub('^[^(]*', '', argspec) doc = "%s%s\n\n%s" % (funcname, argspec, doc) obj = lambda: 0 obj.__argspec_is_invalid_ = True if sys.version_info[0] >= 3: obj.__name__ = funcname else: obj.func_name = funcname obj.__name__ = name obj.__doc__ = doc if inspect.isclass(object_cache[parent]): obj.__objclass__ = object_cache[parent] else: class Dummy(object): pass obj = Dummy() obj.__name__ = name obj.__doc__ = doc if inspect.isclass(object_cache[parent]): obj.__get__ = lambda: None object_cache[name] = obj if parent: if inspect.ismodule(object_cache[parent]): obj.__module__ = parent setattr(object_cache[parent], name.split('.')[-1], obj) # Populate items for node in root: obj = object_cache.get(node.attrib['id']) if obj is None: continue for ref in node.findall('ref'): if node.tag == 'class': if ref.attrib['ref'].startswith(node.attrib['id'] + '.'): setattr(obj, ref.attrib['name'], object_cache.get(ref.attrib['ref'])) else: setattr(obj, ref.attrib['name'], object_cache.get(ref.attrib['ref'])) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/docscrape_sphinx.py0000644001134200020070000002233512602613466027034 0ustar embrayscience00000000000000from __future__ import division, absolute_import, print_function import sys, re, inspect, textwrap, pydoc import sphinx import collections from .docscrape import NumpyDocString, FunctionDoc, ClassDoc if sys.version_info[0] >= 3: sixu = lambda s: s else: sixu = lambda s: unicode(s, 'unicode_escape') class SphinxDocString(NumpyDocString): def __init__(self, docstring, config={}): NumpyDocString.__init__(self, docstring, config=config) self.load_config(config) def load_config(self, config): self.use_plots = config.get('use_plots', False) self.class_members_toctree = config.get('class_members_toctree', True) # string conversion routines def _str_header(self, name, symbol='`'): return ['.. rubric:: ' + name, ''] def _str_field_list(self, name): return [':' + name + ':'] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): return [''] if self['Signature']: return ['``%s``' % self['Signature']] + [''] else: return [''] def _str_summary(self): return self['Summary'] + [''] def _str_extended_summary(self): return self['Extended Summary'] + [''] def _str_returns(self): out = [] if self['Returns']: out += self._str_field_list('Returns') out += [''] for param, param_type, desc in self['Returns']: if param_type: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) else: out += self._str_indent([param.strip()]) if desc: out += [''] out += self._str_indent(desc, 8) out += [''] return out def _str_param_list(self, name): out = [] if self[name]: out += self._str_field_list(name) out += [''] for param, param_type, desc in self[name]: if param_type: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) else: out += self._str_indent(['**%s**' % param.strip()]) if desc: out += [''] out += self._str_indent(desc, 8) out += [''] return out @property def _obj(self): if hasattr(self, '_cls'): return self._cls elif hasattr(self, '_f'): return self._f return None def _str_member_list(self, name): """ Generate a member listing, autosummary:: table where possible, and a table where not. """ out = [] if self[name]: out += ['.. rubric:: %s' % name, ''] prefix = getattr(self, '_name', '') if prefix: prefix = '~%s.' % prefix autosum = [] others = [] for param, param_type, desc in self[name]: param = param.strip() # Check if the referenced member can have a docstring or not param_obj = getattr(self._obj, param, None) if not (callable(param_obj) or isinstance(param_obj, property) or inspect.isgetsetdescriptor(param_obj)): param_obj = None if param_obj and (pydoc.getdoc(param_obj) or not desc): # Referenced object has a docstring autosum += [" %s%s" % (prefix, param)] else: others.append((param, param_type, desc)) if autosum: out += ['.. autosummary::'] if self.class_members_toctree: out += [' :toctree:'] out += [''] + autosum if others: maxlen_0 = max(3, max([len(x[0]) for x in others])) hdr = sixu("=")*maxlen_0 + sixu(" ") + sixu("=")*10 fmt = sixu('%%%ds %%s ') % (maxlen_0,) out += ['', hdr] for param, param_type, desc in others: desc = sixu(" ").join(x.strip() for x in desc).strip() if param_type: desc = "(%s) %s" % (param_type, desc) out += [fmt % (param.strip(), desc)] out += [hdr] out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += [''] content = textwrap.dedent("\n".join(self[name])).split("\n") out += content out += [''] return out def _str_see_also(self, func_role): out = [] if self['See Also']: see_also = super(SphinxDocString, self)._str_see_also(func_role) out = ['.. seealso::', ''] out += self._str_indent(see_also[2:]) return out def _str_warnings(self): out = [] if self['Warnings']: out = ['.. warning::', ''] out += self._str_indent(self['Warnings']) return out def _str_index(self): idx = self['index'] out = [] if len(idx) == 0: return out out += ['.. index:: %s' % idx.get('default','')] for section, references in idx.items(): if section == 'default': continue elif section == 'refguide': out += [' single: %s' % (', '.join(references))] else: out += [' %s: %s' % (section, ','.join(references))] return out def _str_references(self): out = [] if self['References']: out += self._str_header('References') if isinstance(self['References'], str): self['References'] = [self['References']] out.extend(self['References']) out += [''] # Latex collects all references to a separate bibliography, # so we need to insert links to it if sphinx.__version__ >= "0.6": out += ['.. only:: latex',''] else: out += ['.. latexonly::',''] items = [] for line in self['References']: m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I) if m: items.append(m.group(1)) out += [' ' + ", ".join(["[%s]_" % item for item in items]), ''] return out def _str_examples(self): examples_str = "\n".join(self['Examples']) if (self.use_plots and 'import matplotlib' in examples_str and 'plot::' not in examples_str): out = [] out += self._str_header('Examples') out += ['.. plot::', ''] out += self._str_indent(self['Examples']) out += [''] return out else: return self._str_section('Examples') def __str__(self, indent=0, func_role="obj"): out = [] out += self._str_signature() out += self._str_index() + [''] out += self._str_summary() out += self._str_extended_summary() out += self._str_param_list('Parameters') out += self._str_returns() for param_list in ('Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_warnings() out += self._str_see_also(func_role) out += self._str_section('Notes') out += self._str_references() out += self._str_examples() for param_list in ('Attributes', 'Methods'): out += self._str_member_list(param_list) out = self._str_indent(out,indent) return '\n'.join(out) class SphinxFunctionDoc(SphinxDocString, FunctionDoc): def __init__(self, obj, doc=None, config={}): self.load_config(config) FunctionDoc.__init__(self, obj, doc=doc, config=config) class SphinxClassDoc(SphinxDocString, ClassDoc): def __init__(self, obj, doc=None, func_doc=None, config={}): self.load_config(config) ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config) class SphinxObjDoc(SphinxDocString): def __init__(self, obj, doc=None, config={}): self._f = obj self.load_config(config) SphinxDocString.__init__(self, doc, config=config) def get_doc_object(obj, what=None, doc=None, config={}): if what is None: if inspect.isclass(obj): what = 'class' elif inspect.ismodule(obj): what = 'module' elif isinstance(obj, collections.Callable): what = 'function' else: what = 'object' if what == 'class': return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc, config=config) elif what in ('function', 'method'): return SphinxFunctionDoc(obj, doc=doc, config=config) else: if doc is None: doc = pydoc.getdoc(obj) return SphinxObjDoc(obj, doc, config=config) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/docscrape.py0000644001134200020070000003771312602613466025451 0ustar embrayscience00000000000000"""Extract reference documentation from the NumPy source tree. """ from __future__ import division, absolute_import, print_function import inspect import textwrap import re import pydoc from warnings import warn import collections import sys class Reader(object): """A line-based string reader. """ def __init__(self, data): """ Parameters ---------- data : str String with lines separated by '\n'. """ if isinstance(data,list): self._str = data else: self._str = data.split('\n') # store string as list of lines self.reset() def __getitem__(self, n): return self._str[n] def reset(self): self._l = 0 # current line nr def read(self): if not self.eof(): out = self[self._l] self._l += 1 return out else: return '' def seek_next_non_empty_line(self): for l in self[self._l:]: if l.strip(): break else: self._l += 1 def eof(self): return self._l >= len(self._str) def read_to_condition(self, condition_func): start = self._l for line in self[start:]: if condition_func(line): return self[start:self._l] self._l += 1 if self.eof(): return self[start:self._l+1] return [] def read_to_next_empty_line(self): self.seek_next_non_empty_line() def is_empty(line): return not line.strip() return self.read_to_condition(is_empty) def read_to_next_unindented_line(self): def is_unindented(line): return (line.strip() and (len(line.lstrip()) == len(line))) return self.read_to_condition(is_unindented) def peek(self,n=0): if self._l + n < len(self._str): return self[self._l + n] else: return '' def is_empty(self): return not ''.join(self._str).strip() class NumpyDocString(object): def __init__(self, docstring, config={}): docstring = textwrap.dedent(docstring).split('\n') self._doc = Reader(docstring) self._parsed_data = { 'Signature': '', 'Summary': [''], 'Extended Summary': [], 'Parameters': [], 'Returns': [], 'Raises': [], 'Warns': [], 'Other Parameters': [], 'Attributes': [], 'Methods': [], 'See Also': [], 'Notes': [], 'Warnings': [], 'References': '', 'Examples': '', 'index': {} } self._parse() def __getitem__(self,key): return self._parsed_data[key] def __setitem__(self,key,val): if key not in self._parsed_data: warn("Unknown section %s" % key) else: self._parsed_data[key] = val def _is_at_section(self): self._doc.seek_next_non_empty_line() if self._doc.eof(): return False l1 = self._doc.peek().strip() # e.g. Parameters if l1.startswith('.. index::'): return True l2 = self._doc.peek(1).strip() # ---------- or ========== return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) def _strip(self,doc): i = 0 j = 0 for i,line in enumerate(doc): if line.strip(): break for j,line in enumerate(doc[::-1]): if line.strip(): break return doc[i:len(doc)-j] def _read_to_next_section(self): section = self._doc.read_to_next_empty_line() while not self._is_at_section() and not self._doc.eof(): if not self._doc.peek(-1).strip(): # previous line was empty section += [''] section += self._doc.read_to_next_empty_line() return section def _read_sections(self): while not self._doc.eof(): data = self._read_to_next_section() name = data[0].strip() if name.startswith('..'): # index section yield name, data[1:] elif len(data) < 2: yield StopIteration else: yield name, self._strip(data[2:]) def _parse_param_list(self,content): r = Reader(content) params = [] while not r.eof(): header = r.read().strip() if ' : ' in header: arg_name, arg_type = header.split(' : ')[:2] else: arg_name, arg_type = header, '' desc = r.read_to_next_unindented_line() desc = dedent_lines(desc) params.append((arg_name,arg_type,desc)) return params _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) def _parse_see_also(self, content): """ func_name : Descriptive text continued text another_func_name : Descriptive text func_name1, func_name2, :meth:`func_name`, func_name3 """ items = [] def parse_item_name(text): """Match ':role:`name`' or 'name'""" m = self._name_rgx.match(text) if m: g = m.groups() if g[1] is None: return g[3], None else: return g[2], g[1] raise ValueError("%s is not a item name" % text) def push_item(name, rest): if not name: return name, role = parse_item_name(name) items.append((name, list(rest), role)) del rest[:] current_func = None rest = [] for line in content: if not line.strip(): continue m = self._name_rgx.match(line) if m and line[m.end():].strip().startswith(':'): push_item(current_func, rest) current_func, line = line[:m.end()], line[m.end():] rest = [line.split(':', 1)[1].strip()] if not rest[0]: rest = [] elif not line.startswith(' '): push_item(current_func, rest) current_func = None if ',' in line: for func in line.split(','): if func.strip(): push_item(func, []) elif line.strip(): current_func = line elif current_func is not None: rest.append(line.strip()) push_item(current_func, rest) return items def _parse_index(self, section, content): """ .. index: default :refguide: something, else, and more """ def strip_each_in(lst): return [s.strip() for s in lst] out = {} section = section.split('::') if len(section) > 1: out['default'] = strip_each_in(section[1].split(','))[0] for line in content: line = line.split(':') if len(line) > 2: out[line[1]] = strip_each_in(line[2].split(',')) return out def _parse_summary(self): """Grab signature (if given) and summary""" if self._is_at_section(): return # If several signatures present, take the last one while True: summary = self._doc.read_to_next_empty_line() summary_str = " ".join([s.strip() for s in summary]).strip() if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): self['Signature'] = summary_str if not self._is_at_section(): continue break if summary is not None: self['Summary'] = summary if not self._is_at_section(): self['Extended Summary'] = self._read_to_next_section() def _parse(self): self._doc.reset() self._parse_summary() for (section,content) in self._read_sections(): if not section.startswith('..'): section = ' '.join([s.capitalize() for s in section.split(' ')]) if section in ('Parameters', 'Returns', 'Raises', 'Warns', 'Other Parameters', 'Attributes', 'Methods'): self[section] = self._parse_param_list(content) elif section.startswith('.. index::'): self['index'] = self._parse_index(section, content) elif section == 'See Also': self['See Also'] = self._parse_see_also(content) else: self[section] = content # string conversion routines def _str_header(self, name, symbol='-'): return [name, len(name)*symbol] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): if self['Signature']: return [self['Signature'].replace('*','\*')] + [''] else: return [''] def _str_summary(self): if self['Summary']: return self['Summary'] + [''] else: return [] def _str_extended_summary(self): if self['Extended Summary']: return self['Extended Summary'] + [''] else: return [] def _str_param_list(self, name): out = [] if self[name]: out += self._str_header(name) for param,param_type,desc in self[name]: if param_type: out += ['%s : %s' % (param, param_type)] else: out += [param] out += self._str_indent(desc) out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += self[name] out += [''] return out def _str_see_also(self, func_role): if not self['See Also']: return [] out = [] out += self._str_header("See Also") last_had_desc = True for func, desc, role in self['See Also']: if role: link = ':%s:`%s`' % (role, func) elif func_role: link = ':%s:`%s`' % (func_role, func) else: link = "`%s`_" % func if desc or last_had_desc: out += [''] out += [link] else: out[-1] += ", %s" % link if desc: out += self._str_indent([' '.join(desc)]) last_had_desc = True else: last_had_desc = False out += [''] return out def _str_index(self): idx = self['index'] out = [] out += ['.. index:: %s' % idx.get('default','')] for section, references in idx.items(): if section == 'default': continue out += [' :%s: %s' % (section, ', '.join(references))] return out def __str__(self, func_role=''): out = [] out += self._str_signature() out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Returns', 'Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_section('Warnings') out += self._str_see_also(func_role) for s in ('Notes','References','Examples'): out += self._str_section(s) for param_list in ('Attributes', 'Methods'): out += self._str_param_list(param_list) out += self._str_index() return '\n'.join(out) def indent(str,indent=4): indent_str = ' '*indent if str is None: return indent_str lines = str.split('\n') return '\n'.join(indent_str + l for l in lines) def dedent_lines(lines): """Deindent a list of lines maximally""" return textwrap.dedent("\n".join(lines)).split("\n") def header(text, style='-'): return text + '\n' + style*len(text) + '\n' class FunctionDoc(NumpyDocString): def __init__(self, func, role='func', doc=None, config={}): self._f = func self._role = role # e.g. "func" or "meth" if doc is None: if func is None: raise ValueError("No function or docstring given") doc = inspect.getdoc(func) or '' NumpyDocString.__init__(self, doc) if not self['Signature'] and func is not None: func, func_name = self.get_func() try: # try to read signature if sys.version_info[0] >= 3: argspec = inspect.getfullargspec(func) else: argspec = inspect.getargspec(func) argspec = inspect.formatargspec(*argspec) argspec = argspec.replace('*','\*') signature = '%s%s' % (func_name, argspec) except TypeError as e: signature = '%s()' % func_name self['Signature'] = signature def get_func(self): func_name = getattr(self._f, '__name__', self.__class__.__name__) if inspect.isclass(self._f): func = getattr(self._f, '__call__', self._f.__init__) else: func = self._f return func, func_name def __str__(self): out = '' func, func_name = self.get_func() signature = self['Signature'].replace('*', '\*') roles = {'func': 'function', 'meth': 'method'} if self._role: if self._role not in roles: print("Warning: invalid role %s" % self._role) out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''), func_name) out += super(FunctionDoc, self).__str__(func_role=self._role) return out class ClassDoc(NumpyDocString): extra_public_methods = ['__call__'] def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc, config={}): if not inspect.isclass(cls) and cls is not None: raise ValueError("Expected a class or None, but got %r" % cls) self._cls = cls if modulename and not modulename.endswith('.'): modulename += '.' self._mod = modulename if doc is None: if cls is None: raise ValueError("No class or documentation string given") doc = pydoc.getdoc(cls) NumpyDocString.__init__(self, doc) if config.get('show_class_members', True): def splitlines_x(s): if not s: return [] else: return s.splitlines() for field, items in [('Methods', self.methods), ('Attributes', self.properties)]: if not self[field]: doc_list = [] for name in sorted(items): try: doc_item = pydoc.getdoc(getattr(self._cls, name)) doc_list.append((name, '', splitlines_x(doc_item))) except AttributeError: pass # method doesn't exist self[field] = doc_list @property def methods(self): if self._cls is None: return [] return [name for name,func in inspect.getmembers(self._cls) if ((not name.startswith('_') or name in self.extra_public_methods) and isinstance(func, collections.Callable))] @property def properties(self): if self._cls is None: return [] return [name for name,func in inspect.getmembers(self._cls) if not name.startswith('_') and (func is None or isinstance(func, property) or inspect.isgetsetdescriptor(func))] astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/tests/0000755001134200020070000000000012637041310024252 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/tests/test_utils.py0000644001134200020070000000174312602613466027041 0ustar embrayscience00000000000000#namedtuple is needed for find_mod_objs so it can have a non-local module import sys from collections import namedtuple import pytest from ..utils import find_mod_objs PY3 = sys.version_info[0] >= 3 pytestmark = pytest.mark.skipif("PY3") def test_find_mod_objs(): lnms, fqns, objs = find_mod_objs('astropy_helpers') # this import is after the above call intentionally to make sure # find_mod_objs properly imports astropy on its own import astropy_helpers # just check for astropy.test ... other things might be added, so we # shouldn't check that it's the only thing assert lnms == [] lnms, fqns, objs = find_mod_objs( 'astropy_helpers.sphinx.ext.tests.test_utils', onlylocals=False) assert namedtuple in objs lnms, fqns, objs = find_mod_objs( 'astropy_helpers.sphinx.ext.tests.test_utils', onlylocals=True) assert 'namedtuple' not in lnms assert 'collections.namedtuple' not in fqns assert namedtuple not in objs astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/tests/test_docscrape.py0000644001134200020070000004327112602613466027646 0ustar embrayscience00000000000000# -*- encoding:utf-8 -*- from __future__ import division, absolute_import, print_function import sys, textwrap from ..docscrape import NumpyDocString, FunctionDoc, ClassDoc from ..docscrape_sphinx import SphinxDocString, SphinxClassDoc if sys.version_info[0] >= 3: sixu = lambda s: s else: sixu = lambda s: unicode(s, 'unicode_escape') doc_txt = '''\ numpy.multivariate_normal(mean, cov, shape=None, spam=None) Draw values from a multivariate normal distribution with specified mean and covariance. The multivariate normal or Gaussian distribution is a generalisation of the one-dimensional normal distribution to higher dimensions. Parameters ---------- mean : (N,) ndarray Mean of the N-dimensional distribution. .. math:: (1+2+3)/3 cov : (N, N) ndarray Covariance matrix of the distribution. shape : tuple of ints Given a shape of, for example, (m,n,k), m*n*k samples are generated, and packed in an m-by-n-by-k arrangement. Because each sample is N-dimensional, the output shape is (m,n,k,N). Returns ------- out : ndarray The drawn samples, arranged according to `shape`. If the shape given is (m,n,...), then the shape of `out` is is (m,n,...,N). In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. list of str This is not a real return value. It exists to test anonymous return values. Other Parameters ---------------- spam : parrot A parrot off its mortal coil. Raises ------ RuntimeError Some error Warns ----- RuntimeWarning Some warning Warnings -------- Certain warnings apply. Notes ----- Instead of specifying the full covariance matrix, popular approximations include: - Spherical covariance (`cov` is a multiple of the identity matrix) - Diagonal covariance (`cov` has non-negative elements only on the diagonal) This geometrical property can be seen in two dimensions by plotting generated data-points: >>> mean = [0,0] >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis >>> x,y = multivariate_normal(mean,cov,5000).T >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() Note that the covariance matrix must be symmetric and non-negative definite. References ---------- .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic Processes," 3rd ed., McGraw-Hill Companies, 1991 .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," 2nd ed., Wiley, 2001. See Also -------- some, other, funcs otherfunc : relationship Examples -------- >>> mean = (1,2) >>> cov = [[1,0],[1,0]] >>> x = multivariate_normal(mean,cov,(3,3)) >>> print x.shape (3, 3, 2) The following is probably true, given that 0.6 is roughly twice the standard deviation: >>> print list( (x[0,0,:] - mean) < 0.6 ) [True, True] .. index:: random :refguide: random;distributions, random;gauss ''' doc = NumpyDocString(doc_txt) def test_signature(): assert doc['Signature'].startswith('numpy.multivariate_normal(') assert doc['Signature'].endswith('spam=None)') def test_summary(): assert doc['Summary'][0].startswith('Draw values') assert doc['Summary'][-1].endswith('covariance.') def test_extended_summary(): assert doc['Extended Summary'][0].startswith('The multivariate normal') def test_parameters(): assert len(doc['Parameters']) == 3 assert [n for n,_,_ in doc['Parameters']] == ['mean','cov','shape'] arg, arg_type, desc = doc['Parameters'][1] assert arg_type == '(N, N) ndarray' assert desc[0].startswith('Covariance matrix') assert doc['Parameters'][0][-1][-2] == ' (1+2+3)/3' def test_other_parameters(): assert len(doc['Other Parameters']) == 1 assert [n for n,_,_ in doc['Other Parameters']] == ['spam'] arg, arg_type, desc = doc['Other Parameters'][0] assert arg_type == 'parrot' assert desc[0].startswith('A parrot off its mortal coil') def test_returns(): assert len(doc['Returns']) == 2 arg, arg_type, desc = doc['Returns'][0] assert arg == 'out' assert arg_type == 'ndarray' assert desc[0].startswith('The drawn samples') assert desc[-1].endswith('distribution.') arg, arg_type, desc = doc['Returns'][1] assert arg == 'list of str' assert arg_type == '' assert desc[0].startswith('This is not a real') assert desc[-1].endswith('anonymous return values.') def test_notes(): assert doc['Notes'][0].startswith('Instead') assert doc['Notes'][-1].endswith('definite.') assert len(doc['Notes']) == 17 def test_references(): assert doc['References'][0].startswith('..') assert doc['References'][-1].endswith('2001.') def test_examples(): assert doc['Examples'][0].startswith('>>>') assert doc['Examples'][-1].endswith('True]') def test_index(): assert doc['index']['default'] == 'random' assert len(doc['index']) == 2 assert len(doc['index']['refguide']) == 2 def non_blank_line_by_line_compare(a,b): a = textwrap.dedent(a) b = textwrap.dedent(b) a = [l.rstrip() for l in a.split('\n') if l.strip()] b = [l.rstrip() for l in b.split('\n') if l.strip()] for n,line in enumerate(a): if not line == b[n]: raise AssertionError("Lines %s of a and b differ: " "\n>>> %s\n<<< %s\n" % (n,line,b[n])) def test_str(): non_blank_line_by_line_compare(str(doc), """numpy.multivariate_normal(mean, cov, shape=None, spam=None) Draw values from a multivariate normal distribution with specified mean and covariance. The multivariate normal or Gaussian distribution is a generalisation of the one-dimensional normal distribution to higher dimensions. Parameters ---------- mean : (N,) ndarray Mean of the N-dimensional distribution. .. math:: (1+2+3)/3 cov : (N, N) ndarray Covariance matrix of the distribution. shape : tuple of ints Given a shape of, for example, (m,n,k), m*n*k samples are generated, and packed in an m-by-n-by-k arrangement. Because each sample is N-dimensional, the output shape is (m,n,k,N). Returns ------- out : ndarray The drawn samples, arranged according to `shape`. If the shape given is (m,n,...), then the shape of `out` is is (m,n,...,N). In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. list of str This is not a real return value. It exists to test anonymous return values. Other Parameters ---------------- spam : parrot A parrot off its mortal coil. Raises ------ RuntimeError Some error Warns ----- RuntimeWarning Some warning Warnings -------- Certain warnings apply. See Also -------- `some`_, `other`_, `funcs`_ `otherfunc`_ relationship Notes ----- Instead of specifying the full covariance matrix, popular approximations include: - Spherical covariance (`cov` is a multiple of the identity matrix) - Diagonal covariance (`cov` has non-negative elements only on the diagonal) This geometrical property can be seen in two dimensions by plotting generated data-points: >>> mean = [0,0] >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis >>> x,y = multivariate_normal(mean,cov,5000).T >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() Note that the covariance matrix must be symmetric and non-negative definite. References ---------- .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic Processes," 3rd ed., McGraw-Hill Companies, 1991 .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," 2nd ed., Wiley, 2001. Examples -------- >>> mean = (1,2) >>> cov = [[1,0],[1,0]] >>> x = multivariate_normal(mean,cov,(3,3)) >>> print x.shape (3, 3, 2) The following is probably true, given that 0.6 is roughly twice the standard deviation: >>> print list( (x[0,0,:] - mean) < 0.6 ) [True, True] .. index:: random :refguide: random;distributions, random;gauss""") def test_sphinx_str(): sphinx_doc = SphinxDocString(doc_txt) non_blank_line_by_line_compare(str(sphinx_doc), """ .. index:: random single: random;distributions, random;gauss Draw values from a multivariate normal distribution with specified mean and covariance. The multivariate normal or Gaussian distribution is a generalisation of the one-dimensional normal distribution to higher dimensions. :Parameters: **mean** : (N,) ndarray Mean of the N-dimensional distribution. .. math:: (1+2+3)/3 **cov** : (N, N) ndarray Covariance matrix of the distribution. **shape** : tuple of ints Given a shape of, for example, (m,n,k), m*n*k samples are generated, and packed in an m-by-n-by-k arrangement. Because each sample is N-dimensional, the output shape is (m,n,k,N). :Returns: **out** : ndarray The drawn samples, arranged according to `shape`. If the shape given is (m,n,...), then the shape of `out` is is (m,n,...,N). In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. list of str This is not a real return value. It exists to test anonymous return values. :Other Parameters: **spam** : parrot A parrot off its mortal coil. :Raises: **RuntimeError** Some error :Warns: **RuntimeWarning** Some warning .. warning:: Certain warnings apply. .. seealso:: :obj:`some`, :obj:`other`, :obj:`funcs` :obj:`otherfunc` relationship .. rubric:: Notes Instead of specifying the full covariance matrix, popular approximations include: - Spherical covariance (`cov` is a multiple of the identity matrix) - Diagonal covariance (`cov` has non-negative elements only on the diagonal) This geometrical property can be seen in two dimensions by plotting generated data-points: >>> mean = [0,0] >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis >>> x,y = multivariate_normal(mean,cov,5000).T >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() Note that the covariance matrix must be symmetric and non-negative definite. .. rubric:: References .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic Processes," 3rd ed., McGraw-Hill Companies, 1991 .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," 2nd ed., Wiley, 2001. .. only:: latex [1]_, [2]_ .. rubric:: Examples >>> mean = (1,2) >>> cov = [[1,0],[1,0]] >>> x = multivariate_normal(mean,cov,(3,3)) >>> print x.shape (3, 3, 2) The following is probably true, given that 0.6 is roughly twice the standard deviation: >>> print list( (x[0,0,:] - mean) < 0.6 ) [True, True] """) doc2 = NumpyDocString(""" Returns array of indices of the maximum values of along the given axis. Parameters ---------- a : {array_like} Array to look in. axis : {None, integer} If None, the index is into the flattened array, otherwise along the specified axis""") def test_parameters_without_extended_description(): assert len(doc2['Parameters']) == 2 doc3 = NumpyDocString(""" my_signature(*params, **kwds) Return this and that. """) def test_escape_stars(): signature = str(doc3).split('\n')[0] signature == 'my_signature(\*params, \*\*kwds)' doc4 = NumpyDocString( """a.conj() Return an array with all complex-valued elements conjugated.""") def test_empty_extended_summary(): assert doc4['Extended Summary'] == [] doc5 = NumpyDocString( """ a.something() Raises ------ LinAlgException If array is singular. Warns ----- SomeWarning If needed """) def test_raises(): assert len(doc5['Raises']) == 1 name,_,desc = doc5['Raises'][0] assert name == 'LinAlgException' assert desc == ['If array is singular.'] def test_warns(): assert len(doc5['Warns']) == 1 name,_,desc = doc5['Warns'][0] assert name == 'SomeWarning' assert desc == ['If needed'] def test_see_also(): doc6 = NumpyDocString( """ z(x,theta) See Also -------- func_a, func_b, func_c func_d : some equivalent func foo.func_e : some other func over multiple lines func_f, func_g, :meth:`func_h`, func_j, func_k :obj:`baz.obj_q` :class:`class_j`: fubar foobar """) assert len(doc6['See Also']) == 12 for func, desc, role in doc6['See Also']: if func in ('func_a', 'func_b', 'func_c', 'func_f', 'func_g', 'func_h', 'func_j', 'func_k', 'baz.obj_q'): assert(not desc) else: assert(desc) if func == 'func_h': assert role == 'meth' elif func == 'baz.obj_q': assert role == 'obj' elif func == 'class_j': assert role == 'class' else: assert role is None if func == 'func_d': assert desc == ['some equivalent func'] elif func == 'foo.func_e': assert desc == ['some other func over', 'multiple lines'] elif func == 'class_j': assert desc == ['fubar', 'foobar'] def test_see_also_print(): class Dummy(object): """ See Also -------- func_a, func_b func_c : some relationship goes here func_d """ pass obj = Dummy() s = str(FunctionDoc(obj, role='func')) assert(':func:`func_a`, :func:`func_b`' in s) assert(' some relationship' in s) assert(':func:`func_d`' in s) doc7 = NumpyDocString(""" Doc starts on second line. """) def test_empty_first_line(): assert doc7['Summary'][0].startswith('Doc starts') def test_no_summary(): str(SphinxDocString(""" Parameters ----------""")) def test_unicode(): doc = SphinxDocString(""" öäöäöäöäöåååå öäöäöäööäååå Parameters ---------- ååå : äää ööö Returns ------- ååå : ööö äää """) assert isinstance(doc['Summary'][0], str) assert doc['Summary'][0] == 'öäöäöäöäöåååå' def test_plot_examples(): cfg = dict(use_plots=True) doc = SphinxDocString(""" Examples -------- >>> import matplotlib.pyplot as plt >>> plt.plot([1,2,3],[4,5,6]) >>> plt.show() """, config=cfg) assert 'plot::' in str(doc), str(doc) doc = SphinxDocString(""" Examples -------- .. plot:: import matplotlib.pyplot as plt plt.plot([1,2,3],[4,5,6]) plt.show() """, config=cfg) assert str(doc).count('plot::') == 1, str(doc) def test_class_members(): class Dummy(object): """ Dummy class. """ def spam(self, a, b): """Spam\n\nSpam spam.""" pass def ham(self, c, d): """Cheese\n\nNo cheese.""" pass @property def spammity(self): """Spammity index""" return 0.95 class Ignorable(object): """local class, to be ignored""" pass for cls in (ClassDoc, SphinxClassDoc): doc = cls(Dummy, config=dict(show_class_members=False)) assert 'Methods' not in str(doc), (cls, str(doc)) assert 'spam' not in str(doc), (cls, str(doc)) assert 'ham' not in str(doc), (cls, str(doc)) assert 'spammity' not in str(doc), (cls, str(doc)) assert 'Spammity index' not in str(doc), (cls, str(doc)) doc = cls(Dummy, config=dict(show_class_members=True)) assert 'Methods' in str(doc), (cls, str(doc)) assert 'spam' in str(doc), (cls, str(doc)) assert 'ham' in str(doc), (cls, str(doc)) assert 'spammity' in str(doc), (cls, str(doc)) if cls is SphinxClassDoc: assert '.. autosummary::' in str(doc), str(doc) else: assert 'Spammity index' in str(doc), str(doc) def test_duplicate_signature(): # Duplicate function signatures occur e.g. in ufuncs, when the # automatic mechanism adds one, and a more detailed comes from the # docstring itself. doc = NumpyDocString( """ z(x1, x2) z(a, theta) """) assert doc['Signature'].strip() == 'z(a, theta)' class_doc_txt = """ Foo Parameters ---------- f : callable ``f(t, y, *f_args)`` Aaa. jac : callable ``jac(t, y, *jac_args)`` Bbb. Attributes ---------- t : float Current time. y : ndarray Current variable values. Methods ------- a b c Examples -------- For usage examples, see `ode`. """ def test_class_members_doc(): doc = ClassDoc(None, class_doc_txt) non_blank_line_by_line_compare(str(doc), """ Foo Parameters ---------- f : callable ``f(t, y, *f_args)`` Aaa. jac : callable ``jac(t, y, *jac_args)`` Bbb. Examples -------- For usage examples, see `ode`. Attributes ---------- t : float Current time. y : ndarray Current variable values. Methods ------- a b c .. index:: """) def test_class_members_doc_sphinx(): doc = SphinxClassDoc(None, class_doc_txt) non_blank_line_by_line_compare(str(doc), """ Foo :Parameters: **f** : callable ``f(t, y, *f_args)`` Aaa. **jac** : callable ``jac(t, y, *jac_args)`` Bbb. .. rubric:: Examples For usage examples, see `ode`. .. rubric:: Attributes === ========== t (float) Current time. y (ndarray) Current variable values. === ========== .. rubric:: Methods === ========== a b c === ========== """) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/tests/test_automodapi.py0000644001134200020070000001613612602613466030045 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst import os import sys import pytest from . import * from ....utils import iteritems pytest.importorskip('sphinx') # skips these tests if sphinx not present class FakeConfig(object): """ Mocks up a sphinx configuration setting construct for automodapi tests """ def __init__(self, **kwargs): for k, v in iteritems(kwargs): setattr(self, k, v) class FakeApp(object): """ Mocks up a `sphinx.application.Application` object for automodapi tests """ # Some default config values _defaults = { 'automodapi_toctreedirnm': 'api', 'automodapi_writereprocessed': False } def __init__(self, **configs): config = self._defaults.copy() config.update(configs) self.config = FakeConfig(**config) self.info = [] self.warnings = [] def info(self, msg, loc): self.info.append((msg, loc)) def warn(self, msg, loc): self.warnings.append((msg, loc)) am_replacer_str = """ This comes before .. automodapi:: astropy_helpers.sphinx.ext.tests.test_automodapi {options} This comes after """ am_replacer_basic_expected = """ This comes before astropy_helpers.sphinx.ext.tests.test_automodapi Module ------------------------------------------------------- .. automodule:: astropy_helpers.sphinx.ext.tests.test_automodapi Functions ^^^^^^^^^ .. automodsumm:: astropy_helpers.sphinx.ext.tests.test_automodapi :functions-only: :toctree: api/ Classes ^^^^^^^ .. automodsumm:: astropy_helpers.sphinx.ext.tests.test_automodapi :classes-only: :toctree: api/ Class Inheritance Diagram ^^^^^^^^^^^^^^^^^^^^^^^^^ .. automod-diagram:: astropy_helpers.sphinx.ext.tests.test_automodapi :private-bases: :parts: 1 {empty} This comes after """.format(empty='') # the .format is necessary for editors that remove empty-line whitespace def test_am_replacer_basic(): """ Tests replacing an ".. automodapi::" with the automodapi no-option template """ from ..automodapi import automodapi_replace fakeapp = FakeApp() result = automodapi_replace(am_replacer_str.format(options=''), fakeapp) assert result == am_replacer_basic_expected am_replacer_noinh_expected = """ This comes before astropy_helpers.sphinx.ext.tests.test_automodapi Module ------------------------------------------------------- .. automodule:: astropy_helpers.sphinx.ext.tests.test_automodapi Functions ^^^^^^^^^ .. automodsumm:: astropy_helpers.sphinx.ext.tests.test_automodapi :functions-only: :toctree: api/ Classes ^^^^^^^ .. automodsumm:: astropy_helpers.sphinx.ext.tests.test_automodapi :classes-only: :toctree: api/ This comes after """.format(empty='') def test_am_replacer_noinh(): """ Tests replacing an ".. automodapi::" with no-inheritance-diagram option """ from ..automodapi import automodapi_replace fakeapp = FakeApp() ops = ['', ':no-inheritance-diagram:'] ostr = '\n '.join(ops) result = automodapi_replace(am_replacer_str.format(options=ostr), fakeapp) assert result == am_replacer_noinh_expected am_replacer_titleandhdrs_expected = """ This comes before astropy_helpers.sphinx.ext.tests.test_automodapi Module &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& .. automodule:: astropy_helpers.sphinx.ext.tests.test_automodapi Functions ********* .. automodsumm:: astropy_helpers.sphinx.ext.tests.test_automodapi :functions-only: :toctree: api/ Classes ******* .. automodsumm:: astropy_helpers.sphinx.ext.tests.test_automodapi :classes-only: :toctree: api/ Class Inheritance Diagram ************************* .. automod-diagram:: astropy_helpers.sphinx.ext.tests.test_automodapi :private-bases: :parts: 1 {empty} This comes after """.format(empty='') def test_am_replacer_titleandhdrs(): """ Tests replacing an ".. automodapi::" entry with title-setting and header character options. """ from ..automodapi import automodapi_replace fakeapp = FakeApp() ops = ['', ':title: A new title', ':headings: &*'] ostr = '\n '.join(ops) result = automodapi_replace(am_replacer_str.format(options=ostr), fakeapp) assert result == am_replacer_titleandhdrs_expected am_replacer_nomain_str = """ This comes before .. automodapi:: astropy_helpers.sphinx.ext.automodapi :no-main-docstr: This comes after """ am_replacer_nomain_expected = """ This comes before astropy_helpers.sphinx.ext.automodapi Module -------------------------------------------- Functions ^^^^^^^^^ .. automodsumm:: astropy_helpers.sphinx.ext.automodapi :functions-only: :toctree: api/ This comes after """.format(empty='') def test_am_replacer_nomain(): """ Tests replacing an ".. automodapi::" with "no-main-docstring" . """ from ..automodapi import automodapi_replace fakeapp = FakeApp() result = automodapi_replace(am_replacer_nomain_str, fakeapp) assert result == am_replacer_nomain_expected am_replacer_skip_str = """ This comes before .. automodapi:: astropy_helpers.sphinx.ext.automodapi :skip: something1 :skip: something2 This comes after """ am_replacer_skip_expected = """ This comes before astropy_helpers.sphinx.ext.automodapi Module -------------------------------------------- .. automodule:: astropy_helpers.sphinx.ext.automodapi Functions ^^^^^^^^^ .. automodsumm:: astropy_helpers.sphinx.ext.automodapi :functions-only: :toctree: api/ :skip: something1,something2 This comes after """.format(empty='') def test_am_replacer_skip(): """ Tests using the ":skip: option in an ".. automodapi::" . """ from ..automodapi import automodapi_replace fakeapp = FakeApp() result = automodapi_replace(am_replacer_skip_str, fakeapp) assert result == am_replacer_skip_expected am_replacer_invalidop_str = """ This comes before .. automodapi:: astropy_helpers.sphinx.ext.automodapi :invalid-option: This comes after """ def test_am_replacer_invalidop(): """ Tests that a sphinx warning is produced with an invalid option. """ from ..automodapi import automodapi_replace fakeapp = FakeApp() automodapi_replace(am_replacer_invalidop_str, fakeapp) expected_warnings = [('Found additional options invalid-option in ' 'automodapi.', None)] assert fakeapp.warnings == expected_warnings am_replacer_cython_str = """ This comes before .. automodapi:: _eva_.unit02 {options} This comes after """ am_replacer_cython_expected = """ This comes before _eva_.unit02 Module ------------------- .. automodule:: _eva_.unit02 Functions ^^^^^^^^^ .. automodsumm:: _eva_.unit02 :functions-only: :toctree: api/ This comes after """.format(empty='') def test_am_replacer_cython(cython_testpackage): """ Tests replacing an ".. automodapi::" for a Cython module. """ from ..automodapi import automodapi_replace fakeapp = FakeApp() result = automodapi_replace(am_replacer_cython_str.format(options=''), fakeapp) assert result == am_replacer_cython_expected astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/tests/test_autodoc_enhancements.py0000644001134200020070000000324312602613466032064 0ustar embrayscience00000000000000import sys from textwrap import dedent import pytest from ..autodoc_enhancements import type_object_attrgetter # Define test classes outside the class; otherwise there is flakiness with the # details of how exec works on different Python versions class Meta(type): @property def foo(cls): return 'foo' if sys.version_info[0] < 3: exec(dedent(""" class MyClass(object): __metaclass__ = Meta @property def foo(self): \"\"\"Docstring for MyClass.foo property.\"\"\" return 'myfoo' """)) else: exec(dedent(""" class MyClass(metaclass=Meta): @property def foo(self): \"\"\"Docstring for MyClass.foo property.\"\"\" return 'myfoo' """)) def test_type_attrgetter(): """ This test essentially reproduces the docstring for `type_object_attrgetter`. Sphinx itself tests the custom attrgetter feature; see: https://bitbucket.org/birkenfeld/sphinx/src/40bd03003ac6fe274ccf3c80d7727509e00a69ea/tests/test_autodoc.py?at=default#cl-502 so rather than a full end-to-end functional test it's simple enough to just test that this function does what it needs to do. """ assert getattr(MyClass, 'foo') == 'foo' obj = type_object_attrgetter(MyClass, 'foo') assert isinstance(obj, property) assert obj.__doc__ == 'Docstring for MyClass.foo property.' with pytest.raises(AttributeError): type_object_attrgetter(MyClass, 'susy') assert type_object_attrgetter(MyClass, 'susy', 'default') == 'default' assert type_object_attrgetter(MyClass, '__dict__') == MyClass.__dict__ astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/tests/test_automodsumm.py0000644001134200020070000000456412602613466030257 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst import sys import pytest from . import * from ....utils import iteritems pytest.importorskip('sphinx') # skips these tests if sphinx not present class FakeEnv(object): """ Mocks up a sphinx env setting construct for automodapi tests """ def __init__(self, **kwargs): for k, v in iteritems(kwargs): setattr(self, k, v) class FakeBuilder(object): """ Mocks up a sphinx builder setting construct for automodapi tests """ def __init__(self, **kwargs): self.env = FakeEnv(**kwargs) class FakeApp(object): """ Mocks up a `sphinx.application.Application` object for automodapi tests """ def __init__(self, srcdir, automodapipresent=True): self.builder = FakeBuilder(srcdir=srcdir) self.info = [] self.warnings = [] self._extensions = [] if automodapipresent: self._extensions.append('astropy_helpers.sphinx.ext.automodapi') def info(self, msg, loc): self.info.append((msg, loc)) def warn(self, msg, loc): self.warnings.append((msg, loc)) ams_to_asmry_str = """ Before .. automodsumm:: astropy_helpers.sphinx.ext.automodsumm :p: And After """ ams_to_asmry_expected = """\ .. currentmodule:: astropy_helpers.sphinx.ext.automodsumm .. autosummary:: :p: Automoddiagram Automodsumm automodsumm_to_autosummary_lines generate_automodsumm_docs process_automodsumm_generation setup """ def test_ams_to_asmry(tmpdir): from ..automodsumm import automodsumm_to_autosummary_lines fi = tmpdir.join('automodsumm.rst') fi.write(ams_to_asmry_str) fakeapp = FakeApp(srcdir='') resultlines = automodsumm_to_autosummary_lines(str(fi), fakeapp) assert '\n'.join(resultlines) == ams_to_asmry_expected ams_cython_str = """ Before .. automodsumm:: _eva_.unit02 :functions-only: :p: And After """ ams_cython_expected = """\ .. currentmodule:: _eva_.unit02 .. autosummary:: :p: pilot """ def test_ams_cython(tmpdir, cython_testpackage): from ..automodsumm import automodsumm_to_autosummary_lines fi = tmpdir.join('automodsumm.rst') fi.write(ams_cython_str) fakeapp = FakeApp(srcdir='') resultlines = automodsumm_to_autosummary_lines(str(fi), fakeapp) assert '\n'.join(resultlines) == ams_cython_expected astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/tests/__init__.py0000644001134200020070000000334112602613466026375 0ustar embrayscience00000000000000import os import subprocess as sp import sys from textwrap import dedent import pytest @pytest.fixture def cython_testpackage(tmpdir, request): """ Creates a trivial Cython package for use with tests. """ test_pkg = tmpdir.mkdir('test_pkg') test_pkg.mkdir('_eva_').ensure('__init__.py') test_pkg.join('_eva_').join('unit02.pyx').write(dedent("""\ def pilot(): \"\"\"Returns the pilot of Eva Unit-02.\"\"\" return True """)) import astropy_helpers test_pkg.join('setup.py').write(dedent("""\ import sys sys.path.insert(0, {0!r}) from os.path import join from setuptools import setup, Extension from astropy_helpers.setup_helpers import register_commands NAME = '_eva_' VERSION = 0.1 RELEASE = True cmdclassd = register_commands(NAME, VERSION, RELEASE) setup( name=NAME, version=VERSION, cmdclass=cmdclassd, ext_modules=[Extension('_eva_.unit02', [join('_eva_', 'unit02.pyx')])] ) """.format(os.path.dirname(astropy_helpers.__path__[0])))) test_pkg.chdir() # Build the Cython module in a subprocess; otherwise strange things can # happen with Cython's global module state sp.call([sys.executable, 'setup.py', 'build_ext', '--inplace']) sys.path.insert(0, str(test_pkg)) import _eva_.unit02 def cleanup(test_pkg=test_pkg): for modname in ['_eva_', '_eva_.unit02']: try: del sys.modules[modname] except KeyError: pass sys.path.remove(str(test_pkg)) request.addfinalizer(cleanup) return test_pkg astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/templates/0000755001134200020070000000000012637041310025106 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/templates/autosummary_core/0000755001134200020070000000000012637041310030504 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/templates/autosummary_core/class.rst0000644001134200020070000000221112602613466032350 0ustar embrayscience00000000000000{% if referencefile %} .. include:: {{ referencefile }} {% endif %} {{ objname }} {{ underline }} .. currentmodule:: {{ module }} .. autoclass:: {{ objname }} :show-inheritance: {% if '__init__' in methods %} {% set caught_result = methods.remove('__init__') %} {% endif %} {% block attributes_summary %} {% if attributes %} .. rubric:: Attributes Summary .. autosummary:: {% for item in attributes %} ~{{ name }}.{{ item }} {%- endfor %} {% endif %} {% endblock %} {% block methods_summary %} {% if methods %} .. rubric:: Methods Summary .. autosummary:: {% for item in methods %} ~{{ name }}.{{ item }} {%- endfor %} {% endif %} {% endblock %} {% block attributes_documentation %} {% if attributes %} .. rubric:: Attributes Documentation {% for item in attributes %} .. autoattribute:: {{ item }} {%- endfor %} {% endif %} {% endblock %} {% block methods_documentation %} {% if methods %} .. rubric:: Methods Documentation {% for item in methods %} .. automethod:: {{ item }} {%- endfor %} {% endif %} {% endblock %} astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/templates/autosummary_core/module.rst0000644001134200020070000000127712602613466032543 0ustar embrayscience00000000000000{% if referencefile %} .. include:: {{ referencefile }} {% endif %} {{ objname }} {{ underline }} .. automodule:: {{ fullname }} {% block functions %} {% if functions %} .. rubric:: Functions .. autosummary:: {% for item in functions %} {{ item }} {%- endfor %} {% endif %} {% endblock %} {% block classes %} {% if classes %} .. rubric:: Classes .. autosummary:: {% for item in classes %} {{ item }} {%- endfor %} {% endif %} {% endblock %} {% block exceptions %} {% if exceptions %} .. rubric:: Exceptions .. autosummary:: {% for item in exceptions %} {{ item }} {%- endfor %} {% endif %} {% endblock %} astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/templates/autosummary_core/base.rst0000644001134200020070000000025212602613466032160 0ustar embrayscience00000000000000{% if referencefile %} .. include:: {{ referencefile }} {% endif %} {{ objname }} {{ underline }} .. currentmodule:: {{ module }} .. auto{{ objtype }}:: {{ objname }} astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/numpydoc.py0000644001134200020070000001441412602613466025335 0ustar embrayscience00000000000000""" ======== numpydoc ======== Sphinx extension that handles docstrings in the Numpy standard format. [1] It will: - Convert Parameters etc. sections to field lists. - Convert See Also section to a See also entry. - Renumber references. - Extract the signature from the docstring, if it can't be determined otherwise. .. [1] https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt """ from __future__ import division, absolute_import, print_function import os, sys, re, pydoc import sphinx import inspect import collections if sphinx.__version__ < '1.0.1': raise RuntimeError("Sphinx 1.0.1 or newer is required") from .docscrape_sphinx import get_doc_object, SphinxDocString from sphinx.util.compat import Directive if sys.version_info[0] >= 3: sixu = lambda s: s else: sixu = lambda s: unicode(s, 'unicode_escape') def mangle_docstrings(app, what, name, obj, options, lines, reference_offset=[0]): cfg = dict(use_plots=app.config.numpydoc_use_plots, show_class_members=app.config.numpydoc_show_class_members, class_members_toctree=app.config.numpydoc_class_members_toctree, ) if what == 'module': # Strip top title title_re = re.compile(sixu('^\\s*[#*=]{4,}\\n[a-z0-9 -]+\\n[#*=]{4,}\\s*'), re.I|re.S) lines[:] = title_re.sub(sixu(''), sixu("\n").join(lines)).split(sixu("\n")) else: doc = get_doc_object(obj, what, sixu("\n").join(lines), config=cfg) if sys.version_info[0] >= 3: doc = str(doc) else: doc = unicode(doc) lines[:] = doc.split(sixu("\n")) if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \ obj.__name__: if hasattr(obj, '__module__'): v = dict(full_name=sixu("%s.%s") % (obj.__module__, obj.__name__)) else: v = dict(full_name=obj.__name__) lines += [sixu(''), sixu('.. htmlonly::'), sixu('')] lines += [sixu(' %s') % x for x in (app.config.numpydoc_edit_link % v).split("\n")] # replace reference numbers so that there are no duplicates references = [] for line in lines: line = line.strip() m = re.match(sixu('^.. \\[([a-z0-9_.-])\\]'), line, re.I) if m: references.append(m.group(1)) # start renaming from the longest string, to avoid overwriting parts references.sort(key=lambda x: -len(x)) if references: for i, line in enumerate(lines): for r in references: if re.match(sixu('^\\d+$'), r): new_r = sixu("R%d") % (reference_offset[0] + int(r)) else: new_r = sixu("%s%d") % (r, reference_offset[0]) lines[i] = lines[i].replace(sixu('[%s]_') % r, sixu('[%s]_') % new_r) lines[i] = lines[i].replace(sixu('.. [%s]') % r, sixu('.. [%s]') % new_r) reference_offset[0] += len(references) def mangle_signature(app, what, name, obj, options, sig, retann): # Do not try to inspect classes that don't define `__init__` if (inspect.isclass(obj) and (not hasattr(obj, '__init__') or 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' if not (isinstance(obj, collections.Callable) or hasattr(obj, '__argspec_is_invalid_')): return if not hasattr(obj, '__doc__'): return doc = SphinxDocString(pydoc.getdoc(obj)) if doc['Signature']: sig = re.sub(sixu("^[^(]*"), sixu(""), doc['Signature']) return sig, sixu('') def setup(app, get_doc_object_=get_doc_object): if not hasattr(app, 'add_config_value'): return # probably called by nose, better bail out global get_doc_object get_doc_object = get_doc_object_ app.connect('autodoc-process-docstring', mangle_docstrings) app.connect('autodoc-process-signature', mangle_signature) app.add_config_value('numpydoc_edit_link', None, False) app.add_config_value('numpydoc_use_plots', None, False) app.add_config_value('numpydoc_show_class_members', True, True) app.add_config_value('numpydoc_class_members_toctree', True, True) # Extra mangling domains app.add_domain(NumpyPythonDomain) app.add_domain(NumpyCDomain) #------------------------------------------------------------------------------ # Docstring-mangling domains #------------------------------------------------------------------------------ from docutils.statemachine import ViewList from sphinx.domains.c import CDomain from sphinx.domains.python import PythonDomain class ManglingDomainBase(object): directive_mangling_map = {} def __init__(self, *a, **kw): super(ManglingDomainBase, self).__init__(*a, **kw) self.wrap_mangling_directives() def wrap_mangling_directives(self): for name, objtype in list(self.directive_mangling_map.items()): self.directives[name] = wrap_mangling_directive( self.directives[name], objtype) class NumpyPythonDomain(ManglingDomainBase, PythonDomain): name = 'np' directive_mangling_map = { 'function': 'function', 'class': 'class', 'exception': 'class', 'method': 'function', 'classmethod': 'function', 'staticmethod': 'function', 'attribute': 'attribute', } indices = [] class NumpyCDomain(ManglingDomainBase, CDomain): name = 'np-c' directive_mangling_map = { 'function': 'function', 'member': 'attribute', 'macro': 'function', 'type': 'class', 'var': 'object', } def wrap_mangling_directive(base_directive, objtype): class directive(base_directive): def run(self): env = self.state.document.settings.env name = None if self.arguments: m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0]) name = m.group(2).strip() if not name: name = self.arguments[0] lines = list(self.content) mangle_docstrings(env.app, objtype, name, None, None, lines) self.content = ViewList(lines, self.content.parent) return base_directive.run(self) return directive astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/compiler_unparse.py0000644001134200020070000006024412602613466027050 0ustar embrayscience00000000000000""" Turn compiler.ast structures back into executable python code. The unparse method takes a compiler.ast tree and transforms it back into valid python code. It is incomplete and currently only works for import statements, function calls, function definitions, assignments, and basic expressions. Inspired by python-2.5-svn/Demo/parser/unparse.py fixme: We may want to move to using _ast trees because the compiler for them is about 6 times faster than compiler.compile. """ from __future__ import division, absolute_import, print_function import sys from compiler.ast import Const, Name, Tuple, Div, Mul, Sub, Add if sys.version_info[0] >= 3: from io import StringIO else: from StringIO import StringIO def unparse(ast, single_line_functions=False): s = StringIO() UnparseCompilerAst(ast, s, single_line_functions) return s.getvalue().lstrip() op_precedence = { 'compiler.ast.Power':3, 'compiler.ast.Mul':2, 'compiler.ast.Div':2, 'compiler.ast.Add':1, 'compiler.ast.Sub':1 } class UnparseCompilerAst: """ Methods in this class recursively traverse an AST and output source code for the abstract syntax; original formatting is disregarged. """ ######################################################################### # object interface. ######################################################################### def __init__(self, tree, file = sys.stdout, single_line_functions=False): """ Unparser(tree, file=sys.stdout) -> None. Print the source for tree to file. """ self.f = file self._single_func = single_line_functions self._do_indent = True self._indent = 0 self._dispatch(tree) self._write("\n") self.f.flush() ######################################################################### # Unparser private interface. ######################################################################### ### format, output, and dispatch methods ################################ def _fill(self, text = ""): "Indent a piece of text, according to the current indentation level" if self._do_indent: self._write("\n"+" "*self._indent + text) else: self._write(text) def _write(self, text): "Append a piece of text to the current line." self.f.write(text) def _enter(self): "Print ':', and increase the indentation." self._write(": ") self._indent += 1 def _leave(self): "Decrease the indentation level." self._indent -= 1 def _dispatch(self, tree): "_dispatcher function, _dispatching tree type T to method _T." if isinstance(tree, list): for t in tree: self._dispatch(t) return meth = getattr(self, "_"+tree.__class__.__name__) if tree.__class__.__name__ == 'NoneType' and not self._do_indent: return meth(tree) ######################################################################### # compiler.ast unparsing methods. # # There should be one method per concrete grammar type. They are # organized in alphabetical order. ######################################################################### def _Add(self, t): self.__binary_op(t, '+') def _And(self, t): self._write(" (") for i, node in enumerate(t.nodes): self._dispatch(node) if i != len(t.nodes)-1: self._write(") and (") self._write(")") def _AssAttr(self, t): """ Handle assigning an attribute of an object """ self._dispatch(t.expr) self._write('.'+t.attrname) def _Assign(self, t): """ Expression Assignment such as "a = 1". This only handles assignment in expressions. Keyword assignment is handled separately. """ self._fill() for target in t.nodes: self._dispatch(target) self._write(" = ") self._dispatch(t.expr) if not self._do_indent: self._write('; ') def _AssName(self, t): """ Name on left hand side of expression. Treat just like a name on the right side of an expression. """ self._Name(t) def _AssTuple(self, t): """ Tuple on left hand side of an expression. """ # _write each elements, separated by a comma. for element in t.nodes[:-1]: self._dispatch(element) self._write(", ") # Handle the last one without writing comma last_element = t.nodes[-1] self._dispatch(last_element) def _AugAssign(self, t): """ +=,-=,*=,/=,**=, etc. operations """ self._fill() self._dispatch(t.node) self._write(' '+t.op+' ') self._dispatch(t.expr) if not self._do_indent: self._write(';') def _Bitand(self, t): """ Bit and operation. """ for i, node in enumerate(t.nodes): self._write("(") self._dispatch(node) self._write(")") if i != len(t.nodes)-1: self._write(" & ") def _Bitor(self, t): """ Bit or operation """ for i, node in enumerate(t.nodes): self._write("(") self._dispatch(node) self._write(")") if i != len(t.nodes)-1: self._write(" | ") def _CallFunc(self, t): """ Function call. """ self._dispatch(t.node) self._write("(") comma = False for e in t.args: if comma: self._write(", ") else: comma = True self._dispatch(e) if t.star_args: if comma: self._write(", ") else: comma = True self._write("*") self._dispatch(t.star_args) if t.dstar_args: if comma: self._write(", ") else: comma = True self._write("**") self._dispatch(t.dstar_args) self._write(")") def _Compare(self, t): self._dispatch(t.expr) for op, expr in t.ops: self._write(" " + op + " ") self._dispatch(expr) def _Const(self, t): """ A constant value such as an integer value, 3, or a string, "hello". """ self._dispatch(t.value) def _Decorators(self, t): """ Handle function decorators (eg. @has_units) """ for node in t.nodes: self._dispatch(node) def _Dict(self, t): self._write("{") for i, (k, v) in enumerate(t.items): self._dispatch(k) self._write(": ") self._dispatch(v) if i < len(t.items)-1: self._write(", ") self._write("}") def _Discard(self, t): """ Node for when return value is ignored such as in "foo(a)". """ self._fill() self._dispatch(t.expr) def _Div(self, t): self.__binary_op(t, '/') def _Ellipsis(self, t): self._write("...") def _From(self, t): """ Handle "from xyz import foo, bar as baz". """ # fixme: Are From and ImportFrom handled differently? self._fill("from ") self._write(t.modname) self._write(" import ") for i, (name,asname) in enumerate(t.names): if i != 0: self._write(", ") self._write(name) if asname is not None: self._write(" as "+asname) def _Function(self, t): """ Handle function definitions """ if t.decorators is not None: self._fill("@") self._dispatch(t.decorators) self._fill("def "+t.name + "(") defaults = [None] * (len(t.argnames) - len(t.defaults)) + list(t.defaults) for i, arg in enumerate(zip(t.argnames, defaults)): self._write(arg[0]) if arg[1] is not None: self._write('=') self._dispatch(arg[1]) if i < len(t.argnames)-1: self._write(', ') self._write(")") if self._single_func: self._do_indent = False self._enter() self._dispatch(t.code) self._leave() self._do_indent = True def _Getattr(self, t): """ Handle getting an attribute of an object """ if isinstance(t.expr, (Div, Mul, Sub, Add)): self._write('(') self._dispatch(t.expr) self._write(')') else: self._dispatch(t.expr) self._write('.'+t.attrname) def _If(self, t): self._fill() for i, (compare,code) in enumerate(t.tests): if i == 0: self._write("if ") else: self._write("elif ") self._dispatch(compare) self._enter() self._fill() self._dispatch(code) self._leave() self._write("\n") if t.else_ is not None: self._write("else") self._enter() self._fill() self._dispatch(t.else_) self._leave() self._write("\n") def _IfExp(self, t): self._dispatch(t.then) self._write(" if ") self._dispatch(t.test) if t.else_ is not None: self._write(" else (") self._dispatch(t.else_) self._write(")") def _Import(self, t): """ Handle "import xyz.foo". """ self._fill("import ") for i, (name,asname) in enumerate(t.names): if i != 0: self._write(", ") self._write(name) if asname is not None: self._write(" as "+asname) def _Keyword(self, t): """ Keyword value assignment within function calls and definitions. """ self._write(t.name) self._write("=") self._dispatch(t.expr) def _List(self, t): self._write("[") for i,node in enumerate(t.nodes): self._dispatch(node) if i < len(t.nodes)-1: self._write(", ") self._write("]") def _Module(self, t): if t.doc is not None: self._dispatch(t.doc) self._dispatch(t.node) def _Mul(self, t): self.__binary_op(t, '*') def _Name(self, t): self._write(t.name) def _NoneType(self, t): self._write("None") def _Not(self, t): self._write('not (') self._dispatch(t.expr) self._write(')') def _Or(self, t): self._write(" (") for i, node in enumerate(t.nodes): self._dispatch(node) if i != len(t.nodes)-1: self._write(") or (") self._write(")") def _Pass(self, t): self._write("pass\n") def _Printnl(self, t): self._fill("print ") if t.dest: self._write(">> ") self._dispatch(t.dest) self._write(", ") comma = False for node in t.nodes: if comma: self._write(', ') else: comma = True self._dispatch(node) def _Power(self, t): self.__binary_op(t, '**') def _Return(self, t): self._fill("return ") if t.value: if isinstance(t.value, Tuple): text = ', '.join([ name.name for name in t.value.asList() ]) self._write(text) else: self._dispatch(t.value) if not self._do_indent: self._write('; ') def _Slice(self, t): self._dispatch(t.expr) self._write("[") if t.lower: self._dispatch(t.lower) self._write(":") if t.upper: self._dispatch(t.upper) #if t.step: # self._write(":") # self._dispatch(t.step) self._write("]") def _Sliceobj(self, t): for i, node in enumerate(t.nodes): if i != 0: self._write(":") if not (isinstance(node, Const) and node.value is None): self._dispatch(node) def _Stmt(self, tree): for node in tree.nodes: self._dispatch(node) def _Sub(self, t): self.__binary_op(t, '-') def _Subscript(self, t): self._dispatch(t.expr) self._write("[") for i, value in enumerate(t.subs): if i != 0: self._write(",") self._dispatch(value) self._write("]") def _TryExcept(self, t): self._fill("try") self._enter() self._dispatch(t.body) self._leave() for handler in t.handlers: self._fill('except ') self._dispatch(handler[0]) if handler[1] is not None: self._write(', ') self._dispatch(handler[1]) self._enter() self._dispatch(handler[2]) self._leave() if t.else_: self._fill("else") self._enter() self._dispatch(t.else_) self._leave() def _Tuple(self, t): if not t.nodes: # Empty tuple. self._write("()") else: self._write("(") # _write each elements, separated by a comma. for element in t.nodes[:-1]: self._dispatch(element) self._write(", ") # Handle the last one without writing comma last_element = t.nodes[-1] self._dispatch(last_element) self._write(")") def _UnaryAdd(self, t): self._write("+") self._dispatch(t.expr) def _UnarySub(self, t): self._write("-") self._dispatch(t.expr) def _With(self, t): self._fill('with ') self._dispatch(t.expr) if t.vars: self._write(' as ') self._dispatch(t.vars.name) self._enter() self._dispatch(t.body) self._leave() self._write('\n') def _int(self, t): self._write(repr(t)) def __binary_op(self, t, symbol): # Check if parenthesis are needed on left side and then dispatch has_paren = False left_class = str(t.left.__class__) if (left_class in op_precedence.keys() and op_precedence[left_class] < op_precedence[str(t.__class__)]): has_paren = True if has_paren: self._write('(') self._dispatch(t.left) if has_paren: self._write(')') # Write the appropriate symbol for operator self._write(symbol) # Check if parenthesis are needed on the right side and then dispatch has_paren = False right_class = str(t.right.__class__) if (right_class in op_precedence.keys() and op_precedence[right_class] < op_precedence[str(t.__class__)]): has_paren = True if has_paren: self._write('(') self._dispatch(t.right) if has_paren: self._write(')') def _float(self, t): # if t is 0.1, str(t)->'0.1' while repr(t)->'0.1000000000001' # We prefer str here. self._write(str(t)) def _str(self, t): self._write(repr(t)) def _tuple(self, t): self._write(str(t)) ######################################################################### # These are the methods from the _ast modules unparse. # # As our needs to handle more advanced code increase, we may want to # modify some of the methods below so that they work for compiler.ast. ######################################################################### # # stmt # def _Expr(self, tree): # self._fill() # self._dispatch(tree.value) # # def _Import(self, t): # self._fill("import ") # first = True # for a in t.names: # if first: # first = False # else: # self._write(", ") # self._write(a.name) # if a.asname: # self._write(" as "+a.asname) # ## def _ImportFrom(self, t): ## self._fill("from ") ## self._write(t.module) ## self._write(" import ") ## for i, a in enumerate(t.names): ## if i == 0: ## self._write(", ") ## self._write(a.name) ## if a.asname: ## self._write(" as "+a.asname) ## # XXX(jpe) what is level for? ## # # def _Break(self, t): # self._fill("break") # # def _Continue(self, t): # self._fill("continue") # # def _Delete(self, t): # self._fill("del ") # self._dispatch(t.targets) # # def _Assert(self, t): # self._fill("assert ") # self._dispatch(t.test) # if t.msg: # self._write(", ") # self._dispatch(t.msg) # # def _Exec(self, t): # self._fill("exec ") # self._dispatch(t.body) # if t.globals: # self._write(" in ") # self._dispatch(t.globals) # if t.locals: # self._write(", ") # self._dispatch(t.locals) # # def _Print(self, t): # self._fill("print ") # do_comma = False # if t.dest: # self._write(">>") # self._dispatch(t.dest) # do_comma = True # for e in t.values: # if do_comma:self._write(", ") # else:do_comma=True # self._dispatch(e) # if not t.nl: # self._write(",") # # def _Global(self, t): # self._fill("global") # for i, n in enumerate(t.names): # if i != 0: # self._write(",") # self._write(" " + n) # # def _Yield(self, t): # self._fill("yield") # if t.value: # self._write(" (") # self._dispatch(t.value) # self._write(")") # # def _Raise(self, t): # self._fill('raise ') # if t.type: # self._dispatch(t.type) # if t.inst: # self._write(", ") # self._dispatch(t.inst) # if t.tback: # self._write(", ") # self._dispatch(t.tback) # # # def _TryFinally(self, t): # self._fill("try") # self._enter() # self._dispatch(t.body) # self._leave() # # self._fill("finally") # self._enter() # self._dispatch(t.finalbody) # self._leave() # # def _excepthandler(self, t): # self._fill("except ") # if t.type: # self._dispatch(t.type) # if t.name: # self._write(", ") # self._dispatch(t.name) # self._enter() # self._dispatch(t.body) # self._leave() # # def _ClassDef(self, t): # self._write("\n") # self._fill("class "+t.name) # if t.bases: # self._write("(") # for a in t.bases: # self._dispatch(a) # self._write(", ") # self._write(")") # self._enter() # self._dispatch(t.body) # self._leave() # # def _FunctionDef(self, t): # self._write("\n") # for deco in t.decorators: # self._fill("@") # self._dispatch(deco) # self._fill("def "+t.name + "(") # self._dispatch(t.args) # self._write(")") # self._enter() # self._dispatch(t.body) # self._leave() # # def _For(self, t): # self._fill("for ") # self._dispatch(t.target) # self._write(" in ") # self._dispatch(t.iter) # self._enter() # self._dispatch(t.body) # self._leave() # if t.orelse: # self._fill("else") # self._enter() # self._dispatch(t.orelse) # self._leave # # def _While(self, t): # self._fill("while ") # self._dispatch(t.test) # self._enter() # self._dispatch(t.body) # self._leave() # if t.orelse: # self._fill("else") # self._enter() # self._dispatch(t.orelse) # self._leave # # # expr # def _Str(self, tree): # self._write(repr(tree.s)) ## # def _Repr(self, t): # self._write("`") # self._dispatch(t.value) # self._write("`") # # def _Num(self, t): # self._write(repr(t.n)) # # def _ListComp(self, t): # self._write("[") # self._dispatch(t.elt) # for gen in t.generators: # self._dispatch(gen) # self._write("]") # # def _GeneratorExp(self, t): # self._write("(") # self._dispatch(t.elt) # for gen in t.generators: # self._dispatch(gen) # self._write(")") # # def _comprehension(self, t): # self._write(" for ") # self._dispatch(t.target) # self._write(" in ") # self._dispatch(t.iter) # for if_clause in t.ifs: # self._write(" if ") # self._dispatch(if_clause) # # def _IfExp(self, t): # self._dispatch(t.body) # self._write(" if ") # self._dispatch(t.test) # if t.orelse: # self._write(" else ") # self._dispatch(t.orelse) # # unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"} # def _UnaryOp(self, t): # self._write(self.unop[t.op.__class__.__name__]) # self._write("(") # self._dispatch(t.operand) # self._write(")") # # binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%", # "LShift":">>", "RShift":"<<", "BitOr":"|", "BitXor":"^", "BitAnd":"&", # "FloorDiv":"//", "Pow": "**"} # def _BinOp(self, t): # self._write("(") # self._dispatch(t.left) # self._write(")" + self.binop[t.op.__class__.__name__] + "(") # self._dispatch(t.right) # self._write(")") # # boolops = {_ast.And: 'and', _ast.Or: 'or'} # def _BoolOp(self, t): # self._write("(") # self._dispatch(t.values[0]) # for v in t.values[1:]: # self._write(" %s " % self.boolops[t.op.__class__]) # self._dispatch(v) # self._write(")") # # def _Attribute(self,t): # self._dispatch(t.value) # self._write(".") # self._write(t.attr) # ## def _Call(self, t): ## self._dispatch(t.func) ## self._write("(") ## comma = False ## for e in t.args: ## if comma: self._write(", ") ## else: comma = True ## self._dispatch(e) ## for e in t.keywords: ## if comma: self._write(", ") ## else: comma = True ## self._dispatch(e) ## if t.starargs: ## if comma: self._write(", ") ## else: comma = True ## self._write("*") ## self._dispatch(t.starargs) ## if t.kwargs: ## if comma: self._write(", ") ## else: comma = True ## self._write("**") ## self._dispatch(t.kwargs) ## self._write(")") # # # slice # def _Index(self, t): # self._dispatch(t.value) # # def _ExtSlice(self, t): # for i, d in enumerate(t.dims): # if i != 0: # self._write(': ') # self._dispatch(d) # # # others # def _arguments(self, t): # first = True # nonDef = len(t.args)-len(t.defaults) # for a in t.args[0:nonDef]: # if first:first = False # else: self._write(", ") # self._dispatch(a) # for a,d in zip(t.args[nonDef:], t.defaults): # if first:first = False # else: self._write(", ") # self._dispatch(a), # self._write("=") # self._dispatch(d) # if t.vararg: # if first:first = False # else: self._write(", ") # self._write("*"+t.vararg) # if t.kwarg: # if first:first = False # else: self._write(", ") # self._write("**"+t.kwarg) # ## def _keyword(self, t): ## self._write(t.arg) ## self._write("=") ## self._dispatch(t.value) # # def _Lambda(self, t): # self._write("lambda ") # self._dispatch(t.args) # self._write(": ") # self._dispatch(t.body) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/utils.py0000644001134200020070000000443312602613466024637 0ustar embrayscience00000000000000import inspect import sys def find_mod_objs(modname, onlylocals=False): """ Returns all the public attributes of a module referenced by name. .. note:: The returned list *not* include subpackages or modules of `modname`,nor does it include private attributes (those that beginwith '_' or are not in `__all__`). Parameters ---------- modname : str The name of the module to search. onlylocals : bool If True, only attributes that are either members of `modname` OR one of its modules or subpackages will be included. Returns ------- localnames : list of str A list of the names of the attributes as they are named in the module `modname` . fqnames : list of str A list of the full qualified names of the attributes (e.g., ``astropy.utils.misc.find_mod_objs``). For attributes that are simple variables, this is based on the local name, but for functions or classes it can be different if they are actually defined elsewhere and just referenced in `modname`. objs : list of objects A list of the actual attributes themselves (in the same order as the other arguments) """ __import__(modname) mod = sys.modules[modname] if hasattr(mod, '__all__'): pkgitems = [(k, mod.__dict__[k]) for k in mod.__all__] else: pkgitems = [(k, mod.__dict__[k]) for k in dir(mod) if k[0] != '_'] # filter out modules and pull the names and objs out ismodule = inspect.ismodule localnames = [k for k, v in pkgitems if not ismodule(v)] objs = [v for k, v in pkgitems if not ismodule(v)] # fully qualified names can be determined from the object's module fqnames = [] for obj, lnm in zip(objs, localnames): if hasattr(obj, '__module__') and hasattr(obj, '__name__'): fqnames.append(obj.__module__ + '.' + obj.__name__) else: fqnames.append(modname + '.' + lnm) if onlylocals: valids = [fqn.startswith(modname) for fqn in fqnames] localnames = [e for i, e in enumerate(localnames) if valids[i]] fqnames = [e for i, e in enumerate(fqnames) if valids[i]] objs = [e for i, e in enumerate(objs) if valids[i]] return localnames, fqnames, objs astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/changelog_links.py0000644001134200020070000000542012602613466026623 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This sphinx extension makes the issue numbers in the changelog into links to GitHub issues. """ from __future__ import print_function import re from docutils.nodes import Text, reference BLOCK_PATTERN = re.compile('\[#.+\]', flags=re.DOTALL) ISSUE_PATTERN = re.compile('#[0-9]+') def process_changelog_links(app, doctree, docname): for rex in app.changelog_links_rexes: if rex.match(docname): break else: # if the doc doesn't match any of the changelog regexes, don't process return app.info('[changelog_links] Adding changelog links to "{0}"'.format(docname)) for item in doctree.traverse(): if not isinstance(item, Text): continue # We build a new list of items to replace the current item. If # a link is found, we need to use a 'reference' item. children = [] # First cycle through blocks of issues (delimited by []) then # iterate inside each one to find the individual issues. prev_block_end = 0 for block in BLOCK_PATTERN.finditer(item): block_start, block_end = block.start(), block.end() children.append(Text(item[prev_block_end:block_start])) block = item[block_start:block_end] prev_end = 0 for m in ISSUE_PATTERN.finditer(block): start, end = m.start(), m.end() children.append(Text(block[prev_end:start])) issue_number = block[start:end] refuri = app.config.github_issues_url + issue_number[1:] children.append(reference(text=issue_number, name=issue_number, refuri=refuri)) prev_end = end prev_block_end = block_end # If no issues were found, this adds the whole item, # otherwise it adds the remaining text. children.append(Text(block[prev_end:block_end])) # If no blocks were found, this adds the whole item, otherwise # it adds the remaining text. children.append(Text(item[prev_block_end:])) # Replace item by the new list of items we have generated, # which may contain links. item.parent.replace(item, children) def setup_patterns_rexes(app): app.changelog_links_rexes = [re.compile(pat) for pat in app.config.changelog_links_docpattern] def setup(app): app.connect('doctree-resolved', process_changelog_links) app.connect('builder-inited', setup_patterns_rexes) app.add_config_value('github_issues_url', None, True) app.add_config_value('changelog_links_docpattern', ['.*changelog.*', 'whatsnew/.*'], True) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/autodoc_enhancements.py0000644001134200020070000000673412602613466027673 0ustar embrayscience00000000000000""" Miscellaneous enhancements to help autodoc along. """ import inspect import sys import types from sphinx.ext.autodoc import AttributeDocumenter, ModuleDocumenter from sphinx.util.inspect import isdescriptor if sys.version_info[0] == 3: class_types = (type,) else: class_types = (type, types.ClassType) MethodDescriptorType = type(type.__subclasses__) # See # https://github.com/astropy/astropy-helpers/issues/116#issuecomment-71254836 # for further background on this. def type_object_attrgetter(obj, attr, *defargs): """ This implements an improved attrgetter for type objects (i.e. classes) that can handle class attributes that are implemented as properties on a metaclass. Normally `getattr` on a class with a `property` (say, "foo"), would return the `property` object itself. However, if the class has a metaclass which *also* defines a `property` named "foo", ``getattr(cls, 'foo')`` will find the "foo" property on the metaclass and resolve it. For the purposes of autodoc we just want to document the "foo" property defined on the class, not on the metaclass. For example:: >>> class Meta(type): ... @property ... def foo(cls): ... return 'foo' ... >>> class MyClass(metaclass=Meta): ... @property ... def foo(self): ... \"\"\"Docstring for MyClass.foo property.\"\"\" ... return 'myfoo' ... >>> getattr(MyClass, 'foo') 'foo' >>> type_object_attrgetter(MyClass, 'foo') >>> type_object_attrgetter(MyClass, 'foo').__doc__ 'Docstring for MyClass.foo property.' The last line of the example shows the desired behavior for the purposes of autodoc. """ for base in obj.__mro__: if attr in base.__dict__: if isinstance(base.__dict__[attr], property): # Note, this should only be used for properties--for any other # type of descriptor (classmethod, for example) this can mess # up existing expectations of what getattr(cls, ...) returns return base.__dict__[attr] break return getattr(obj, attr, *defargs) # Provided to work around a bug in Sphinx # See https://github.com/sphinx-doc/sphinx/pull/1843 class AttributeDocumenter(AttributeDocumenter): @classmethod def can_document_member(cls, member, membername, isattr, parent): non_attr_types = cls.method_types + class_types + \ (MethodDescriptorType,) isdatadesc = isdescriptor(member) and not \ isinstance(member, non_attr_types) and not \ type(member).__name__ == "instancemethod" # That last condition addresses an obscure case of C-defined # methods using a deprecated type in Python 3, that is not otherwise # exported anywhere by Python return isdatadesc or (not isinstance(parent, ModuleDocumenter) and not inspect.isroutine(member) and not isinstance(member, class_types)) def setup(app): # Must have the autodoc extension set up first so we can override it app.setup_extension('sphinx.ext.autodoc') # Need to import this too since it re-registers all the documenter types # =_= import sphinx.ext.autosummary.generate app.add_autodoc_attrgetter(type, type_object_attrgetter) app.add_autodocumenter(AttributeDocumenter) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/viewcode.py0000644001134200020070000001752512602613466025312 0ustar embrayscience00000000000000# -*- coding: utf-8 -*- """ sphinx.ext.viewcode ~~~~~~~~~~~~~~~~~~~ Add links to module code in Python object descriptions. :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. Patched using patch in https://bitbucket.org/birkenfeld/sphinx/issue/623/extension-viewcode-fails-with-function on 21 Aug 2013 by Kyle H Barbary """ from docutils import nodes from sphinx import addnodes from sphinx.locale import _ from sphinx.pycode import ModuleAnalyzer from sphinx.util.inspect import safe_getattr from sphinx.util.nodes import make_refnode import sys import traceback if sys.version < '3': text_type = unicode else: text_type = str from ...utils import iteritems def doctree_read(app, doctree): env = app.builder.env if not hasattr(env, '_viewcode_modules'): env._viewcode_modules = {} def get_full_modname(modname, attribute): try: __import__(modname) except Exception as error: if not app.quiet: app.info(traceback.format_exc().rstrip()) app.warn('viewcode can\'t import %s, failed with error "%s"' % (modname, error)) return None module = sys.modules[modname] try: # Allow an attribute to have multiple parts and incidentally allow # repeated .s in the attribute. attr = attribute.split('.') value = module for attr in attribute.split('.'): if attr: value = safe_getattr(value, attr) except AttributeError: app.warn('Didn\'t find %s in %s' % (attribute, module.__name__)) return None else: return safe_getattr(value, '__module__', None) def has_tag(modname, fullname, docname, refname): entry = env._viewcode_modules.get(modname, None) if entry is None: try: analyzer = ModuleAnalyzer.for_module(modname) except Exception: env._viewcode_modules[modname] = False return analyzer.find_tags() if not isinstance(analyzer.code, text_type): code = analyzer.code.decode(analyzer.encoding) else: code = analyzer.code entry = code, analyzer.tags, {}, refname env._viewcode_modules[modname] = entry elif entry is False: return _, tags, used, _ = entry if fullname in tags: used[fullname] = docname return True for objnode in doctree.traverse(addnodes.desc): if objnode.get('domain') != 'py': continue names = set() for signode in objnode: if not isinstance(signode, addnodes.desc_signature): continue modname = signode.get('module') fullname = signode.get('fullname') refname = modname if env.config.viewcode_import: modname = get_full_modname(modname, fullname) if not modname: continue if not has_tag(modname, fullname, env.docname, refname): continue if fullname in names: # only one link per name, please continue names.add(fullname) pagename = '_modules/' + modname.replace('.', '/') onlynode = addnodes.only(expr='html') onlynode += addnodes.pending_xref( '', reftype='viewcode', refdomain='std', refexplicit=False, reftarget=pagename, refid=fullname, refdoc=env.docname) onlynode[0] += nodes.inline('', _('[source]'), classes=['viewcode-link']) signode += onlynode def missing_reference(app, env, node, contnode): # resolve our "viewcode" reference nodes -- they need special treatment if node['reftype'] == 'viewcode': return make_refnode(app.builder, node['refdoc'], node['reftarget'], node['refid'], contnode) def collect_pages(app): env = app.builder.env if not hasattr(env, '_viewcode_modules'): return highlighter = app.builder.highlighter urito = app.builder.get_relative_uri modnames = set(env._viewcode_modules) app.builder.info(' (%d module code pages)' % len(env._viewcode_modules), nonl=1) for modname, entry in iteritems(env._viewcode_modules): if not entry: continue code, tags, used, refname = entry # construct a page name for the highlighted source pagename = '_modules/' + modname.replace('.', '/') # highlight the source using the builder's highlighter highlighted = highlighter.highlight_block(code, 'python', linenos=False) # split the code into lines lines = highlighted.splitlines() # split off wrap markup from the first line of the actual code before, after = lines[0].split('
')
        lines[0:1] = [before + '
', after]
        # nothing to do for the last line; it always starts with 
anyway # now that we have code lines (starting at index 1), insert anchors for # the collected tags (HACK: this only works if the tag boundaries are # properly nested!) maxindex = len(lines) - 1 for name, docname in iteritems(used): type, start, end = tags[name] backlink = urito(pagename, docname) + '#' + refname + '.' + name lines[start] = ( '
%s' % (name, backlink, _('[docs]')) + lines[start]) lines[min(end - 1, maxindex)] += '
' # try to find parents (for submodules) parents = [] parent = modname while '.' in parent: parent = parent.rsplit('.', 1)[0] if parent in modnames: parents.append({ 'link': urito(pagename, '_modules/' + parent.replace('.', '/')), 'title': parent}) parents.append({'link': urito(pagename, '_modules/index'), 'title': _('Module code')}) parents.reverse() # putting it all together context = { 'parents': parents, 'title': modname, 'body': _('

Source code for %s

') % modname + \ '\n'.join(lines) } yield (pagename, context, 'page.html') if not modnames: return app.builder.info(' _modules/index') html = ['\n'] # the stack logic is needed for using nested lists for submodules stack = [''] for modname in sorted(modnames): if modname.startswith(stack[-1]): stack.append(modname + '.') html.append('
    ') else: stack.pop() while not modname.startswith(stack[-1]): stack.pop() html.append('
') stack.append(modname + '.') html.append('
  • %s
  • \n' % ( urito('_modules/index', '_modules/' + modname.replace('.', '/')), modname)) html.append('' * (len(stack) - 1)) context = { 'title': _('Overview: module code'), 'body': _('

    All modules for which code is available

    ') + \ ''.join(html), } yield ('_modules/index', context, 'page.html') def setup(app): app.add_config_value('viewcode_import', True, False) app.connect('doctree-read', doctree_read) app.connect('html-collect-pages', collect_pages) app.connect('missing-reference', missing_reference) #app.add_config_value('viewcode_include_modules', [], 'env') #app.add_config_value('viewcode_exclude_modules', [], 'env') astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/edit_on_github.py0000644001134200020070000001340712602613466026463 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This extension makes it easy to edit documentation on github. It adds links associated with each docstring that go to the corresponding view source page on Github. From there, the user can push the "Edit" button, edit the docstring, and submit a pull request. It has the following configuration options (to be set in the project's ``conf.py``): * ``edit_on_github_project`` The name of the github project, in the form "username/projectname". * ``edit_on_github_branch`` The name of the branch to edit. If this is a released version, this should be a git tag referring to that version. For a dev version, it often makes sense for it to be "master". It may also be a git hash. * ``edit_on_github_source_root`` The location within the source tree of the root of the Python package. Defaults to "lib". * ``edit_on_github_doc_root`` The location within the source tree of the root of the documentation source. Defaults to "doc", but it may make sense to set it to "doc/source" if the project uses a separate source directory. * ``edit_on_github_docstring_message`` The phrase displayed in the links to edit a docstring. Defaults to "[edit on github]". * ``edit_on_github_page_message`` The phrase displayed in the links to edit a RST page. Defaults to "[edit this page on github]". * ``edit_on_github_help_message`` The phrase displayed as a tooltip on the edit links. Defaults to "Push the Edit button on the next page" * ``edit_on_github_skip_regex`` When the path to the .rst file matches this regular expression, no "edit this page on github" link will be added. Defaults to ``"_.*"``. """ import inspect import os import re import sys from docutils import nodes from sphinx import addnodes def import_object(modname, name): """ Import the object given by *modname* and *name* and return it. If not found, or the import fails, returns None. """ try: __import__(modname) mod = sys.modules[modname] obj = mod for part in name.split('.'): obj = getattr(obj, part) return obj except: return None def get_url_base(app): return 'http://github.com/%s/tree/%s/' % ( app.config.edit_on_github_project, app.config.edit_on_github_branch) def doctree_read(app, doctree): # Get the configuration parameters if app.config.edit_on_github_project == 'REQUIRED': raise ValueError( "The edit_on_github_project configuration variable must be " "provided in the conf.py") source_root = app.config.edit_on_github_source_root url = get_url_base(app) docstring_message = app.config.edit_on_github_docstring_message # Handle the docstring-editing links for objnode in doctree.traverse(addnodes.desc): if objnode.get('domain') != 'py': continue names = set() for signode in objnode: if not isinstance(signode, addnodes.desc_signature): continue modname = signode.get('module') if not modname: continue fullname = signode.get('fullname') if fullname in names: # only one link per name, please continue names.add(fullname) obj = import_object(modname, fullname) anchor = None if obj is not None: try: lines, lineno = inspect.getsourcelines(obj) except: pass else: anchor = '#L%d' % lineno if anchor: real_modname = inspect.getmodule(obj).__name__ path = '%s%s%s.py%s' % ( url, source_root, real_modname.replace('.', '/'), anchor) onlynode = addnodes.only(expr='html') onlynode += nodes.reference( reftitle=app.config.edit_on_github_help_message, refuri=path) onlynode[0] += nodes.inline( '', '', nodes.raw('', ' ', format='html'), nodes.Text(docstring_message), classes=['edit-on-github', 'viewcode-link']) signode += onlynode def html_page_context(app, pagename, templatename, context, doctree): if (templatename == 'page.html' and not re.match(app.config.edit_on_github_skip_regex, pagename)): doc_root = app.config.edit_on_github_doc_root if doc_root != '' and not doc_root.endswith('/'): doc_root += '/' doc_path = os.path.relpath(doctree.get('source'), app.builder.srcdir) url = get_url_base(app) page_message = app.config.edit_on_github_page_message context['edit_on_github'] = url + doc_root + doc_path context['edit_on_github_page_message'] = ( app.config.edit_on_github_page_message) def setup(app): app.add_config_value('edit_on_github_project', 'REQUIRED', True) app.add_config_value('edit_on_github_branch', 'master', True) app.add_config_value('edit_on_github_source_root', 'lib', True) app.add_config_value('edit_on_github_doc_root', 'doc', True) app.add_config_value('edit_on_github_docstring_message', '[edit on github]', True) app.add_config_value('edit_on_github_page_message', 'Edit This Page on Github', True) app.add_config_value('edit_on_github_help_message', 'Push the Edit button on the next page', True) app.add_config_value('edit_on_github_skip_regex', '_.*', True) app.connect('doctree-read', doctree_read) app.connect('html-page-context', html_page_context) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/comment_eater.py0000644001134200020070000001245112602613466026320 0ustar embrayscience00000000000000from __future__ import division, absolute_import, print_function import sys if sys.version_info[0] >= 3: from io import StringIO else: from io import StringIO import compiler import inspect import textwrap import tokenize from .compiler_unparse import unparse class Comment(object): """ A comment block. """ is_comment = True def __init__(self, start_lineno, end_lineno, text): # int : The first line number in the block. 1-indexed. self.start_lineno = start_lineno # int : The last line number. Inclusive! self.end_lineno = end_lineno # str : The text block including '#' character but not any leading spaces. self.text = text def add(self, string, start, end, line): """ Add a new comment line. """ self.start_lineno = min(self.start_lineno, start[0]) self.end_lineno = max(self.end_lineno, end[0]) self.text += string def __repr__(self): return '%s(%r, %r, %r)' % (self.__class__.__name__, self.start_lineno, self.end_lineno, self.text) class NonComment(object): """ A non-comment block of code. """ is_comment = False def __init__(self, start_lineno, end_lineno): self.start_lineno = start_lineno self.end_lineno = end_lineno def add(self, string, start, end, line): """ Add lines to the block. """ if string.strip(): # Only add if not entirely whitespace. self.start_lineno = min(self.start_lineno, start[0]) self.end_lineno = max(self.end_lineno, end[0]) def __repr__(self): return '%s(%r, %r)' % (self.__class__.__name__, self.start_lineno, self.end_lineno) class CommentBlocker(object): """ Pull out contiguous comment blocks. """ def __init__(self): # Start with a dummy. self.current_block = NonComment(0, 0) # All of the blocks seen so far. self.blocks = [] # The index mapping lines of code to their associated comment blocks. self.index = {} def process_file(self, file): """ Process a file object. """ if sys.version_info[0] >= 3: nxt = file.__next__ else: nxt = file.next for token in tokenize.generate_tokens(nxt): self.process_token(*token) self.make_index() def process_token(self, kind, string, start, end, line): """ Process a single token. """ if self.current_block.is_comment: if kind == tokenize.COMMENT: self.current_block.add(string, start, end, line) else: self.new_noncomment(start[0], end[0]) else: if kind == tokenize.COMMENT: self.new_comment(string, start, end, line) else: self.current_block.add(string, start, end, line) def new_noncomment(self, start_lineno, end_lineno): """ We are transitioning from a noncomment to a comment. """ block = NonComment(start_lineno, end_lineno) self.blocks.append(block) self.current_block = block def new_comment(self, string, start, end, line): """ Possibly add a new comment. Only adds a new comment if this comment is the only thing on the line. Otherwise, it extends the noncomment block. """ prefix = line[:start[1]] if prefix.strip(): # Oops! Trailing comment, not a comment block. self.current_block.add(string, start, end, line) else: # A comment block. block = Comment(start[0], end[0], string) self.blocks.append(block) self.current_block = block def make_index(self): """ Make the index mapping lines of actual code to their associated prefix comments. """ for prev, block in zip(self.blocks[:-1], self.blocks[1:]): if not block.is_comment: self.index[block.start_lineno] = prev def search_for_comment(self, lineno, default=None): """ Find the comment block just before the given line number. Returns None (or the specified default) if there is no such block. """ if not self.index: self.make_index() block = self.index.get(lineno, None) text = getattr(block, 'text', default) return text def strip_comment_marker(text): """ Strip # markers at the front of a block of comment text. """ lines = [] for line in text.splitlines(): lines.append(line.lstrip('#')) text = textwrap.dedent('\n'.join(lines)) return text def get_class_traits(klass): """ Yield all of the documentation for trait definitions on a class object. """ # FIXME: gracefully handle errors here or in the caller? source = inspect.getsource(klass) cb = CommentBlocker() cb.process_file(StringIO(source)) mod_ast = compiler.parse(source) class_ast = mod_ast.node.nodes[0] for node in class_ast.code.nodes: # FIXME: handle other kinds of assignments? if isinstance(node, compiler.ast.Assign): name = node.nodes[0].name rhs = unparse(node.expr).strip() doc = strip_comment_marker(cb.search_for_comment(node.lineno, default='')) yield name, rhs, doc astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/automodapi.py0000644001134200020070000003164712602613466025650 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This sphinx extension adds a tools to simplify generating the API documentation for Astropy packages and affiliated packages. .. _automodapi: ======================== automodapi directive ======================== This directive takes a single argument that must be a module or package. It will produce a block of documentation that includes the docstring for the package, an :ref:`automodsumm` directive, and an :ref:`automod-diagram` if there are any classes in the module. If only the main docstring of the module/package is desired in the documentation, use `automodule`_ instead of `automodapi`_. It accepts the following options: * ``:no-inheritance-diagram:`` If present, the inheritance diagram will not be shown even if the module/package has classes. * ``:skip: str`` This option results in the specified object being skipped, that is the object will *not* be included in the generated documentation. This option may appear any number of times to skip multiple objects. * ``:no-main-docstr:`` If present, the docstring for the module/package will not be generated. The function and class tables will still be used, however. * ``:headings: str`` Specifies the characters (in one string) used as the heading levels used for the generated section. This must have at least 2 characters (any after 2 will be ignored). This also *must* match the rest of the documentation on this page for sphinx to be happy. Defaults to "-^", which matches the convention used for Python's documentation, assuming the automodapi call is inside a top-level section (which usually uses '='). * ``:no-heading:`` If specified do not create a top level heading for the section. That is, do not create a title heading with text like "packagename Package". The actual docstring for the package/module will still be shown, though, unless ``:no-main-docstr:`` is given. * ``:allowed-package-names: str`` Specifies the packages that functions/classes documented here are allowed to be from, as comma-separated list of package names. If not given, only objects that are actually in a subpackage of the package currently being documented are included. This extension also adds two sphinx configuration options: * ``automodapi_toctreedirnm`` This must be a string that specifies the name of the directory the automodsumm generated documentation ends up in. This directory path should be relative to the documentation root (e.g., same place as ``index.rst``). Defaults to ``'api'``. * ``automodapi_writereprocessed`` Should be a bool, and if `True`, will cause `automodapi`_ to write files with any `automodapi`_ sections replaced with the content Sphinx processes after `automodapi`_ has run. The output files are not actually used by sphinx, so this option is only for figuring out the cause of sphinx warnings or other debugging. Defaults to `False`. .. _automodule: http://sphinx-doc.org/latest/ext/autodoc.html?highlight=automodule#directive-automodule """ # Implementation note: # The 'automodapi' directive is not actually implemented as a docutils # directive. Instead, this extension searches for the 'automodapi' text in # all sphinx documents, and replaces it where necessary from a template built # into this extension. This is necessary because automodsumm (and autosummary) # use the "builder-inited" event, which comes before the directives are # actually built. import inspect import os import re import sys from .utils import find_mod_objs if sys.version_info[0] == 3: text_type = str else: text_type = unicode automod_templ_modheader = """ {modname} {pkgormod} {modhds}{pkgormodhds} {automoduleline} """ automod_templ_classes = """ Classes {clshds} .. automodsumm:: {modname} :classes-only: {clsfuncoptions} """ automod_templ_funcs = """ Functions {funchds} .. automodsumm:: {modname} :functions-only: {clsfuncoptions} """ automod_templ_inh = """ Class Inheritance Diagram {clsinhsechds} .. automod-diagram:: {modname} :private-bases: :parts: 1 {allowedpkgnms} """ _automodapirex = re.compile(r'^(?:\s*\.\.\s+automodapi::\s*)([A-Za-z0-9_.]+)' r'\s*$((?:\n\s+:[a-zA-Z_\-]+:.*$)*)', flags=re.MULTILINE) # the last group of the above regex is intended to go into finall with the below _automodapiargsrex = re.compile(r':([a-zA-Z_\-]+):(.*)$', flags=re.MULTILINE) def automodapi_replace(sourcestr, app, dotoctree=True, docname=None, warnings=True): """ Replaces `sourcestr`'s entries of ".. automdapi::" with the automodapi template form based on provided options. This is used with the sphinx event 'source-read' to replace `automodapi`_ entries before sphinx actually processes them, as automodsumm needs the code to be present to generate stub documentation. Parameters ---------- sourcestr : str The string with sphinx source to be checked for automodapi replacement. app : `sphinx.application.Application` The sphinx application. dotoctree : bool If `True`, a ":toctree:" option will be added in the ".. automodsumm::" sections of the template, pointing to the appropriate "generated" directory based on the Astropy convention (e.g. in ``docs/api``) docname : str The name of the file for this `sourcestr` (if known - if not, it can be `None`). If not provided and `dotoctree` is `True`, the generated files may end up in the wrong place. warnings : bool If `False`, all warnings that would normally be issued are silenced. Returns ------- newstr :str The string with automodapi entries replaced with the correct sphinx markup. """ spl = _automodapirex.split(sourcestr) if len(spl) > 1: # automodsumm is in this document if dotoctree: toctreestr = ':toctree: ' dirnm = app.config.automodapi_toctreedirnm if not dirnm.endswith("/"): dirnm += "/" if docname is not None: toctreestr += '../' * docname.count('/') + dirnm else: toctreestr += dirnm else: toctreestr = '' newstrs = [spl[0]] for grp in range(len(spl) // 3): modnm = spl[grp * 3 + 1] # find where this is in the document for warnings if docname is None: location = None else: location = (docname, spl[0].count('\n')) # initialize default options toskip = [] inhdiag = maindocstr = top_head = True hds = '-^' allowedpkgnms = [] # look for actual options unknownops = [] for opname, args in _automodapiargsrex.findall(spl[grp * 3 + 2]): if opname == 'skip': toskip.append(args.strip()) elif opname == 'no-inheritance-diagram': inhdiag = False elif opname == 'no-main-docstr': maindocstr = False elif opname == 'headings': hds = args elif opname == 'no-heading': top_head = False elif opname == 'allowed-package-names': allowedpkgnms.append(args.strip()) else: unknownops.append(opname) #join all the allowedpkgnms if len(allowedpkgnms) == 0: allowedpkgnms = '' onlylocals = True else: allowedpkgnms = ':allowed-package-names: ' + ','.join(allowedpkgnms) onlylocals = allowedpkgnms # get the two heading chars if len(hds) < 2: msg = 'Not enough headings (got {0}, need 2), using default -^' if warnings: app.warn(msg.format(len(hds)), location) hds = '-^' h1, h2 = hds.lstrip()[:2] # tell sphinx that the remaining args are invalid. if len(unknownops) > 0 and app is not None: opsstrs = ','.join(unknownops) msg = 'Found additional options ' + opsstrs + ' in automodapi.' if warnings: app.warn(msg, location) ispkg, hascls, hasfuncs = _mod_info(modnm, toskip, onlylocals=onlylocals) # add automodule directive only if no-main-docstr isn't present if maindocstr: automodline = '.. automodule:: {modname}'.format(modname=modnm) else: automodline = '' if top_head: newstrs.append(automod_templ_modheader.format(modname=modnm, modhds=h1 * len(modnm), pkgormod='Package' if ispkg else 'Module', pkgormodhds=h1 * (8 if ispkg else 7), automoduleline=automodline)) else: newstrs.append(automod_templ_modheader.format( modname='', modhds='', pkgormod='', pkgormodhds='', automoduleline=automodline)) #construct the options for the class/function sections #start out indented at 4 spaces, but need to keep the indentation. clsfuncoptions = [] if toctreestr: clsfuncoptions.append(toctreestr) if toskip: clsfuncoptions.append(':skip: ' + ','.join(toskip)) if allowedpkgnms: clsfuncoptions.append(allowedpkgnms) clsfuncoptionstr = '\n '.join(clsfuncoptions) if hasfuncs: newstrs.append(automod_templ_funcs.format( modname=modnm, funchds=h2 * 9, clsfuncoptions=clsfuncoptionstr)) if hascls: newstrs.append(automod_templ_classes.format( modname=modnm, clshds=h2 * 7, clsfuncoptions=clsfuncoptionstr)) if inhdiag and hascls: # add inheritance diagram if any classes are in the module newstrs.append(automod_templ_inh.format( modname=modnm, clsinhsechds=h2 * 25, allowedpkgnms=allowedpkgnms)) newstrs.append(spl[grp * 3 + 3]) newsourcestr = ''.join(newstrs) if app.config.automodapi_writereprocessed: # sometimes they are unicode, sometimes not, depending on how # sphinx has processed things if isinstance(newsourcestr, text_type): ustr = newsourcestr else: ustr = newsourcestr.decode(app.config.source_encoding) if docname is None: with open(os.path.join(app.srcdir, 'unknown.automodapi'), 'a') as f: f.write('\n**NEW DOC**\n\n') f.write(ustr) else: env = app.builder.env # Determine the filename associated with this doc (specifically # the extension) filename = docname + os.path.splitext(env.doc2path(docname))[1] filename += '.automodapi' with open(os.path.join(app.srcdir, filename), 'w') as f: f.write(ustr) return newsourcestr else: return sourcestr def _mod_info(modname, toskip=[], onlylocals=True): """ Determines if a module is a module or a package and whether or not it has classes or functions. """ hascls = hasfunc = False for localnm, fqnm, obj in zip(*find_mod_objs(modname, onlylocals=onlylocals)): if localnm not in toskip: hascls = hascls or inspect.isclass(obj) hasfunc = hasfunc or inspect.isroutine(obj) if hascls and hasfunc: break # find_mod_objs has already imported modname # TODO: There is probably a cleaner way to do this, though this is pretty # reliable for all Python versions for most cases that we care about. pkg = sys.modules[modname] ispkg = (hasattr(pkg, '__file__') and isinstance(pkg.__file__, str) and os.path.split(pkg.__file__)[1].startswith('__init__.py')) return ispkg, hascls, hasfunc def process_automodapi(app, docname, source): source[0] = automodapi_replace(source[0], app, True, docname) def setup(app): # need automodsumm for automodapi app.setup_extension('astropy_helpers.sphinx.ext.automodsumm') app.connect('source-read', process_automodapi) app.add_config_value('automodapi_toctreedirnm', 'api', True) app.add_config_value('automodapi_writereprocessed', False, True) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/automodsumm.py0000644001134200020070000005610412602613466026053 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This sphinx extension adds two directives for summarizing the public members of a module or package. These directives are primarily for use with the `automodapi`_ extension, but can be used independently. .. _automodsumm: ======================= automodsumm directive ======================= This directive will produce an "autosummary"-style table for public attributes of a specified module. See the `sphinx.ext.autosummary`_ extension for details on this process. The main difference from the `autosummary`_ directive is that `autosummary`_ requires manually inputting all attributes that appear in the table, while this captures the entries automatically. This directive requires a single argument that must be a module or package. It also accepts any options supported by the `autosummary`_ directive- see `sphinx.ext.autosummary`_ for details. It also accepts two additional options: * ``:classes-only:`` If present, the autosummary table will only contain entries for classes. This cannot be used at the same time with ``:functions-only:`` . * ``:functions-only:`` If present, the autosummary table will only contain entries for functions. This cannot be used at the same time with ``:classes-only:`` . * ``:skip: obj1, [obj2, obj3, ...]`` If present, specifies that the listed objects should be skipped and not have their documentation generated, nor be included in the summary table. * ``:allowed-package-names: pkgormod1, [pkgormod2, pkgormod3, ...]`` Specifies the packages that functions/classes documented here are allowed to be from, as comma-separated list of package names. If not given, only objects that are actually in a subpackage of the package currently being documented are included. This extension also adds one sphinx configuration option: * ``automodsumm_writereprocessed`` Should be a bool, and if True, will cause `automodsumm`_ to write files with any ``automodsumm`` sections replaced with the content Sphinx processes after ``automodsumm`` has run. The output files are not actually used by sphinx, so this option is only for figuring out the cause of sphinx warnings or other debugging. Defaults to `False`. .. _sphinx.ext.autosummary: http://sphinx-doc.org/latest/ext/autosummary.html .. _autosummary: http://sphinx-doc.org/latest/ext/autosummary.html#directive-autosummary .. _automod-diagram: =========================== automod-diagram directive =========================== This directive will produce an inheritance diagram like that of the `sphinx.ext.inheritance_diagram`_ extension. This directive requires a single argument that must be a module or package. It accepts no options. .. note:: Like 'inheritance-diagram', 'automod-diagram' requires `graphviz `_ to generate the inheritance diagram. .. _sphinx.ext.inheritance_diagram: http://sphinx-doc.org/latest/ext/inheritance.html """ import inspect import os import re from distutils.version import LooseVersion import sphinx from sphinx.ext.autosummary import Autosummary from sphinx.ext.inheritance_diagram import InheritanceDiagram from docutils.parsers.rst.directives import flag from .utils import find_mod_objs from .astropyautosummary import AstropyAutosummary # Don't use AstropyAutosummary with newer versions of Sphinx # See https://github.com/astropy/astropy-helpers/pull/129 if LooseVersion(sphinx.__version__) < LooseVersion('1.2.0'): BaseAutosummary = AstropyAutosummary else: BaseAutosummary = Autosummary def _str_list_converter(argument): """ A directive option conversion function that converts the option into a list of strings. Used for 'skip' option. """ if argument is None: return [] else: return [s.strip() for s in argument.split(',')] class Automodsumm(BaseAutosummary): required_arguments = 1 optional_arguments = 0 final_argument_whitespace = False has_content = False option_spec = dict(Autosummary.option_spec) option_spec['functions-only'] = flag option_spec['classes-only'] = flag option_spec['skip'] = _str_list_converter option_spec['allowed-package-names'] = _str_list_converter def run(self): env = self.state.document.settings.env modname = self.arguments[0] self.warnings = [] nodelist = [] try: localnames, fqns, objs = find_mod_objs(modname) except ImportError: self.warnings = [] self.warn("Couldn't import module " + modname) return self.warnings try: # set self.content to trick the Autosummary internals. # Be sure to respect functions-only and classes-only. funconly = 'functions-only' in self.options clsonly = 'classes-only' in self.options skipnames = [] if 'skip' in self.options: option_skipnames = set(self.options['skip']) for lnm in localnames: if lnm in option_skipnames: option_skipnames.remove(lnm) skipnames.append(lnm) if len(option_skipnames) > 0: self.warn('Tried to skip objects {objs} in module {mod}, ' 'but they were not present. Ignoring.'.format( objs=option_skipnames, mod=modname)) if funconly and not clsonly: cont = [] for nm, obj in zip(localnames, objs): if nm not in skipnames and inspect.isroutine(obj): cont.append(nm) elif clsonly: cont = [] for nm, obj in zip(localnames, objs): if nm not in skipnames and inspect.isclass(obj): cont.append(nm) else: if clsonly and funconly: self.warning('functions-only and classes-only both ' 'defined. Skipping.') cont = [nm for nm in localnames if nm not in skipnames] self.content = cont # for some reason, even though ``currentmodule`` is substituted in, # sphinx doesn't necessarily recognize this fact. So we just force # it internally, and that seems to fix things env.temp_data['py:module'] = modname # can't use super because Sphinx/docutils has trouble return # super(Autosummary,self).run() nodelist.extend(Autosummary.run(self)) return self.warnings + nodelist finally: # has_content = False for the Automodsumm self.content = [] def get_items(self, names): self.genopt['imported-members'] = True return Autosummary.get_items(self, names) #<-------------------automod-diagram stuff------------------------------------> class Automoddiagram(InheritanceDiagram): option_spec = dict(InheritanceDiagram.option_spec) option_spec['allowed-package-names'] = _str_list_converter def run(self): try: ols = self.options.get('allowed-package-names', []) ols = True if len(ols) == 0 else ols # if none are given, assume only local nms, objs = find_mod_objs(self.arguments[0], onlylocals=ols)[1:] except ImportError: self.warnings = [] self.warn("Couldn't import module " + self.arguments[0]) return self.warnings clsnms = [] for n, o in zip(nms, objs): if inspect.isclass(o): clsnms.append(n) oldargs = self.arguments try: if len(clsnms) > 0: self.arguments = [' '.join(clsnms)] return InheritanceDiagram.run(self) finally: self.arguments = oldargs #<---------------------automodsumm generation stuff---------------------------> def process_automodsumm_generation(app): env = app.builder.env filestosearch = [] for docname in env.found_docs: filename = env.doc2path(docname) if os.path.isfile(filename): filestosearch.append(docname + os.path.splitext(filename)[1]) liness = [] for sfn in filestosearch: lines = automodsumm_to_autosummary_lines(sfn, app) liness.append(lines) if app.config.automodsumm_writereprocessed: if lines: # empty list means no automodsumm entry is in the file outfn = os.path.join(app.srcdir, sfn) + '.automodsumm' with open(outfn, 'w') as f: for l in lines: f.write(l) f.write('\n') for sfn, lines in zip(filestosearch, liness): suffix = os.path.splitext(sfn)[1] if len(lines) > 0: generate_automodsumm_docs(lines, sfn, builder=app.builder, warn=app.warn, info=app.info, suffix=suffix, base_path=app.srcdir) #_automodsummrex = re.compile(r'^(\s*)\.\. automodsumm::\s*([A-Za-z0-9_.]+)\s*' # r'\n\1(\s*)(\S|$)', re.MULTILINE) _lineendrex = r'(?:\n|$)' _hdrex = r'^\n?(\s*)\.\. automodsumm::\s*(\S+)\s*' + _lineendrex _oprex1 = r'(?:\1(\s+)\S.*' + _lineendrex + ')' _oprex2 = r'(?:\1\4\S.*' + _lineendrex + ')' _automodsummrex = re.compile(_hdrex + '(' + _oprex1 + '?' + _oprex2 + '*)', re.MULTILINE) def automodsumm_to_autosummary_lines(fn, app): """ Generates lines from a file with an "automodsumm" entry suitable for feeding into "autosummary". Searches the provided file for `automodsumm` directives and returns a list of lines specifying the `autosummary` commands for the modules requested. This does *not* return the whole file contents - just an autosummary section in place of any :automodsumm: entries. Note that any options given for `automodsumm` are also included in the generated `autosummary` section. Parameters ---------- fn : str The name of the file to search for `automodsumm` entries. app : sphinx.application.Application The sphinx Application object Return ------ lines : list of str Lines for all `automodsumm` entries with the entries replaced by `autosummary` and the module's members added. """ fullfn = os.path.join(app.builder.env.srcdir, fn) with open(fullfn) as fr: if 'astropy_helpers.sphinx.ext.automodapi' in app._extensions: from astropy_helpers.sphinx.ext.automodapi import automodapi_replace # Must do the automodapi on the source to get the automodsumm # that might be in there docname = os.path.splitext(fn)[0] filestr = automodapi_replace(fr.read(), app, True, docname, False) else: filestr = fr.read() spl = _automodsummrex.split(filestr) #0th entry is the stuff before the first automodsumm line indent1s = spl[1::5] mods = spl[2::5] opssecs = spl[3::5] indent2s = spl[4::5] remainders = spl[5::5] # only grab automodsumm sections and convert them to autosummary with the # entries for all the public objects newlines = [] #loop over all automodsumms in this document for i, (i1, i2, modnm, ops, rem) in enumerate(zip(indent1s, indent2s, mods, opssecs, remainders)): allindent = i1 + ('' if i2 is None else i2) #filter out functions-only and classes-only options if present oplines = ops.split('\n') toskip = [] allowedpkgnms = [] funcsonly = clssonly = False for i, ln in reversed(list(enumerate(oplines))): if ':functions-only:' in ln: funcsonly = True del oplines[i] if ':classes-only:' in ln: clssonly = True del oplines[i] if ':skip:' in ln: toskip.extend(_str_list_converter(ln.replace(':skip:', ''))) del oplines[i] if ':allowed-package-names:' in ln: allowedpkgnms.extend(_str_list_converter(ln.replace(':allowed-package-names:', ''))) del oplines[i] if funcsonly and clssonly: msg = ('Defined both functions-only and classes-only options. ' 'Skipping this directive.') lnnum = sum([spl[j].count('\n') for j in range(i * 5 + 1)]) app.warn('[automodsumm]' + msg, (fn, lnnum)) continue # Use the currentmodule directive so we can just put the local names # in the autosummary table. Note that this doesn't always seem to # actually "take" in Sphinx's eyes, so in `Automodsumm.run`, we have to # force it internally, as well. newlines.extend([i1 + '.. currentmodule:: ' + modnm, '', '.. autosummary::']) newlines.extend(oplines) ols = True if len(allowedpkgnms) == 0 else allowedpkgnms for nm, fqn, obj in zip(*find_mod_objs(modnm, onlylocals=ols)): if nm in toskip: continue if funcsonly and not inspect.isroutine(obj): continue if clssonly and not inspect.isclass(obj): continue newlines.append(allindent + nm) # add one newline at the end of the autosummary block newlines.append('') return newlines def generate_automodsumm_docs(lines, srcfn, suffix='.rst', warn=None, info=None, base_path=None, builder=None, template_dir=None): """ This function is adapted from `sphinx.ext.autosummary.generate.generate_autosummmary_docs` to generate source for the automodsumm directives that should be autosummarized. Unlike generate_autosummary_docs, this function is called one file at a time. """ from sphinx.jinja2glue import BuiltinTemplateLoader from sphinx.ext.autosummary import import_by_name, get_documenter from sphinx.ext.autosummary.generate import (find_autosummary_in_lines, _simple_info, _simple_warn) from sphinx.util.osutil import ensuredir from sphinx.util.inspect import safe_getattr from jinja2 import FileSystemLoader, TemplateNotFound from jinja2.sandbox import SandboxedEnvironment if info is None: info = _simple_info if warn is None: warn = _simple_warn #info('[automodsumm] generating automodsumm for: ' + srcfn) # Create our own templating environment - here we use Astropy's # templates rather than the default autosummary templates, in order to # allow docstrings to be shown for methods. template_dirs = [os.path.join(os.path.dirname(__file__), 'templates'), os.path.join(base_path, '_templates')] if builder is not None: # allow the user to override the templates template_loader = BuiltinTemplateLoader() template_loader.init(builder, dirs=template_dirs) else: if template_dir: template_dirs.insert(0, template_dir) template_loader = FileSystemLoader(template_dirs) template_env = SandboxedEnvironment(loader=template_loader) # read #items = find_autosummary_in_files(sources) items = find_autosummary_in_lines(lines, filename=srcfn) if len(items) > 0: msg = '[automodsumm] {1}: found {0} automodsumm entries to generate' info(msg.format(len(items), srcfn)) # gennms = [item[0] for item in items] # if len(gennms) > 20: # gennms = gennms[:10] + ['...'] + gennms[-10:] # info('[automodsumm] generating autosummary for: ' + ', '.join(gennms)) # remove possible duplicates items = dict([(item, True) for item in items]).keys() # keep track of new files new_files = [] # write for name, path, template_name in sorted(items): if path is None: # The corresponding autosummary:: directive did not have # a :toctree: option continue path = os.path.abspath(path) ensuredir(path) try: import_by_name_values = import_by_name(name) except ImportError as e: warn('[automodsumm] failed to import %r: %s' % (name, e)) continue # if block to accommodate Sphinx's v1.2.2 and v1.2.3 respectively if len(import_by_name_values) == 3: name, obj, parent = import_by_name_values elif len(import_by_name_values) == 4: name, obj, parent, module_name = import_by_name_values fn = os.path.join(path, name + suffix) # skip it if it exists if os.path.isfile(fn): continue new_files.append(fn) f = open(fn, 'w') try: doc = get_documenter(obj, parent) if template_name is not None: template = template_env.get_template(template_name) else: tmplstr = 'autosummary/%s.rst' try: template = template_env.get_template(tmplstr % doc.objtype) except TemplateNotFound: template = template_env.get_template(tmplstr % 'base') def get_members_mod(obj, typ, include_public=[]): """ typ = None -> all """ items = [] for name in dir(obj): try: documenter = get_documenter(safe_getattr(obj, name), obj) except AttributeError: continue if typ is None or documenter.objtype == typ: items.append(name) public = [x for x in items if x in include_public or not x.startswith('_')] return public, items def get_members_class(obj, typ, include_public=[], include_base=False): """ typ = None -> all include_base -> include attrs that are from a base class """ items = [] # using dir gets all of the attributes, including the elements # from the base class, otherwise use __slots__ or __dict__ if include_base: names = dir(obj) else: if hasattr(obj, '__slots__'): names = tuple(getattr(obj, '__slots__')) else: names = getattr(obj, '__dict__').keys() for name in names: try: documenter = get_documenter(safe_getattr(obj, name), obj) except AttributeError: continue if typ is None or documenter.objtype == typ: items.append(name) public = [x for x in items if x in include_public or not x.startswith('_')] return public, items ns = {} if doc.objtype == 'module': ns['members'] = get_members_mod(obj, None) ns['functions'], ns['all_functions'] = \ get_members_mod(obj, 'function') ns['classes'], ns['all_classes'] = \ get_members_mod(obj, 'class') ns['exceptions'], ns['all_exceptions'] = \ get_members_mod(obj, 'exception') elif doc.objtype == 'class': api_class_methods = ['__init__', '__call__'] ns['members'] = get_members_class(obj, None) ns['methods'], ns['all_methods'] = \ get_members_class(obj, 'method', api_class_methods) ns['attributes'], ns['all_attributes'] = \ get_members_class(obj, 'attribute') ns['methods'].sort() ns['attributes'].sort() parts = name.split('.') if doc.objtype in ('method', 'attribute'): mod_name = '.'.join(parts[:-2]) cls_name = parts[-2] obj_name = '.'.join(parts[-2:]) ns['class'] = cls_name else: mod_name, obj_name = '.'.join(parts[:-1]), parts[-1] ns['fullname'] = name ns['module'] = mod_name ns['objname'] = obj_name ns['name'] = parts[-1] ns['objtype'] = doc.objtype ns['underline'] = len(name) * '=' # We now check whether a file for reference footnotes exists for # the module being documented. We first check if the # current module is a file or a directory, as this will give a # different path for the reference file. For example, if # documenting astropy.wcs then the reference file is at # ../wcs/references.txt, while if we are documenting # astropy.config.logging_helper (which is at # astropy/config/logging_helper.py) then the reference file is set # to ../config/references.txt if '.' in mod_name: mod_name_dir = mod_name.replace('.', '/').split('/', 1)[1] else: mod_name_dir = mod_name if not os.path.isdir(os.path.join(base_path, mod_name_dir)) \ and os.path.isdir(os.path.join(base_path, mod_name_dir.rsplit('/', 1)[0])): mod_name_dir = mod_name_dir.rsplit('/', 1)[0] # We then have to check whether it exists, and if so, we pass it # to the template. if os.path.exists(os.path.join(base_path, mod_name_dir, 'references.txt')): # An important subtlety here is that the path we pass in has # to be relative to the file being generated, so we have to # figure out the right number of '..'s ndirsback = path.replace(base_path, '').count('/') ref_file_rel_segments = ['..'] * ndirsback ref_file_rel_segments.append(mod_name_dir) ref_file_rel_segments.append('references.txt') ns['referencefile'] = os.path.join(*ref_file_rel_segments) rendered = template.render(**ns) f.write(rendered) finally: f.close() def setup(app): # need our autosummary and autodoc fixes app.setup_extension('astropy_helpers.sphinx.ext.astropyautosummary') app.setup_extension('astropy_helpers.sphinx.ext.autodoc_enhancements') # need inheritance-diagram for automod-diagram app.setup_extension('sphinx.ext.inheritance_diagram') app.add_directive('automod-diagram', Automoddiagram) app.add_directive('automodsumm', Automodsumm) app.connect('builder-inited', process_automodsumm_generation) app.add_config_value('automodsumm_writereprocessed', False, True) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/tocdepthfix.py0000644001134200020070000000124512602613466026016 0ustar embrayscience00000000000000from sphinx import addnodes def fix_toc_entries(app, doctree): # Get the docname; I don't know why this isn't just passed in to the # callback # This seems a bit unreliable as it's undocumented, but it's not "private" # either: docname = app.builder.env.temp_data['docname'] if app.builder.env.metadata[docname].get('tocdepth', 0) != 0: # We need to reprocess any TOC nodes in the doctree and make sure all # the files listed in any TOCs are noted for treenode in doctree.traverse(addnodes.toctree): app.builder.env.note_toctree(docname, treenode) def setup(app): app.connect('doctree-read', fix_toc_entries) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/traitsdoc.py0000644001134200020070000001026112602613466025467 0ustar embrayscience00000000000000""" ========= traitsdoc ========= Sphinx extension that handles docstrings in the Numpy standard format, [1] and support Traits [2]. This extension can be used as a replacement for ``numpydoc`` when support for Traits is required. .. [1] http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines#docstring-standard .. [2] http://code.enthought.com/projects/traits/ """ from __future__ import division, absolute_import, print_function import inspect import os import pydoc import collections from . import docscrape from . import docscrape_sphinx from .docscrape_sphinx import SphinxClassDoc, SphinxFunctionDoc, SphinxDocString from . import numpydoc from . import comment_eater class SphinxTraitsDoc(SphinxClassDoc): def __init__(self, cls, modulename='', func_doc=SphinxFunctionDoc): if not inspect.isclass(cls): raise ValueError("Initialise using a class. Got %r" % cls) self._cls = cls if modulename and not modulename.endswith('.'): modulename += '.' self._mod = modulename self._name = cls.__name__ self._func_doc = func_doc docstring = pydoc.getdoc(cls) docstring = docstring.split('\n') # De-indent paragraph try: indent = min(len(s) - len(s.lstrip()) for s in docstring if s.strip()) except ValueError: indent = 0 for n,line in enumerate(docstring): docstring[n] = docstring[n][indent:] self._doc = docscrape.Reader(docstring) self._parsed_data = { 'Signature': '', 'Summary': '', 'Description': [], 'Extended Summary': [], 'Parameters': [], 'Returns': [], 'Raises': [], 'Warns': [], 'Other Parameters': [], 'Traits': [], 'Methods': [], 'See Also': [], 'Notes': [], 'References': '', 'Example': '', 'Examples': '', 'index': {} } self._parse() def _str_summary(self): return self['Summary'] + [''] def _str_extended_summary(self): return self['Description'] + self['Extended Summary'] + [''] def __str__(self, indent=0, func_role="func"): out = [] out += self._str_signature() out += self._str_index() + [''] out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Traits', 'Methods', 'Returns','Raises'): out += self._str_param_list(param_list) out += self._str_see_also("obj") out += self._str_section('Notes') out += self._str_references() out += self._str_section('Example') out += self._str_section('Examples') out = self._str_indent(out,indent) return '\n'.join(out) def looks_like_issubclass(obj, classname): """ Return True if the object has a class or superclass with the given class name. Ignores old-style classes. """ t = obj if t.__name__ == classname: return True for klass in t.__mro__: if klass.__name__ == classname: return True return False def get_doc_object(obj, what=None, config=None): if what is None: if inspect.isclass(obj): what = 'class' elif inspect.ismodule(obj): what = 'module' elif isinstance(obj, collections.Callable): what = 'function' else: what = 'object' if what == 'class': doc = SphinxTraitsDoc(obj, '', func_doc=SphinxFunctionDoc, config=config) if looks_like_issubclass(obj, 'HasTraits'): for name, trait, comment in comment_eater.get_class_traits(obj): # Exclude private traits. if not name.startswith('_'): doc['Traits'].append((name, trait, comment.splitlines())) return doc elif what in ('function', 'method'): return SphinxFunctionDoc(obj, '', config=config) else: return SphinxDocString(pydoc.getdoc(obj), config=config) def setup(app): # init numpydoc numpydoc.setup(app, get_doc_object) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/smart_resolver.py0000644001134200020070000000717612602613466026555 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The classes in the astropy docs are documented by their API location, which is not necessarily where they are defined in the source. This causes a problem when certain automated features of the doc build, such as the inheritance diagrams or the `Bases` list of a class reference a class by its canonical location rather than its "user" location. In the `autodoc-process-docstring` event, a mapping from the actual name to the API name is maintained. Later, in the `missing-reference` event, unresolved references are looked up in this dictionary and corrected if possible. """ from docutils.nodes import literal, reference def process_docstring(app, what, name, obj, options, lines): if isinstance(obj, type): env = app.env if not hasattr(env, 'class_name_mapping'): env.class_name_mapping = {} mapping = env.class_name_mapping mapping[obj.__module__ + '.' + obj.__name__] = name def missing_reference_handler(app, env, node, contnode): if not hasattr(env, 'class_name_mapping'): env.class_name_mapping = {} mapping = env.class_name_mapping reftype = node['reftype'] reftarget = node['reftarget'] if reftype in ('obj', 'class', 'exc', 'meth'): reftarget = node['reftarget'] suffix = '' if reftarget not in mapping: if '.' in reftarget: front, suffix = reftarget.rsplit('.', 1) else: suffix = reftarget if suffix.startswith('_') and not suffix.startswith('__'): # If this is a reference to a hidden class or method, # we can't link to it, but we don't want to have a # nitpick warning. return node[0].deepcopy() if reftype in ('obj', 'meth') and '.' in reftarget: if front in mapping: reftarget = front suffix = '.' + suffix if (reftype in ('class', ) and '.' in reftarget and reftarget not in mapping): if '.' in front: reftarget, _ = front.rsplit('.', 1) suffix = '.' + suffix reftarget = reftarget + suffix prefix = reftarget.rsplit('.')[0] if (reftarget not in mapping and prefix in env.intersphinx_named_inventory): if reftarget in env.intersphinx_named_inventory[prefix]['py:class']: newtarget = env.intersphinx_named_inventory[prefix]['py:class'][reftarget][2] if not node['refexplicit'] and \ '~' not in node.rawsource: contnode = literal(text=reftarget) newnode = reference('', '', internal=True) newnode['reftitle'] = reftarget newnode['refuri'] = newtarget newnode.append(contnode) return newnode if reftarget in mapping: newtarget = mapping[reftarget] + suffix if not node['refexplicit'] and not '~' in node.rawsource: contnode = literal(text=newtarget) newnode = env.domains['py'].resolve_xref( env, node['refdoc'], app.builder, 'class', newtarget, node, contnode) if newnode is not None: newnode['reftitle'] = reftarget return newnode def setup(app): app.connect('autodoc-process-docstring', process_docstring) app.connect('missing-reference', missing_reference_handler) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/astropyautosummary.py0000644001134200020070000001054312630421426027500 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This sphinx extension builds off of `sphinx.ext.autosummary` to clean up some issues it presents in the Astropy docs. The main issue this fixes is the summary tables getting cut off before the end of the sentence in some cases. Note: Sphinx 1.2 appears to have fixed the the main issues in the stock autosummary extension that are addressed by this extension. So use of this extension with newer versions of Sphinx is deprecated. """ import re from distutils.version import LooseVersion import sphinx from sphinx.ext.autosummary import Autosummary from ...utils import deprecated # used in AstropyAutosummary.get_items _itemsummrex = re.compile(r'^([A-Z].*?\.(?:\s|$))') @deprecated('1.0', message='AstropyAutosummary is only needed when used ' 'with Sphinx versions less than 1.2') class AstropyAutosummary(Autosummary): def get_items(self, names): """Try to import the given names, and return a list of ``[(name, signature, summary_string, real_name), ...]``. """ from sphinx.ext.autosummary import (get_import_prefixes_from_env, import_by_name, get_documenter, mangle_signature) env = self.state.document.settings.env prefixes = get_import_prefixes_from_env(env) items = [] max_item_chars = 50 for name in names: display_name = name if name.startswith('~'): name = name[1:] display_name = name.split('.')[-1] try: import_by_name_values = import_by_name(name, prefixes=prefixes) except ImportError: self.warn('[astropyautosummary] failed to import %s' % name) items.append((name, '', '', name)) continue # to accommodate Sphinx v1.2.2 and v1.2.3 if len(import_by_name_values) == 3: real_name, obj, parent = import_by_name_values elif len(import_by_name_values) == 4: real_name, obj, parent, module_name = import_by_name_values # NB. using real_name here is important, since Documenters # handle module prefixes slightly differently documenter = get_documenter(obj, parent)(self, real_name) if not documenter.parse_name(): self.warn('[astropyautosummary] failed to parse name %s' % real_name) items.append((display_name, '', '', real_name)) continue if not documenter.import_object(): self.warn('[astropyautosummary] failed to import object %s' % real_name) items.append((display_name, '', '', real_name)) continue # -- Grab the signature sig = documenter.format_signature() if not sig: sig = '' else: max_chars = max(10, max_item_chars - len(display_name)) sig = mangle_signature(sig, max_chars=max_chars) sig = sig.replace('*', r'\*') # -- Grab the summary doc = list(documenter.process_doc(documenter.get_doc())) while doc and not doc[0].strip(): doc.pop(0) m = _itemsummrex.search(" ".join(doc).strip()) if m: summary = m.group(1).strip() elif doc: summary = doc[0].strip() else: summary = '' items.append((display_name, sig, summary, real_name)) return items def setup(app): # need autosummary, of course app.setup_extension('sphinx.ext.autosummary') # Don't make the replacement if Sphinx is at least 1.2 if LooseVersion(sphinx.__version__) < LooseVersion('1.2.0'): # this replaces the default autosummary with the astropy one app.add_directive('autosummary', AstropyAutosummary) elif LooseVersion(sphinx.__version__) < LooseVersion('1.3.2'): # Patch Autosummary again, but to work around an upstream bug; see # https://github.com/astropy/astropy-helpers/issues/172 class PatchedAutosummary(Autosummary): def get_items(self, names): self.genopt['imported-members'] = True return Autosummary.get_items(self, names) app.add_directive('autosummary', PatchedAutosummary) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/doctest.py0000644001134200020070000000243612602613466025145 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This is a set of three directives that allow us to insert metadata about doctests into the .rst files so the testing framework knows which tests to skip. This is quite different from the doctest extension in Sphinx itself, which actually does something. For astropy, all of the testing is centrally managed from py.test and Sphinx is not used for running tests. """ import re from docutils.nodes import literal_block from sphinx.util.compat import Directive class DoctestSkipDirective(Directive): has_content = True def run(self): # Check if there is any valid argument, and skip it. Currently only # 'win32' is supported in astropy.tests.pytest_plugins. if re.match('win32', self.content[0]): self.content = self.content[2:] code = '\n'.join(self.content) return [literal_block(code, code)] class DoctestRequiresDirective(DoctestSkipDirective): # This is silly, but we really support an unbounded number of # optional arguments optional_arguments = 64 def setup(app): app.add_directive('doctest-requires', DoctestRequiresDirective) app.add_directive('doctest-skip', DoctestSkipDirective) app.add_directive('doctest-skip-all', DoctestSkipDirective) astropy-helpers-1.1.1/astropy_helpers/sphinx/ext/__init__.py0000644001134200020070000000013612602613466025232 0ustar embrayscience00000000000000from __future__ import division, absolute_import, print_function from .numpydoc import setup astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/0000755001134200020070000000000012637041310023575 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/0000755001134200020070000000000012637041310027311 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/searchbox.html0000644001134200020070000000042012602613466032162 0ustar embrayscience00000000000000{%- if pagename != "search" %}
    {%- endif %} astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/globaltoc.html0000644001134200020070000000011112602613466032147 0ustar embrayscience00000000000000

    Table of Contents

    {{ toctree(maxdepth=-1, titles_only=true) }} astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/localtoc.html0000644001134200020070000000004212602613466032004 0ustar embrayscience00000000000000

    Page Contents

    {{ toc }} astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/layout.html0000644001134200020070000000655112602613466031534 0ustar embrayscience00000000000000{% extends "basic/layout.html" %} {# Collapsible sidebar script from default/layout.html in Sphinx #} {% set script_files = script_files + ['_static/sidebar.js'] %} {# Add the google webfonts needed for the logo #} {% block extrahead %} {% if not embedded %}{% endif %} {% endblock %} {% block header %}
    {{ theme_logotext1 }}{{ theme_logotext2 }}{{ theme_logotext3 }}
    • Index
    • Modules
    • {% block sidebarsearch %} {% include "searchbox.html" %} {% endblock %}
    {% endblock %} {% block relbar1 %} {% endblock %} {# Silence the bottom relbar. #} {% block relbar2 %}{% endblock %} {%- block footer %}

    {%- if edit_on_github %} {{ edit_on_github_page_message }}   {%- endif %} {%- if show_source and has_source and sourcename %} {{ _('Page Source') }} {%- endif %}   Back to Top

    {%- if show_copyright %} {%- if hasdoc('copyright') %} {% trans path=pathto('copyright'), copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
    {%- else %} {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
    {%- endif %} {%- endif %} {%- if show_sphinx %} {% trans sphinx_version=sphinx_version|e %}Created using Sphinx {{ sphinx_version }}.{% endtrans %}   {%- endif %} {%- if last_updated %} {% trans last_updated=last_updated|e %}Last built {{ last_updated }}.{% endtrans %}
    {%- endif %}

    {%- endblock %} astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/static/0000755001134200020070000000000012637041310030600 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.ico0000644001134200020070000010033412602613466034027 0ustar embrayscience00000000000000@@ (@F  (n@ ( –P (¾Y(@€ ÿÿ ÿ* ÿVýy ý›¬±ûÕúûüí÷ùüìýýýíýýýí§ªû× ü«ý‹ýoüKÿ ÿ ÿÿîûHýýÌúìýó ûøûü°µÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ˜Ÿÿÿûþ ûúûöýòúéûÇýÿIóÿ¿ÿ!ýyûÏüðüúÿÿÿÿÿÿ ÿÿ>KÿÿÏÓÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ‡ÿÿ ÿÿÿÿÿÿÿÿÿÿÿÿûúÿðûÚü—û?ã ÿÿý|ûÚýöÿÿÿÿÿÿþÿýÿýÿLYþÿÌÏÿÿûüÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþÿÿerþÿýÿýÿýÿýÿýÿþÿÿÿÿÿÿÿüûûíú½ÿZÿÿüJûÇýöÿÿÿÿþÿýÿýÿýÿ*<þÿ™¡ÿÿêíÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÉÎÿÿ3Eýÿ ýÿýÿýÿýÿýÿýÿýÿýÿþÿÿÿÿÿÿÿÿðûÄüXÿÿü•üêÿÿÿÿþÿýÿýÿýÿ1ýÿ“þÿäèÿÿûûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿrþÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿÿÿÿÿÿÿüïù´ÿ=ÿø$û®ýúÿÿÿÿýÿýÿýÿýÿ.Cþÿ·¿ÿÿùúÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ”Ÿþÿ*ýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿÿÿÿÿÿÿúâû†ÿÿ3üÃüþÿÿýÿýÿýÿýÿýÿ^pþÿÓÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ÷øÿÿ…“þÿ+ýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿÿÿÿÿýöü¸ÿ7 ÿ(øÃ!ÿÿ ÿÿ ýÿ ýÿ ýÿýÿýÿsƒþÿåéÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÅÍþÿPfýÿ %ýÿýÿýÿ ýÿ ýÿ ýÿ ýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿýÿþÿÿÿ ÿÿûÙ ÿi#ÿ úº#ÿÿ"ÿÿ"ýÿ"ýÿ"ýÿ ýÿ ýÿy‹ÿÿîðÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâåþÿu‡ýÿ =ýÿýÿýÿ!ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ!ýÿ!ÿÿ!ÿÿ ûìýƒÿÿ$ý›$ýÿ$ÿÿ$ýÿ$ýÿ$ýÿ"ýÿýÿmþÿëîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýýÿÿ¦²þÿ8Týÿ%ýÿýÿ!ýÿ$ýÿ$ýÿ$ýÿ$ýÿ$ýÿ#ýÿ#ýÿ#ýÿ"ýÿ"ýÿ"ýÿ"ýÿ#ýÿ#ýÿ#ýÿ$ýÿ$ýÿ$ýÿ$ýÿ$ýÿ$ýÿ#ýÿ#ýÿ$þÿ$ÿÿ#üõ!ü’'ÿ &ÿj'ûñ(ÿÿ&ýÿ&ýÿ&ýÿ&ýÿ!ýÿNiþÿÞãÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿçëþÿmƒýÿ5ýÿ ýÿ ýÿ%ýÿ&ýÿ&ýÿ&ýÿ%ýÿ%ýÿ$ýÿ#ýÿ#ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ"ýÿ#ýÿ#ýÿ$ýÿ$ýÿ%ýÿ&ýÿ&ýÿ&ýÿ&ýÿ&ýÿ&ýÿ&þÿ&ÿÿ%ýø&üš3ÿ)ÿ%(ûØ*ÿÿ)ýÿ)ýÿ)ýÿ(ýÿ%ýÿ'IþÿÇÐÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÙßþÿUqýÿ'ýÿ!ýÿ'ýÿ)ýÿ(ýÿ(ýÿ(ýÿ'ýÿ&ýÿ#ýÿ(ýÿ"DþÿPkþÿp†þÿ‡™þÿ’£þÿ‘¢þÿ„˜þÿm„þÿNjþÿ!Cþÿ&ýÿ"ýÿ%ýÿ&ýÿ'ýÿ(ýÿ(ýÿ(ýÿ(ýÿ(ýÿ(ýÿ(ÿÿ&üú'ü˜9ÿ ÿ+ý”*þÿ+ÿÿ*þÿ+þÿ+þÿ)þÿ*þÿ®ÿÿþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàåÿÿQoþÿ%þÿ%þÿ+þÿ+þÿ+þÿ+þÿ*þÿ'þÿ$þÿ"Gþÿt‹ÿÿ¶ÂÿÿÔÛÿÿäèÿÿïñÿÿöøÿÿúûÿÿúûÿÿö÷ÿÿîñÿÿãèÿÿÔÛÿÿ¹Åÿÿ}’ÿÿ/Rþÿ'þÿ&þÿ)þÿ)þÿ*þÿ*þÿ*þÿ*þÿ*þÿ*ÿÿ'ûù'ý‰Uª+ÿ;-üæ.ÿÿ-þÿ-þÿ-þÿ,þÿ)þÿVtÿÿêîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíñÿÿ]zþÿ(þÿ'þÿ-þÿ-þÿ-þÿ-þÿ,þÿ$þÿFþÿ‰ÿÿÒÚÿÿðóÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿóõÿÿ×Þÿÿ£³ÿÿ?`ÿÿ)þÿ)þÿ,þÿ,þÿ,þÿ,þÿ,þÿ,þÿ.ÿÿ,üñ.ýt@ÿ/ý™/þÿ0ÿÿ0þÿ0þÿ0þÿ-þÿ :þÿ¿Ìÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿz“þÿ1þÿ(þÿ0þÿ/þÿ.þÿ/þÿ,þÿ+þÿZwÿÿÇÑÿÿøúÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÜâÿÿ”§ÿÿ#Lþÿ'þÿ.þÿ/þÿ/þÿ/þÿ/þÿ/þÿ0ÿÿ.üâ-ÿI0ÿ%0ýá2ÿÿ1þÿ1þÿ1þÿ0þÿ/þÿbÿÿðóÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¯¿þÿHþÿ)þÿ1þÿ2þÿ2þÿ1þÿ.þÿ8þÿx’ÿÿæëÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýÿÿÄÏÿÿTsÿÿ+þÿ/þÿ1þÿ0þÿ0þÿ0þÿ1ÿÿ1ÿÿ/üÈ2ÿ$@¿3ýn3üø5ÿÿ3þÿ3þÿ3þÿ1þÿ8þÿÀÌÿÿþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿëïÿÿEkþÿ,þÿ2þÿ4þÿ3þÿ4þÿ1þÿ8þÿ€šÿÿñôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôöÿÿÕÞÿÿËÕÿÿËÖÿÿ×ßÿÿõöÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâçÿÿqÿÿ1þÿ0þÿ3þÿ2þÿ2þÿ2þÿ3ÿÿ2þÿ0üŸ@ÿã 2ü¶6üþ6ÿÿ6þÿ6þÿ5þÿ5þÿDlþÿèíÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ”«þÿ @þÿ/þÿ6þÿ5þÿ5þÿ4þÿ0þÿt‘ÿÿñôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿäéÿÿ—­þÿ\~þÿ?eþÿ1\þÿ,Xþÿ,Xþÿ2]þÿ?fþÿZ}þÿ§þÿÖßÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿïòÿÿu’ÿÿ3þÿ3þÿ5þÿ5þÿ5þÿ5þÿ6ÿÿ4üð4ÿX8ÿ 5ûå:ÿÿ8þÿ8þÿ8þÿ7þÿ7þÿ–®ÿÿûüÿÿÿÿÿÿÿÿÿÿÿÿÿÿðôÿÿ@jþÿ0þÿ8þÿ8þÿ8þÿ8þÿ.þÿRxÿÿæëÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿñóÿÿާþÿ:eþÿEþÿ1þÿ*þÿ-þÿ.þÿ.þÿ-þÿ*þÿ0þÿCþÿ3]þÿj‹þÿËÖþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿíòÿÿlŒÿÿ1þÿ6þÿ8þÿ7þÿ7þÿ8þÿ8ÿÿ5ýÌ5ÿ:ÿO9üô<ÿÿ:þÿ:þÿ:þÿ8þÿ BþÿÏÚÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ®ÁþÿMþÿ3þÿ;þÿ:þÿ;þÿ7þÿOþÿ»ËÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÕÞþÿ^„þÿ@þÿ/þÿ4þÿ9þÿ:þÿ:þÿ:þÿ9þÿ9þÿ9þÿ8þÿ4þÿ.þÿ6þÿ/]þÿ‹¥þÿóõÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàçÿÿMuÿÿ3þÿ9þÿ9þÿ9þÿ9þÿ;ÿÿ8ûý8ÿÿ<ÿ{;ü÷=ÿÿ<þÿ<þÿ<þÿ;þÿ9hþÿæìÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿcˆþÿ<þÿ:þÿ<þÿ<þÿ<þÿ2þÿx™ÿÿõøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒÝþÿOyþÿ6þÿ5þÿ<þÿ=þÿ<þÿ<þÿ<þÿ<þÿ<þÿ<þÿ<þÿ<þÿ<þÿ<þÿ9þÿ2þÿ Aþÿf‹þÿàçÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÅÓÿÿ!Sþÿ9þÿ;þÿ;þÿ;þÿ<þÿ=ÿÿ9üÝ;ÿ'=ý¢?ûý@ÿÿ?þÿ?þÿ=þÿ?þÿkÿÿðôÿÿÿÿÿÿÿÿÿÿÿÿÿÿñôÿÿ7gþÿ5þÿ>þÿ>þÿ>þÿ;þÿMþÿ¾Îÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿêïÿÿc‰þÿ8þÿ9þÿ?þÿ>þÿ>þÿ>þÿ>þÿ>þÿ>þÿ>þÿ>þÿ>þÿ>þÿ>þÿ>þÿ?þÿ>þÿ7þÿ:þÿ[„þÿàèÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýÿÿ‘¬ÿÿ=þÿ<þÿ>þÿ>þÿ>þÿ@ÿÿ>üþ>ýˆÿAþ¿BÿÿBÿÿAþÿAþÿ?þÿBþÿ•°ÿÿúüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÉ×ÿÿ#[þÿ9þÿAþÿAþÿAþÿ:þÿUÿÿäëÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ”°þÿ Fþÿ9þÿAþÿ@þÿ@þÿ@þÿ>þÿ:þÿ6þÿ5þÿ5þÿ7þÿ;þÿ?þÿ@þÿ@þÿ@þÿ@þÿAþÿ;þÿ=þÿeþÿñõÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿãëÿÿ>oþÿ>þÿ@þÿ@þÿ@þÿAþÿBÿÿ?üÝ<ÿBûÒDÿÿCÿÿCþÿCþÿAþÿDþÿ³Çÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¢ºþÿTþÿ>þÿCþÿCþÿCþÿ9þÿŒ«ÿÿûýÿÿÿÿÿÿÿÿÿÿÿÿÿÿâêÿÿEvþÿ7þÿCþÿCþÿCþÿ>þÿ4þÿDþÿBsþÿs˜þÿƒ£ÿÿ„£ÿÿjÿÿ0fþÿ>þÿ:þÿBþÿBþÿBþÿBþÿCþÿ;þÿ Fþÿˆ¦þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýÿÿ§¾ÿÿCþÿ@þÿBþÿBþÿBþÿDÿÿBûúBýhUÿEûãGÿÿEþÿEþÿEþÿBþÿFþÿÈÖÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ‡¨þÿOþÿAþÿEþÿEþÿDþÿ@þÿ®Äÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ©ÁþÿYþÿ?þÿFþÿCþÿ9þÿ&aþÿ¡ÿÿ¨ÀÿÿÔàÿÿöùÿÿÿÿÿÿÿÿÿÿïóÿÿÈÖÿÿ—³ÿÿ?rÿÿ?þÿBþÿEþÿEþÿEþÿEþÿ=þÿ$_þÿÇÖþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâêÿÿ4kþÿCþÿDþÿDþÿDþÿDÿÿEýþBüÄ@ÿ FúêJÿÿHþÿHþÿHþÿEþÿIþÿÕáÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿxžþÿMþÿDþÿGþÿGþÿFþÿ Oþÿ¾Ðÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ‚¥þÿIþÿEþÿ?þÿHþÿ_Šÿÿ¿ÑÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒßÿÿbŽÿÿHþÿEþÿGþÿGþÿGþÿEþÿEþÿ\‰þÿþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿùûÿÿ‰ªÿÿEþÿEþÿGþÿGþÿGþÿIÿÿDûñJÿ4IûåLÿÿJþÿJþÿJþÿGþÿLþÿÙäÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿtœþÿNþÿGþÿJþÿJþÿHþÿZþÿÃÕÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿp™þÿBþÿ=þÿ ]ÿÿЬÿÿêðÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿèïÿÿ]ŒÿÿBþÿIþÿIþÿIþÿJþÿAþÿ `þÿÆ×þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿËÚÿÿ PþÿGþÿIþÿIþÿIþÿKÿÿHüùJÿxLúàNÿÿLÿÿLþÿLþÿIþÿNþÿÖãÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ{¢þÿ RþÿIþÿLþÿLþÿJþÿ\þÿÃÕÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿrœþÿ;þÿ.hÿÿ¬ÄÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿËÛÿÿ+iÿÿGþÿKþÿKþÿKþÿHþÿNþÿo™þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿçîÿÿ5pþÿJþÿKþÿKþÿKþÿKÿÿKüþLü¿OûÓPÿÿNÿÿNþÿNþÿLþÿOþÿÈÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ°þÿYþÿJþÿNþÿNþÿMþÿRþÿ¼Ðÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ€¦þÿ5nÿÿ®ÈÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿøûÿÿŠ®ÿÿFþÿMþÿNþÿNþÿNþÿFþÿ9uþÿô÷ÿÿÿÿÿÿÿÿÿÿÿÿÿÿðõÿÿnšÿÿMþÿKþÿMþÿMþÿMþÿOÿÿLûéPüºQþÿQÿÿQþÿQþÿOþÿRþÿ±Éÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¬ÆÿÿbþÿJþÿPþÿPþÿPþÿHþÿ¨ÃÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿºÏÿÿ¹ÐÿÿüýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÀÕÿÿ\þÿNþÿPþÿPþÿPþÿIþÿ!gþÿÈÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿûýÿÿ™¹ÿÿPþÿNþÿPþÿPþÿPþÿRÿÿPüïRýŸQüüTÿÿSþÿSþÿQþÿTþÿ‘µÿÿùûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÚæÿÿ'mþÿJþÿSþÿSþÿSþÿJþÿ}§ÿÿõøÿÿÿÿÿÿÿÿÿÿÿÿÿÿ÷ùÿÿþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿûüÿÿüýÿÿÿÿÿÿÿÿÿÿÿÿÿÿØäÿÿAÿÿNþÿRþÿRþÿRþÿMþÿ`þÿ›¼þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿºÐÿÿTþÿPþÿRþÿRþÿRþÿTÿÿPüíVÿ}UüøVÿÿUþÿUþÿSþÿTþÿf™ÿÿïôÿÿÿÿÿÿÿÿÿÿÿÿÿÿûýÿÿF„þÿOþÿTþÿUþÿUþÿQþÿ9{ÿÿØåÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÑàÿÿœ½ÿÿéðÿÿÿÿÿÿÿÿÿÿÿÿÿÿæîÿÿ_“ÿÿNþÿTþÿTþÿTþÿPþÿ Zþÿ~¨þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÑàÿÿVþÿQþÿTþÿTþÿTþÿVÿÿRüíVÿPWüóYÿÿWþÿWþÿVþÿUþÿ0wþÿåîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿˆ±þÿ _þÿSþÿWþÿWþÿVþÿXþÿ¡ÁÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿçïÿÿSÿÿ:}þÿÖäÿÿÿÿÿÿÿÿÿÿÿÿÿÿìóÿÿjÿÿPþÿVþÿVþÿVþÿTþÿYþÿjœþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÛçÿÿ ]þÿSþÿVþÿVþÿVþÿXÿÿVüíZÿ"Xúì\ÿÿYþÿYþÿYþÿWþÿ^þÿÃÙÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÜéÿÿ-vþÿQþÿYþÿYþÿYþÿRþÿ;€ÿÿÙæÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿßêÿÿb™ÿÿQþÿ9þÿÛèÿÿÿÿÿÿÿÿÿÿÿÿÿÿëòÿÿhœÿÿSþÿYþÿYþÿYþÿWþÿZþÿb™þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÞéÿÿeþÿWþÿXþÿXþÿXþÿ[ÿÿXüífÿ XüÇ]ýþ\ÿÿ\þÿ\þÿZþÿZþÿ~¬ÿÿ÷úÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿzªþÿ^þÿWþÿ\þÿ[þÿ[þÿWþÿd›ÿÿêòÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¿×ÿÿOÿÿXþÿKþÿNŽþÿó÷ÿÿÿÿÿÿÿÿÿÿÿÿÿÿáìÿÿV’ÿÿVþÿ[þÿ[þÿ[þÿYþÿ\þÿeœþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÝéÿÿ dþÿYþÿ[þÿ[þÿ[þÿ^ÿÿ[üífÿ_ý„]üù_ÿÿ^þÿ^þÿ]þÿ\þÿ*xþÿÜèÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàëÿÿ<„þÿXþÿ]þÿ^þÿ^þÿ\þÿ_þÿdÿÿÓäÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÑâÿÿ‡³ÿÿ)wÿÿTþÿ[þÿYþÿt§þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒâÿÿ4þÿYþÿ]þÿ]þÿ]þÿZþÿaþÿr¥þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÙçÿÿ`þÿZþÿ]þÿ]þÿ]þÿ`ÿÿ]üíÿbÿ<`ûñcÿÿ`þÿ`þÿ`þÿ^þÿ]þÿ˜¿ÿÿûýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ®ÌþÿoþÿYþÿ`þÿ`þÿ`þÿ]þÿ[þÿBˆÿÿ—¾ÿÿÏáÿÿøúÿÿÿÿÿÿüýÿÿäîÿÿ´Ðÿÿ‹·ÿÿ?‡ÿÿZþÿYþÿ`þÿWþÿ"vþÿ½Õþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¹Óÿÿeþÿ]þÿ_þÿ_þÿ_þÿ\þÿhþÿеþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÈÜÿÿ`þÿ\þÿ_þÿ_þÿ_þÿbÿÿ]üímíbýÌdýþbþÿbþÿbþÿbþÿaþÿ/€þÿÚèÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ‘»þÿiþÿ[þÿaþÿbþÿbþÿaþÿZþÿ_þÿ;†þÿr¨ÿÿ°ÿÿy¬ÿÿW—þÿoþÿWþÿ[þÿaþÿbþÿ_þÿ]þÿp§þÿøûÿÿÿÿÿÿÿÿÿÿÿÿÿÿùûÿÿ‰·ÿÿYþÿaþÿbþÿbþÿbþÿ]þÿrþÿ¬Ìþÿÿÿÿÿÿÿÿÿÿÿÿÿþÿÿÿ­Ìÿÿbþÿ_þÿaþÿaþÿaþÿdÿÿ_üíUªeýwdüùgÿÿeþÿeþÿeþÿdþÿ`þÿy®ÿÿùûÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýþÿÿºþÿoþÿ\þÿbþÿdþÿdþÿdþÿcþÿ_þÿZþÿYþÿZþÿ\þÿaþÿdþÿdþÿdþÿaþÿ]þÿGþÿÔäþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿØçÿÿ9‡ÿÿ_þÿdþÿdþÿdþÿdþÿ\þÿ(|þÿÜêÿÿÿÿÿÿÿÿÿÿÿÿÿÿ÷ûÿÿ‰·ÿÿeþÿbþÿdþÿdþÿdþÿgÿÿdüígÿ%füâjÿÿgþÿgþÿgþÿgþÿeþÿpþÿ°Ïÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ Æþÿ3…þÿcþÿ_þÿfþÿgþÿgþÿgþÿfþÿfþÿfþÿfþÿfþÿfþÿfþÿ_þÿcþÿG‘þÿÆÝþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýÿÿ˜Áÿÿcþÿeþÿfþÿfþÿfþÿeþÿ`þÿGþÿþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿìôÿÿZ›þÿeþÿeþÿfþÿfþÿfþÿiÿÿfüíÿiý—hýýkÿÿiþÿiþÿiþÿhþÿeþÿ5‡þÿÍáÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿËáþÿg¥þÿ'}þÿeþÿ`þÿdþÿfþÿgþÿhþÿhþÿgþÿeþÿaþÿbþÿwþÿj§þÿØèþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÐäÿÿ5‡ÿÿcþÿhþÿhþÿhþÿhþÿdþÿ pþÿ‰¹þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàíÿÿ&þÿfþÿgþÿhþÿhþÿhþÿkÿÿhüíjÿ0kúánÿÿkþÿkþÿkþÿkþÿjþÿeþÿN˜ÿÿ×éÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¹Öþÿi¨þÿ=þÿ|þÿ qþÿhþÿdþÿfþÿlþÿvþÿ0…þÿXžþÿ«Îþÿüýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿïöÿÿd¦ÿÿdþÿjþÿkþÿkþÿkþÿkþÿdþÿ*„þÿØéÿÿÿÿÿÿÿÿÿÿÿÿÿÿþÿÿÿ¼ØÿÿkþÿhþÿjþÿjþÿjþÿjþÿmÿÿküínýmüþpÿÿnþÿnþÿnþÿnþÿmþÿgþÿSœÿÿÑåÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáîÿÿ«Ïþÿ‡ºþÿmªþÿ_¤þÿe§þÿx±þÿ˜ÃþÿÊáÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòøÿÿ|µÿÿnþÿkþÿmþÿmþÿmþÿmþÿjþÿmþÿl«þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿôùÿÿq®ÿÿlþÿlþÿmþÿmþÿmþÿmþÿpÿÿküïuÿ%mýÔrÿÿpþÿpþÿpþÿpþÿpþÿoþÿiþÿD•ÿÿ·×ÿÿûýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿìôÿÿ~·ÿÿ tþÿkþÿoþÿoþÿnþÿjþÿkþÿiþÿ.‰þÿÒåÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÙêÿÿ#‚þÿmþÿnþÿoþÿoþÿoþÿoþÿrÿÿoüérÿgpûôuÿÿqþÿrþÿrþÿrþÿrþÿqþÿlþÿþÿÁÿÿáîÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÕèÿÿj¬ÿÿsþÿoþÿqþÿqþÿpþÿsþÿ~þÿtþÿtþÿ¿þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýÿÿ•ÄÿÿnþÿoþÿqþÿqþÿqþÿqþÿqÿÿrýþnüÂ`ÿtü«vÿÿuÿÿtþÿtþÿtþÿtþÿtþÿtþÿqþÿsþÿR ÿÿ²Õÿÿãðÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâïÿÿ§Ïÿÿ:“ÿÿoþÿrþÿtþÿtþÿtþÿmþÿ%ˆþÿ—Æþÿn°þÿb©þÿóøÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÜìÿÿ0þÿrþÿrþÿsþÿsþÿsþÿsþÿuÿÿsüúrýyxÿ1výÓzÿÿwþÿwþÿwþÿwþÿwþÿwþÿvþÿuþÿpþÿ {þÿW¤ÿÿ©ÒÿÿÑæÿÿëôÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿð÷ÿÿÔèÿÿ«ÑÿÿT¢ÿÿxþÿqþÿuþÿuþÿuþÿvþÿsþÿ vþÿp²þÿÿÿÿÿï÷ÿÿåñÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüýÿÿ‰Àÿÿsþÿuþÿvþÿvþÿvþÿvþÿvþÿyÿÿuüéwÿ/xÿ[wûê|ÿÿyþÿxþÿxþÿxþÿxþÿyþÿyþÿxþÿwþÿtþÿvþÿ+þÿh°ÿÿ›Êÿÿ¼ÜÿÿÊãÿÿÒçÿÿÔèÿÿÒèÿÿÍåÿÿÃàÿÿ§Ñÿÿt¶ÿÿ2“þÿwþÿsþÿwþÿxþÿxþÿxþÿxþÿxþÿqþÿ7”þÿÙëÿÿÿÿÿÿÿÿÿÿýÿÿÿýþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÃßÿÿ…þÿvþÿxþÿxþÿwþÿwþÿwþÿyÿÿxÿÿwüª’ÿ|û„xûù~ÿÿ{þÿzþÿzþÿzþÿzþÿzþÿzþÿzþÿzþÿzþÿxþÿwþÿuþÿxþÿ…þÿ)þÿ-’þÿ)þÿŠþÿ|þÿuþÿwþÿxþÿzþÿzþÿzþÿ{þÿ{þÿ{þÿ{þÿvþÿ €þÿ‰ÁþÿþþÿÿÿÿÿÿÿÿÿÿþþÿÿúüÿÿþÿÿÿÿÿÿÿÝíÿÿO¤ÿÿvþÿyþÿyþÿyþÿyþÿyþÿzþÿ|ÿÿwûï|ÿHÿŽÿ {ü—~ýú€ÿÿ}þÿ}þÿ}þÿ}þÿ|þÿ|þÿ|þÿ|þÿ|þÿ|þÿ|þÿ|þÿ{þÿ{þÿ{þÿzþÿ{þÿ{þÿ{þÿ|þÿ|þÿ|þÿ|þÿ|þÿ|þÿ|þÿ|þÿ|þÿ|þÿwþÿP¥þÿôùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþÿÿýþÿÿõúÿÿ†Áÿÿyþÿ{þÿ}þÿ|þÿ|þÿ|þÿ|þÿ}ÿÿ}ÿÿzû«’ÿ€ÿ€ý¢‚ýýƒÿÿ€þÿþÿþÿþÿþÿþÿþÿþÿþÿþÿþÿþÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿyþÿ!þÿ«ÕþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿëõÿÿÀþÿ„þÿ}þÿ~þÿ~þÿ~þÿ~þÿþÿ‚ÿÿ~ûä}ÿ7„ÿú¨ƒüû…ÿÿ‚ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ€ÿÿ}ÿÿb±ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüþÿÿ¿ßÿÿ/˜ÿÿ€ÿÿ€ÿÿ€ÿÿ€ÿÿ€ÿÿƒÿÿ€ýùý€ÿ€ÿüž…üù‡ÿÿ„ÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿƒÿÿ‚ÿÿ„ÿÿn¹ÿÿúýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿòùÿÿÇäÿÿšÎÿÿ; ÿÿƒÿÿƒÿÿƒÿÿƒÿÿ‚ÿÿ…ÿÿ…ÿÿ€ü²ˆÿ’ÿ…ý’„üòŠÿÿ‡ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿƒÿÿ‚ÿÿÿÿ€ÿÿ‚ÿÿ]±ÿÿåóÿÿùýÿÿÿÿÿÿÿÿÿÿàðÿÿ¼àÿÿƒÄÿÿ“ÿÿƒÿÿ„ÿÿ…ÿÿ…ÿÿ…ÿÿ…ÿÿ†ÿÿˆÿÿ‚ýÌ‚ÿ+€ÿ‰ýu‰üáŒÿÿŠÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‰ÿÿ‘ÿÿ•ÿÿ •ÿÿ%–ÿÿg¸ÿÿËçÿÿ³Ûÿÿ¬ØÿÿÕëÿÿ©ÖÿÿL«ÿÿ‰ÿÿ…ÿÿ†ÿÿ†ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‰ÿÿŒÿÿ†ýÔ‡ÿ@‹üMˆüÈüûŽÿÿ‹ÿÿŠÿÿŠÿÿŠÿÿŠÿÿŠÿÿŠÿÿŠÿÿŠÿÿŠÿÿŠÿÿ‰ÿÿ‰ÿÿ‰ÿÿ‰ÿÿ†ÿÿ•ÿÿxÁÿÿ«Øÿÿ¦ÖÿÿÓêÿÿëöÿÿs¾ÿÿ‰ÿÿ’ÿÿ&›ÿÿˆÿÿˆÿÿˆÿÿˆÿÿ‰ÿÿ‰ÿÿ‰ÿÿ‰ÿÿ‰ÿÿ‹ÿÿŽÿÿ‰ýÔˆÿGŠÿ#Šý™‹üìÿÿÿÿÿÿŒÿÿŒÿÿŒÿÿŒÿÿŒÿÿŒÿÿŒÿÿŒÿÿŒÿÿŒÿÿŒÿÿŒÿÿŠÿÿ•ÿÿV³ÿÿsÀÿÿ]¶ÿÿµÝÿÿ´Ýÿÿ‰ÿÿŠÿÿ‹ÿÿŠÿÿŠÿÿ‹ÿÿ‹ÿÿ‹ÿÿ‹ÿÿ‹ÿÿ‹ÿÿŒÿÿÿÿýþ‰üÆŒÿ<€ÿÿTŽüÇýó‘ÿÿ‘ÿÿÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿÿÿÿÿ”ÿÿ‘Îÿÿ–Ñÿÿ†ÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿŽÿÿÿÿ’ÿÿŽýóŽþ­Šÿ%ŒÿÿqŒýÔýõ“ÿÿ”ÿÿ‘ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ‹ÿÿžÿÿ’ÐÿÿsÁÿÿ‹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ’ÿÿ”ÿÿ’ýûüÜýs€ê €ÿ™ÿ”ÿw‘ýÒ“ÿð“ÿÿ—ÿÿ–ÿÿ”ÿÿ“ÿÿ“ÿÿ’ÿÿ’ÿÿ’ÿÿ’ÿÿ’ÿÿ’ÿÿÿÿœÿÿP´ÿÿ7ªÿÿÿÿ’ÿÿ’ÿÿ’ÿÿ’ÿÿ“ÿÿ•ÿÿ–ÿÿ”ýûüç‘ýœ•ÿ0€ÿªÿ”ÿ˜ÿW“þµ”üè–ýô”ýþ™ÿÿ™ÿÿ˜ÿÿ—ÿÿ–ÿÿ–ÿÿ–ÿÿ•ÿÿ•ÿÿ•ÿÿ–ÿÿ–ÿÿ–ÿÿ—ÿÿ˜ÿÿ™ÿÿ˜ÿÿ”þÿ•ýó’úâ“ýš”ÿ7™ÿÿÿŽÿ šÿ&—ýi˜ü¯–üà˜ýï•ýõ•ýú–ýþ™ÿÿšÿÿ›ÿÿœÿÿ›ÿÿ™ÿÿ™ÿÿ˜ÿÿ—ýû•ý÷™ÿð—úâ—þ°–ÿd‘ÿªÿªÿ™æ ™ÿ›ÿ8™ÿi—ýŽšþ«—üÇ—ûØ—üá—üê•ýè˜ûÚ™ýΘþ¹™ý˜›ÿu˜ÿE›ÿ™ÿ €ÿ( @ ÿüdýŸüÁþþÿÿÿÿÿÿ¢¦ûíþº ýšüW ÿÿ,þ«úûýÿýÿuþÿÿÿÿÿÿÿÿÿ‰þÿýÿýÿýÿúüüÀüJÿÿý‡úûýÿýÿ>Pýÿ½Ãþÿÿÿÿÿÿÿÿÿúúÿÿ&:ýÿýÿýÿýÿýÿýÿýÿýÍüKÿ þºýÿýÿýÿbsþÿýýÿÿÿÿÿÿÿÿÿÿúûÿÿ\nþÿýÿýÿýÿýÿýÿýÿýÿýÿýÿý¦ ÿ+ÿþ»!ýÿ!ýÿ!ýÿŠ™þÿÿÿÿÿÿÿÿÿôöÿÿÈÏÿÿ0Jýÿ ýÿ ýÿ ýÿ ýÿ ýÿ ýÿ ýÿ ýÿ ýÿ ýÿ ýÿüÛÿ+%ý‹%ýÿ%ýÿ%ýÿp…þÿÿÿÿÿÿÿÿÿúûÿÿs‡þÿ(ýÿ%ýÿ%ýÿ%ýÿ%ýÿ%ýÿ%ýÿ%ýÿ%ýÿ%ýÿ%ýÿ%ýÿ$ýÿ$ýÿ$úí"ÿ5+ÿ0*úý*ýÿ*ýÿ>^ýÿüüÿÿÿÿÿÿõ÷ÿÿ@_ýÿ)ýÿ)ýÿ)ýÿ)ýÿ$Gýÿyþÿ¥³þÿ»Æþÿ¯¼þÿ¢þÿPlþÿ 1ýÿ)ýÿ)ýÿ)ýÿ)ýÿ)úð'ÿ..þ¶.þÿ.þÿ0þÿÌÕÿÿÿÿÿÿþþÿÿXvþÿ.þÿ.þÿ.þÿCþÿ²Àÿÿþþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿêîÿÿ{’þÿ3þÿ-þÿ-þÿ-þÿ*üÞ$ÿ3ÿ#3ûþ3þÿ3þÿXyþÿÿÿÿÿÿÿÿÿ¤¶ÿÿ2þÿ2þÿ2þÿ8_þÿìðÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÍ×ÿÿ#Nþÿ2þÿ2þÿ2þÿ2ýž7ÿt7þÿ7þÿ7þÿ·ÇÿÿÿÿÿÿúûÿÿOþÿ7þÿ7þÿOþÿèíÿÿÿÿÿÿÿÿÿÿÓÜÿÿ`‚þÿKþÿ6þÿ =þÿ;eþÿ§ÿÿóöÿÿÿÿÿÿÿÿÿÿàçÿÿJþÿ6þÿ6þÿ6þÿ7ÿO:þ³<þÿ;þÿIþÿúûÿÿÿÿÿÿ¸Èÿÿ;þÿ;þÿ;þÿ¯Âÿÿÿÿÿÿÿÿÿÿ¡·ÿÿ@þÿ;þÿ;þÿ;þÿ;þÿ;þÿ;þÿPþÿÀÏÿÿÿÿÿÿÿÿÿÿÎÙÿÿ@þÿ;þÿ:þÿ:üÛUÿ@üÞ@þÿ@þÿCrþÿÿÿÿÿÿÿÿÿjþÿ@þÿ@þÿ(^þÿýýÿÿÿÿÿÿÇÕÿÿCþÿ?þÿ?þÿ?þÿ?þÿ?þÿ?þÿ?þÿ?þÿ FþÿÅÓÿÿÿÿÿÿÿÿÿÿs–þÿ?þÿ?þÿ?þÿAÿSAûõDþÿDþÿgþÿÿÿÿÿÿÿÿÿAtþÿDþÿDþÿj’þÿÿÿÿÿÿÿÿÿM|þÿDþÿDþÿ7lþÿ­ÂþÿÑÝÿÿÂÒÿÿHyþÿDþÿDþÿDþÿTþÿðôÿÿÿÿÿÿîòÿÿNþÿCþÿCþÿDüÈIûøIþÿIþÿ|¡þÿÿÿÿÿÿÿÿÿ)fþÿIþÿIþÿ°ÿÿÿÿÿÿÿÿÿÿWþÿ Qþÿ¥¿ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿˆªÿÿHþÿHþÿHþÿxžþÿÿÿÿÿÿÿÿÿ\ŠþÿHþÿHþÿHþÿFÿ!MûçMþÿMþÿošþÿÿÿÿÿÿÿÿÿ6sþÿMþÿMþÿ‰­ÿÿÿÿÿÿÿÿÿÿ#eþÿÁÔÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýþÿÿ8sþÿLþÿLþÿ^þÿýþÿÿÿÿÿÿªÃÿÿLþÿLþÿLþÿMÿcRýÓRþÿRþÿ\þÿÿÿÿÿÿÿÿÿYŽþÿQþÿQþÿh˜þÿÿÿÿÿÿÿÿÿËÜÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþÿÿþþÿÿÿÿÿÿœ»ÿÿQþÿQþÿQþÿÏÞÿÿÿÿÿÿÛæÿÿQþÿQþÿQþÿOý§Vþ®VþÿVþÿ"mþÿÿÿÿÿÿÿÿÿž¿ÿÿVþÿVþÿcþÿöùÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ³ÿÿãìÿÿÿÿÿÿÂÖÿÿUþÿUþÿUþÿ­ÈÿÿÿÿÿÿöùÿÿUþÿUþÿUþÿSýÏ\ÿl[þÿ[þÿZþÿÜèÿÿÿÿÿÿó÷ÿÿjþÿZþÿZþÿz©þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ’¹ÿÿ[þÿçïÿÿÿÿÿÿ»ÓÿÿZþÿZþÿZþÿŸÁÿÿÿÿÿÿþþÿÿ]þÿYþÿYþÿWúàaÿ*_þÿ_þÿ_þÿr§þÿÿÿÿÿÿÿÿÿžÂÿÿ_þÿ_þÿ_þÿ€¯þÿýþÿÿÿÿÿÿÿÿÿÿßëÿÿG‹þÿ^þÿ*yþÿÿÿÿÿÿÿÿÿ—½ÿÿ^þÿ^þÿ^þÿ²Îÿÿÿÿÿÿðöÿÿ^þÿ^þÿ^þÿ[ûòÿaýÔcþÿcþÿhþÿáíÿÿÿÿÿÿÿÿÿÿn¦þÿcþÿcþÿcþÿeþÿ({þÿlþÿcþÿcþÿdþÿ±ÏÿÿÿÿÿÿÿÿÿÿA‹þÿcþÿcþÿcþÿÛéÿÿÿÿÿÿÖæÿÿbþÿbþÿbþÿ_ûóiÿ_hþÿhþÿhþÿH“þÿüýÿÿÿÿÿÿÿÿÿÿ•Àÿÿqþÿhþÿgþÿgþÿgþÿgþÿ mþÿŸÆÿÿÿÿÿÿÿÿÿÿÌáÿÿhþÿgþÿgþÿ zþÿÿÿÿÿÿÿÿÿšÃÿÿgþÿgþÿgþÿgüÜfÿkúálþÿlþÿlþÿj©þÿþþÿÿÿÿÿÿÿÿÿÿíõÿÿ“ÁÿÿQ›þÿ8ŒþÿG•þÿ€¶þÿâîÿÿÿÿÿÿÿÿÿÿîõÿÿ#€þÿlþÿlþÿkþÿ‡¹ÿÿÿÿÿÿÿÿÿÿVþÿkþÿkþÿkþÿiþ³pÿ]qþÿqþÿqþÿqþÿNœþÿð÷ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàîÿÿ7þÿpþÿpþÿpþÿ~þÿñ÷ÿÿÿÿÿÿåðÿÿtþÿpþÿpþÿpþÿoÿwtþ­uþÿuþÿuþÿuþÿ€þÿŽÂÿÿñ÷ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿñ÷ÿÿ“Åÿÿ~þÿuþÿtþÿ yþÿÇàÿÿÉáÿÿÿÿÿÿÿÿÿÿs³þÿtþÿtþÿtþÿtûþvÿ'yÿ{ûçyþÿyþÿyþÿyþÿyþÿ|þÿ@›þÿl²þÿ€¼þÿq´þÿJ þÿ þÿyþÿyþÿyþÿyþÿŽÃþÿÿÿÿÿêôÿÿÿÿÿÿÌäÿÿ|þÿyþÿyþÿyþÿwþ¼}ÿ5~ûô~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ~þÿ}þÿ}þÿ}þÿ}þÿ"ŽþÿäñÿÿÿÿÿÿÿÿÿÿèóÿÿP¥þÿ}þÿ}þÿ}þÿ}þÿ{ÿ<ƒÿD‚üó‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿ‚ÿÿŸÐÿÿÿÿÿÿÿÿÿÿýþÿÿÑéÿÿsºÿÿ‚ÿÿ‚ÿÿÿÿý–‰ÿ6…üå‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ‡ÿÿ†ÿÿ†ÿÿ†ÿÿ†ÿÿ†ÿÿ†ÿÿŠÿÿœÐÿÿÓêÿÿ¼ßÿÿi¸ÿÿˆÿÿ†ÿÿ†ÿÿ†ÿÿ‡üÇŽÿ ÿ‹þ»‹ÿÿ‹ÿÿ‹ÿÿ‹ÿÿ‹ÿÿ‹ÿÿ‹ÿÿ‹ÿÿ%œÿÿ‘ÍÿÿÊæÿÿ3¢ÿÿŽÿÿ‹ÿÿ‹ÿÿ‹ÿÿŠÿÿŠÿÿŠüÇŽÿÿÿ‘ÿaŽüÞÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ‹ÌÿÿÿÿÿÿÿÿÿÿÿÿŒüþý˜Žÿ €ÿ”ÿ_’ýÔ”ÿÿ”ÿÿ”ÿÿ”ÿÿ”ÿÿ”ÿÿ$£ÿÿ”ÿÿ“ÿÿ“ÿÿ“ÿÿ“þ½“ÿ;•ÿ$—ÿl˜þ°™ýÑ•üæ˜üû˜üé™ýÑ–þ²˜ÿwšÿ&(0 ÿù\ýš&2üÅùùýîÜÞûì$1üÉý¢ÿeÿ1ÿýiýÐÿÿ9Eÿÿ²¸ÿÿÿÿÿÿ¸¿ÿÿ ÿÿÿÿÿÿüðý ûBÿýÿÿÿÿl{ÿÿìïÿÿÿÿÿÿÚÞÿÿ;Oýÿýÿýÿþÿÿÿÿÿüîûÿÿ "ýŸÿÿ ÿÿƒ“þÿÿÿÿÿÿÿÿÿ¡®þÿ)Cýÿýÿýÿýÿýÿýÿýÿ!ÿÿ"ÿÿ þ¾ø&&ýk'üý ÿÿg~ýÿüüÿÿñóÿÿgþÿ#ýÿýÿ#ýÿ-MýÿTnýÿ[týÿGcýÿ<ýÿýÿ!ÿÿ'ÿÿ'ýÐ&ÿ(1ÿ+ûÙ'ÿÿ&Nþÿèìÿÿÿÿÿÿeþÿþÿ!þÿ5Xþÿ­»þÿéíÿÿÿÿÿÿÿÿÿÿþþÿÿÙàÿÿŽ¡þÿEþÿ$ÿÿ.ÿÿ*üÄÿ3ÿd4ÿÿ1ÿÿˆ þÿÿÿÿÿ£µÿÿ1þÿ&þÿMpþÿçìÿÿÿÿÿÿïóÿÿÇÒÿÿÀÌÿÿÒÜÿÿÿÿÿÿÿÿÿÿÓÜÿÿ@eþÿ*ÿÿ5ÿÿ1ýŒÿ8ý¨7ÿÿFþÿÝåÿÿøúÿÿ;gþÿ&þÿ(Xþÿâêÿÿÿÿÿÿ¤¹ÿÿ4\þÿ2þÿ)þÿ @þÿMsþÿ»Ëþÿÿÿÿÿáèÿÿ1^þÿ1ÿÿ8ýó8ÿD>ûÕ;ÿÿ8iþÿÿÿÿÿÈÖÿÿKþÿ2þÿŒ¨þÿÿÿÿÿ¯Ãÿÿ;þÿ,þÿCþÿ Gþÿ9þÿ,þÿFþÿ¬Àÿÿÿÿÿÿ·ÉÿÿEþÿ=ÿÿ>ú¨UÕBúê@ÿÿ[‡þÿÿÿÿÿ¢»ÿÿ>þÿOþÿÅÕÿÿþþÿÿ3jþÿ@þÿ^‰þÿ»ÍþÿÓßÿÿ…¦þÿSþÿ7þÿWþÿÙãÿÿÿÿÿÿOþÿ=ÿÿDüòCú5KùæFÿÿbþÿÿÿÿÿ›¸ÿÿ@þÿ]þÿ×âÿÿõ÷ÿÿ+iþÿš¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¢¾ÿÿJþÿ@þÿo™þÿÿÿÿÿžºÿÿKÿÿLÿÿIýpQýÔNÿÿP‡þÿÿÿÿÿ±ÊÿÿPþÿ WþÿÀÓþÿüýÿÿÁÕÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþþÿÿó÷ÿÿ?|ÿÿ?þÿ3sþÿùûÿÿ×äÿÿ YþÿOÿÿPþ­Vþ±Uÿÿ#nþÿøúÿÿäíÿÿ"nþÿFþÿv¦þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¦Äÿÿ¾Ôÿÿÿÿÿÿc—ÿÿDþÿ"mþÿâìÿÿôøÿÿbþÿTÿÿQýÑ]ýv_ÿÿ_ÿÿ¶Ñþÿÿÿÿÿ~­ÿÿQþÿdþÿ©Éþÿÿÿÿÿÿÿÿÿûüÿÿ”»þÿeþÿµÐÿÿÿÿÿÿU’ÿÿLþÿ"rþÿàëÿÿôøÿÿhþÿZÿÿYúâaû:büö^ÿÿGþÿÿÿÿÿôøÿÿO”þÿUþÿdþÿFŽþÿR•þÿ1þÿVþÿBŠþÿøûÿÿåïÿÿ!vþÿUþÿ0€þÿúüÿÿÛéÿÿ jþÿaÿÿ_úèqÿ fü¬mÿÿgþÿ½þÿÿÿÿÿñ÷ÿÿw®ÿÿvþÿbþÿ`þÿhþÿQ—þÿßìÿÿÿÿÿÿ‚µÿÿaþÿaþÿb¢þÿÿÿÿÿ¦ÊÿÿjþÿhÿÿgýÔnûHoýômÿÿtþÿ’Âþÿÿÿÿÿÿÿÿÿàîÿÿ«Ïÿÿ¡ÉÿÿÆßÿÿÿÿÿÿÿÿÿÿ¥Ìþÿ rþÿdþÿrþÿÇßÿÿÿÿÿÿ]£þÿhþÿqÿÿmý©ªÿrý“zÿÿqÿÿtþÿX¤þÿÇàÿÿüþÿÿÿÿÿÿÿÿÿÿÿÿÿÿàîÿÿt´ÿÿ xþÿjþÿQžþÿ±ÓÿÿÿÿÿÿÏåÿÿþÿnÿÿyÿÿtýgózüÈ‚ÿÿwÿÿuþÿ ~þÿ3•þÿ^¬þÿe¯þÿFŸþÿ…þÿvþÿqþÿ‰þÿÙëþÿÿÿÿÿóùÿÿV§ÿÿrþÿÿÿwüÞxÿ ‚ù+ûÕˆÿÿ€ÿÿ}þÿ|þÿ{þÿ{þÿ|þÿ|þÿ|þÿ|ÿÿ„ÂÿÿÿÿÿÿÿÿÿÿÍæÿÿ'’þÿ}ÿÿ„ÿÿ~ýs…ÿ,‡üÄÿÿŠÿÿ†ÿÿ†ÿÿ…ÿÿ…ÿÿ…ÿÿ ‰ÿÿ“ÿÿ§ÖÿÿÑéÿÿ‘Ëÿÿ6ŸÿÿŽÿÿŒÿÿ„ý¨™ÿ ÿŠý™ýô•ÿÿŽÿÿŒÿÿ‹ÿÿŒÿÿ>¨ÿÿžÓÿÿ4£ÿÿ ÿÿ‹ÿÿŒÿÿŽÿÿŠý¦ŒÿüL’ý¨’üøšÿÿ—ÿÿ–ÿÿ”ÿÿ;°ÿÿ™ÿÿ“ÿÿ—ÿÿ”ýÙŽýsŽÿ ™ÿ”ÿ9™ÿq–þ²—ýÓ–üç•üä—ýΕý©™ÿf•ÿ$(  ÿÿÿ* ýmîïý“cmýƒüU#ÿÿÿÿ1ÿ üª üû‘šþÿþþþþ4Eüþüÿüîý• ÿÿ1ÿúá6ýÿßãýýô÷ýüG\üûûûûý ûþüÿ ûí'ûHUÿ'þ¶+ýÿÝâýøÆÒýþ#ýÿ!üþl„üþ§³ýþœªýýKiüú!üþ$üý0üPUÿAÿC$ýÿvýýöùýþ 3üþ DþÿåëþÿúûÿÿÁÎþÿÏÙýþÿÿÿÿÅÐýü;þÿ-üñ>ÿ%>ýˆ4ýÿ×àýü”ªýþ$ýÿÏÚþÿÉ×þÿ4ýÿ"ýÿ"ýÿ RýþáêþÿÄÒýú6ýÿ;ý¨Aý¨Jþÿ÷ùýüRýþTþÿÿÿÿÿ0aþÿIxþÿ¼Îþÿœ¸þÿ Býÿ UýþþþþþW‚þÿ7üöUÿ-Mý RþÿôøýüXˆýþ_þÿöøþÿ´Ëþÿÿÿÿÿûüþÿÿÿÿÿœ»þÿ5ýÿ¾Ïýþ°ÉýüAýÿ Tÿt\ÿtOýÿÈÚýü­ÈýþCýÿ»Òþÿÿÿÿÿÿÿÿÿ¶Ïþÿ¡ÂþÿÐàþÿBýÿ—·ýþÏßýüNýÿYý lÿ-XüöNþÿþþþþKŒýþWýÿWšþÿGþÿ\ýÿÔãþÿ¢ÆþÿIýÿ­Êýþ¿ÖýüVýÿdý¨mý¨dýÿ‰ºýúÿÿÿÿ¡Æýþ@ŽþÿN–þÿÔåþÿíõþÿuþÿdýÿïöýþy°ýü_ýÿoýˆuÿ%uýñmýÿQ¡ýüÕéþÿúýýþùýþÿ´Øþÿþÿ{þÿºÙýþêóýþ}üýqýÿvÿCUÿ|ÿP€ýývýþvýú ‚ýýýþtýþoýþ¬Ôþÿþþþþ¼ýøtþÿ€þ¶Uÿ‡ÿHˆþí‰ÿÿ…þþ…þý ŒþûN©þû–ÎýüZ²ýý‹ÿÿ…þá„ÿªÿŠÿý”ýî‘þÿýþ:ªýþ þÿ†þúþª“ÿÿÿ€ÿÿÿ—ÿ–ÿU—ÿ€ÿˆ™ÿl¤ÿ*ÿÿÿastropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/static/sidebar.js0000644001134200020070000001155312602613466032565 0ustar embrayscience00000000000000/* * sidebar.js * ~~~~~~~~~~ * * This script makes the Sphinx sidebar collapsible. * * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton * used to collapse and expand the sidebar. * * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden * and the width of the sidebar and the margin-left of the document * are decreased. When the sidebar is expanded the opposite happens. * This script saves a per-browser/per-session cookie used to * remember the position of the sidebar among the pages. * Once the browser is closed the cookie is deleted and the position * reset to the default (expanded). * * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ $(function() { // global elements used by the functions. // the 'sidebarbutton' element is defined as global after its // creation, in the add_sidebar_button function var bodywrapper = $('.bodywrapper'); var sidebar = $('.sphinxsidebar'); var sidebarwrapper = $('.sphinxsidebarwrapper'); // for some reason, the document has no sidebar; do not run into errors if (!sidebar.length) return; // original margin-left of the bodywrapper and width of the sidebar // with the sidebar expanded var bw_margin_expanded = bodywrapper.css('margin-left'); var ssb_width_expanded = sidebar.width(); // margin-left of the bodywrapper and width of the sidebar // with the sidebar collapsed var bw_margin_collapsed = 12; var ssb_width_collapsed = 12; // custom colors var dark_color = '#404040'; var light_color = '#505050'; function sidebar_is_collapsed() { return sidebarwrapper.is(':not(:visible)'); } function toggle_sidebar() { if (sidebar_is_collapsed()) expand_sidebar(); else collapse_sidebar(); } function collapse_sidebar() { sidebarwrapper.hide(); sidebar.css('width', ssb_width_collapsed); bodywrapper.css('margin-left', bw_margin_collapsed); sidebarbutton.css({ 'margin-left': '-1px', 'height': bodywrapper.height(), 'border-radius': '3px' }); sidebarbutton.find('span').text('»'); sidebarbutton.attr('title', _('Expand sidebar')); document.cookie = 'sidebar=collapsed'; } function expand_sidebar() { bodywrapper.css('margin-left', bw_margin_expanded); sidebar.css('width', ssb_width_expanded); sidebarwrapper.show(); sidebarbutton.css({ 'margin-left': ssb_width_expanded - 12, 'height': bodywrapper.height(), 'border-radius': '0px 3px 3px 0px' }); sidebarbutton.find('span').text('«'); sidebarbutton.attr('title', _('Collapse sidebar')); document.cookie = 'sidebar=expanded'; } function add_sidebar_button() { sidebarwrapper.css({ 'float': 'left', 'margin-right': '0', 'width': ssb_width_expanded - 18 }); // create the button sidebar.append('
    «
    '); var sidebarbutton = $('#sidebarbutton'); // find the height of the viewport to center the '<<' in the page var viewport_height; if (window.innerHeight) viewport_height = window.innerHeight; else viewport_height = $(window).height(); var sidebar_offset = sidebar.offset().top; var sidebar_height = Math.max(bodywrapper.height(), sidebar.height()); sidebarbutton.find('span').css({ 'font-family': '"Lucida Grande",Arial,sans-serif', 'display': 'block', 'top': Math.min(viewport_height/2, sidebar_height/2 + sidebar_offset) - 10, 'width': 12, 'position': 'fixed', 'text-align': 'center' }); sidebarbutton.click(toggle_sidebar); sidebarbutton.attr('title', _('Collapse sidebar')); sidebarbutton.css({ 'color': '#FFFFFF', 'background-color': light_color, 'border': '1px solid ' + light_color, 'border-radius': '0px 3px 3px 0px', 'font-size': '1.2em', 'cursor': 'pointer', 'height': sidebar_height, 'padding-top': '1px', 'margin': '-1px', 'margin-left': ssb_width_expanded - 12 }); sidebarbutton.hover( function () { $(this).css('background-color', dark_color); }, function () { $(this).css('background-color', light_color); } ); } function set_position_from_cookie() { if (!document.cookie) return; var items = document.cookie.split(';'); for(var k=0; k)é÷ÎÀ´Ntž$éS`6p6pTØím¢5…—ÿÆHš“s8˜Éã{à@`»¿ ÷J:×v=ð%``/à9`çàœ/iší~À\`ÿbŸ{ƒçœH7KBäÝ€§"Æ“o€f¥´:¡/°hRÊʱ' J™\"ö`ànàÜ*ý[!©ÍöåÀ”ˆÿ `'I­ØÆö¶µ}Ÿí ¶o´Ý9÷#Ûg›Ùþ6ì l²}’í—m¿h[¶›lO·ýeð~ŽòtÛgE;õnÇÛkmϳ=Ëö^ÑÎKQ¿&âš~*¸² Ò NøÑ §ìµNxÊ ×æl30¡L-'ÌwÂ~¥uö ÛOÒ lOŒ˜Ïm)†ÙÞ©`»"×±ÁakÈÙšs\"5äߟ[m,ˆÝfû˜Bý±¹ú 9{ígÃþ[Œþ¼Ø“ªØà„'(Ê8á}'ëðú;aqÑ^{N•:l_q-ãÔHZ"éëx©.„Ü5ÇkŠû×ÀOñ|[ì86—„¤_Y?Ü-éé‚í¸¸ÿB6m‰8×wDqkÚ×… ÚÊ(eY´5$ʯwdz"ðD%¿—iZMh²´1/éѪbÛîmûZÛŸ‘åÒ¸0Çë] ŒV’-Ž_Ù¾9öÕ냲…ª1îK%­)Ôå®AÝðÓBûº08­À9•lî *±íN¶à'’ž M/ÎØÛÛo×;·GcJ=IÏÛ€€þÀeÀ›¶û®§àÕ:T6’܆ò}ÖæÊ³€£œP à„F 7°¸“6J}Kú h,ÌÐa¡S‡ÎŒŠV`¤¤‹%½üXU é[I—»WEÀÿˆÔ°<îM¶‹;¤Á¹çeÝh³1ÏWÊjà% 2úF3;I!±ËF6’Z ¦âÇ¥†ÈcÀrIKªtªÝ›=¢"€¤VIS€rªà·¸°½Y7Å®ï·ÎÈù8/ŠmÀü®4æ„}Õdg‡<¦çÄóhàÁ.4§.p*Úv»ø*žw·}=YJ9ÖÝÙ¼,²=øì”…9ú;À @_`†í¹ÀÊ.þ'IÉöê#{lï |Hv868·Hú¦ðÞÞNRòûï-ÈRãÍ%£öM Þ ûµJÿšQÕÐVCvNé öŒ¶¸&ìk"À“ÉrrÉv$Ä•Ç:ŽŒidi¥8%®WiµU!i­íÑÀcáçÒ\õÀý¹XóÌsÂL²…w7`2°¸o?)8áNàqàÖ.ŠØd{rxS˜yÙ¾ÓÞ¸˜,¡¯î—ôží1À²³ýòàöŽúß‘”æåOtÁ\ V $MSë©A{UÒGeÑFºj&;öö#›IIZg‹dK| ó€=ÉÆJYTM'lE¶»¤”–ÎÔ‹³Äé]ü(¯Hú üMq¨¹h=ÞÛÏ ¯lˆkþ~›<&wmGÿk±pYº™½!üõäÿì%âÿÈ#ÀædëÀX¥·h=…ÿ’ØSß»À3p5™Ø‹óÛĞƟ ½§pÅ%tEXtdate:create2012-10-18T20:57:33+02:00¢p_f%tEXtdate:modify2012-10-18T20:57:33+02:00Ó-çÚtEXtSoftwarewww.inkscape.org›î<IEND®B`‚astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.svg0000644001134200020070000001103212602613466034050 0ustar embrayscience00000000000000 astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout.svg0000644001134200020070000001212112602613466034575 0ustar embrayscience00000000000000 astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.png0000644001134200020070000000353412602613466034351 0ustar embrayscience00000000000000‰PNG  IHDR szzôsBIT|dˆ pHYsÓÇòŽtEXtSoftwarewww.inkscape.org›î<ÙIDATX…Å—PT×Ç?÷ñc‘P° ˆ „FƒUBìȘ&Ådi”I# ±ÒÄJhC2æÇŒ5ÑÔtw:íÛª5ÑÒSÒÆª©é„Ú¡%¨ZÉF2Vk­JFŒ`ÁŠ(JdÙ÷öÝþqaß., É?½3gæ½sÏïýžûΞº®óÿ\Ñ_ÄÉ•ìÎ#šÅHŠl ø2pèZ€Ý8ØãéÔ¯KL”Wš;†AC°È‹h4¥z>ÕÀ$?ñôé—#¹hJ~‹»œ›´`;&y˜#D²ËÂß b0¨Â¤Åu‹»2RìqKàJr'âã7˜<6.´;`Îã2Ò‹@‹†Ž&°Ìa‹$`›+Æâ1ôWB]Ç, w.rÆM¶|»r€Þh?G6B—m"ù‘GêÕïKàƒ…“œ0º#Ñ&¢: WBÅaˆË°mL6¸pÏ€+àΔƒx¥Áti@D1Çä;«áz§ v³ú7zCýrׇóE9ÎÐäš ‹,“é_Gÿ±hbÞˆy•ˆ;¾Ñ Ðñ!,e÷ÙUÄ—¦AÚlˆO†„©ˆ€-^;V€¬…~ï;MçÅðKxUZùK%:Lü剜"¸ë9äžáT½rÝë†3WCúWaá8úè9ô³`p4XW·;KšxBjó«ËwÙÉ¥„Ö÷á“ýÐÚׇ.WêLDå_e5Êw`ÎDîzFíG;ßz9ì¾?@ÈghI^Ž ÄâUˆ¥›Ô³áƒÆMÈl…+çíãÇÄs%bñZˆK„»Ÿ‚Ão@ûÅ`ó!8¹ò—À¬o‚)Ô!ÔÊpu¹4W›;Uü0ˆ0×i'÷Ý@V— ë\Ð}>üÖßôÁž Èu Àôƒˆï¾ ¦übdëÇ‘‰Yáþ>rµ¡z—c0iØI,\1D‹‰ÜX §)‡Ìùׇˆ×üˆ__…Šm cáB3ì߬|f̃¹ÙI.œ²KŸ;ò“NÖ¤AqÐ!~*Üùr8Þg)ã¬BÄß…¬;!*â'#î©DÔôÁürdÓN;Ql’ à|(€Ùá Xôj®€[Ã`aPy÷ã* ÷ר—¦Ô¥h¹bâO½¶Î 9el¢­ïë 0HÆi¦a29HáReÜÝ 5*Ã@ä)}豄 ¢cU5ö»aÙIr mý0›Jú€nARÂPÊør‡j­&5â“+Þðçõ£AL:éµKðAƒÍ\îÿ´ž eà'_Œ໩âlg'ò›Èm/!7|ü¾p7z‘¯T@ß5å—0 KÕÞ¹Àg†öƒ ú@/fHN|ׯ@b bÁÃÈú8X‹lü,yf} ºÚ ®ú•ˆU; )U1·o»bSµ j€~Ú¦‚aS2!&A”8¼/‡‚û ¿Ž7ªhu¯Ž.@ùó0¿D=¿_oo nIøý/© Ió”è70è¦FÞ§¬&%ÀýÁ¶,Ô*}t â—ƒ{Ë#ÿ$'Ï@ütbÅËʾç?ÈuO„Ú j&Á¡DèºÎK î-T㎉E4| )épá,ò;·Ûí³ôˆµ¿…¨!ÊÎ7ÿ¼Èö3ˆiÙ0ý6X°“Ô¾¹ò8önðôB°ÚSjOEÑšÅNi 0ýÈÚ-ˆg<0c&”T@Ãe]· ùßKˆ» .²ó ;©Þzäæç¡³-Tû³™R[åt:iºÝy±è„·‹,, å4âÑçÝEBÛY8{Z5˜öðîFô÷A¬¦¤ƒÐK]àä?‘úÓð»upíjèLñ©,ñ<«÷…" ^?aReÁ ÀAO/¬YŽØü–±áHKCî}K7ÿÙ¼V='N†´ èhß@$.:4Á}žr½säFp"jÊw^ùÆqo?%Š…føä$¢äâþ2HÍ€÷€°O6àƒžËà75E)iנس\o™FÌ„ë*õj¬þ”î{YU†¬¢üI´¿…ܹ㠦!bò¦¦Qà©Ð[Ç¢&âX¾¶Æ])àWHTÿ]º í…ŸAÖ­Ê`Їu×W ëâXq;¤dÍúgõÚ± "20¼Ö¯Ð·k·að:µobÝ3¹u‹2pÄ!}rô¸nÒ,TjÝäN$9Là¿¡k“{rÀâAMP*a¦Öri.©išÜ[ï—ËÊÎ h“Ш™ì÷¼¨7O$éç0 Ë•Lg§$3ó3Çãÿ¼ G®ÿ.Á½8<ßÇIEND®B`‚astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/static/copybutton.js0000644001134200020070000000467712602613466033373 0ustar embrayscience00000000000000$(document).ready(function() { /* Add a [>>>] button on the top-right corner of code samples to hide * the >>> and ... prompts and the output and thus make the code * copyable. */ var div = $('.highlight-python .highlight,' + '.highlight-python3 .highlight') var pre = div.find('pre'); // get the styles from the current theme pre.parent().parent().css('position', 'relative'); var hide_text = 'Hide the prompts and output'; var show_text = 'Show the prompts and output'; var border_width = pre.css('border-top-width'); var border_style = pre.css('border-top-style'); var border_color = pre.css('border-top-color'); var button_styles = { 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', 'border-color': border_color, 'border-style': border_style, 'border-width': border_width, 'color': border_color, 'text-size': '75%', 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em', 'border-radius': '0 3px 0 0' } // create and add the button to all the code blocks that contain >>> div.each(function(index) { var jthis = $(this); if (jthis.find('.gp').length > 0) { var button = $('>>>'); button.css(button_styles) button.attr('title', hide_text); jthis.prepend(button); } // tracebacks (.gt) contain bare text elements that need to be // wrapped in a span to work with .nextUntil() (see later) jthis.find('pre:has(.gt)').contents().filter(function() { return ((this.nodeType == 3) && (this.data.trim().length > 0)); }).wrap(''); }); // define the behavior of the button when it's clicked $('.copybutton').toggle( function() { var button = $(this); button.parent().find('.go, .gp, .gt').hide(); button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); button.css('text-decoration', 'line-through'); button.attr('title', show_text); }, function() { var button = $(this); button.parent().find('.go, .gp, .gt').show(); button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); button.css('text-decoration', 'none'); button.attr('title', hide_text); }); }); astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/static/bootstrap-astropy.css0000644001134200020070000002712012623702444035037 0ustar embrayscience00000000000000/*! * Bootstrap v1.4.0 * * Copyright 2011 Twitter, Inc * Licensed under the Apache License v2.0 * http://www.apache.org/licenses/LICENSE-2.0 * * Heavily modified by Kyle Barbary for the AstroPy Project for use with Sphinx. */ @import url("basic.css"); body { background-color: #ffffff; margin: 0; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 13px; font-weight: normal; line-height: 18px; color: #404040; } /* Hyperlinks ----------------------------------------------------------------*/ a { color: #0069d6; text-decoration: none; line-height: inherit; font-weight: inherit; } a:hover { color: #00438a; text-decoration: underline; } /* Typography ----------------------------------------------------------------*/ h1,h2,h3,h4,h5,h6 { color: #404040; margin: 0.7em 0 0 0; line-height: 1.5em; } h1 { font-size: 24px; margin: 0; } h2 { font-size: 21px; line-height: 1.2em; margin: 1em 0 0.5em 0; border-bottom: 1px solid #404040; } h3 { font-size: 18px; } h4 { font-size: 16px; } h5 { font-size: 14px; } h6 { font-size: 13px; text-transform: uppercase; } p { font-size: 13px; font-weight: normal; line-height: 18px; margin-top: 0px; margin-bottom: 9px; } ul, ol { margin-left: 0; padding: 0 0 0 25px; } ul ul, ul ol, ol ol, ol ul { margin-bottom: 0; } ul { list-style: disc; } ol { list-style: decimal; } li { line-height: 18px; color: #404040; } ul.unstyled { list-style: none; margin-left: 0; } dl { margin-bottom: 18px; } dl dt, dl dd { line-height: 18px; } dl dd { margin-left: 9px; } hr { margin: 20px 0 19px; border: 0; border-bottom: 1px solid #eee; } strong { font-style: inherit; font-weight: bold; } em { font-style: italic; font-weight: inherit; line-height: inherit; } .muted { color: #bfbfbf; } address { display: block; line-height: 18px; margin-bottom: 18px; } code, pre { padding: 0 3px 2px; font-family: monospace; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } tt { font-family: monospace; } code { padding: 1px 3px; } pre { display: block; padding: 8.5px; margin: 0 0 18px; line-height: 18px; border: 1px solid #ddd; border: 1px solid rgba(0, 0, 0, 0.12); -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; white-space: pre; word-wrap: break-word; } img { margin: 9px 0; } /* format inline code with a rounded box */ tt, code { margin: 0 2px; padding: 0 5px; border: 1px solid #ddd; border: 1px solid rgba(0, 0, 0, 0.12); border-radius: 3px; } code.xref, a code { margin: 0; padding: 0 1px 0 1px; background-color: none; border: none; } /* all code has same box background color, even in headers */ h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt, h1 code, h2 code, h3 code, h4 code, h5 code, h6 code, pre, code, tt { background-color: #f8f8f8; } /* override box for links & other sphinx-specifc stuff */ tt.xref, a tt, tt.descname, tt.descclassname { padding: 0 1px 0 1px; border: none; } /* override box for related bar at the top of the page */ .related tt { border: none; padding: 0 1px 0 1px; background-color: transparent; font-weight: bold; } th { background-color: #dddddd; } .viewcode-back { font-family: sans-serif; } div.viewcode-block:target { background-color: #f4debf; border-top: 1px solid #ac9; border-bottom: 1px solid #ac9; } table.docutils { border-spacing: 5px; border-collapse: separate; } /* Topbar --------------------------------------------------------------------*/ div.topbar { height: 40px; position: absolute; top: 0; left: 0; right: 0; z-index: 10000; padding: 0px 10px; background-color: #222; background-color: #222222; background-repeat: repeat-x; background-image: -khtml-gradient(linear, left top, left bottom, from(#333333), to(#222222)); background-image: -moz-linear-gradient(top, #333333, #222222); background-image: -ms-linear-gradient(top, #333333, #222222); background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #333333), color-stop(100%, #222222)); background-image: -webkit-linear-gradient(top, #333333, #222222); background-image: -o-linear-gradient(top, #333333, #222222); background-image: linear-gradient(top, #333333, #222222); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#333333', endColorstr='#222222', GradientType=0); } div.topbar a.brand { font-family: 'Source Sans Pro', sans-serif; font-size: 26px; color: #ffffff; font-weight: 600; text-decoration: none; float: left; display: block; height: 32px; padding: 8px 12px 0px 45px; margin-left: -10px; background: transparent url("astropy_logo_32.png") no-repeat 10px 4px; background-image: url("astropy_logo.svg"), none; background-size: 32px 32px; } #logotext1 { } #logotext2 { font-weight:200; color: #ff5000; } #logotext3 { font-weight:200; } div.topbar .brand:hover, div.topbar ul li a.homelink:hover { background-color: #333; background-color: rgba(255, 255, 255, 0.05); } div.topbar ul { font-size: 110%; list-style: none; margin: 0; padding: 0 0 0 10px; float: right; color: #bfbfbf; text-align: center; text-decoration: none; height: 100%; } div.topbar ul li { float: left; display: inline; height: 30px; margin: 5px; padding: 0px; } div.topbar ul li a { color: #bfbfbf; text-decoration: none; padding: 5px; display: block; height: auto; text-align: center; vertical-align: middle; border-radius: 4px; } div.topbar ul li a:hover { color: #ffffff; text-decoration: none; } div.topbar ul li a.homelink { width: 112px; display: block; height: 20px; padding: 5px 0px; background: transparent url("astropy_linkout_20.png") no-repeat 10px 5px; background-image: url("astropy_linkout.svg"), none; background-size: 91px 20px; } div.topbar form { text-align: left; margin: 0 0 0 5px; position: relative; filter: alpha(opacity=100); -khtml-opacity: 1; -moz-opacity: 1; opacity: 1; } div.topbar input { background-color: #444; background-color: rgba(255, 255, 255, 0.3); font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: normal; font-weight: 13px; line-height: 1; padding: 4px 9px; color: #ffffff; color: rgba(255, 255, 255, 0.75); border: 1px solid #111; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; -webkit-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); -moz-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.1), 0 1px 0px rgba(255, 255, 255, 0.25); -webkit-transition: none; -moz-transition: none; -ms-transition: none; -o-transition: none; transition: none; } div.topbar input:-moz-placeholder { color: #e6e6e6; } div.topbar input::-webkit-input-placeholder { color: #e6e6e6; } div.topbar input:hover { background-color: #bfbfbf; background-color: rgba(255, 255, 255, 0.5); color: #ffffff; } div.topbar input:focus, div.topbar input.focused { outline: 0; background-color: #ffffff; color: #404040; text-shadow: 0 1px 0 #ffffff; border: 0; padding: 5px 10px; -webkit-box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); -moz-box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); box-shadow: 0 0 3px rgba(0, 0, 0, 0.15); } /* Relation bar (breadcrumbs, prev, next) ------------------------------------*/ div.related { height: 21px; width: auto; margin: 0 10px; position: absolute; top: 42px; clear: both; left: 0; right: 0; z-index: 10000; font-size: 100%; vertical-align: middle; background-color: #fff; border-bottom: 1px solid #bbb; } div.related ul { padding: 0; margin: 0; } /* Footer --------------------------------------------------------------------*/ footer { display: block; margin: 10px 10px 0px; padding: 10px 0 0 0; border-top: 1px solid #bbb; } .pull-right { float: right; width: 30em; text-align: right; } /* Sphinx sidebar ------------------------------------------------------------*/ div.sphinxsidebar { font-size: inherit; border-radius: 3px; background-color: #eee; border: 1px solid #bbb; } div.sphinxsidebarwrapper { padding: 0px 0px 0px 5px; } div.sphinxsidebar h3 { font-family: 'Trebuchet MS', sans-serif; font-size: 1.4em; font-weight: normal; margin: 5px 0px 0px 5px; padding: 0; line-height: 1.6em; } div.sphinxsidebar h4 { font-family: 'Trebuchet MS', sans-serif; font-size: 1.3em; font-weight: normal; margin: 5px 0 0 0; padding: 0; } div.sphinxsidebar p { } div.sphinxsidebar p.topless { margin: 5px 10px 10px 10px; } div.sphinxsidebar ul { margin: 0px 0px 0px 5px; padding: 0; } div.sphinxsidebar ul ul { margin-left: 15px; list-style-type: disc; } /* If showing the global TOC (toctree), color the current page differently */ div.sphinxsidebar a.current { color: #404040; } div.sphinxsidebar a.current:hover { color: #404040; } /* document, documentwrapper, body, bodywrapper ----------------------------- */ div.document { margin-top: 72px; margin-left: 10px; margin-right: 10px; } div.documentwrapper { float: left; width: 100%; } div.body { background-color: #ffffff; padding: 0 0 0px 20px; } div.bodywrapper { margin: 0 0 0 230px; max-width: 55em; } /* Header links ------------------------------------------------------------- */ a.headerlink { font-size: 0.8em; padding: 0 4px 0 4px; text-decoration: none; } a.headerlink:hover { background-color: #0069d6; color: white; text-docoration: none; } /* Admonitions and warnings ------------------------------------------------- */ /* Shared by admonitions and warnings */ div.admonition, div.warning { padding: 0px; border-radius: 3px; -moz-border-radius: 3px; -webkit-border-radius: 3px; } div.admonition p, div.warning p { margin: 0.5em 1em 0.5em 1em; padding: 0; } div.admonition pre, div.warning pre { margin: 0.4em 1em 0.4em 1em; } div.admonition p.admonition-title, div.warning p.admonition-title { margin: 0; padding: 0.1em 0 0.1em 0.5em; color: white; font-weight: bold; font-size: 1.1em; } div.admonition ul, div.admonition ol, div.warning ul, div.warning ol { margin: 0.1em 0.5em 0.5em 3em; padding: 0; } /* Admonitions only */ div.admonition { border: 1px solid #609060; background-color: #e9ffe9; } div.admonition p.admonition-title { background-color: #70A070; } /* Warnings only */ div.warning { border: 1px solid #900000; background-color: #ffe9e9; } div.warning p.admonition-title { background-color: #b04040; } /* Figures ------------------------------------------------------------------ */ .figure.align-center { clear: none; } /* This is a div for containing multiple figures side-by-side, for use with * .. container:: figures */ div.figures { border: 1px solid #CCCCCC; background-color: #F8F8F8; margin: 1em; text-align: center; } div.figures .figure { clear: none; float: none; display: inline-block; border: none; margin-left: 0.5em; margin-right: 0.5em; } .field-list th { white-space: nowrap; } table.field-list { border-spacing: 0px; margin-left: 1px; border-left: 5px solid rgb(238, 238, 238) !important; } table.field-list th.field-name { display: inline-block; padding: 1px 8px 1px 5px; white-space: nowrap; background-color: rgb(238, 238, 238); border-radius: 0 3px 3px 0; -webkit-border-radius: 0 3px 3px 0; } astropy-helpers-1.1.1/astropy_helpers/sphinx/themes/bootstrap-astropy/theme.conf0000644001134200020070000000030012602613466031264 0ustar embrayscience00000000000000# AstroPy theme based on Twitter Bootstrap CSS [theme] inherit = basic stylesheet = bootstrap-astropy.css pygments_style = sphinx [options] logotext1 = astro logotext2 = py logotext3 = :docs astropy-helpers-1.1.1/astropy_helpers/sphinx/__init__.py0000644001134200020070000000041412602613466024431 0ustar embrayscience00000000000000""" This package contains utilities and extensions for the Astropy sphinx documentation. In particular, the `astropy.sphinx.conf` should be imported by the sphinx ``conf.py`` file for affiliated packages that wish to make use of the Astropy documentation format. """ astropy-helpers-1.1.1/astropy_helpers/commands/0000755001134200020070000000000012637041310022600 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/commands/src/0000755001134200020070000000000012637041310023367 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/commands/src/compiler.c0000644001134200020070000000573112602613465025363 0ustar embrayscience00000000000000#include /*************************************************************************** * Macros for determining the compiler version. * * These are borrowed from boost, and majorly abridged to include only * the compilers we care about. ***************************************************************************/ #ifndef PY3K #if PY_MAJOR_VERSION >= 3 #define PY3K 1 #else #define PY3K 0 #endif #endif #define STRINGIZE(X) DO_STRINGIZE(X) #define DO_STRINGIZE(X) #X #if defined __clang__ /* Clang C++ emulates GCC, so it has to appear early. */ # define COMPILER "Clang version " __clang_version__ #elif defined(__INTEL_COMPILER) || defined(__ICL) || defined(__ICC) || defined(__ECC) /* Intel */ # if defined(__INTEL_COMPILER) # define INTEL_VERSION __INTEL_COMPILER # elif defined(__ICL) # define INTEL_VERSION __ICL # elif defined(__ICC) # define INTEL_VERSION __ICC # elif defined(__ECC) # define INTEL_VERSION __ECC # endif # define COMPILER "Intel C compiler version " STRINGIZE(INTEL_VERSION) #elif defined(__GNUC__) /* gcc */ # define COMPILER "GCC version " __VERSION__ #elif defined(__SUNPRO_CC) /* Sun Workshop Compiler */ # define COMPILER "Sun compiler version " STRINGIZE(__SUNPRO_CC) #elif defined(_MSC_VER) /* Microsoft Visual C/C++ Must be last since other compilers define _MSC_VER for compatibility as well */ # if _MSC_VER < 1200 # define COMPILER_VERSION 5.0 # elif _MSC_VER < 1300 # define COMPILER_VERSION 6.0 # elif _MSC_VER == 1300 # define COMPILER_VERSION 7.0 # elif _MSC_VER == 1310 # define COMPILER_VERSION 7.1 # elif _MSC_VER == 1400 # define COMPILER_VERSION 8.0 # elif _MSC_VER == 1500 # define COMPILER_VERSION 9.0 # elif _MSC_VER == 1600 # define COMPILER_VERSION 10.0 # else # define COMPILER_VERSION _MSC_VER # endif # define COMPILER "Microsoft Visual C++ version " STRINGIZE(COMPILER_VERSION) #else /* Fallback */ # define COMPILER "Unknown compiler" #endif /*************************************************************************** * Module-level ***************************************************************************/ struct module_state { /* The Sun compiler can't handle empty structs */ #if defined(__SUNPRO_C) || defined(_MSC_VER) int _dummy; #endif }; #if PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "_compiler", NULL, sizeof(struct module_state), NULL, NULL, NULL, NULL, NULL }; #define INITERROR return NULL PyMODINIT_FUNC PyInit__compiler(void) #else #define INITERROR return PyMODINIT_FUNC init_compiler(void) #endif { PyObject* m; #if PY3K m = PyModule_Create(&moduledef); #else m = Py_InitModule3("_compiler", NULL, NULL); #endif if (m == NULL) INITERROR; PyModule_AddStringConstant(m, "compiler", COMPILER); #if PY3K return m; #endif } astropy-helpers-1.1.1/astropy_helpers/commands/_dummy.py0000644001134200020070000000557412630421426024462 0ustar embrayscience00000000000000""" Provides a base class for a 'dummy' setup.py command that has no functionality (probably due to a missing requirement). This dummy command can raise an exception when it is run, explaining to the user what dependencies must be met to use this command. The reason this is at all tricky is that we want the command to be able to provide this message even when the user passes arguments to the command. If we don't know ahead of time what arguments the command can take, this is difficult, because distutils does not allow unknown arguments to be passed to a setup.py command. This hacks around that restriction to provide a useful error message even when a user passes arguments to the dummy implementation of a command. Use this like: try: from some_dependency import SetupCommand except ImportError: from ._dummy import _DummyCommand class SetupCommand(_DummyCommand): description = \ 'Implementation of SetupCommand from some_dependency; ' 'some_dependency must be installed to run this command' # This is the message that will be raised when a user tries to # run this command--define it as a class attribute. error_msg = \ "The 'setup_command' command requires the some_dependency " "package to be installed and importable." """ import sys from setuptools import Command from distutils.errors import DistutilsArgError from textwrap import dedent class _DummyCommandMeta(type): """ Causes an exception to be raised on accessing attributes of a command class so that if ``./setup.py command_name`` is run with additional command-line options we can provide a useful error message instead of the default that tells users the options are unrecognized. """ def __init__(cls, name, bases, members): if bases == (Command, object): # This is the _DummyCommand base class, presumably return if not hasattr(cls, 'description'): raise TypeError( "_DummyCommand subclass must have a 'description' " "attribute.") if not hasattr(cls, 'error_msg'): raise TypeError( "_DummyCommand subclass must have an 'error_msg' " "attribute.") def __getattribute__(cls, attr): if attr in ('description', 'error_msg'): # Allow cls.description to work so that `./setup.py # --help-commands` still works return super(_DummyCommandMeta, cls).__getattribute__(attr) raise DistutilsArgError(cls.error_msg) if sys.version_info[0] < 3: exec(dedent(""" class _DummyCommand(Command, object): __metaclass__ = _DummyCommandMeta """)) else: exec(dedent(""" class _DummyCommand(Command, object, metaclass=_DummyCommandMeta): pass """)) astropy-helpers-1.1.1/astropy_helpers/commands/install_lib.py0000644001134200020070000000100012602613465025445 0ustar embrayscience00000000000000from setuptools.command.install_lib import install_lib as SetuptoolsInstallLib from ..utils import _get_platlib_dir class AstropyInstallLib(SetuptoolsInstallLib): user_options = SetuptoolsInstallLib.user_options[:] boolean_options = SetuptoolsInstallLib.boolean_options[:] def finalize_options(self): build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) self.build_dir = platlib_dir SetuptoolsInstallLib.finalize_options(self) astropy-helpers-1.1.1/astropy_helpers/commands/build_ext.py0000644001134200020070000004566512636105326025160 0ustar embrayscience00000000000000import errno import os import re import shlex import shutil import subprocess import sys import textwrap from distutils import log, ccompiler, sysconfig from distutils.cmd import Command from distutils.core import Extension from distutils.ccompiler import get_default_compiler from setuptools.command.build_ext import build_ext as SetuptoolsBuildExt from ..utils import get_numpy_include_path, invalidate_caches, classproperty from ..version_helpers import get_pkg_version_module def should_build_with_cython(package, release=None): """Returns the previously used Cython version (or 'unknown' if not previously built) if Cython should be used to build extension modules from pyx files. If the ``release`` parameter is not specified an attempt is made to determine the release flag from `astropy.version`. """ try: version_module = __import__(package + '.cython_version', fromlist=['release', 'cython_version']) except ImportError: version_module = None if release is None and version_module is not None: try: release = version_module.release except AttributeError: pass try: cython_version = version_module.cython_version except AttributeError: cython_version = 'unknown' # Only build with Cython if, of course, Cython is installed, we're in a # development version (i.e. not release) or the Cython-generated source # files haven't been created yet (cython_version == 'unknown'). The latter # case can happen even when release is True if checking out a release tag # from the repository have_cython = False try: import Cython have_cython = True except ImportError: pass if have_cython and (not release or cython_version == 'unknown'): return cython_version else: return False _compiler_versions = {} def get_compiler_version(compiler): if compiler in _compiler_versions: return _compiler_versions[compiler] # Different flags to try to get the compiler version # TODO: It might be worth making this configurable to support # arbitrary odd compilers; though all bets may be off in such # cases anyway flags = ['--version', '--Version', '-version', '-Version', '-v', '-V'] def try_get_version(flag): process = subprocess.Popen( shlex.split(compiler, posix=('win' not in sys.platform)) + [flag], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if process.returncode != 0: return 'unknown' output = stdout.strip().decode('latin-1') # Safest bet if not output: # Some compilers return their version info on stderr output = stderr.strip().decode('latin-1') if not output: output = 'unknown' return output for flag in flags: version = try_get_version(flag) if version != 'unknown': break # Cache results to speed up future calls _compiler_versions[compiler] = version return version # TODO: I think this can be reworked without having to create the class # programmatically. def generate_build_ext_command(packagename, release): """ Creates a custom 'build_ext' command that allows for manipulating some of the C extension options at build time. We use a function to build the class since the base class for build_ext may be different depending on certain build-time parameters (for example, we may use Cython's build_ext instead of the default version in distutils). Uses the default distutils.command.build_ext by default. """ class build_ext(SetuptoolsBuildExt, object): package_name = packagename is_release = release _user_options = SetuptoolsBuildExt.user_options[:] _boolean_options = SetuptoolsBuildExt.boolean_options[:] _help_options = SetuptoolsBuildExt.help_options[:] force_rebuild = False _broken_compiler_mapping = [ ('i686-apple-darwin[0-9]*-llvm-gcc-4.2', 'clang') ] # Warning: Spaghetti code ahead. # During setup.py, the setup_helpers module needs the ability to add # items to a command's user_options list. At this stage we don't know # whether or not we can build with Cython, and so don't know for sure # what base class will be used for build_ext; nevertheless we want to # be able to provide a list to add options into. # # Later, once setup() has been called we should have all build # dependencies included via setup_requires available. distutils needs # to be able to access the user_options as a *class* attribute before # the class has been initialized, but we do need to be able to # enumerate the options for the correct base class at that point @classproperty def user_options(cls): from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._user_options return cls._final_class.user_options @classproperty def boolean_options(cls): # Similar to user_options above from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._boolean_options return cls._final_class.boolean_options @classproperty def help_options(cls): # Similar to user_options above from distutils import core if core._setup_distribution is None: # We haven't gotten into setup() yet, and the Distribution has # not yet been initialized return cls._help_options return cls._final_class.help_options @classproperty(lazy=True) def _final_class(cls): """ Late determination of what the build_ext base class should be, depending on whether or not Cython is available. """ uses_cython = should_build_with_cython(cls.package_name, cls.is_release) if uses_cython: # We need to decide late on whether or not to use Cython's # build_ext (since Cython may not be available earlier in the # setup.py if it was brought in via setup_requires) from Cython.Distutils import build_ext as base_cls else: base_cls = SetuptoolsBuildExt # Create and return an instance of a new class based on this class # using one of the above possible base classes def merge_options(attr): base = getattr(base_cls, attr) ours = getattr(cls, '_' + attr) all_base = set(opt[0] for opt in base) return base + [opt for opt in ours if opt[0] not in all_base] boolean_options = (base_cls.boolean_options + [opt for opt in cls._boolean_options if opt not in base_cls.boolean_options]) members = dict(cls.__dict__) members.update({ 'user_options': merge_options('user_options'), 'help_options': merge_options('help_options'), 'boolean_options': boolean_options, 'uses_cython': uses_cython, }) # Update the base class for the original build_ext command build_ext.__bases__ = (base_cls, object) # Create a new class for the existing class, but now with the # appropriate base class depending on whether or not to use Cython. # Ensure that object is one of the bases to make a new-style class. return type(cls.__name__, (build_ext,), members) def __new__(cls, *args, **kwargs): # By the time the command is actually instantialized, the # Distribution instance for the build has been instantiated, which # means setup_requires has been processed--now we can determine # what base class we can use for the actual build, and return an # instance of a build_ext command that uses that base class (right # now the options being Cython.Distutils.build_ext, or the stock # setuptools build_ext) new_cls = super(build_ext, cls._final_class).__new__( cls._final_class) # Since the new cls is not a subclass of the original cls, we must # manually call its __init__ new_cls.__init__(*args, **kwargs) return new_cls def finalize_options(self): # Add a copy of the _compiler.so module as well, but only if there # are in fact C modules to compile (otherwise there's no reason to # include a record of the compiler used) # Note, self.extensions may not be set yet, but # self.distribution.ext_modules is where any extension modules # passed to setup() can be found self._adjust_compiler() extensions = self.distribution.ext_modules if extensions: src_path = os.path.relpath( os.path.join(os.path.dirname(__file__), 'src')) shutil.copy2(os.path.join(src_path, 'compiler.c'), os.path.join(self.package_name, '_compiler.c')) ext = Extension(self.package_name + '._compiler', [os.path.join(self.package_name, '_compiler.c')]) extensions.insert(0, ext) super(build_ext, self).finalize_options() # Generate if self.uses_cython: try: from Cython import __version__ as cython_version except ImportError: # This shouldn't happen if we made it this far cython_version = None if (cython_version is not None and cython_version != self.uses_cython): self.force_rebuild = True # Update the used cython version self.uses_cython = cython_version # Regardless of the value of the '--force' option, force a rebuild # if the debug flag changed from the last build if self.force_rebuild: self.force = True def run(self): # For extensions that require 'numpy' in their include dirs, # replace 'numpy' with the actual paths np_include = get_numpy_include_path() for extension in self.extensions: if 'numpy' in extension.include_dirs: idx = extension.include_dirs.index('numpy') extension.include_dirs.insert(idx, np_include) extension.include_dirs.remove('numpy') self._check_cython_sources(extension) super(build_ext, self).run() # Update cython_version.py if building with Cython try: cython_version = get_pkg_version_module( packagename, fromlist=['cython_version'])[0] except (AttributeError, ImportError): cython_version = 'unknown' if self.uses_cython and self.uses_cython != cython_version: package_dir = os.path.relpath(packagename) cython_py = os.path.join(package_dir, 'cython_version.py') with open(cython_py, 'w') as f: f.write('# Generated file; do not modify\n') f.write('cython_version = {0!r}\n'.format(self.uses_cython)) if os.path.isdir(self.build_lib): # The build/lib directory may not exist if the build_py # command was not previously run, which may sometimes be # the case self.copy_file(cython_py, os.path.join(self.build_lib, cython_py), preserve_mode=False) invalidate_caches() def _adjust_compiler(self): """ This function detects broken compilers and switches to another. If the environment variable CC is explicitly set, or a compiler is specified on the commandline, no override is performed -- the purpose here is to only override a default compiler. The specific compilers with problems are: * The default compiler in XCode-4.2, llvm-gcc-4.2, segfaults when compiling wcslib. The set of broken compilers can be updated by changing the compiler_mapping variable. It is a list of 2-tuples where the first in the pair is a regular expression matching the version of the broken compiler, and the second is the compiler to change to. """ if 'CC' in os.environ: # Check that CC is not set to llvm-gcc-4.2 c_compiler = os.environ['CC'] try: version = get_compiler_version(c_compiler) except OSError: msg = textwrap.dedent( """ The C compiler set by the CC environment variable: {compiler:s} cannot be found or executed. """.format(compiler=c_compiler)) log.warn(msg) sys.exit(1) for broken, fixed in self._broken_compiler_mapping: if re.match(broken, version): msg = textwrap.dedent( """Compiler specified by CC environment variable ({compiler:s}:{version:s}) will fail to compile {pkg:s}. Please set CC={fixed:s} and try again. You can do this, for example, by running: CC={fixed:s} python setup.py where is the command you ran. """.format(compiler=c_compiler, version=version, pkg=self.package_name, fixed=fixed)) log.warn(msg) sys.exit(1) # If C compiler is set via CC, and isn't broken, we are good to go. We # should definitely not try accessing the compiler specified by # ``sysconfig.get_config_var('CC')`` lower down, because this may fail # if the compiler used to compile Python is missing (and maybe this is # why the user is setting CC). For example, the official Python 2.7.3 # MacOS X binary was compiled with gcc-4.2, which is no longer available # in XCode 4. return if self.compiler is not None: # At this point, self.compiler will be set only if a compiler # was specified in the command-line or via setup.cfg, in which # case we don't do anything return compiler_type = ccompiler.get_default_compiler() if compiler_type == 'unix': # We have to get the compiler this way, as this is the one that is # used if os.environ['CC'] is not set. It is actually read in from # the Python Makefile. Note that this is not necessarily the same # compiler as returned by ccompiler.new_compiler() c_compiler = sysconfig.get_config_var('CC') try: version = get_compiler_version(c_compiler) except OSError: msg = textwrap.dedent( """ The C compiler used to compile Python {compiler:s}, and which is normally used to compile C extensions, is not available. You can explicitly specify which compiler to use by setting the CC environment variable, for example: CC=gcc python setup.py or if you are using MacOS X, you can try: CC=clang python setup.py """.format(compiler=c_compiler)) log.warn(msg) sys.exit(1) for broken, fixed in self._broken_compiler_mapping: if re.match(broken, version): os.environ['CC'] = fixed break def _check_cython_sources(self, extension): """ Where relevant, make sure that the .c files associated with .pyx modules are present (if building without Cython installed). """ # Determine the compiler we'll be using if self.compiler is None: compiler = get_default_compiler() else: compiler = self.compiler # Replace .pyx with C-equivalents, unless c files are missing for jdx, src in enumerate(extension.sources): base, ext = os.path.splitext(src) pyxfn = base + '.pyx' cfn = base + '.c' cppfn = base + '.cpp' if not os.path.isfile(pyxfn): continue if self.uses_cython: extension.sources[jdx] = pyxfn else: if os.path.isfile(cfn): extension.sources[jdx] = cfn elif os.path.isfile(cppfn): extension.sources[jdx] = cppfn else: msg = ( 'Could not find C/C++ file {0}.(c/cpp) for Cython ' 'file {1} when building extension {2}. Cython ' 'must be installed to build from a git ' 'checkout.'.format(base, pyxfn, extension.name)) raise IOError(errno.ENOENT, msg, cfn) # Current versions of Cython use deprecated Numpy API features # the use of which produces a few warnings when compiling. # These additional flags should squelch those warnings. # TODO: Feel free to remove this if/when a Cython update # removes use of the deprecated Numpy API if compiler == 'unix': extension.extra_compile_args.extend([ '-Wp,-w', '-Wno-unused-function']) return build_ext astropy-helpers-1.1.1/astropy_helpers/commands/build_py.py0000644001134200020070000000265612602613465025002 0ustar embrayscience00000000000000from setuptools.command.build_py import build_py as SetuptoolsBuildPy from ..utils import _get_platlib_dir class AstropyBuildPy(SetuptoolsBuildPy): user_options = SetuptoolsBuildPy.user_options[:] boolean_options = SetuptoolsBuildPy.boolean_options[:] def finalize_options(self): # Update build_lib settings from the build command to always put # build files in platform-specific subdirectories of build/, even # for projects with only pure-Python source (this is desirable # specifically for support of multiple Python version). build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) build_cmd.build_purelib = platlib_dir build_cmd.build_lib = platlib_dir self.build_lib = platlib_dir SetuptoolsBuildPy.finalize_options(self) def run_2to3(self, files, doctests=False): # Filter the files to exclude things that shouldn't be 2to3'd skip_2to3 = self.distribution.skip_2to3 filtered_files = [] for filename in files: for package in skip_2to3: if filename[len(self.build_lib) + 1:].startswith(package): break else: filtered_files.append(filename) SetuptoolsBuildPy.run_2to3(self, filtered_files, doctests) def run(self): # first run the normal build_py SetuptoolsBuildPy.run(self) astropy-helpers-1.1.1/astropy_helpers/commands/build_sphinx.py0000644001134200020070000002266712630421426025662 0ustar embrayscience00000000000000from __future__ import print_function import inspect import os import pkgutil import re import shutil import subprocess import sys import textwrap from distutils import log from distutils.cmd import DistutilsOptionError import sphinx from sphinx.setup_command import BuildDoc as SphinxBuildDoc from ..utils import minversion PY3 = sys.version_info[0] >= 3 class AstropyBuildSphinx(SphinxBuildDoc): """ A version of the ``build_sphinx`` command that uses the version of Astropy that is built by the setup ``build`` command, rather than whatever is installed on the system. To build docs against the installed version, run ``make html`` in the ``astropy/docs`` directory. This also automatically creates the docs/_static directories--this is needed because GitHub won't create the _static dir because it has no tracked files. """ description = 'Build Sphinx documentation for Astropy environment' user_options = SphinxBuildDoc.user_options[:] user_options.append(('warnings-returncode', 'w', 'Parses the sphinx output and sets the return code to 1 if there ' 'are any warnings. Note that this will cause the sphinx log to ' 'only update when it completes, rather than continuously as is ' 'normally the case.')) user_options.append(('clean-docs', 'l', 'Completely clean previous builds, including ' 'automodapi-generated files before building new ones')) user_options.append(('no-intersphinx', 'n', 'Skip intersphinx, even if conf.py says to use it')) user_options.append(('open-docs-in-browser', 'o', 'Open the docs in a browser (using the webbrowser module) if the ' 'build finishes successfully.')) boolean_options = SphinxBuildDoc.boolean_options[:] boolean_options.append('warnings-returncode') boolean_options.append('clean-docs') boolean_options.append('no-intersphinx') boolean_options.append('open-docs-in-browser') _self_iden_rex = re.compile(r"self\.([^\d\W][\w]+)", re.UNICODE) def initialize_options(self): SphinxBuildDoc.initialize_options(self) self.clean_docs = False self.no_intersphinx = False self.open_docs_in_browser = False self.warnings_returncode = False def finalize_options(self): #Clear out previous sphinx builds, if requested if self.clean_docs: dirstorm = [os.path.join(self.source_dir, 'api')] if self.build_dir is None: dirstorm.append('docs/_build') else: dirstorm.append(self.build_dir) for d in dirstorm: if os.path.isdir(d): log.info('Cleaning directory ' + d) shutil.rmtree(d) else: log.info('Not cleaning directory ' + d + ' because ' 'not present or not a directory') SphinxBuildDoc.finalize_options(self) def run(self): # TODO: Break this method up into a few more subroutines and # document them better import webbrowser if PY3: from urllib.request import pathname2url else: from urllib import pathname2url # This is used at the very end of `run` to decide if sys.exit should # be called. If it's None, it won't be. retcode = None # If possible, create the _static dir if self.build_dir is not None: # the _static dir should be in the same place as the _build dir # for Astropy basedir, subdir = os.path.split(self.build_dir) if subdir == '': # the path has a trailing /... basedir, subdir = os.path.split(basedir) staticdir = os.path.join(basedir, '_static') if os.path.isfile(staticdir): raise DistutilsOptionError( 'Attempted to build_sphinx in a location where' + staticdir + 'is a file. Must be a directory.') self.mkpath(staticdir) # Now make sure Astropy is built and determine where it was built build_cmd = self.reinitialize_command('build') build_cmd.inplace = 0 self.run_command('build') build_cmd = self.get_finalized_command('build') build_cmd_path = os.path.abspath(build_cmd.build_lib) ah_importer = pkgutil.get_importer('astropy_helpers') ah_path = os.path.abspath(ah_importer.path) # Now generate the source for and spawn a new process that runs the # command. This is needed to get the correct imports for the built # version runlines, runlineno = inspect.getsourcelines(SphinxBuildDoc.run) subproccode = textwrap.dedent(""" from sphinx.setup_command import * os.chdir({srcdir!r}) sys.path.insert(0, {build_cmd_path!r}) sys.path.insert(0, {ah_path!r}) """).format(build_cmd_path=build_cmd_path, ah_path=ah_path, srcdir=self.source_dir) # runlines[1:] removes 'def run(self)' on the first line subproccode += textwrap.dedent(''.join(runlines[1:])) # All "self.foo" in the subprocess code needs to be replaced by the # values taken from the current self in *this* process subproccode = self._self_iden_rex.split(subproccode) for i in range(1, len(subproccode), 2): iden = subproccode[i] val = getattr(self, iden) if iden.endswith('_dir'): # Directories should be absolute, because the `chdir` call # in the new process moves to a different directory subproccode[i] = repr(os.path.abspath(val)) else: subproccode[i] = repr(val) subproccode = ''.join(subproccode) # This is a quick gross hack, but it ensures that the code grabbed from # SphinxBuildDoc.run will work in Python 2 if it uses the print # function if minversion(sphinx, '1.3'): subproccode = 'from __future__ import print_function' + subproccode if self.no_intersphinx: # the confoverrides variable in sphinx.setup_command.BuildDoc can # be used to override the conf.py ... but this could well break # if future versions of sphinx change the internals of BuildDoc, # so remain vigilant! subproccode = subproccode.replace('confoverrides = {}', 'confoverrides = {\'intersphinx_mapping\':{}}') log.debug('Starting subprocess of {0} with python code:\n{1}\n' '[CODE END])'.format(sys.executable, subproccode)) # To return the number of warnings, we need to capture stdout. This # prevents a continuous updating at the terminal, but there's no # apparent way around this. if self.warnings_returncode: proc = subprocess.Popen([sys.executable, '-c', subproccode], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) with proc.stdout: for line in iter(proc.stdout.readline, b''): line = line.strip(b'\n') print(line.decode('utf-8')) if 'build succeeded.' in str(line): retcode = 0 else: retcode = 1 # Poll to set proc.retcode proc.wait() if retcode != 0: if os.environ.get('TRAVIS', None) == 'true': #this means we are in the travis build, so customize #the message appropriately. msg = ('The build_sphinx travis build FAILED ' 'because sphinx issued documentation ' 'warnings (scroll up to see the warnings).') else: # standard failure message msg = ('build_sphinx returning a non-zero exit ' 'code because sphinx issued documentation ' 'warnings.') log.warn(msg) else: proc = subprocess.Popen([sys.executable], stdin=subprocess.PIPE) proc.communicate(subproccode.encode('utf-8')) if proc.returncode == 0: if self.open_docs_in_browser: if self.builder == 'html': absdir = os.path.abspath(self.builder_target_dir) index_path = os.path.join(absdir, 'index.html') fileurl = 'file://' + pathname2url(index_path) webbrowser.open(fileurl) else: log.warn('open-docs-in-browser option was given, but ' 'the builder is not html! Ignoring.') else: log.warn('Sphinx Documentation subprocess failed with return ' 'code ' + str(proc.returncode)) retcode = proc.returncode if retcode is not None: # this is potentially dangerous in that there might be something # after the call to `setup` in `setup.py`, and exiting here will # prevent that from running. But there's no other apparent way # to signal what the return code should be. sys.exit(retcode) class AstropyBuildDocs(AstropyBuildSphinx): description = 'alias to the build_sphinx command' astropy-helpers-1.1.1/astropy_helpers/commands/setup_package.py0000644001134200020070000000016712602613465026001 0ustar embrayscience00000000000000from os.path import join def get_package_data(): return {'astropy_helpers.commands': [join('src', 'compiler.c')]} astropy-helpers-1.1.1/astropy_helpers/commands/_test_compat.py0000644001134200020070000002666212630421426025652 0ustar embrayscience00000000000000""" Old implementation of ``./setup.py test`` command. This has been moved to astropy.tests as of Astropy v1.1.0, but a copy of the implementation is kept here for backwards compatibility. """ from __future__ import absolute_import, unicode_literals import inspect import os import shutil import subprocess import sys import tempfile from setuptools import Command from ..compat import _fix_user_options PY3 = sys.version_info[0] == 3 class AstropyTest(Command, object): description = 'Run the tests for this package' user_options = [ ('package=', 'P', "The name of a specific package to test, e.g. 'io.fits' or 'utils'. " "If nothing is specified, all default tests are run."), ('test-path=', 't', 'Specify a test location by path. If a relative path to a .py file, ' 'it is relative to the built package, so e.g., a leading "astropy/" ' 'is necessary. If a relative path to a .rst file, it is relative to ' 'the directory *below* the --docs-path directory, so a leading ' '"docs/" is usually necessary. May also be an absolute path.'), ('verbose-results', 'V', 'Turn on verbose output from pytest.'), ('plugins=', 'p', 'Plugins to enable when running pytest.'), ('pastebin=', 'b', "Enable pytest pastebin output. Either 'all' or 'failed'."), ('args=', 'a', 'Additional arguments to be passed to pytest.'), ('remote-data', 'R', 'Run tests that download remote data.'), ('pep8', '8', 'Enable PEP8 checking and disable regular tests. ' 'Requires the pytest-pep8 plugin.'), ('pdb', 'd', 'Start the interactive Python debugger on errors.'), ('coverage', 'c', 'Create a coverage report. Requires the coverage package.'), ('open-files', 'o', 'Fail if any tests leave files open. Requires the ' 'psutil package.'), ('parallel=', 'j', 'Run the tests in parallel on the specified number of ' 'CPUs. If negative, all the cores on the machine will be ' 'used. Requires the pytest-xdist plugin.'), ('docs-path=', None, 'The path to the documentation .rst files. If not provided, and ' 'the current directory contains a directory called "docs", that ' 'will be used.'), ('skip-docs', None, "Don't test the documentation .rst files."), ('repeat=', None, 'How many times to repeat each test (can be used to check for ' 'sporadic failures).'), ('temp-root=', None, 'The root directory in which to create the temporary testing files. ' 'If unspecified the system default is used (e.g. /tmp) as explained ' 'in the documentation for tempfile.mkstemp.') ] user_options = _fix_user_options(user_options) package_name = '' def initialize_options(self): self.package = None self.test_path = None self.verbose_results = False self.plugins = None self.pastebin = None self.args = None self.remote_data = False self.pep8 = False self.pdb = False self.coverage = False self.open_files = False self.parallel = 0 self.docs_path = None self.skip_docs = False self.repeat = None self.temp_root = None def finalize_options(self): # Normally we would validate the options here, but that's handled in # run_tests pass # Most of the test runner arguments have the same name as attributes on # this command class, with one exception (for now) _test_runner_arg_attr_map = { 'verbose': 'verbose_results' } def generate_testing_command(self): """ Build a Python script to run the tests. """ cmd_pre = '' # Commands to run before the test function cmd_post = '' # Commands to run after the test function if self.coverage: pre, post = self._generate_coverage_commands() cmd_pre += pre cmd_post += post def get_attr(arg): attr = self._test_runner_arg_attr_map.get(arg, arg) return getattr(self, attr) test_args = filter(lambda arg: hasattr(self, arg), self._get_test_runner_args()) test_args = ', '.join('{0}={1!r}'.format(arg, get_attr(arg)) for arg in test_args) if PY3: set_flag = "import builtins; builtins._ASTROPY_TEST_ = True" else: set_flag = "import __builtin__; __builtin__._ASTROPY_TEST_ = True" cmd = ('{cmd_pre}{0}; import {1.package_name}, sys; result = ' '{1.package_name}.test({test_args}); {cmd_post}' 'sys.exit(result)') return cmd.format(set_flag, self, cmd_pre=cmd_pre, cmd_post=cmd_post, test_args=test_args) def _validate_required_deps(self): """ This method checks that any required modules are installed before running the tests. """ try: import astropy except ImportError: raise ImportError( "The 'test' command requires the astropy package to be " "installed and importable.") def run(self): """ Run the tests! """ # Ensure there is a doc path if self.docs_path is None: if os.path.exists('docs'): self.docs_path = os.path.abspath('docs') # Build a testing install of the package self._build_temp_install() # Ensure all required packages are installed self._validate_required_deps() # Run everything in a try: finally: so that the tmp dir gets deleted. try: # Construct this modules testing command cmd = self.generate_testing_command() # Run the tests in a subprocess--this is necessary since # new extension modules may have appeared, and this is the # easiest way to set up a new environment # On Python 3.x prior to 3.3, the creation of .pyc files # is not atomic. py.test jumps through some hoops to make # this work by parsing import statements and carefully # importing files atomically. However, it can't detect # when __import__ is used, so its carefulness still fails. # The solution here (admittedly a bit of a hack), is to # turn off the generation of .pyc files altogether by # passing the `-B` switch to `python`. This does mean # that each core will have to compile .py file to bytecode # itself, rather than getting lucky and borrowing the work # already done by another core. Compilation is an # insignificant fraction of total testing time, though, so # it's probably not worth worrying about. retcode = subprocess.call([sys.executable, '-B', '-c', cmd], cwd=self.testing_path, close_fds=False) finally: # Remove temporary directory shutil.rmtree(self.tmp_dir) raise SystemExit(retcode) def _build_temp_install(self): """ Build the package and copy the build to a temporary directory for the purposes of testing this avoids creating pyc and __pycache__ directories inside the build directory """ self.reinitialize_command('build', inplace=True) self.run_command('build') build_cmd = self.get_finalized_command('build') new_path = os.path.abspath(build_cmd.build_lib) # On OSX the default path for temp files is under /var, but in most # cases on OSX /var is actually a symlink to /private/var; ensure we # dereference that link, because py.test is very sensitive to relative # paths... tmp_dir = tempfile.mkdtemp(prefix=self.package_name + '-test-', dir=self.temp_root) self.tmp_dir = os.path.realpath(tmp_dir) self.testing_path = os.path.join(self.tmp_dir, os.path.basename(new_path)) shutil.copytree(new_path, self.testing_path) new_docs_path = os.path.join(self.tmp_dir, os.path.basename(self.docs_path)) shutil.copytree(self.docs_path, new_docs_path) self.docs_path = new_docs_path shutil.copy('setup.cfg', self.tmp_dir) def _generate_coverage_commands(self): """ This method creates the post and pre commands if coverage is to be generated """ if self.parallel != 0: raise ValueError( "--coverage can not be used with --parallel") try: import coverage except ImportError: raise ImportError( "--coverage requires that the coverage package is " "installed.") # Don't use get_pkg_data_filename here, because it # requires importing astropy.config and thus screwing # up coverage results for those packages. coveragerc = os.path.join( self.testing_path, self.package_name, 'tests', 'coveragerc') # We create a coveragerc that is specific to the version # of Python we're running, so that we can mark branches # as being specifically for Python 2 or Python 3 with open(coveragerc, 'r') as fd: coveragerc_content = fd.read() if PY3: ignore_python_version = '2' else: ignore_python_version = '3' coveragerc_content = coveragerc_content.replace( "{ignore_python_version}", ignore_python_version).replace( "{packagename}", self.package_name) tmp_coveragerc = os.path.join(self.tmp_dir, 'coveragerc') with open(tmp_coveragerc, 'wb') as tmp: tmp.write(coveragerc_content.encode('utf-8')) cmd_pre = ( 'import coverage; ' 'cov = coverage.coverage(data_file="{0}", config_file="{1}"); ' 'cov.start();'.format( os.path.abspath(".coverage"), tmp_coveragerc)) cmd_post = ( 'cov.stop(); ' 'from astropy.tests.helper import _save_coverage; ' '_save_coverage(cov, result, "{0}", "{1}");'.format( os.path.abspath('.'), self.testing_path)) return cmd_pre, cmd_post def _get_test_runner_args(self): """ A hack to determine what arguments are supported by the package's test() function. In the future there should be a more straightforward API to determine this (really it should be determined by the ``TestRunner`` class for whatever version of Astropy is in use). """ if PY3: import builtins builtins._ASTROPY_TEST_ = True else: import __builtin__ __builtin__._ASTROPY_TEST_ = True try: pkg = __import__(self.package_name) if not hasattr(pkg, 'test'): raise ImportError( 'package {0} does not have a {0}.test() function as ' 'required by the Astropy test runner'.format(package_name)) argspec = inspect.getargspec(pkg.test) return argspec.args finally: if PY3: del builtins._ASTROPY_TEST_ else: del __builtin__._ASTROPY_TEST_ astropy-helpers-1.1.1/astropy_helpers/commands/register.py0000644001134200020070000000454712602613465025020 0ustar embrayscience00000000000000from setuptools.command.register import register as SetuptoolsRegister class AstropyRegister(SetuptoolsRegister): """Extends the built in 'register' command to support a ``--hidden`` option to make the registered version hidden on PyPI by default. The result of this is that when a version is registered as "hidden" it can still be downloaded from PyPI, but it does not show up in the list of actively supported versions under http://pypi.python.org/pypi/astropy, and is not set as the most recent version. Although this can always be set through the web interface it may be more convenient to be able to specify via the 'register' command. Hidden may also be considered a safer default when running the 'register' command, though this command uses distutils' normal behavior if the ``--hidden`` option is omitted. """ user_options = SetuptoolsRegister.user_options + [ ('hidden', None, 'mark this release as hidden on PyPI by default') ] boolean_options = SetuptoolsRegister.boolean_options + ['hidden'] def initialize_options(self): SetuptoolsRegister.initialize_options(self) self.hidden = False def build_post_data(self, action): data = SetuptoolsRegister.build_post_data(self, action) if action == 'submit' and self.hidden: data['_pypi_hidden'] = '1' return data def _set_config(self): # The original register command is buggy--if you use .pypirc with a # server-login section *at all* the repository you specify with the -r # option will be overwritten with either the repository in .pypirc or # with the default, # If you do not have a .pypirc using the -r option will just crash. # Way to go distutils # If we don't set self.repository back to a default value _set_config # can crash if there was a user-supplied value for this option; don't # worry, we'll get the real value back afterwards self.repository = 'pypi' SetuptoolsRegister._set_config(self) options = self.distribution.get_option_dict('register') if 'repository' in options: source, value = options['repository'] # Really anything that came from setup.cfg or the command line # should override whatever was in .pypirc self.repository = value astropy-helpers-1.1.1/astropy_helpers/commands/install.py0000644001134200020070000000074612602613465024637 0ustar embrayscience00000000000000from setuptools.command.install import install as SetuptoolsInstall from ..utils import _get_platlib_dir class AstropyInstall(SetuptoolsInstall): user_options = SetuptoolsInstall.user_options[:] boolean_options = SetuptoolsInstall.boolean_options[:] def finalize_options(self): build_cmd = self.get_finalized_command('build') platlib_dir = _get_platlib_dir(build_cmd) self.build_lib = platlib_dir SetuptoolsInstall.finalize_options(self) astropy-helpers-1.1.1/astropy_helpers/commands/__init__.py0000644001134200020070000000000012602613465024707 0ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/commands/test.py0000644001134200020070000000251412630421426024136 0ustar embrayscience00000000000000""" Different implementations of the ``./setup.py test`` command depending on what's locally available. If Astropy v1.1.0.dev or later is available it should be possible to import AstropyTest from ``astropy.tests.command``. If ``astropy`` can be imported but not ``astropy.tests.command`` (i.e. an older version of Astropy), we can use the backwards-compat implementation of the command. If Astropy can't be imported at all then there is a skeleton implementation that allows users to at least discover the ``./setup.py test`` command and learn that they need Astropy to run it. """ # Previously these except statements caught only ImportErrors, but there are # some other obscure exceptional conditions that can occur when importing # astropy.tests (at least on older versions) that can cause these imports to # fail try: import astropy try: from astropy.tests.command import AstropyTest except Exception: from ._test_compat import AstropyTest except Exception: # No astropy at all--provide the dummy implementation from ._dummy import _DummyCommand class AstropyTest(_DummyCommand): command_name = 'test' description = 'Run the tests for this package' error_msg = ( "The 'test' command requires the astropy package to be " "installed and importable.") astropy-helpers-1.1.1/astropy_helpers/compat/0000755001134200020070000000000012637041310022262 5ustar embrayscience00000000000000astropy-helpers-1.1.1/astropy_helpers/compat/subprocess.py0000644001134200020070000000105012602613465025030 0ustar embrayscience00000000000000""" A replacement wrapper around the subprocess module that adds check_output (which was only added to Python in 2.7. Instead of importing subprocess, other modules should use this as follows:: from astropy.utils.compat import subprocess This module is safe to import from anywhere within astropy. """ from __future__ import absolute_import, print_function import subprocess # python2.7 and later provide a check_output method if not hasattr(subprocess, 'check_output'): from ._subprocess_py2 import check_output from subprocess import * astropy-helpers-1.1.1/astropy_helpers/compat/__init__.py0000644001134200020070000000056012602613465024404 0ustar embrayscience00000000000000def _fix_user_options(options): """ This is for Python 2.x and 3.x compatibility. distutils expects Command options to all be byte strings on Python 2 and Unicode strings on Python 3. """ def to_str_or_none(x): if x is None: return None return str(x) return [tuple(to_str_or_none(x) for x in y) for y in options] astropy-helpers-1.1.1/astropy_helpers/__init__.py0000644001134200020070000000345412630421426023121 0ustar embrayscience00000000000000try: from .version import version as __version__ from .version import githash as __githash__ except ImportError: __version__ = '' __githash__ = '' # If we've made it as far as importing astropy_helpers, we don't need # ah_bootstrap in sys.modules anymore. Getting rid of it is actually necessary # if the package we're installing has a setup_requires of another package that # uses astropy_helpers (and possibly a different version at that) # See https://github.com/astropy/astropy/issues/3541 import sys if 'ah_bootstrap' in sys.modules: del sys.modules['ah_bootstrap'] # Note, this is repeated from ah_bootstrap.py, but is here too in case this # astropy-helpers was upgraded to from an older version that did not have this # check in its ah_bootstrap. # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass import os # Ensure that all module-level code in astropy or other packages know that # we're in setup mode: if ('__main__' in sys.modules and hasattr(sys.modules['__main__'], '__file__')): filename = os.path.basename(sys.modules['__main__'].__file__) if filename.rstrip('co') == 'setup.py': if sys.version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True del filename astropy-helpers-1.1.1/astropy_helpers/git_helpers.py0000644001134200020070000001372712602613465023700 0ustar embrayscience00000000000000# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Utilities for retrieving revision information from a project's git repository. """ # Do not remove the following comment; it is used by # astropy_helpers.version_helpers to determine the beginning of the code in # this module # BEGIN import locale import os import subprocess import warnings def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding = 'utf-8' try: text = stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback text = stream.decode('latin1') return text def update_git_devstr(version, path=None): """ Updates the git revision string if and only if the path is being imported directly from a git working copy. This ensures that the revision number in the version string is accurate. """ try: # Quick way to determine if we're in git or not - returns '' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version if not devstr: # Probably not in git so just pass silently return version if 'dev' in version: # update to the current git revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' + devstr else: #otherwise it's already the true/release version return version def get_git_devstr(sha=False, show_warning=True, path=None): """ Determines the number of revisions in this repository. Parameters ---------- sha : bool If True, the full SHA1 hash will be returned. Otherwise, the total count of commits in the repository will be used as a "revision number". show_warning : bool If True, issue a warning if git returns an error code, otherwise errors pass silently. path : str or None If a string, specifies the directory to look in to find the git repository. If `None`, the current working directory is used, and must be the root of the git repository. If given a filename it uses the directory containing that file. Returns ------- devversion : str Either a string with the revision number (if `sha` is False), the SHA1 hash of the current commit (if `sha` is True), or an empty string if git version info could not be identified. """ if path is None: path = os.getcwd() if not _get_repo_path(path, levels=0): return '' if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha: # Faster for getting just the hash of HEAD cmd = ['rev-parse', 'HEAD'] else: cmd = ['rev-list', '--count', 'HEAD'] def run_git(cmd): try: p = subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate() except OSError as e: if show_warning: warnings.warn('Error running git: ' + str(e)) return (None, b'', b'') if p.returncode == 128: if show_warning: warnings.warn('No git repository present at {0!r}! Using ' 'default dev version.'.format(path)) return (p.returncode, b'', b'') if p.returncode == 129: if show_warning: warnings.warn('Your git looks old (does it support {0}?); ' 'consider upgrading to v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif p.returncode != 0: if show_warning: warnings.warn('Git failed while determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return p.returncode, stdout, stderr returncode, stdout, stderr = run_git(cmd) if not sha and returncode == 129: # git returns 129 if a command option failed to parse; in # particular this could happen in git versions older than 1.7.2 # where the --count option is not supported # Also use --abbrev-commit and --abbrev=0 to display the minimum # number of characters needed per-commit (rather than the full hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr = run_git(cmd) # Fall back on the old method of getting all revisions and counting # the lines if returncode == 0: return str(stdout.count(b'\n')) else: return '' elif sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() def _get_repo_path(pathname, levels=None): """ Given a file or directory name, determine the root of the git repository this path is under. If given, this won't look any higher than ``levels`` (that is, if ``levels=0`` then the given path must be the root of the git repository and is returned if so. Returns `None` if the given path could not be determined to belong to a git repo. """ if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return None current_level = 0 while levels is None or current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level += 1 if current_dir == os.path.dirname(current_dir): break current_dir = os.path.dirname(current_dir) return None