pyvo-0.6.1+dfsg.orig/0000755000175000017500000000000013125165225013723 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/.gitignore0000644000175000017500000000125513125165225015716 0ustar noahfxnoahfx# Compiled files *.py[cod] *.a *.o *.so __pycache__ # Ignore .c files by default to avoid including generated code. If you want to # add a non-generated .c extension, use `git add -f filename.c`. *.c # Other generated files */version.py */cython_version.py htmlcov .coverage MANIFEST .ipynb_checkpoints # Sphinx docs/api docs/_build # Eclipse editor project files .project .pydevproject .settings # Pycharm editor project files .idea # Packages/installer info *.egg *.egg-info dist build eggs parts bin var sdist develop-eggs .installed.cfg distribute-*.tar.gz # Other .cache .tox .*.sw[op] *~ .project .pydevproject .settings # Mac OSX .DS_Store # ipython .ipynb_checkpoints pyvo-0.6.1+dfsg.orig/pyvo/0000755000175000017500000000000013125165225014720 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/tools/0000755000175000017500000000000013125165225016060 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/tools/vosi.py0000644000175000017500000002215213125165225017414 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import unicode_literals from . import plainxml import datetime from collections import OrderedDict from astropy.table import Table, Column from astropy.io.votable.converters import ( get_converter, converter_mapping, Int, Float, FloatComplex, Boolean, Char, Short, Long, Double ) import numpy as np # dirty hack converter_mapping.update({ "integer": Int, "real": Float, "complex": FloatComplex, "boolean": Boolean, "string": Char, "BOOLEAN": Boolean, "SMALLINT": Short, "INTEGER": Int, "BIGINT": Long, "REAL": Float, "DOUBLE": Double, "TIMESTAMP": Char, "CHAR": Char, "VARCHAR": Char, "BINARY": Char, "VARBINARY": Char, "POINT": Char, "REGION": Char, "CLOB": Char, "BLOB": Char, "UNKNOWN": Char }) class _CapabilitiesParser(plainxml.StartEndHandler): # VOSI; each capability is a dict with at least a key interfaces. # each interface is a dict with key type (namespace prefix not expanded; # change that?), accessURL, and use. def __init__(self): plainxml.StartEndHandler.__init__(self) self.capabilities = [] def _start_capability(self, name, attrs): self.curCap = { "interfaces": [], "dataModels": [], "languages": [], "outputFormats": [], "uploadMethods": [], "retentionPeriod": {}, "executionDuration": {} } self.curCap["standardID"] = attrs.get("standardID") def _end_capability(self, name, attrs, content): self.capabilities.append(self.curCap) self.curCap = None def _start_interface(self, name, attrs): attrs = plainxml._pruneAttrNS(attrs) self.curInterface = {"type": attrs.get("type"), "role": attrs.get("role")} def _end_interface(self,name, attrs, content): self.curCap["interfaces"].append(self.curInterface) self.curInterface = None def _start_dataModel(self, name, attrs): pass def _end_dataModel(self, name, attrs, content): self.curCap["dataModels"].append(content) def _start_language(self, name, attrs): self.curLang = { "name": "", "version": "", "description": "", "languageFeatures": [] } def _end_language(self, name, attrs, content): self.curCap["languages"].append(self.curLang) self.curLang = None def _start_languageFeatures(self, name, attrs): attrs = plainxml._pruneAttrNS(attrs) self.curLanguageFeatures = { "type": attrs["type"], "features": [] } def _end_languageFeatures(self, name, attrs, content): self.curLang["languageFeatures"] = self.curLang.get( "languageFeatures", []) + [self.curLanguageFeatures] self.curLanguageFeatures = None def _start_feature(self, name, attrs): self.curFeature = { "form": "", "description": "" } def _end_feature(self, name, attrs, content): self.curLanguageFeatures["features"].append(self.curFeature) self.curFeature = None def _start_outputFormat(self, name, attrs): self.curOutputFormat = { "mime": "", "alias": "" } def _end_outputFormat(self, name, attrs, content): self.curCap["outputFormats"].append(self.curOutputFormat) self.curOutputFormat = None def _start_uploadMethod(self, name, attrs): attrs = plainxml._pruneAttrNS(attrs) self.curCap["uploadMethods"].append(attrs["ivo-id"]) def _end_uploadMethod(self, name, attrs, content): pass def _start_retentionPeriod(self, name, attrs): self.curRetentionPeriod = { "default": "" } def _end_retentionPeriod(self, name, attrs, content): self.curCap["retentionPeriod"] = self.curRetentionPeriod self.curRetentionPeriod = None def _start_executionDuration(self, name, content): self.curExecutionDuration = { "default": 0 } def _end_executionDuration(self, name, attrs, content): self.curCap["executionDuration"] = self.curExecutionDuration self.curExecutionDuration = None def _start_outputLimit(self, name, attrs): self.curOutputLimit = { "default": {}, "hard": {} } def _end_outputLimit(self, name, attrs, content): self.curCap["outputLimit"] = self.curOutputLimit self.curOutputLimit = None def _start_uploadLimit(self, name, attrs): self.curUploadLimit = { "hard": {} } def _end_uploadLimit(self, name, attrs, content): self.curCap["uploadLimit"] = self.curUploadLimit self.curUploadLimit = None def _end_accessURL(self, name, attrs, content): self.curInterface["accessURL"] = content.strip() self.curInterface["use"] = attrs.get("use") def _end_name(self, name, attrs, content): if self.curLang is not None: self.curLang["name"] = content.strip() def _end_version(self, name, attrs, content): if self.curLang is not None: self.curLang["version"] = content.strip() def _end_description(self, name, attrs, content): if getattr(self, "curFeature", None) is not None: self.curFeature["description"] = content.strip() elif self.curLang is not None: self.curLang["description"] = content.strip() def _end_form(self, name, attrs, content): if self.curFeature is not None: self.curFeature["form"] = content.strip() def _end_mime(self, name, attrs, content): if self.curOutputFormat is not None: self.curOutputFormat["mime"] = content.strip() def _end_alias(self, name, attrs, content): if self.curOutputFormat is not None: self.curOutputFormat["alias"] = content.strip() def _end_default(self, name, attrs, content): if getattr(self, "curRetentionPeriod", None) is not None: self.curRetentionPeriod["default"] = int(content.strip()) elif getattr(self, "curExecutionDuration", None) is not None: self.curExecutionDuration["default"] = int(content.strip()) elif getattr(self, "curOutputLimit", None) is not None: self.curOutputLimit["default"] = { "unit": attrs["unit"], "value": int(content.strip()) } def _end_hard(self, name, attrs, content): if getattr(self, "curOutputLimit", None) is not None: self.curOutputLimit["hard"] = { "unit": attrs["unit"], "value": int(content.strip()) } elif getattr(self, "curUploadLimit", None) is not None: self.curUploadLimit["hard"] = { "unit": attrs["unit"], "value": int(content.strip()) } def getResult(self): return self.capabilities def parse_capabilities(stream): parser = _CapabilitiesParser() parser.parse(stream) return parser.getResult() class _TablesParser(plainxml.StartEndHandler): def __init__(self): plainxml.StartEndHandler.__init__(self) self.tables = OrderedDict() def _start_schema(self, name, attrs): self.curSchema = "" def _end_schema(self, name, attrs, content): self.curSchema = None def _start_table(self, name, attrs): self.curTable = Table() def _end_table(self, name, attrs, content): table_name = self.curTable.meta["name"] self.tables[table_name] = self.curTable self.curTable = None def _start_column(self, name, attrs): self.inColumn = True self.curUCD = None self.curUnit = None self.curDescription = None def _end_column(self, name, attrs, content): column = Column( name = self.curColumn, dtype = self.curDtype, description = self.curDescription, unit = self.curUnit) if self.curUCD is not None: column.meta["ucd"] = self.curUCD column.meta["datatype"] = self.curDatatype column.meta["arraysize"] = self.curArraysize self.curTable[self.curColumn] = column self.inColumn = False def _end_name(self, name, attrs, content): content = content.strip() if getattr(self, "inColumn", False): self.curColumn = content elif getattr(self, "curTable", None) is not None: self.curTable.meta["name"] = content def _end_description(self, name, attrs, content): content = content.strip() if getattr(self, "inColumn", False): self.curDescription = content def _end_ucd(self, name, attrs, content): content = content.strip() if getattr(self, "inColumn", False): self.curUCD = content def _end_dataType(self, name, attrs, content): content = content.strip() class _values(object): null = None class _field(object): datatype = content if datatype in ( key for key, value in converter_mapping.items() if value == Char ): datatype = "char" arraysize = ( attrs.get("arraysize") or attrs.get("size")) if datatype in ( "char", "unicodeChar") else None precision = None width = None values = _values() converter = get_converter(_field()) self.curDtype = np.dtype(converter.format) self.curDatatype = content self.curArraysize = attrs.get("arraysize") def _end_unit(self, name, attrs, content): content = content.strip() if getattr(self, "inColumn", False): self.curUnit = content def getResult(self): return self.tables def parse_tables(stream): parser = _TablesParser() parser.parse(stream) return parser.getResult() class _AvailabiliyParser(plainxml.StartEndHandler): def __init__(self): plainxml.StartEndHandler.__init__(self) self.available = False self.up_since = None def _end_available(self, name, attrs, content): content = content.strip() self.available = content == "true" def _end_upSince(self, name, attrs, content): content = content.strip() self.up_since = datetime.datetime.strptime( content, "%Y-%m-%dT%H:%M:%SZ") def getResult(self): return (self.available, self.up_since) def parse_availability(stream): parser = _AvailabiliyParser() parser.parse(stream) return parser.getResult() pyvo-0.6.1+dfsg.orig/pyvo/tools/plainxml.py0000644000175000017500000001651613125165225020267 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Some XML hacks. StartEndHandler simplifies the creation of SAX parsers, intended for client code or non-DC XML parsing. iterparse is an elementtree-inspired thin expat layer; both VOTable and base.structure parsing builds on it. """ import collections import weakref import xml.sax from xml.parsers import expat from xml.sax.handler import ContentHandler from astropy.extern import six class ErrorPosition(object): """A wrapper for an error position. Construct it with file name, line number, and column. Use None for missing or unknown values. """ fName = None def __init__(self, fName, line, column): self.line = line or '?' self.col = column if self.col is None: self.col = '?' self.fName = fName def __str__(self): if self.fName: return "%s, (%s, %s)"%(self.fName, self.line, self.col) else: return "(%s, %s)"%(self.line, self.col) class iterparse(object): """iterates over start, data, and end events in source. To keep things simple downstream, we swallow all namespace prefixes, if present. iterparse is constructed with a source (anything that can read(source)) and optionally a custom error class. This error class needs to have the message as the first argument. Since expat error messages usually contain line number and column in them, no extra pos attribute is supported. Since the parser typically is far ahead of the events seen, we do our own bookkeeping by storing the parser position with each event. The *end* of the construct that caused an event can be retrieved using pos. """ chunkSize = 2**20 "The number of bytes handed to expat from iterparse at one go." def __init__(self, source, parseErrorClass=ValueError): self.source = source self.parseErrorClass = parseErrorClass if hasattr(source, "name"): self.inputName = source.name elif hasattr(source, "getvalue"): self.inputName = repr(source.getvalue())[1:-1] else: self.inputName = repr(source)[:34] self.parser = expat.ParserCreate() self.parser.buffer_text = True self.lastLine, self.lastColumn = 1, 0 # We want ordered attributes for forcing attribute names to be # byte strings. self.parser.returns_unicode = True self.evBuf = collections.deque() self.parser.StartElementHandler = self._startElement self.parser.EndElementHandler = self._endElement self.parser.CharacterDataHandler = self._characters def __iter__(self): return self def _startElement(self, name, attrs): self.evBuf.append( (("start", name.split(":")[-1], attrs), (self.parser.CurrentLineNumber, self.parser.CurrentColumnNumber))) def _endElement(self, name): self.evBuf.append((("end", name.split(":")[-1], None), (self.parser.CurrentLineNumber, self.parser.CurrentColumnNumber))) def _characters(self, data): self.evBuf.append((("data", None, data), None)) def pushBack(self, type, name, payload): self.evBuf.appendleft(((type, name, payload), None)) def next(self): while not self.evBuf: try: nextChunk = self.source.read(self.chunkSize) if nextChunk: self.parser.Parse(nextChunk) else: self.close() break except expat.ExpatError as ex: newEx = self.parseErrorClass(str(ex)) newEx.posInMsg = True # see base.xmlstruct newEx.inFile = getattr(self.source, "name", "(internal source)") raise ex if not self.evBuf: raise StopIteration("End of Input") event, pos = self.evBuf.popleft() if pos is not None: self.lastLine, self.lastColumn = pos return event def close(self): self.parser.Parse("", True) self.parser.StartElementHandler =\ self.parser.EndElementHandler = \ self.parser.CharacterDataHandler = None @property def pos(self): return ErrorPosition(self.inputName, self.lastLine, self.lastColumn) def getParseError(self, msg): res = self.parseErrorClass("At %s: %s"%(self.pos, msg)) res.posInMsg = True # see base.xmlstruct return res class StartEndHandler(ContentHandler): """This class provides startElement, endElement and characters methods that translate events into method calls. When an opening tag is seen, we look of a _start_ method and, if present, call it with the name and the attributes. When a closing tag is seen, we try to call _end_ with name, attributes and contents. If the _end_xxx method returns a string (or similar), this value will be added to the content of the enclosing element. Rather than overriding __init__, you probably want to override the _initialize() method to create the data structures you want to fill from XML. StartEndHandlers clean element names from namespace prefixes, and they ignore them in every other way. If you need namespaces, use a different interface. """ def __init__(self): ContentHandler.__init__(self) self.realHandler = weakref.proxy(self) self.elementStack = [] self.contentsStack = [[]] self._initialize() def _initialize(self): pass def processingInstruction(self, target, data): self.contentsStack[-1].append(data) def cleanupName(self, name): return name.split(":")[-1].replace("-", "_") def startElementNS(self, namePair, qName, attrs): newAttrs = {} for ns, name in attrs.keys(): if ns is None: newAttrs[name] = attrs[(ns, name)] else: newAttrs["{%s}%s"%(ns, name)] = attrs[(ns, name)] self.startElement(namePair[1], newAttrs) def startElement(self, name, attrs): self.contentsStack.append([]) name = self.cleanupName(name) self.elementStack.append((name, attrs)) if hasattr(self.realHandler, "_start_%s"%name): getattr(self.realHandler, "_start_%s"%name)(name, attrs) elif hasattr(self, "_defaultStart"): self._defaultStart(name, attrs) def endElementNS(self, namePair, qName): self.endElement(namePair[1]) def endElement(self, name, suppress=False): contents = "".join(self.contentsStack.pop()) name = self.cleanupName(name) _, attrs = self.elementStack.pop() res = None if hasattr(self.realHandler, "_end_%s"%name): res = getattr(self.realHandler, "_end_%s"%name)(name, attrs, contents) elif hasattr(self, "_defaultEnd"): res = self._defaultEnd(name, attrs, contents) if type(res) in six.string_types and not suppress: self.contentsStack[-1].append(res) def characters(self, chars): self.contentsStack[-1].append(chars) def getResult(self): return self.contentsStack[0][0] def getParentTag(self, depth=1): """Returns the name of the parent element. This only works as written here in end handlers. In start handlers, you have to path depth=2 (since their tag already is on the stack. """ if self.elementStack: return self.elementStack[-depth][0] def parse(self, stream): xml.sax.parse(stream, self) return self def parseString(self, string): xml.sax.parseString(string, self) return self def getAttrsAsDict(self, attrs): """returns attrs as received from SAX as a dictionary. The main selling point is that any namespace prefixes are removed from the attribute names. Any prefixes on attrs remain, though. """ return dict((k.split(":")[-1], v) for k, v in attrs.items()) def setDocumentLocator(self, locator): self.locator = locator def traverseETree(eTree): """iterates the elements of an elementTree in postorder. """ for child in eTree: for gc in traverseETree(child): yield gc yield eTree def _pruneAttrNS(attrs): return dict((k.split(":")[-1], v) for k,v in attrs.items()) pyvo-0.6.1+dfsg.orig/pyvo/tools/uws.py0000644000175000017500000000307713125165225017257 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst from astropy.time import Time from . import plainxml class _JobParser(plainxml.StartEndHandler): def __init__(self): plainxml.StartEndHandler.__init__(self) self.job = { "version": "1.0" } def _start_job(self, name, attrs): if "version" in attrs: self.job["version"] = attrs["version"] def _end_jobId(self, name, attrs, content): self.job["jobId"] = content.strip() def _end_ownerId(self, name, attrs, content): self.job["ownerId"] = content.strip() def _end_phase(self, name, attrs, content): self.job["phase"] = content.strip() def _end_quote(self, name, attrs, content): self.job["quote"] = content.strip() def _end_startTime(self, name, attrs, content): self.job["startTime"] = content.strip() def _end_endTime(self, name, attrs, content): self.job["endTime"] = content.strip() def _end_executionDuration(self, name, attrs, content): self.job["executionDuration"] = content.strip() def _end_destruction(self, name, attrs, content): time = Time( content.strip(), format="isot") self.job["destruction"] = time.datetime def _end_message(self, name, attrs, content): self.job["message"] = content.strip() def _start_results(self, name, attrs): if not self.job.get("results", None): self.job["results"] = dict() def _start_result(self, name, attrs): id = attrs["id"] href = attrs["xlink:href"] self.job["results"][id] = href def getResult(self): return self.job def parse_job(stream): parser = _JobParser() parser.parse(stream) return parser.getResult() pyvo-0.6.1+dfsg.orig/pyvo/tools/__init__.py0000644000175000017500000000013013125165225020163 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst from . import uws, vosi pyvo-0.6.1+dfsg.orig/pyvo/dal/0000755000175000017500000000000013125165225015460 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/dal/mixin.py0000644000175000017500000000455713125165225017171 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Various mixins """ from __future__ import ( absolute_import, division, print_function, unicode_literals) from functools import partial import requests from .query import DALServiceError from ..tools import vosi class AvailabilityMixin(object): """ Mixing for VOSI availability """ _availability = (None, None) @property def availability(self): """ returns availability as a tuple in the following form: Returns ------- [0] : bool whether the service is available or not [1] : datetime the time since the server is running """ if self._availability == (None, None): avail_url = '{0}/availability'.format(self.baseurl) response = requests.get(avail_url, stream=True) try: response.raise_for_status() except requests.RequestException as ex: raise DALServiceError.from_except(ex, avail_url) # requests doesn't decode the content by default response.raw.read = partial(response.raw.read, decode_content=True) self._availability = vosi.parse_availability(response.raw) return self._availability @property def available(self): """ True if the service is available, False otherwise """ return self.availability[0] @property def up_since(self): """ datetime the service was started """ return self.availability[1] class CapabilityMixin(object): """ Mixing for VOSI capability """ _capabilities = None @property def capabilities(self): """ Returns capabilities as a nested dictionary """ if self._capabilities is None: capa_url = '{0}/capabilities'.format(self.baseurl) response = requests.get(capa_url, stream=True) try: response.raise_for_status() except requests.RequestException as ex: raise DALServiceError.from_except(ex, capa_url) # requests doesn't decode the content by default response.raw.read = partial(response.raw.read, decode_content=True) self._capabilities = vosi.parse_capabilities(response.raw) return self._capabilities pyvo-0.6.1+dfsg.orig/pyvo/dal/sla.py0000644000175000017500000003316213125165225016616 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ A module for searching for spectral line metadata in a remote database. A Simple Line Access (SLA) service allows a client to search for metadata describing atomic and molecular transitions that can result in spectral line emission and absorption. The service responds to a search query with a table in which each row represents a transition that matches the query constraints. The columns provide the metadata describing the transition. This module provides an interface for accessing an SLA service. It is implemented as a specialization of the DAL Query interface. The ``search()`` function support the simplest and most common types of queries, returning an SLAResults instance as its results which represents the matching imagess from the archive. The SLAResults supports access to and iterations over the individual records; these are provided as SLARecord instances, which give easy access to key metadata in the response, such as the transition title. The SLAService class can represent a specific service available at a URL endpoint. """ from __future__ import ( absolute_import, division, print_function, unicode_literals) from astropy.units import Quantity, Unit from astropy.units import spectral as spectral_equivalencies from .query import (DALResults, DALQuery, DALService, Record) __all__ = ["search", "SLAService", "SLAQuery", "SLAResults", "SLARecord"] def search(baseurl, wavelength, **keywords): """ submit a simple SLA query that requests spectral lines within a wavelength range Parameters ---------- baseurl : str the base URL for the SLA service wavelength : `~astropy.units.Quantity` class or sequence of two floats the bandwidth range the observations belong to. assuming meters if unit is not specified. **keywords : additional parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SLAResults a container holding a table of matching spectral lines Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError if the service responds with an error, including a query syntax error. """ service = SLAService(baseurl) return service.search(wavelength, **keywords) class SLAService(DALService): """ a representation of an spectral line catalog (SLA) service """ def __init__(self, baseurl): """ instantiate an SLA service Parameters ---------- baseurl : str the base URL for submitting search queries to the service. """ super(SLAService, self).__init__(baseurl,) def search(self, wavelength, **keywords): """ submit a simple SLA query to this service with the given constraints. This method is provided for a simple but typical SLA queries. For more complex queries, one should create an SLAQuery object via create_query() Parameters ---------- wavelength : `~astropy.units.Quantity` class or sequence of two floats the bandwidth range the observations belong to. assuming meters if unit is not specified. **keywords : additional parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SLAResults a container holding a table of matching spectral lines Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError if the service responds with an error, including a query syntax error. See Also -------- SLAResults pyvo.dal.query.DALServiceError pyvo.dal.query.DALQueryError """ return self.create_query(wavelength, **keywords).execute() def create_query(self, wavelength=None, request="queryData", **keywords): """ create a query object that constraints can be added to and then executed. The input arguments will initialize the query with the given values. Parameters ---------- wavelength : `~astropy.units.Quantity` class or sequence of two floats the bandwidth range the observations belong to. assuming meters if unit is not specified. **keywords : additional parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SLAQuery the query instance See Also -------- SLAQuery """ return SLAQuery(self.baseurl, wavelength, request, **keywords) class SLAQuery(DALQuery): """ a class for preparing an query to an SLA service. Query constraints are added via its service type-specific methods. The various execute() functions will submit the query and return the results. The base URL for the query, which controls where the query will be sent when one of the execute functions is called, is typically set at construction time; however, it can be updated later via the :py:attr:`~pyvo.dal.query.DALQuery.baseurl` to send a configured query to another service. In addition to the search constraint attributes described below, search parameters can be set generically by name via the dict semantics. The typical function for submitting the query is ``execute()``; however, alternate execute functions provide the response in different forms, allowing the caller to take greater control of the result processing. """ def __init__( self, baseurl, wavelength=None, request="queryData", **keywords): """ initialize the query object with a baseurl and the given parameters Parameters ---------- baseurl : str the base URL for the SLA service wavelength : `~astropy.units.Quantity` class or sequence of two floats the bandwidth range the observations belong to. assuming meters if unit is not specified. **keywords : additional parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. """ super(SLAQuery, self).__init__(baseurl) if wavelength: self.wavelength = wavelength self.request = request self.update({key.upper(): value for key, value in keywords.items()}) @property def wavelength(self): """ the bandwidth range the observations belong to. """ return getattr(self, "_wavelength", None) @wavelength.setter def wavelength(self, val): setattr(self, "_wavelength", val) if not isinstance(val, Quantity): # assume meters val = val * Unit("meter") try: if len(val) != 2: raise ValueError( "wavelength must be specified with exactly two values") except TypeError: raise ValueError( "wavelength must be specified with exactly two values") # transform to meters val = val.to( Unit("m"), equivalencies=spectral_equivalencies()) # frequency is counter-proportional to wavelength, so we just sort it to # have the right order again val.sort() self["WAVELENGTH"] = "{start}/{end}".format( start=val.value[0], end=val.value[1]) @wavelength.deleter def wavelength(self): delattr(self, "_wavelength") del self["WAVELENGTH"] @property def request(self): """ the type of service operation which is being performed """ return getattr(self, "_request", None) @request.setter def request(self, val): setattr(self, "_request", val) self["REQUEST"] = val @request.deleter def request(self): delattr(self, "_request") del self["REQUEST"] def execute(self): """ submit the query and return the results as a SLAResults instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ return SLAResults(self.execute_votable(), self.queryurl) class SLAResults(DALResults): """ The list of matching spectral lines resulting from a spectal line catalog (SLA) query. Each record contains a set of metadata that describes a source or observation within the requested circular region (i.e. a "cone"). The number of records in the results is available via the :py:attr:`nrecs attribute or by passing it to the Python built-in ``len()`` function. This class supports iterable semantics; thus, individual records (in the form of :py:class:`~pyvo.dal.sia.SLARecord` instances) are typically accessed by iterating over an ``SLAResults`` instance. >>> results = pyvo.linesearch(url, wavelength=(0.0265,0.0280)) >>> for spl in results: ... print("{0}: {1}".format(spl.species_name, spl.wavelength)) Alternatively, records can be accessed randomly via :py:meth:`getrecord` or through a Python Database API (v2) Cursor (via :py:meth:`~pyvo.dal.query.DALResults.cursor`). Column-based data access is possible via the :py:meth:`~pyvo.dal.query.DALResults.getcolumn` method. ``SLAResults`` is essentially a wrapper around an Astropy :py:mod:`~astropy.io.votable` :py:class:`~astropy.io.votable.tree.Table` instance where the columns contain the various metadata describing the images. One can access that VOTable directly via the :py:attr:`~pyvo.dal.query.DALResults.votable` attribute. Thus, when one retrieves a whole column via :py:meth:`~pyvo.dal.query.DALResults.getcolumn`, the result is a Numpy array. Alternatively, one can manipulate the results as an Astropy :py:class:`~astropy.table.table.Table` via the following conversion: >>> table = results.votable.to_table() ``SLAResults`` supports the array item operator ``[...]`` in a read-only context. When the argument is numerical, the result is an :py:class:`~pyvo.dal.sla.SLARecord` instance, representing the record at the position given by the numerical index. If the argument is a string, it is interpreted as the name of a column, and the data from the column matching that name is returned as a Numpy array. """ def getrecord(self, index): """ return a representation of a sla result record that follows dictionary semantics. The keys of the dictionary are those returned by this instance's fieldnames attribute. The returned record has additional image-specific properties Parameters ---------- index : int the integer index of the desired record where 0 returns the first record Returns ------- SLARecord a dictionary-like wrapper containing the result record metadata. Raises ------ IndexError if index is negative or equal or larger than the number of rows in the result table. See Also -------- Record """ return SLARecord(self, index) class SLARecord(Record): """ a dictionary-like container for data in a record from the results of an spectral line (SLA) query, describing a spectral line transition. The commonly accessed metadata which are stadardized by the SLA protocol are available as attributes. All metadata, particularly non-standard metadata, are acessible via the ``get(`` *key* ``)`` function (or the [*key*] operator) where *key* is table column name. """ @property def title(self): """ a title/small description of the line transition """ return self.getbyutype("ssldm:Line.title", decode=True) @property def wavelength(self): """ the vacuum wavelength of the line in meters. """ return self.getbyutype("ssldm:Line.wavelength.value") * Unit("m") @property def species_name(self): """ the name of the chemical species that produces the transition. """ return self.getbyutype("ssldm:Line.species.name") @property def status(self): """ the name of the chemical species that produces the transition. """ return self.getbyutype("ssldm:Line.identificationStatus") @property def initial_level(self): """ a description of the initial (higher energy) quantum level """ return self.getbyutype("ssldm:Line.initialLevel.name", decode=True) @property def final_level(self): """ a description of the final (higher energy) quantum level """ return self.getbyutype("ssldm:Line.finalLevel.name") pyvo-0.6.1+dfsg.orig/pyvo/dal/ssa.py0000644000175000017500000005553613125165225016636 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ A module for searching for spectra in a remote archive. A Simple Spectral Access (SSA) service allows a client to search for spectra in an archive whose field of view overlaps with a given cone on the sky. The service responds to a search query with a table in which each row represents an image that is available for download. The columns provide metadata describing each image and one column in particular provides the image's download URL (also called the *access reference*, or *acref*). Some SSA services can create spectra on-the-fly from underlying data (e.g. image cubes); in this case, the query result is a table of images whose aperture matches the requested cone and which will be created when accessed via the download URL. This module provides an interface for accessing an SSA service. It is implemented as a specialization of the DAL Query interface. The ``search()`` function support the simplest and most common types of queries, returning an SSAResults instance as its results which represents the matching imagess from the archive. The SSAResults supports access to and iterations over the individual records; these are provided as SSARecord instances, which give easy access to key metadata in the response, such as the position of the spectrum's aperture, the spectrum format, its frequency range, and its download URL. The SSAService class can represent a specific service available at a URL endpoint. """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import re from astropy.extern import six from astropy.coordinates import SkyCoord from astropy.time import Time from astropy.units import Quantity, Unit from astropy.units import spectral as spectral_equivalencies from .query import DALResults, DALQuery, DALService, Record, mime2extension from .datalink import DatalinkMixin __all__ = ["search", "SSAService", "SSAQuery", "SSAResults", "SSARecord"] def search(baseurl, pos, diameter=None, band=None, time=None, format='all', **keywords): """ submit a simple SSA query that requests spectra overlapping a given region Parameters ---------- baseurl : str the base URL for the SSA service pos : `~astropy.coordinates.SkyCoord` class or sequence of two floats the position of the center of the circular search region. assuming icrs decimal degrees if unit is not specified. diameter : `~astropy.units.Quantity` class or scalar float the diameter of the circular region around pos in which to search. assuming icrs decimal degrees if unit is not specified. band : `~astropy.units.Quantity` class or sequence of two floats the bandwidth range the observations belong to. assuming meters if unit is not specified. time : `~astropy.time.Time` class or sequence of two strings the datetime range the observations were made in. assuming iso 8601 if format is not specified. format : str the image format(s) of interest. "all" indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata. **keywords : additional case insensitive parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SSAResults a container holding a table of matching spectrum records Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError if the service responds with an error, including a query syntax error. See Also -------- SSAResults pyvo.dal.query.DALServiceError pyvo.dal.query.DALQueryError """ return SSAService(baseurl).search(pos, diameter, band, time, format, **keywords) class SSAService(DALService): """ a representation of an SSA service """ def __init__(self, baseurl): """ instantiate an SSA service Parameters ---------- baseurl : str the base URL for submitting search queries to the service. """ super(SSAService, self).__init__(baseurl) def search(self, pos, diameter, band=None, time=None, format='all', **keywords): """ submit a simple SSA query to this service with the given constraints. This method is provided for a simple but typical SSA queries. For more complex queries, one should create an SSAQuery object via create_query() Parameters ---------- pos : `~astropy.coordinates.SkyCoord` class or sequence of two floats the position of the center of the circular search region. assuming icrs decimal degrees if unit is not specified. diameter : `~astropy.units.Quantity` class or scalar float the diameter of the circular region around pos in which to search. assuming icrs decimal degrees if unit is not specified. band : `~astropy.units.Quantity` class or sequence of two floats the bandwidth range the observations belong to. assuming meters if unit is not specified. time : `~astropy.time.Time` class or sequence of two strings the datetime range the observations were made in. assuming iso 8601 if format is not specified. format : str the image format(s) of interest. "all" indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata. **keywords : additional case insensitive parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SSAResults a container holding a table of matching catalog records Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError if the service responds with an error, including a query syntax error See Also -------- SSAResults pyvo.dal.query.DALServiceError pyvo.dal.query.DALQueryError """ return self.create_query( pos, diameter, band, time, format, **keywords).execute() def create_query( self, pos=None, diameter=None, band=None, time=None, format=None, request="queryData", **keywords): """ create a query object that constraints can be added to and then executed. The input arguments will initialize the query with the given values. Parameters ---------- pos : `~astropy.coordinates.SkyCoord` class or sequence of two floats the position of the center of the circular search region. assuming icrs decimal degrees if unit is not specified. diameter : `~astropy.units.Quantity` class or scalar float the diameter of the circular region around pos in which to search. assuming icrs decimal degrees if unit is not specified. band : `~astropy.units.Quantity` class or sequence of two floats the bandwidth range the observations belong to. assuming meters if unit is not specified. time : `~astropy.time.Time` class or sequence of two strings the datetime range the observations were made in. assuming iso 8601 if format is not specified. format : str the image format(s) of interest. "all" indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata. **keywords : additional case insensitive parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SSAQuery the query instance See Also -------- SSAQuery """ return SSAQuery( self.baseurl, pos, diameter, band, time, format, request, **keywords) class SSAQuery(DALQuery): """ a class for preparing an query to an SSA service. Query constraints are added via its service type-specific properties and methods. Once all the constraints are set, one of the various execute() functions can be called to submit the query and return the results. The base URL for the query, which controls where the query will be sent when one of the execute functions is called, is typically set at construction time; however, it can be updated later via the :py:attr:`~pyvo.dal.query.DALQuery.baseurl` to send a configured query to another service. The typical function for submitting the query is ``execute()``; however, alternate execute functions provide the response in different forms, allowing the caller to take greater control of the result processing. """ def __init__( self, baseurl, pos=None, diameter=None, band=None, time=None, format=None, request="queryData", **keywords): """ initialize the query object with a baseurl and the given parameters Parameters ---------- baseurl : str the base URL for the SSA service pos : `~astropy.coordinates.SkyCoord` class or sequence of two floats the position of the center of the circular search region. assuming icrs decimal degrees if unit is not specified. diameter : `~astropy.units.Quantity` class or scalar float the diameter of the circular region around pos in which to search. assuming icrs decimal degrees if unit is not specified. band : `~astropy.units.Quantity` class or sequence of two floats the bandwidth range the observations belong to. assuming meters if unit is not specified. time : `~astropy.time.Time` class or sequence of two strings the datetime range the observations were made in. assuming iso 8601 if format is not specified. format : str the image format(s) of interest. "all" indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata. **keywords : additional case insensitive parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. """ super(SSAQuery, self).__init__(baseurl) if pos: self.pos = pos if diameter: self.diameter = diameter if band: self.band = band if time: self.time = time if format: self.format = format self.request = request self.update({key.upper(): value for key, value in keywords.items()}) @property def pos(self): """ the position of the center of the circular search region as a `~astropy.coordinates.SkyCoord` instance. """ return getattr(self, "_pos", None) @pos.setter def pos(self, val): setattr(self, "_pos", val) # use the astropy, luke if not isinstance(val, SkyCoord): pos_ra, pos_dec = val # assume ICRS degrees val = SkyCoord(ra=pos_ra, dec=pos_dec, unit="deg", frame="icrs") self["POS"] = "{ra},{dec}".format( ra=val.icrs.ra.deg, dec=val.icrs.dec.deg) @pos.deleter def pos(self): delattr(self, "_pos") del self["POS"] @property def diameter(self): """ the diameter of the circular region around pos as a `~astropy.units.Quantity` instance. """ return getattr(self, "_diameter", None) @diameter.setter def diameter(self, val): setattr(self, "_diameter", val) if not isinstance(val, Quantity): # assume degrees val = val * Unit("deg") try: if len(val): raise ValueError( "diameter may be specified using exactly one value") except TypeError: # len 1 pass self["SIZE"] = val.to(Unit("deg")).value @diameter.deleter def diameter(self): delattr(self, "_diameter") del self["SIZE"] @property def band(self): """ the bandwidth range the observations belong to. """ return getattr(self, "_band", None) @band.setter def band(self, val): setattr(self, "_band", val) if not isinstance(val, Quantity): # assume meters val = val * Unit("meter") try: if len(val) != 2: raise ValueError( "band must be specified with exactly two values") except TypeError: raise ValueError( "band must be specified with exactly two values") # transform to meters val = val.to(Unit("m"), equivalencies=spectral_equivalencies()) # frequency is counter-proportional to wavelength, so we just sort # it to have the right order again val.sort() self["BAND"] = "{start}/{end}".format( start=val.value[0], end=val.value[1]) @band.deleter def band(self): delattr(self, "_band") del self["BAND"] @property def time(self): """ the datetime range the observations were made in. """ return getattr(self, "_time", None) @time.setter def time(self, val): setattr(self, "_time", val) if not isinstance(val, Time): # assume iso8601 val = Time(val, format="isot") try: if len(val) != 2: raise ValueError( "time must be specified with exactly two values") except TypeError: raise ValueError( "time must be specified with exactly two values") self["TIME"] = "{start}/{end}".format( start=val.isot[0], end=val.isot[1]) @time.deleter def time(self): delattr(self, "_time") del self["TIME"] @property def format(self): """ the image format(s) of interest. "all" indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata. """ return getattr(self, "_format", None) @format.setter def format(self, val): setattr(self, "_format", val) if type(val) in (six.text_type, six.binary_type): val = [val] self["FORMAT"] = ",".join(val) @format.deleter def format(self): delattr(self, "_format") del self["FORMAT"] @property def request(self): """ the type of service operation which is being performed """ return getattr(self, "_request", None) @request.setter def request(self, val): setattr(self, "_request", val) self["REQUEST"] = val @request.deleter def request(self): delattr(self, "_request") del self["REQUEST"] def execute(self): """ submit the query and return the results as a SSAResults instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ return SSAResults(self.execute_votable(), url=self.queryurl) class SSAResults(DALResults, DatalinkMixin): """ The list of matching images resulting from a spectrum (SSA) query. Each record contains a set of metadata that describes an available spectrum matching the query constraints. The number of records in the results is by passing it to the Python built-in ``len()`` function. This class supports iterable semantics; thus, individual records (in the form of :py:class:`~pyvo.dal.ssa.SSARecord` instances) are typically accessed by iterating over an ``SSAResults`` instance. >>> results = pyvo.spectrumsearch(url, pos=[12.24, -13.1], diameter=0.2) >>> for spec in results: ... print("{0}: {1}".format(spec.title, spec.getdataurl())) Alternatively, records can be accessed randomly via :py:meth:`getrecord` or through a Python Database API (v2) Cursor (via :py:meth:`~pyvo.dal.query.DALResults.cursor`). Column-based data access is possible via the :py:meth:`~pyvo.dal.query.DALResults.getcolumn` method. ``SSAResults`` is essentially a wrapper around an Astropy :py:mod:`~astropy.io.votable` :py:class:`~astropy.io.votable.tree.Table` instance where the columns contain the various metadata describing the spectra. One can access that VOTable directly via the :py:attr:`~pyvo.dal.query.DALResults.votable` attribute. Thus, when one retrieves a whole column via :py:meth:`~pyvo.dal.query.DALResults.getcolumn`, the result is a Numpy array. Alternatively, one can manipulate the results as an Astropy :py:class:`~astropy.table.table.Table` via the following conversion: >>> table = results.votable.to_table() ``SSAResults`` supports the array item operator ``[...]`` in a read-only context. When the argument is numerical, the result is an :py:class:`~pyvo.dal.ssa.SSARecord` instance, representing the record at the position given by the numerical index. If the argument is a string, it is interpreted as the name of a column, and the data from the column matching that name is returned as a Numpy array. """ def __init__(self, votable, **kwargs): """ Initialize datalinks """ super(SSAResults, self).__init__(votable, **kwargs) self._init_datalinks(votable) def getrecord(self, index): """ return a representation of a sia result record that follows dictionary semantics. The keys of the dictionary are those returned by this instance's fieldnames attribute. The returned record has additional image-specific properties Parameters ---------- index : int the integer index of the desired record where 0 returns the first record Returns ------- SIARecord a dictionary-like wrapper containing the result record metadata. Raises ------ IndexError if index is negative or equal or larger than the number of rows in the result table. See Also -------- Record """ return SSARecord(self, index) class SSARecord(Record): """ a dictionary-like container for data in a record from the results of an SSA query, describing an available spectrum. The commonly accessed metadata which are stadardized by the SSA protocol are available as attributes. If the metadatum accessible via an attribute is not available, the value of that attribute will be None. All metadata, including non-standard metadata, are acessible via the ``get(`` *key* ``)`` function (or the [*key*] operator) where *key* is table column name. """ @property def ra(self): """ return the right ascension of the center of the spectrum """ return self.getbyutype("ssa:Target.Pos")[0] @property def dec(self): """ return the declination of the center of the spectrum """ return self.getbyutype("ssa:Target.Pos")[1] @property def title(self): """ return the title of the spectrum """ return self.getbyutype("ssa:DataID.Title", decode=True) @property def format(self): """ return the file format that this the spectrum is stored in """ return self.getbyutype("ssa:Access.Format", decode=True) @property def dateobs(self): """ return the modified Julien date (MJD) of the mid-point of the observational data that went into the spectrum """ dateobs = self.getbyutype("ssa:DataID.Date", decode=True) if dateobs: return Time(dateobs, format="iso") else: return None @property def instr(self): """ return the name of the instrument (or instruments) that produced the data that went into this spectrum. """ return self.getbyutype("ssa:DataID.Instrument", decode=True) @property def acref(self): """ return the URL that can be used to retrieve the spectrum. """ return self.getbyutype("ssa:Access.Reference", decode=True) def getdataurl(self): """ return the URL contained in the access URL column which can be used to retrieve the dataset described by this record. None is returned if no such column exists. """ return self.acref def suggest_dataset_basename(self): """ return a default base filename that the dataset available via ``getdataset()`` can be saved as. This function is specialized for a particular service type this record originates from so that it can be used by ``cachedataset()`` via ``make_dataset_filename()``. """ out = self.title if type(out) == six.binary_type: out = out.decode('utf-8') if not out: out = "spectrum" else: out = re.sub(r'\s+', '_', out.strip()) return out def suggest_extension(self, default=None): """ returns a recommended filename extension for the dataset described by this record. Typically, this would look at the column describing the format and choose an extension accordingly. """ return mime2extension(self.format, default) pyvo-0.6.1+dfsg.orig/pyvo/dal/query.py0000644000175000017500000012527213125165225017210 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ A module for walking through the query response of VO data access layer (DAL) queries and general VOTable-based datasets. Most data queries in the VO return a table as a result, usually formatted as a VOTable. Each row of the table describes a single physical or virtual dataset which can be retrieved. For uniformity, datasets are described via standard metadata defined by a data model specific to the type of data being queried. The fields of the data model are identified most generally by their VOClient alias as defined in this interface, or at a lower level by the Utype or UCD of the specific standard and version of the standard being queried. While the data model differs depending upon the type of data being queried, the form of the query response is the same for all classes of data, allowing a common query response interface to be used. An exception to this occurs when querying an astronomical catalog or other externally defined table. In this case there is no VO defined standard data model. Usually the field names are used to uniquely identify table columns. """ from __future__ import ( absolute_import, division, print_function, unicode_literals) __all__ = ["DALAccessError", "DALProtocolError", "DALFormatError", "DALServiceError", "DALQueryError", "DALService", "DALQuery", "DALResults", "Record"] import sys import os import re import warnings import textwrap import requests import functools from astropy.extern import six from astropy.table.table import Table from astropy.io.votable import parse as votableparse from astropy.utils.exceptions import AstropyUserWarning if six.PY3: _mimetype_re = re.compile(b'^\w[\w\-]+/\w[\w\-]+(\+\w[\w\-]*)?(;[\w\-]+(\=[\w\-]+))*$') else: _mimetype_re = re.compile(r'^\w[\w\-]+/\w[\w\-]+(\+\w[\w\-]*)?(;[\w\-]+(\=[\w\-]+))*$') def is_mime_type(val): if type(val) == six.text_type: val = val.encode('utf-8') return bool(_mimetype_re.match(val)) class DALService(object): """ an abstract base class representing a DAL service located a particular endpoint. """ def __init__(self, baseurl): """ instantiate the service connecting it to a base URL Parameters ---------- baseurl : str the base URL that should be used for forming queries to the service. """ self._baseurl = baseurl @property def baseurl(self): """ the base URL identifying the location of the service and where queries are submitted (read-only) """ return self._baseurl def search(self, **keywords): """ send a search query to this service. This implementation has no knowledge of the type of service being queried. The query parameters are given as arbitrary keywords which will be assumed to be understood by the service (i.e. there is no argument checking). The response is a generic DALResults object. Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ q = self.create_query(**keywords) return q.execute() def create_query(self, **keywords): """ create a query object that constraints can be added to and then executed. Returns ------- DALQuery a generic query object """ q = DALQuery(self.baseurl, **keywords) return q # TODO: move to pyvo.registry def describe(self, verbose=False, width=78, file=None): """ Print a summary description of this service. At a minimum, this will include the service protocol and base URL. If there is metadata associated with this service, the summary will include other information, such as the service title and description. Parameters ---------- verbose : bool If false (default), only user-oriented information is printed; if true, additional information will be printed as well. width : int Format the description with given character-width. file : writable file-like object If provided, write information to this output stream. Otherwise, it is written to standard out. """ if not file: file = sys.stdout print("{0} v{1} Service".format(self.protocol.upper(), self.version)) if self.info.get("title"): print(para_format_desc(self.info["title"]), file=file) if self.info.get("shortName"): print("Short Name: " + self.info["shortName"], file=file) if self.info.get("publisher"): print(para_format_desc("Publisher: " + self.info["publisher"]), file=file) if self.info.get("identifier"): print("IVOA Identifier: " + self.info["identifier"], file=file) print("Base URL: " + self.baseurl, file=file) if self.info.get("description"): print(file=file) print(para_format_desc(self.info["description"]), file=file) print(file=file) if self.info.get("subjects"): val = self.info.get("subjects") if not hasattr(val, "__getitem__"): val = [val] val = (str(v) for v in val) print(para_format_desc("Subjects: " + ", ".join(val)), file=file) if self.info.get("waveband"): val = self.info.get("waveband") if not hasattr(val, "__getitem__"): val = [val] val = (str(v) for v in val) print(para_format_desc("Waveband Coverage: " + ", ".join(val)), file=file) if verbose: if self.info.get("capabilityStandardID"): print("StandardID: " + self.info["capabilityStandardID"], file=file) if self.info.get("referenceURL"): print("More info: " + self.info["referenceURL"], file=file) class DALQuery(dict): """ a class for preparing a query to a particular service. Query constraints are added via its service type-specific methods. The various execute() functions will submit the query and return the results. The base URL for the query can be changed via the baseurl property. """ _ex = None def __init__(self, baseurl, **keywords): """ initialize the query object with a baseurl """ if type(baseurl) == six.binary_type: baseurl = baseurl.decode("utf-8") self._baseurl = baseurl.rstrip("?") self.update({key.upper(): value for key, value in keywords.items()}) @property def baseurl(self): """ the base URL that this query will be sent to when one of the execute functions is called. """ return self._baseurl def execute(self): """ submit the query and return the results as a Results subclass instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ return DALResults(self.execute_votable(), self.queryurl) def execute_raw(self): """ submit the query and return the raw VOTable XML as a string. Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors in the input query syntax """ f = self.execute_stream() out = None try: out = f.read() finally: f.close() return out def execute_stream(self): """ Submit the query and return the raw VOTable XML as a file stream. No exceptions are raised here because non-2xx responses might still contain payload. """ r = self.submit() try: r.raise_for_status() except requests.RequestException as ex: # save for later use self._ex = ex finally: return r.raw def submit(self): """ does the actual request """ url = self.queryurl params = {k: v for k, v in self.items()} r = requests.get(url, params = params, stream = True) r.raw.read = functools.partial(r.raw.read, decode_content=True) return r def execute_votable(self): """ Submit the query and return the results as an AstroPy votable instance. As this is the level where qualified error messages are available, they are raised here instead of in the underlying execute_stream. Returns ------- astropy.io.votable.tree.Table an Astropy votable Table instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALFormatError for errors parsing the VOTable response DALQueryError for errors in the input query syntax See Also -------- astropy.io.votable DALServiceError DALFormatError DALQueryError """ try: return self._votableparse(self.execute_stream().read) except DALAccessError: raise except Exception as e: if self._ex: e = self._ex raise DALServiceError( str(e), e.response.status_code, e, self.queryurl) else: raise DALServiceError.from_except(e, self.queryurl) @property def queryurl(self): """ The URL that encodes the current query. This is the URL that the execute functions will use if called next. """ return self.baseurl def _votableparse(self, fobj): """ takes a file like object and returns a VOTable instance override in subclasses for service specifica. """ return votableparse(fobj, _debug_python_based_parser=True) class DALResults(object): """ Results from a DAL query. It provides random access to records in the response. Alternatively, it can provide results via a Cursor (compliant with the Python Database API) or an iterable. """ def __init__(self, votable, url=None): """ initialize the cursor. This constructor is not typically called by directly applications; rather an instance is obtained from calling a DALQuery's execute(). Parameters ---------- votable : str the service response parsed into an astropy.io.votable.tree.VOTableFile instance. url : str the URL that produced the response Raises ------ DALFormatError if the response VOTable does not contain a response table See Also -------- DALFormatError """ self._url = url self._status = self._findstatus(votable) if self._status[0].upper() not in ("OK", "OVERFLOW"): raise DALQueryError(self._status[1], self._status[0], url) self.votable = self._findresultstable(votable) if not self.votable: raise DALFormatError( reason="VOTable response missing results table", url=url) self._fldnames = [field.name for field in self.votable.fields] if not self._fldnames: raise DALFormatError( reason="response table missing column descriptions.", url=url) self._infos = self._findinfos(votable) def _findresultstable(self, votable): # this can be overridden to specialize for a particular DAL protocol res = self._findresultsresource(votable) if not res or len(res.tables) < 1: return None return res.tables[0] def _findresultsresource(self, votable): # this can be overridden to specialize for a particular DAL protocol if len(votable.resources) < 1: return None for res in votable.resources: if res.type.lower() == "results": return res return votable.resources[0] def _findstatus(self, votable): # this can be overridden to specialize for a particular DAL protocol # look first in the result resource res = self._findresultsresource(votable) if res: # should be a RESOURCE/INFO info = self._findstatusinfo(res.infos) if info: return (info.value, info.content) # if not there, check inside first table if len(res.tables) > 0: info = self._findstatusinfo(res.tables[0].infos) if info: return (info.value, info.content) # otherwise, look just below the root element info = self._findstatusinfo(votable.infos) if info: return (info.value, info.content) # assume it's okay return ("OK", "QUERY_STATUS not specified") def _findstatusinfo(self, infos): # this can be overridden to specialize for a particular DAL protocol for info in infos: if info.name == "QUERY_STATUS": return info def _findinfos(self, votable): # this can be overridden to specialize for a particular DAL protocol infos = {} res = self._findresultsresource(votable) for info in res.infos: infos[info.name] = info.value for info in votable.infos: infos[info.name] = info.value return infos @property def queryurl(self): """ the URL query that produced these results. None is returned if unknown """ return self._url @property def table(self): """ the astropy table object """ return self.votable.to_table() def __len__(self): """ return the record count """ return len(self.table) def __getitem__(self, indx): """ if indx is a string, r[indx] will return the field with the name of indx; if indx is an integer, r[indx] will return the indx-th record. """ if isinstance(indx, int): return self.getrecord(indx) else: return self.getcolumn(indx) @property def fieldnames(self): """ return the names of the columns. These are the names that are used to access values from the dictionaries returned by getrecord(). They correspond to the column name. """ return self._fldnames[:] @property def fielddescs(self): """ return the full metadata the columns as a list of Field instances, a simple object with attributes corresponding the the VOTable FIELD attributes, namely: name, id, type, ucd, utype, arraysize, description """ return self.votable.fields def fieldname_with_ucd(self, ucd): """ return the field name that has a given UCD value or None if the UCD is not found. """ try: iterchain = ( self.getdesc(fieldname) for fieldname in self.fieldnames) iterchain = (field for field in iterchain if field.ucd == ucd) return next(iterchain).name except StopIteration: return None def fieldname_with_utype(self, utype): """ return the field name that has a given UType value or None if the UType is not found. """ try: iterchain = ( self.getdesc(fieldname) for fieldname in self.fieldnames) iterchain = (field for field in iterchain if field.utype == utype) return next(iterchain).name except StopIteration: return None def getcolumn(self, name): """ return a numpy array containing the values for the column with the given name """ if name not in self.fieldnames: raise KeyError("No such column name: " + name) return self.votable.array[name] def getrecord(self, index): """ return a representation of a result record that follows dictionary semantics. The keys of the dictionary are those returned by this instance's fieldnames attribute.The returned record may have additional accessor methods for getting at stardard DAL response metadata (e.g. ra, dec). Parameters ---------- index : int the integer index of the desired record where 0 returns the first record Returns ------- Record a dictionary-like wrapper containing the result record metadata. Raises ------ IndexError if index is negative or equal or larger than the number of rows in the result table. See Also -------- Record """ return Record(self, index) def getvalue(self, name, index): """ return the value of a record attribute--a value from a column and row. Parameters ---------- name : str the name of the attribute (column) index : int the zero-based index of the record Raises ------ IndexError if index is negative or equal or larger than the number of rows in the result table. KeyError if name is not a recognized column name """ return self.getrecord(index)[name] def getdesc(self, name): """ return the field description for the record attribute (column) with the given name Parameters ---------- name : str the name of the attribute (column), chosen from those in fieldnames() Returns ------- object with attributes (name, id, datatype, unit, ucd, utype, arraysize) which describe the column """ if name not in self._fldnames: raise KeyError(name) return self.votable.get_field_by_id_or_name(name) def __iter__(self): """ return a python iterable for stepping through the records in this result """ def _iter(res): pos = 0 while True: try: out = res.getrecord(pos) except IndexError: break yield out pos += 1 return _iter(self) def cursor(self): """ return a cursor that is compliant with the Python Database API's :class:`.Cursor` interface. See PEP 249 for details. """ from .dbapi2 import Cursor return Cursor(self) class Record(dict): """ one record from a DAL query result. The column values are accessible as dictionary items. It also provides special added functions for accessing the dataset the record corresponds to. Subclasses may provide additional functions for access to service type-specific data. """ def __init__(self, results, index): self._results = results super(Record, self).__init__() self.update(zip( results.fieldnames, results.votable.array.data[index] )) def get(self, key, default=None, decode=False): """ This method mimics the dict get method and adds a decode parameter to allow decoding of binary strings. """ out = super(Record, self).get(key, default) if decode and type(out) == six.binary_type: out = out.decode('utf-8') return out def getbyucd(self, ucd, default=None, decode=False): """ return the column with the given ucd. """ return self.get( self._results.fieldname_with_ucd(ucd), default, decode) def getbyutype(self, utype, default=None, decode=False): """ return the column with the given utype. Raises ------ KeyError if theres no column with the given utype. """ return self.get( self._results.fieldname_with_utype(utype), default, decode) def getdataurl(self): """ return the URL contained in the access URL column which can be used to retrieve the dataset described by this record. None is returned if no such column exists. """ for fieldname in self._results.fieldnames.items(): field = self._results.getdesc(fieldname) if (field.utype and "Access.Reference" in field.utype) or \ (field.ucd and "meta.dataset" in field.ucd and "meta.ref.url" in field.ucd): out = self[fieldname] if type(out) == six.binary_type: out = out.decode('utf-8') return out return None def getdataset(self, timeout=None): """ Get the dataset described by this record from the server. Parameters ---------- timeout : float the time in seconds to allow for a successful connection with server before failing with an IOError (specifically, socket.timeout) exception Returns ------- A file-like object which may be read to retrieve the referenced dataset. Raises ------ KeyError if no datast access URL is included in the record URLError if the dataset access URL is invalid (note: subclass of IOError) HTTPError if an HTTP error occurs while accessing the dataset (note: subclass of IOError) socket.timeout if the timeout is exceeded before a connection is established. (note: subclass of IOError) IOError if some other error occurs while establishing the data stream. """ url = self.getdataurl() if not url: raise KeyError("no dataset access URL recognized in record") if timeout: r = requests.get(url, stream = True, timeout = timeout) else: r = requests.get(url, stream = True) r.raise_for_status() r.raw.read = functools.partial(r.raw.read, decode_content=True) return r.raw def cachedataset(self, filename=None, dir=".", timeout=None, bufsize=524288): """ retrieve the dataset described by this record and write it out to a file with the given name. If the file already exists, it will be over-written. Parameters ---------- filename : str the name of the file to write dataset to. If the value represents a relative path, it will be taken to be relative to the value of the ``dir`` parameter. If None, a default name is attempted based on the record title and format. dir : str the directory to write the file into. This value will be ignored if filename is an absolute path. timeout : int the time in seconds to allow for a successful connection with server before failing with an IOError (specifically, socket.timeout) exception bufsize : int a buffer size in bytes for copying the data to disk (default: 0.5 MB) Raises ------ KeyError if no datast access URL is included in the record URLError if the dataset access URL is invalid HTTPError if an HTTP error occurs while accessing the dataset socket.timeout if the timeout is exceeded before a connection is established. (note: subclass of IOError) IOError if an error occurs while writing out the dataset """ if not bufsize: bufsize = 524288 if not filename: filename = self.make_dataset_filename(dir) inp = self.getdataset(timeout) try: with open(filename, 'wb') as out: buf = inp.read(bufsize) while buf: out.write(buf) buf = inp.read(bufsize) finally: inp.close() _dsname_no = 0 # used by make_dataset_filename def make_dataset_filename(self, dir=".", base=None, ext=None): """ create a viable pathname in a given directory for saving the dataset available via getdataset(). The pathname that is returned is guaranteed not to already exist (under single-threaded conditions). This implementation will first try combining the base name with the file extension (with a dot). If this file already exists in the directory, a name that appends an integer suffix ("-#") to the base before joining with the extension will be tried. The integer will be incremented until a non-existent filename is created. Parameters ---------- dir : str the directory to save the dataset under. This must already exist. base : str a basename to use to as the base of the filename. If None, the result of ``suggest_dataset_basename()`` will be used. ext : str the filename extension to use. If None, the result of ``suggest_extension()`` will be used. """ if not dir: raise ValueError("make_dataset_filename(): no dir parameter provided") if not os.path.exists(dir): raise IOError("{0}: directory not found".format(dir)) if not os.path.isdir(dir): raise ValueError("{0}: not a directory".format(dir)) if not base: base = self.suggest_dataset_basename() if not ext: ext = self.suggest_extension("dat") # be efficient when writing a bunch of files into the same directory # in succession n = self._dsname_no mkpath = lambda i: os.path.join(dir, "{0}-{1}.{2}".format(base, n, ext)) if n > 0: # find the last file written of the form, base-n.ext while n > 0 and not os.path.exists(mkpath(n)): n -= 1 if n > 0: n += 1 if n == 0: # never wrote a file of form, base-n.ext; try base.ext path = os.path.join(dir, "{0}.{1}".format(base, ext)) if not os.path.exists(path): return path n += 1 # find next available name while os.path.exists(mkpath(n)): n += 1 self._dsname_no = n return mkpath(n) def suggest_dataset_basename(self): """ return a default base filename that the dataset available via ``getdataset()`` can be saved as. This function is specialized for a particular service type this record originates from so that it can be used by ``cachedataset()`` via ``make_dataset_filename()``. """ # abstract; specialized for the different service types return "dataset" def suggest_extension(self, default=None): """ returns a recommended filename extension for the dataset described by this record. Typically, this would look at the column describing the format and choose an extension accordingly. This function is specialized for a particular service type this record originates from so that it can be used by ``cachedataset()`` via ``make_dataset_filename()``. """ # abstract; specialized for the different service types return default class Iter(object): def __init__(self, res): self.resultset = res self.pos = 0 def __iter__(self): return self def __next__(self): try: out = self.resultset.getrecord(self.pos) self.pos += 1 return out except IndexError: raise StopIteration() next = __next__ class Upload(object): """ This class represents a DALI Upload as described in http://www.ivoa.net/documents/DALI/20161101/PR-DALI-1.1-20161101.html#tth_sEc3.4.5 """ def __init__(self, name, content): """ Initialise the Upload object with the given parameters Parameters ---------- name : str Tablename for use in queries content : object If its a file-like object, a string pointing to a local file, a `DALResults` object or a astropy table, `is_inline` will be true and it will expose a file-like object under `fileobj` Otherwise it exposes a URI under `uri` """ try: self._is_file = os.path.isfile(content) except Exception: self._is_file = False self._is_fileobj = hasattr(content, "read") self._is_table = isinstance(content, Table) self._is_resultset = isinstance(content, DALResults) self._inline = any(( self._is_file, self._is_fileobj, self._is_table, self._is_resultset, )) self._name = name self._content = content @property def is_inline(self): """ True if the upload can be inlined """ return self._inline @property def name(self): return self._name def fileobj(self): """ A file-like object for a local resource Raises ------ ValueError if theres no valid local resource """ if not self.is_inline: raise ValueError( "Upload {name} doesn't refer to a local resource".format( name = self.name)) # astropy table if isinstance(self._content, Table): from io import BytesIO fileobj = BytesIO() self._content.write(output = fileobj, format = "votable") fileobj.seek(0) return fileobj elif isinstance(self._content, DALResults): from io import BytesIO fileobj = BytesIO() table = self._content.table table.write(output = fileobj, format = "votable") fileobj.seek(0) return fileobj fileobj = self._content try: fileobj = open(self._content) finally: return fileobj def uri(self): """ The URI pointing to the result """ # TODO: use a async job base class instead of hasattr for inspection if hasattr(self._content, "result_uri"): self._content.raise_if_error() uri = self._content.result_uri else: uri = six.text_type(self._content) return uri def query_part(self): """ The query part for use in DALI requests """ if self.is_inline: value = "{name},param:{name}" else: value = "{name},{uri}" return value.format(name = self.name, uri = self.uri()) class UploadList(list): """ This class extends the native python list with utility functions for upload handling """ @classmethod def fromdict(cls, dct): """ Constructs a upload list from a dictionary with table_name: content """ return cls(Upload(key, value) for key, value in dct.items()) def param(self): """ Returns a string suitable for use in UPLOAD parameters """ return ";".join(upload.query_part() for upload in self) if six.PY3: _image_mt_re = re.compile(b'^image/(\w+)') _text_mt_re = re.compile(b'^text/(\w+)') _votable_mt_re = re.compile(b'^(\w+)/(x-)?votable(\+\w+)?') else: _image_mt_re = re.compile(r'^image/(\w+)') _text_mt_re = re.compile(r'^text/(\w+)') _votable_mt_re = re.compile(r'^(\w+)/(x-)?votable(\+\w+)?') def mime2extension(mimetype, default=None): """ return a recommended file extension for a file with a given MIME-type. This function provides some generic mappings that can be leveraged in implementations of ``suggest_extension()`` in ``Record`` subclasses. >>> mime2extension('application/fits') fits >>> mime2extension('image/jpeg') jpg >>> mime2extension('application/x-zed', 'dat') dat Parameters ---------- mimetype : str the file MIME-type byte-string to convert default : str the default extension to return if one could not be recommended based on ``mimetype``. By convention, this should not include a preceding '.' Returns ------- str the recommended extension without a preceding '.', or the value of ``default`` if no recommendation could be made. """ if not mimetype: return default if type(mimetype) == six.text_type: mimetype = mimetype.encode('utf-8') if mimetype.endswith(b"/fits") or mimetype.endswith(b'/x-fits'): return "fits" if mimetype == b"image/jpeg": return "jpg" m = _votable_mt_re.match(mimetype) # r'^(\w+)/(x-)?votable(\+\w+)' if m: return "xml" m = _image_mt_re.match(mimetype) # r'^image/(\w+)' if m: out = m.group(1).lower() if six.PY3: out = out.decode('utf-8') return out m = _text_mt_re.match(mimetype) # r'^text/(\w+)' if m: if m.group(1) == b'html' or m.group(1) == b'xml': out = m.group(1) if six.PY3: out = out.decode('utf-8') return out return "txt" return default class DALAccessError(Exception): """ a base class for failures while accessing a DAL service """ _defreason = "Unknown service access error" def __init__(self, reason=None, url=None): """ initialize the exception with an error message Parameters ---------- reason : str a message describing the cause of the error url : str the query URL that produced the error """ if not reason: reason = self._defreason super(DALAccessError, self).__init__(reason) self._reason = reason self._url = url @classmethod def _typeName(cls, exc): return re.sub(r"'>$", '', re.sub(r"<(type|class) '(.*\.)?", '', str(type(exc)))) def __str__(self): return self._reason def __repr__(self): return "{0}: {1}".format(self._typeName(self), self._reason) @property def reason(self): """ a string description of what went wrong """ return self._reason @property def url(self): """ the URL that produced the error. If None, the URL is unknown or unset """ return self._url class DALProtocolError(DALAccessError): """ a base exception indicating that a DAL service responded in an erroneous way. This can be either an HTTP protocol error or a response format error; both of these are handled by separate subclasses. This base class captures an underlying exception clause. """ _defreason = "Unknown DAL Protocol Error" def __init__(self, reason=None, cause=None, url=None): """ initialize with a string message and an optional HTTP response code Parameters ---------- reason : str a message describing the cause of the error code : int the HTTP error code (as an integer) cause : str an exception issued as the underlying cause. A value of None indicates that no underlying exception was caught. url : str the query URL that produced the error """ super(DALProtocolError, self).__init__(reason, url) self._cause = cause @property def cause(self): """ a string description of what went wrong """ return self._cause class DALFormatError(DALProtocolError): """ an exception indicating that a DAL response contains fatal format errors. This would include XML or VOTable format errors. """ _defreason = "Unknown VOTable Format Error" def __init__(self, cause=None, url=None, reason=None): """ create the exception Parameters ---------- cause : str an exception issued as the underlying cause. A value of None indicates that no underlying exception was caught. url the query URL that produced the error reason a message describing the cause of the error """ if cause and not reason: reason = "{0}: {0}".format(DALAccessError._typeName(cause), str(cause)) super(DALFormatError, self).__init__(reason, cause, url) class DALServiceError(DALProtocolError): """ an exception indicating a failure communicating with a DAL service. Most typically, this is used to report DAL queries that result in an HTTP error. """ _defreason = "Unknown service error" def __init__(self, reason=None, code=None, cause=None, url=None): """ initialize with a string message and an optional HTTP response code Parameters ---------- reason : str a message describing the cause of the error code : int the HTTP error code (as an integer) cause : str an exception issued as the underlying cause. A value of None indicates that no underlying exception was caught. url : str the query URL that produced the error """ super(DALServiceError, self).__init__(reason, cause, url) self._code = code @property def code(self): """ the HTTP error code that resulted from the DAL service query, indicating the error. If None, the service did not produce an HTTP response. """ return self._code @classmethod def from_except(cls, exc, url=None): """ create and return DALServiceError exception appropriate for the given exception that represents the underlying cause. """ if isinstance(exc, requests.exceptions.RequestException): message = str(exc) try: code = exc.response.status_code except AttributeError: code = 0 return DALServiceError(message, code, exc, url) elif isinstance(exc, Exception): return DALServiceError("{0}: {1}".format(cls._typeName(exc), str(exc)), cause=exc, url=url) else: raise TypeError("from_except: expected Exception") class DALQueryError(DALAccessError): """ an exception indicating an error by a working DAL service while processing a query. Generally, this would be an error that the service successfully detected and consequently was able to respond with a legal error response-- namely, a VOTable document with an INFO element contains the description of the error. Possible errors will include bad usage by the client, such as query-syntax errors. """ _defreason = "Unknown DAL Query Error" def __init__(self, reason=None, label=None, url=None): """ Parameters ---------- reason : str a message describing the cause of the error. This should be set to the content of the INFO error element. label : str the identifying name of the error. This should be the value of the INFO element's value attribute within the VOTable response that describes the error. url : str the query URL that produced the error """ super(DALQueryError, self).__init__(reason, url) self._label = label @property def label(self): """ the identifing name for the error given in the DAL query response. DAL queries that produce an error which is detectable on the server will respond with a VOTable containing an INFO element that contains the description of the error. This property contains the value of the INFO's value attribute. """ return self._label class PyvoUserWarning(AstropyUserWarning): pass # routines used by DALService describe to format metadata _parasp = re.compile(r"(?:[ \t\r\f\v]*\n){2,}[ \t\r\f\v]*") _ptag = re.compile(r"\s*(?:)|(?:\\para(?:\\ )*)\s*") def para_format_desc(text, width=78): """ format description text into paragraphs suiteable for display in the shell. That is, the output will be one or more plain text paragraphs of the prescribed width (78 characters, the default). The text will be split into separate paragraphs whwre there occurs (1) a two or more consecutive carriage return, (2) an HTMS paragraph tag, or (2) a LaTeX parabraph control sequence. It will attempt other substitutions of HTML and LaTeX markup that sometimes find their way into resource descriptions. """ paras = _parasp.split(text) for i in range(len(paras)): para = paras.pop(0) for p in _ptag.split(para): if len(p) > 0: p = "\n".join( (l.strip() for l in (t for t in p.splitlines() if len(t) > 0)) ) paras.append(deref_markup(p)) return "\n\n".join( (textwrap.fill(p, width) for p in paras) ) _musubs = [ (re.compile(r"<"), "<"), (re.compile(r">"), ">"), (re.compile(r"&"), "&"), (re.compile(r""), ''), (re.compile(r"

"), ''), (re.compile(r"°"), " deg"), (re.compile(r"\$((?:[^\$]*[\*\+=/^_~><\\][^\$]*)|(?:\w+))\$"), r'\1'), (re.compile(r"\\deg"), " deg"), ] _alink = re.compile(r'''\s*(\S.*\S)\s*''') def deref_markup(text): """ perform some substitutions of common markup suitable for text display. This includes HTML escape sequence """ for pat, repl in _musubs: text = pat.sub(repl, text) text = _alink.sub(r"\3 <\2>", text) return text pyvo-0.6.1+dfsg.orig/pyvo/dal/tap.py0000644000175000017500000006046513125165225016631 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ A module for accessing remote source and observation catalogs """ from __future__ import ( absolute_import, division, print_function, unicode_literals) from functools import partial from datetime import datetime from time import sleep from distutils.version import LooseVersion import requests from astropy.io.votable import parse as votableparse from .query import ( DALResults, DALQuery, DALService, Record, UploadList, DALServiceError, DALQueryError) from .mixin import AvailabilityMixin, CapabilityMixin from .datalink import DatalinkMixin from ..tools import vosi, uws __all__ = [ "search", "escape", "TAPService", "TAPQuery", "AsyncTAPJob", "TAPResults"] def escape(term): """ escapes a term for use in ADQL """ return str(term).replace("'", "''") def search(url, query, language="ADQL", maxrec=None, uploads=None, **keywords): """ submit a Table Access query that returns rows matching the criteria given. Parameters ---------- url : str the base URL of the query service. query : str, dict The query string / parameters language : str specifies the query language, default ADQL. useful for services which allow to use the backend query language. maxrec : int specifies the maximum records to return. defaults to the service default uploads : dict a mapping from table names to file like objects containing a votable Returns ------- TAPResults a container holding a table of matching catalog records Raises ------ DALServiceError for errors connecting to or communicating with the service. DALQueryError if the service responds with an error, including a query syntax error. """ service = TAPService(url) return service.search(query, language, maxrec, uploads, **keywords) class TAPService(DALService, AvailabilityMixin, CapabilityMixin): """ a representation of a Table Access Protocol service """ _tables = None def __init__(self, baseurl): """ instantiate a Tablee Access Protocol service Parameters ---------- baseurl : str the base URL that should be used for forming queries to the service. """ super(TAPService, self).__init__(baseurl) @property def tables(self): """ returns tables as a flat OrderedDict """ if self._tables is None: tables_url = '{0}/tables'.format(self.baseurl) response = requests.get(tables_url, stream=True) try: response.raise_for_status() except requests.RequestException as ex: raise DALServiceError.from_except(ex, tables_url) # requests doesn't decode the content by default response.raw.read = partial(response.raw.read, decode_content=True) self._tables = vosi.parse_tables(response.raw) return self._tables @property def maxrec(self): """ the default output limit. Raises ------ DALServiceError if the property is not exposed by the service """ try: for capa in self.capabilities: if "outputLimit" in capa: return capa["outputLimit"]["default"]["value"] except KeyError: pass raise DALServiceError("Default limit not exposed by the service") @property def hardlimit(self): """ the hard output limit. Raises ------ DALServiceError if the property is not exposed by the service """ try: for capa in self.capabilities: if "outputLimit" in capa: return capa["outputLimit"]["hard"]["value"] except KeyError: pass raise DALServiceError("Hard limit not exposed by the service") @property def upload_methods(self): """ a list of upload methods in form of IVOA identifiers """ _upload_methods = [] for capa in self.capabilities: if "uploadMethods" in capa: _upload_methods += capa["uploadMethods"] return _upload_methods def run_sync( self, query, language="ADQL", maxrec=None, uploads=None, **keywords): """ runs sync query and returns its result Parameters ---------- query : str The query language : str specifies the query language, default ADQL. useful for services which allow to use the backend query language. maxrec : int specifies the maximum records to return. defaults to the service default uploads : dict a mapping from table names to file like objects containing a votable Returns ------- TAPResults the query result See Also -------- TAPResults """ return self.create_query( query, language=language, maxrec=maxrec, uploads=uploads, **keywords).execute() #alias for service discovery search = run_sync def run_async( self, query, language="ADQL", maxrec=None, uploads=None, **keywords): """ runs async query and returns its result Parameters ---------- query : str, dict the query string / parameters language : str specifies the query language, default ADQL. useful for services which allow to use the backend query language. maxrec : int specifies the maximum records to return. defaults to the service default uploads : dict a mapping from table names to file like objects containing a votable Returns ------- TAPResult the query instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response See Also -------- AsyncTAPJob """ job = AsyncTAPJob.create( self.baseurl, query, language, maxrec, uploads, **keywords) job = job.run().wait() job.raise_if_error() result = job.fetch_result() job.delete() return result def submit_job(self, query, language="ADQL", maxrec=None, uploads=None): """ submit a async query without starting it and returns a AsyncTAPJob object Parameters ---------- query : str the query string / parameters language : str specifies the query language, default ADQL. useful for services which allow to use the backend query language. maxrec : int specifies the maximum records to return. defaults to the service default uploads : dict a mapping from table names to file like objects containing a votable Returns ------- AsyncTAPJob the query instance See Also -------- AsyncTAPJob """ return AsyncTAPJob.create( self.baseurl, query, language, maxrec, uploads) def create_query( self, query=None, mode="sync", language="ADQL", maxrec=None, uploads=None, **keywords): """ create a query object that constraints can be added to and then executed. The input arguments will initialize the query with the given values. Parameters ---------- baseurl : str the base URL for the TAP service query : str the query string / parameters mode : str the query mode (sync | async). default "sync" language : str specifies the query language, default ADQL. useful for services which allow to use the backend query language. maxrec : int specifies the maximum records to return. defaults to the service default. uploads : dict a mapping from table names to objects containing a votable. """ return TAPQuery( self.baseurl, query, mode, language, maxrec, uploads, **keywords) class AsyncTAPJob(object): """ This class represents a UWS TAP Job. """ _job = {} @classmethod def create( cls, baseurl, query, language="ADQL", maxrec=None, uploads=None, **keywords): """ creates a async tap job on the server under `baseurl` Parameters ---------- baseurl : str the TAP baseurl query : str the query string language : str specifies the query language, default ADQL. useful for services which allow to use the backend query language. maxrec : int specifies the maximum records to return. defaults to the service default uploads : dict a mapping from table names to file like objects containing a votable """ query = TAPQuery( baseurl, query, mode="async", language=language, maxrec=maxrec, uploads=uploads, **keywords) response = query.submit() job = cls(response.url) return job def __init__(self, url): """ initialize the job object with the given url and fetch the remote values Parameters ---------- url : str the job url """ self._url = url self._update() def __enter__(self): """ Enters the context """ return self def __exit__(self, exc_type, exc_val, exc_tb): """ Exits the context. The job is silently deleted. """ try: self.delete() except Exception: pass def _update(self, wait_for_statechange=False): """ updates local job infos with remote values """ try: if wait_for_statechange: response = requests.get(self.url, stream=True, params={ "WAIT": "-1" }) else: response = requests.get(self.url, stream=True) response.raise_for_status() except requests.exceptions.RequestException as ex: raise DALServiceError.from_except(ex, self.url) # requests doesn't decode the content by default response.raw.read = partial(response.raw.read, decode_content=True) self._job.update(uws.parse_job(response.raw)) @property def job(self): """ all up-to-date uws job infos as dictionary """ #keep it up to date self._update() return self._job @property def url(self): """ the job url """ return self._url @property def job_id(self): """ the job id """ return self._job["jobId"] @property def phase(self): """ the current query phase """ self._update() return self._job["phase"] @property def execution_duration(self): """ maximum execution duration. read-write """ self._update() return self._job["executionDuration"] @execution_duration.setter def execution_duration(self, value): """ maximum execution duration. read-write Parameters ---------- value : int seconds after the query execution is aborted """ try: response = requests.post( "{}/executionduration".format(self.url), data={"EXECUTIONDURATION": str(value)}) response.raise_for_status() except requests.exceptions.RequestException as ex: raise DALServiceError.from_except(ex, self.url) self._job["executionDuration"] = value @property def destruction(self): """ datetime after which the job results are deleted automatically. read-write """ self._update() return self._job["destruction"] @destruction.setter def destruction(self, value): """ datetime after which the job results are deleted automatically. read-write Parameters ---------- value : datetime datetime after which the job results are deleted automatically """ try: #is string? easier to ask for forgiveness value = datetime.strptime(value, "%Y-%m-%dT%H:%M:%SZ") except ValueError: pass try: response = requests.post( "{}/destruction".format(self.url), data={"DESTRUCTION": value.strftime("%Y-%m-%dT%H:%M:%SZ")}) response.raise_for_status() except requests.exceptions.RequestException as ex: raise DALServiceError.from_except(ex, self.url) self._job["destruction"] = value @property def quote(self): """ estimated runtime """ self._update() return self._job["quote"] @property def owner(self): """ job owner (if applicable) """ self._update() return self._job["owner"] @property def result_uris(self): """ a list of the last result uri's """ return self._job["results"] @property def result_uri(self): """ the first result uri """ try: return next(iter(self.result_uris.values())) except StopIteration: return None @property def uws_version(self): self._update() return self._job["version"] def run(self): """ starts the job / change phase to RUN """ try: response = requests.post( '{}/phase'.format(self.url), data={"PHASE": "RUN"}) response.raise_for_status() except requests.exceptions.RequestException as ex: raise DALServiceError.from_except(ex, self.url) return self def abort(self): """ aborts the job / change phase to ABORT """ try: response = requests.post( '{}/phase'.format(self.url), data={"PHASE": "ABORT"}) response.raise_for_status() except requests.exceptions.RequestException as ex: raise DALServiceError.from_except(ex, self.url) return self def wait( self, phases=None): """ waits for the job to reach the given phases. Parameters ---------- phases : list phases to wait for Raises ------ DALServiceError if the job is in a state that won't lead to an result """ if not phases: phases = {"COMPLETED", "ABORTED", "ERROR"} interval = 1.0 increment = 1.2 active_phases = { "QUEUED", "EXECUTING", "RUN", "COMPLETED", "ERROR", "UNKNOWN"} while True: self._update(wait_for_statechange=True) # use the cached value cur_phase = self._job["phase"] if cur_phase not in active_phases: raise DALServiceError( "Cannot wait for job completion. Job is not active!") if cur_phase in phases: break # fallback for uws 1.0 if LooseVersion(self._job["version"]) < LooseVersion("1.1"): sleep(interval) interval = min(120, interval * increment) return self def delete(self): """ deletes the job. this object will become invalid. """ try: response = requests.post(self.url, data={"ACTION": "DELETE"}) response.raise_for_status() except requests.exceptions.RequestException as ex: raise DALServiceError.from_except(ex, self.url) self._url = None def raise_if_error(self): """ raise a exception if theres an error Raises ------ DALQueryError if theres an error """ if self.phase in ["ERROR", "ABORTED"]: raise DALQueryError( self._job.get("message", "Unknown Query Error"), self.phase, self.url) def fetch_result(self): """ returns the result votable if query is finished """ try: response = requests.get(self.result_uri, stream=True) response.raise_for_status() except requests.RequestException as ex: self._update() # we propably got a 404 because query error. raise with error msg self.raise_if_error() raise DALServiceError.from_except(ex, self.url) response.raw.read = partial( response.raw.read, decode_content=True) return TAPResults(votableparse(response.raw.read), url=self.result_uri) class TAPQuery(DALQuery): """ a class for preparing an query to an TAP service. Query constraints are added via its service type-specific methods. The various execute() functions will submit the query and return the results. The base URL for the query, which controls where the query will be sent when one of the execute functions is called, is typically set at construction time; however, it can be updated later via the :py:attr:`~pyvo.dal.query.DALQuery.baseurl` to send a configured query to another service. In addition to the search constraint attributes described below, search parameters can be set generically by name via dict semantics. The typical function for submitting the query is ``execute()``; however, alternate execute functions provide the response in different forms, allowing the caller to take greater control of the result processing. """ def __init__( self, baseurl, query, mode="sync", language="ADQL", maxrec=None, uploads=None, **keywords): """ initialize the query object with the given parameters Parameters ---------- baseurl : str the TAP baseurl query : str the query string mode : str the query mode (sync | async). default "sync" language : str the query language. defaults to ADQL maxrec : int the amount of records to fetch uploads : dict Files to upload. Uses table name as key and table content as value. """ baseurl = baseurl.rstrip("?") super(TAPQuery, self).__init__(baseurl, **keywords) self._mode = mode if mode in ("sync", "async") else "sync" self._uploads = UploadList.fromdict(uploads or {}) self["REQUEST"] = "doQuery" self["LANG"] = language if maxrec: self["MAXREC"] = maxrec self["QUERY"] = query if self._uploads: self["UPLOAD"] = self._uploads.param() @property def queryurl(self): return '{baseurl}/{mode}'.format(baseurl=self.baseurl, mode=self._mode) def execute_stream(self): """ submit the query and return the raw VOTable XML as a file stream Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors in the input query syntax """ # theres nothing to execute in non-sync queries if self._mode != "sync": raise DALServiceError( "Cannot execute a non-synchronous query. Use submit instead") return super(TAPQuery, self).execute_stream() def execute(self): """ submit the query and return the results as a TAPResults instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ return TAPResults(self.execute_votable(), url=self.queryurl) def submit(self): """ Does the request part of the TAP query. This function is separated from response parsing because async queries return no votable but behave like sync queries in terms of request. It returns the requests response. """ url = self.queryurl files = { upload.name: upload.fileobj() for upload in self._uploads if upload.is_inline } response = requests.post( url, data=self, stream=True, files=files) # requests doesn't decode the content by default response.raw.read = partial(response.raw.read, decode_content=True) return response class TAPResults(DALResults, DatalinkMixin): """ The list of matching images resulting from an image (SIA) query. Each record contains a set of metadata that describes an available image matching the query constraints. The number of records in the results is available via the :py:attr:`nrecs` attribute or by passing it to the Python built-in ``len()`` function. This class supports iterable semantics; thus, individual records (in the form of :py:class:`~pyvo.dal.query.Record` instances) are typically accessed by iterating over an ``TAPResults`` instance. Alternatively, records can be accessed randomly via :py:meth:`getrecord` or through a Python Database API (v2) Cursor (via :py:meth:`~pyvo.dal.query.DALResults.cursor`). Column-based data access is possible via the :py:meth:`~pyvo.dal.query.DALResults.getcolumn` method. ``TAPResults`` is essentially a wrapper around an Astropy :py:mod:`~astropy.io.votable` :py:class:`~astropy.io.votable.tree.Table` instance where the columns contain the various metadata describing the images. One can access that VOTable directly via the :py:attr:`~pyvo.dal.query.DALResults.votable` attribute. Thus, when one retrieves a whole column via :py:meth:`~pyvo.dal.query.DALResults.getcolumn`, the result is a Numpy array. Alternatively, one can manipulate the results as an Astropy :py:class:`~astropy.table.table.Table` via the following conversion: >>> table = results.table ``SIAResults`` supports the array item operator ``[...]`` in a read-only context. When the argument is numerical, the result is an :py:class:`~pyvo.dal.query.Record` instance, representing the record at the position given by the numerical index. If the argument is a string, it is interpreted as the name of a column, and the data from the column matching that name is returned as a Numpy array. """ def __init__(self, votable, **kwargs): """ Initialize datalinks """ super(TAPResults, self).__init__(votable, **kwargs) self._init_datalinks(votable) @property def infos(self): """ return the info element as dictionary """ return getattr(self, "_infos", {}) @property def query_status(self): """ return the query status """ return getattr(self, "_infos", {}).get("QUERY_STATUS", None) def getrecord(self, index): """ return a representation of a tap result record that follows dictionary semantics. The keys of the dictionary are those returned by this instance's fieldnames attribute. The returned record has additional image-specific properties Parameters ---------- index : int the integer index of the desired record where 0 returns the first record Returns ------- REc a dictionary-like wrapper containing the result record metadata. Raises ------ IndexError if index is negative or equal or larger than the number of rows in the result table. See Also -------- Record """ return Record(self, index) pyvo-0.6.1+dfsg.orig/pyvo/dal/scs.py0000644000175000017500000004120613125165225016625 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ A module for searching remote source and observation catalogs A Simple Cone Search (SCS) service allows a client to search for records in a source or observation catalog whose positions are within some minimum distance of a search position (i.e. within a specified "cone" on the sky). This module provides an interface for accessing such services. It is implemented as a specialization of the DAL Query interface. The ``search()`` function provides a simple interface to a service, returning an SCSResults instance as its results which represents the matching records from the catalog. The SCSResults supports access to and iterations over the individual records; these are provided as SCSRecord instances, which give easy access to key metadata in the response, including the ICRS position of the matched source or observation. This module also features the SCSQuery class that provides an interface for building up and remembering a query. The SCSService class can represent a specific service available at a URL endpoint. """ from __future__ import ( absolute_import, division, print_function, unicode_literals) from astropy.coordinates import SkyCoord from astropy.units import Unit, Quantity from .query import DALResults, DALQuery, DALService, Record from .datalink import DatalinkMixin __all__ = ["search", "SCSService", "SCSQuery", "SCSResults", "SCSRecord"] def search(url, pos, radius=1.0, verbosity=2, **keywords): """ submit a simple Cone Search query that requests objects or observations whose positions fall within some distance from a search position. Parameters ---------- url : str the base URL of the query service. pos : astropy.coordinates.SkyCoord a SkyCoord instance defining the position of the center of the circular search region. converted if it's a iterable containing scalars, assuming icrs degrees. radius : `~astropy.units.Quantity` or float a Quantity instance defining the radius of the circular search region, in degrees. converted if it is another unit. verbosity : int an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columsn, and 3 means as many columns as are available. **keywords : additional case insensitive parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SCSResults a container holding a table of matching catalog records Raises ------ DALServiceError for errors connecting to or communicating with the service. DALQueryError if the service responds with an error, including a query syntax error. See Also -------- SCSResults pyvo.dal.query.DALServiceError pyvo.dal.query.DALQueryError """ return SCSService(url).search(pos, radius, verbosity, **keywords) class SCSService(DALService): """ a representation of a Cone Search service """ def __init__(self, baseurl): """ instantiate a Cone Search service Parameters ---------- baseurl : str the base URL for submitting search queries to the service. """ super(SCSService, self).__init__(baseurl) def search(self, pos, radius=1.0, verbosity=2, **keywords): """ submit a simple Cone Search query that requests objects or observations whose positions fall within some distance from a search position. Parameters ---------- pos : astropy.coordinates.SkyCoord a SkyCoord instance defining the position of the center of the circular search region. converted if it's a iterable containing scalars, assuming icrs degrees. radius : `~astropy.units.Quantity` or float a Quantity instance defining the radius of the circular search region, in degrees. converted if it is another unit. verbosity : int an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columns, 3 means as many columns as are available. **keywords : additional case insensitive parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SCSResults a container holding a table of matching catalog records Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError if the service responds with an error, including a query syntax error. See Also -------- SCSResults pyvo.dal.query.DALServiceError pyvo.dal.query.DALQueryError """ return self.create_query(pos, radius, verbosity, **keywords).execute() def create_query(self, pos=None, radius=None, verbosity=None, **keywords): """ create a query object that constraints can be added to and then executed. The input arguments will initialize the query with the given values. Parameters ---------- pos : astropy.coordinates.SkyCoord a SkyCoord instance defining the position of the center of the circular search region. converted if it's a iterable containing scalars, assuming icrs degrees. radius : `~astropy.units.Quantity` or float a Quantity instance defining the radius of the circular search region, in degrees. converted if it is another unit. verbosity : int an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columns, 3 means as many columns as are available. **keywords : additional case insensitive parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SCSQuery the query instance See Also -------- SCSQuery """ return SCSQuery(self.baseurl, pos, radius, verbosity, **keywords) class SCSQuery(DALQuery): """ a class for preparing an query to a Cone Search service. Query constraints are added via its service type-specific methods. The various execute() functions will submit the query and return the results. The base URL for the query, which controls where the query will be sent when one of the execute functions is called, is typically set at construction time; however, it can be updated later via the :py:attr:`~pyvo.dal.query.DALQuery.baseurl` to send a configured query to another service. In addition to the search constraint attributes described below, search parameters can be set generically by name via dict semantics. The typical function for submitting the query is ``execute()``; however, alternate execute functions provide the response in different forms, allowing the caller to take greater control of the result processing. """ def __init__( self, baseurl, pos=None, radius=None, verbosity=None, **keywords): """ initialize the query object with a baseurl and the given parameters Parameters ---------- pos : astropy.coordinates.SkyCoord a SkyCoord instance defining the position of the center of the circular search region. converted if it's a iterable containing scalars, assuming icrs degrees. radius : `~astropy.units.Quantity` or float a Quantity instance defining the radius of the circular search region, in degrees. converted if it is another unit. verbosity : int an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columns, 3 means as many columns as are available. """ super(SCSQuery, self).__init__(baseurl) if pos: self.pos = pos if radius: self.radius = radius if verbosity: self.verbosity = verbosity self.update({key.upper(): value for key, value in keywords.items()}) @property def pos(self): """ the position of the center of the circular search region as a `~astropy.coordinates.SkyCoord` instance. """ return getattr(self, "_pos", None) @pos.setter def pos(self, val): setattr(self, "_pos", val) # use the astropy, luke if not isinstance(val, SkyCoord): pos_ra, pos_dec = val # assume ICRS degrees val = SkyCoord(ra=pos_ra, dec=pos_dec, unit="deg", frame="icrs") self["RA"] = val.icrs.ra.deg self["DEC"] = val.icrs.dec.deg @pos.deleter def pos(self): delattr(self, "_pos") del self["RA"] del self["DEC"] @property def radius(self): """ the radius of the circular region around pos as a `~astropy.units.Quantity` instance. """ return getattr(self, "_radius", None) @radius.setter def radius(self, val): setattr(self, "_radius", val) if not isinstance(val, Quantity): # assume degrees val = val * Unit("deg") try: if len(val): raise ValueError( "radius may be specified using exactly one value") except TypeError: # len 1 pass self["SR"] = val.to(Unit("deg")).value @radius.deleter def radius(self): delattr(self, "_radius") del self["SR"] @property def verbosity(self): """ an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columsn, 3 means as many columns as are available. """ return getattr(self, "_verbosity", None) @verbosity.setter def verbosity(self, val): setattr(self, "_verbosity", val) self["VERB"] = val @verbosity.deleter def verbosity(self): delattr(self, "_verbosity") del self["VERB"] def execute(self): """ submit the query and return the results as a SCSResults instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ return SCSResults(self.execute_votable(), url=self.queryurl) class SCSResults(DALResults, DatalinkMixin): """ The list of matching catalog records resulting from a catalog (SCS) query. Each record contains a set of metadata that describes a source or observation within the requested circular region (i.e. a "cone"). The number of records in the results is available via the :py:attr:`nrecs attribute or by passing it to the Python built-in ``len()`` function. This class supports iterable semantics; thus, individual records (in the form of :py:class:`~pyvo.dal.scs.SCSRecord` instances) are typically accessed by iterating over an ``SCSResults`` instance. >>> results = pyvo.conesearch(url, pos=[12.24, -13.1], radius=0.1) >>> for src in results: ... print("{0}: {1} {2}".format(src.id, src.ra, src.dec)) Alternatively, records can be accessed randomly via :py:meth:`getrecord` or through a Python Database API (v2) Cursor (via :py:meth:`~pyvo.dal.query.DALResults.cursor`). Column-based data access is possible via the :py:meth:`~pyvo.dal.query.DALResults.getcolumn` method. ``SCSResults`` is essentially a wrapper around an Astropy :py:mod:`~astropy.io.votable` :py:class:`~astropy.io.votable.tree.Table` instance where the columns contain the various metadata describing the images. One can access that VOTable directly via the :py:attr:`~pyvo.dal.query.DALResults.votable` attribute. Thus, when one retrieves a whole column via :py:meth:`~pyvo.dal.query.DALResults.getcolumn`, the result is a Numpy array. Alternatively, one can manipulate the results as an Astropy :py:class:`~astropy.table.table.Table` via the following conversion: >>> table = results.votable.to_table() ``SCSResults`` supports the array item operator ``[...]`` in a read-only context. When the argument is numerical, the result is an :py:class:`~pyvo.dal.scs.SCSRecord` instance, representing the record at the position given by the numerical index. If the argument is a string, it is interpreted as the name of a column, and the data from the column matching that name is returned as a Numpy array. """ def __init__(self, votable, **kwargs): """ Initialize datalinks """ super(SCSResults, self).__init__(votable, **kwargs) self._init_datalinks(votable) def _findresultsresource(self, votable): if len(votable.resources) < 1: return None return votable.resources[0] def _findstatus(self, votable): # this is specialized according to the Conesearch standard # look first in the preferred location: just below the root VOTABLE info = self._findstatusinfo(votable.infos) if info: return (info.name, info.value) # look next in the result resource res = self._findresultsresource(votable) if res: # look for RESOURCE/INFO info = self._findstatusinfo(res.infos) if info: return (info.name, info.value) # if not there, check for a PARAM info = self._findstatusinfo(res.params) if info: return (info.name, info.value) # last resort: VOTABLE/DEFINITIONS/PARAM # NOT SUPPORTED BY astropy; parser has been configured to # raise W22 as exception instead. # assume it's okay return ("OK", "Successful Response") def _findstatusinfo(self, infos): # this can be overridden to specialize for a particular DAL protocol for info in infos: if info.name == "Error": return info def getrecord(self, index): """ return a representation of a conesearch result record that follows dictionary semantics. The keys of the dictionary are those returned by this instance's fieldnames attribute. The returned record has the following additional properties: id, ra, dec Parameters ---------- index : int the integer index of the desired record where 0 returns the first record Returns ------- SCSRecord a dictionary-like wrapper containing the result record metadata. Raises ------ IndexError if index is negative or equal or larger than the number of rows in the result table. See Also -------- Record """ return SCSRecord(self, index) class SCSRecord(Record): """ a dictionary-like container for data in a record from the results of an Cone Search (SCS) query, describing a matching source or observation. The commonly accessed metadata which are stadardized by the SCS protocol are available as attributes. All metadata, particularly non-standard metadata, are acessible via the ``get(`` *key* ``)`` function (or the [*key*] operator) where *key* is table column name. """ @property def pos(self): """ the position of the object or observation described by this record. """ return SkyCoord( ra=self.getbyucd("POS_EQ_RA_MAIN"), dec=self.getbyucd("POS_EQ_DEC_MAIN"), unit="deg", frame="icrs") @property def id(self): """ return the identifying name of the object or observation described by this record. """ return self.getbyucd("ID_MAIN") pyvo-0.6.1+dfsg.orig/pyvo/dal/dbapi2.py0000644000175000017500000001616713125165225017206 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ An implementation of the Database API v2.0 interface to DAL VOTable responses. This only supports read-only access. """ from __future__ import print_function, division from .query import Iter from astropy.extern import six if six.PY3: StandardError = Exception apilevel = "2.0" threadsafety = 2 paramstyle = "n/a" __all__ = "STRING BINARY NUMBER DATETIME ROWID".split() class Error(StandardError): """ DB-API base exception """ pass class Warning(StandardError): """ DB-API warning """ pass class InterfaceError(Error): """ DB-API exception indicating an error related to the database interface rather than the database itself. """ pass class DatabaseError(Error): """ DB-API exception indicating an error related to the database. """ pass class DataError(DatabaseError): """ DB-API exception indicating an error while processing data (e.g. divide by zero, numeric value out-of-range, etc.) """ pass class OperationalError(DatabaseError): """ DB-API exception indicating an error related to the database's operation and not necessarily under the control of the programmer. """ pass class IntegrityError(DatabaseError): """ DB-API exception indicating an inconsistancy in the database integrity. """ pass class InternalError(DatabaseError): """ DB-API exception indicating an internal error that might indicate that a connection or cursor is no longer valid. """ pass class ProgrammingError(DatabaseError): """ DB-API exception indicating an erroneous request (e.g. column not found) """ pass class NotSupportedError(DatabaseError): """ DB-API exception indicating a request is not supported """ pass class TypeObject(object): def __init__(self,*values): self._values = values @property def id(self): return self._values[0] def __eq__(self, other): if not isinstance(other, TypeObject): return False if other.id in self._values: return True return self.id in other._values def __ne__(self, other): return not self.__eq__(other) STRING = TypeObject(0) BINARY = TypeObject(1) NUMBER = TypeObject(2) DATETIME = TypeObject(3, STRING.id) ROWID = TypeObject(4, NUMBER.id) def connect(source): raise NotSupportedError("Connection objects not supported") class Cursor(Iter): """ A class used to walk through a query response table row by row, accessing the contents of each record (row) of the table. This class implements the Python Database API. """ def __init__(self, results): """Create a cursor instance. The constructor is not typically called by directly applications; rather an instance is obtained from calling a DalQuery's execute(). """ super(Cursor, self).__init__(results) self._description = self._mkdesc() self._rowcount = len(self.resultset) self._arraysize = 1 def _mkdesc(self): flds = self.resultset.fieldnames out = [] for name in flds: fld = self.resultset.getdesc(name) typ = STRING if fld.datatype in \ "short int long float double floatComplex doubleComplex boolean".split(): typ = NUMBER elif fld.datatype in "char unicodeChar unsignedByte".split(): typ = STRING out.append( (name, typ) ) return tuple(out) @property def description(self): """ a read-only sequence of 2-item seqences. Each seqence describes a column in the results, giving its name and type_code. """ return self._description @property def rowcount(self): """ the number of rows in the result (read-only) """ return self._rowcount @property def arraysize(self): """ the number of rows that will be returned by returned by a call to fetchmany(). This defaults to 1, but can be changed. """ return self._arraysize @arraysize.setter def arraysize(self, value): if not value: value = 1 self._arraysize = value def infos(self): """Return any INFO elements in the VOTable as a dictionary. Returns ------- dict : A dictionary with each element corresponding to a single INFO, representing the INFO as a name:value pair. """ return self.resultset._infos def fetchone(self): """Return the next row of the query response table. Returns ------- tuple : The response is a tuple wherein each element is the value of the corresponding table field. """ try: rec = self.next() out = [] for name in self.resultset.fieldnames: out.append(rec[name]) return out except StopIteration: return None def fetchmany(self, size=None): """Fetch the next block of rows from the query result. Parameters ---------- size : int The number of rows to return (default: cursor.arraysize). Returns ------- list of tuples : A list of tuples, one per row. An empty sequence is returned when no more rows are available. If a DictCursor is used then the output consists of a list of dictionaries, one per row. """ if not size: size = self.arraysize out = [] for _ in range(size): out.append(self.fetchone()) return out def fetchall(self): """Fetch all remaining rows from the result set. Returns ------- list of tuples : A list of tuples, one per row. An empty sequence is returned when no more rows are available. If a DictCursor is used then the output consists of a list of dictionaries, one per row. """ out = [] for _ in range(self._rowcount - self.pos): out.append(self.fetchone()) return out def scroll(self, value, mode="relative"): """Move the row cursor. Parameters ---------- value : str The number of rows to skip or the row number to position to. mode : str Either "relative" for a relative skip (default), or "absolute" to position to a row by its absolute index within the result set (zero-indexed). """ if mode == "absolute": if value > 0: self.pos = value else: raise DataError("row number not valid:" + str(value)) elif mode == "relative": self.pos += value def close(self): """Close the cursor object and free all resources. This implementation does nothing. It is provided for compliance with the Python Database API. """ # this can remain implemented as "pass" pass pyvo-0.6.1+dfsg.orig/pyvo/dal/sia.py0000644000175000017500000007645013125165225016622 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ A module for searching for images in a remote archive. A Simple Image Access (SIA) service allows a client to search for images in an archive whose field of view overlaps with a given rectangular region on the sky. The service responds to a search query with a table in which each row represents an image that is available for download. The columns provide metadata describing each image and one column in particular provides the image's download URL (also called the *access reference*, or *acref*). Some SIA services act as a cut-out service; in this case, the query result is a table of images whose field of view matches the requested region and which will be created when accessed via the download URL. This module provides an interface for accessing an SIA service. It is implemented as a specialization of the DAL Query interface. The ``search()`` function support the simplest and most common types of queries, returning an SIAResults instance as its results which represents the matching images from the archive. The SIAResults supports access to and iterations over the individual records; these are provided as SIARecord instances, which give easy access to key metadata in the response, such as the position of the image's center, the image format, the size and shape of the image, and its download URL. The ``SIAService`` class can represent a specific service available at a URL endpoint. """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import re from astropy.extern import six from astropy.coordinates import SkyCoord from astropy.time import Time from astropy.units import Quantity, Unit from .query import DALResults, DALQuery, DALService, Record, mime2extension from .datalink import DatalinkMixin __all__ = ["search", "SIAService", "SIAQuery", "SIAResults", "SIARecord"] def search( url, pos, size=1.0, format='all', intersect="overlaps", verbosity=2, **keywords): """ submit a simple SIA query that requests images overlapping a given region Parameters ---------- url : str the base URL for the SIA service pos : `~astropy.coordinates.SkyCoord` class or sequence of two floats the position of the center of the rectangular search region. assuming icrs decimal degrees if unit is not specified. size : `~astropy.units.Quantity` class or 2 element sequence of float the full rectangular size of the search region along the RA and Dec directions. converted if it's a iterable containing scalars, assuming decimal degrees. size : `~astropy.units.Quantity` class or scalar float the size of the rectangular region around pos. assuming icrs decimal degrees if unit is not specified. format : str the image format(s) of interest. "all" (default) indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata; "image/\\*" indicates a particular image format where * can have values like "fits", "jpeg", "png", etc. intersect : str a token indicating how the returned images should intersect with the search region; recognized values include: ========= ====================================================== COVERS select images that completely cover the search region ENCLOSED select images that are complete enclosed by the region OVERLAPS select any image that overlaps with the search region CENTER select images whose center is within the search region ========= ====================================================== verbosity : int an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columsn, 3 means as many columns as are available. **keywords : additional parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SIAResults a container holding a table of matching image records Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError if the service responds with an error, including a query syntax error. See Also -------- SIAResults pyvo.dal.query.DALServiceError pyvo.dal.query.DALQueryError """ service = SIAService(url) return service.search(pos, size, format, intersect, verbosity, **keywords) class SIAService(DALService): """ a representation of an SIA service """ def __init__(self, baseurl): """ instantiate an SIA service Parameters ---------- baseurl : str the base URL for submitting search queries to the service. """ super(SIAService, self).__init__(baseurl) def _get_metadata(self): """ the metadata resource element """ if not hasattr(self, "_metadata"): query = self.create_query(format='metadata') metadata = query.execute_votable() setattr(self, "_metadata", metadata) try: setattr(self, "_metadata_resource", metadata.resources[0]) except IndexError: setattr(self, "_metadata_resource", None) @property def description(self): """ the service description. """ self._get_metadata() try: return getattr(self, "_metadata", None).description except AttributeError: return None @property def params(self): """ the service parameters. """ self._get_metadata() try: return getattr(self, "_metadata_resource", None).params except AttributeError: return None def search( self, pos, size=1.0, format='all', intersect="overlaps", verbosity=2, **keywords): """ submit a simple SIA query to this service with the given constraints. This method is provided for a simple but typical SIA queries. For more complex queries, one should create an SIAQuery object via create_query() Parameters ---------- pos : `~astropy.coordinates.SkyCoord` class or sequence of two floats the position of the center of the rectangular search region. assuming icrs decimal degrees if unit is not specified. size : `~astropy.units.Quantity` class or 2 element sequence of float the full rectangular size of the search region along the RA and Dec directions. converted if it's a iterable containing scalars, assuming decimal degrees. size : `~astropy.units.Quantity` class or scalar float the size of the rectangular region around pos. assuming icrs decimal degrees if unit is not specified. format : str the image format(s) of interest. "all" (default) indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata; "image/\\*" indicates a particular image format where * can have values like "fits", "jpeg", "png", etc. intersect : str a token indicating how the returned images should intersect with the search region; recognized values include: ========= ====================================================== COVERS select images that completely cover the search region ENCLOSED select images that are complete enclosed by the region OVERLAPS select any image that overlaps with the search region CENTER select images whose center is within the search region ========= ====================================================== verbosity : int an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columsn, 3 means as many columns as are available. **keywords : additional parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SIAResults a container holding a table of matching catalog records Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError if the service responds with an error, including a query syntax error. See Also -------- SIAResults pyvo.dal.query.DALServiceError pyvo.dal.query.DALQueryError """ return self.create_query( pos, size, format, intersect, verbosity, **keywords).execute() def create_query( self, pos=None, size=None, format=None, intersect=None, verbosity=None, **keywords ): """ create a query object that constraints can be added to and then executed. The input arguments will initialize the query with the given values. Parameters ---------- pos : `~astropy.coordinates.SkyCoord` class or sequence of two floats the position of the center of the rectangular search region. assuming icrs decimal degrees if unit is not specified. size : `~astropy.units.Quantity` class or 2 element sequence of float the full rectangular size of the search region along the RA and Dec directions. converted if it's a iterable containing scalars, assuming decimal degrees. size : `~astropy.units.Quantity` class or scalar float the size of the rectangular region around pos. assuming icrs decimal degrees if unit is not specified. format : str the image format(s) of interest. "all" (default) indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata; "image/\\*" indicates a particular image format where * can have values like "fits", "jpeg", "png", etc. intersect : str a token indicating how the returned images should intersect with the search region; recognized values include: ========= ====================================================== COVERS select images that completely cover the search region ENCLOSED select images that are complete enclosed by the region OVERLAPS select any image that overlaps with the search region CENTER select images whose center is within the search region ========= ====================================================== verbosity : int an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columsn, 3 means as many columns as are available. **keywords : additional parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. Returns ------- SIAQuery the query instance See Also -------- SIAQuery """ return SIAQuery( self.baseurl, pos, size, format, intersect, verbosity, **keywords) class SIAQuery(DALQuery): """ a class for preparing an query to an SIA service. Query constraints are added via its service type-specific methods. The various execute() functions will submit the query and return the results. The base URL for the query, which controls where the query will be sent when one of the execute functions is called, is typically set at construction time; however, it can be updated later via the :py:attr:`~pyvo.dal.query.DALQuery.baseurl` to send a configured query to another service. In addition to the search constraint attributes described below, search parameters can be set generically by name via dict semantics. The typical function for submitting the query is ``execute()``; however, alternate execute functions provide the response in different forms, allowing the caller to take greater control of the result processing. """ def __init__( self, baseurl, pos=None, size=None, format=None, intersect=None, verbosity=None, **keywords ): """ initialize the query object with a baseurl and the given parameters Parameters ---------- baseurl : str the base URL for the SIA service pos : `~astropy.coordinates.SkyCoord` class or sequence of two floats the position of the center of the rectangular search region. assuming icrs decimal degrees if unit is not specified. size : `~astropy.units.Quantity` class or 2 element sequence of float the full rectangular size of the search region along the RA and Dec directions. converted if it's a iterable containing scalars, assuming decimal degrees. size : `~astropy.units.Quantity` class or scalar float the size of the rectangular region around pos. assuming icrs decimal degrees if unit is not specified. format : str the image format(s) of interest. "all" (default) indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata; "image/\\*" indicates a particular image format where * can have values like "fits", "jpeg", "png", etc. intersect : str a token indicating how the returned images should intersect with the search region; recognized values include: ========= ====================================================== COVERS select images that completely cover the search region ENCLOSED select images that are complete enclosed by the region OVERLAPS select any image that overlaps with the search region CENTER select images whose center is within the search region ========= ====================================================== verbosity : int an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columsn, 3 means as many columns as are available. **keywords : additional parameters can be given via arbitrary case insensitive keyword arguments. Where there is overlap with the parameters set by the other arguments to this function, these keywords will override. """ super(SIAQuery, self).__init__(baseurl, **keywords) if pos: self.pos = pos if size: self.size = size if format: self.format = format if intersect: self.intersect = intersect if verbosity: self.verbosity = verbosity @property def pos(self): """ the position of the center of the rectangular search region as a `~astropy.coordinates.SkyCoord` instance. """ return getattr(self, "_pos", None) @pos.setter def pos(self, val): setattr(self, "_pos", val) # use the astropy, luke if not isinstance(val, SkyCoord): try: pos_ra, pos_dec = val except TypeError: raise ValueError( "pos may be specified using exactly two values") # assume ICRS degrees val = SkyCoord(ra=pos_ra, dec=pos_dec, unit="deg", frame="icrs") self["POS"] = "{ra},{dec}".format( ra=val.icrs.ra.deg, dec=val.icrs.dec.deg) @pos.deleter def pos(self): delattr(self, "_pos") del self["POS"] @property def size(self): """ the size of the rectangular region around pos as a `~astropy.units.Quantity` instance. """ return getattr(self, "_size", None) @size.setter def size(self, val): setattr(self, "_size", val) if not isinstance(val, Quantity): # assume degrees val = val * Unit("deg") try: if len(val) > 2: raise ValueError( "size may be specified using either one or two values") except TypeError: # len 1 pass try: self["SIZE"] = ",".join( str(deg) for deg in val.to(Unit("deg")).value) except TypeError: self["SIZE"] = str(val.to(Unit("deg")).value) @size.deleter def size(self): delattr(self, "_size") del self["SIZE"] @property def format(self): """ the image format(s) of interest. "all" (default) indicates all available formats; "graphic" indicates graphical images (e.g. jpeg, png, gif; not FITS); "metadata" indicates that no images should be returned--only an empty table with complete metadata; "image/\\*" indicates a particular image format where * can have values like "fits", "jpeg", "png", etc. """ return getattr(self, "_format", None) @format.setter def format(self, val): setattr(self, "_format", val) if type(val) in (six.text_type, six.binary_type): val = [val] self["FORMAT"] = ",".join(_.upper() for _ in val) @format.deleter def format(self): delattr(self, "_format") del self["FORMAT"] @property def intersect(self): """ a token indicating how the returned images should intersect with the search region; recognized values include: ========= ====================================================== COVERS select images that completely cover the search region ENCLOSED select images that are complete enclosed by the region OVERLAPS select any image that overlaps with the search region CENTER select images whose center is within the search region ========= ====================================================== """ return getattr(self, "_intersect", None) @intersect.setter def intersect(self, val): setattr(self, "_intersect", val) self["INTERSECT"] = val.upper() @intersect.deleter def intersect(self): delattr(self, "_intersect") del self["INTERSECT"] @property def verbosity(self): """ an integer value that indicates the volume of columns to return in the result table. 0 means the minimum set of columsn, 3 means as many columns as are available. """ return getattr(self, "_verbosity", None) @verbosity.setter def verbosity(self, val): setattr(self, "_verbosity", val) self["VERB"] = val @verbosity.deleter def verbosity(self): delattr(self, "_verbosity") del self["VERB"] def execute(self): """ submit the query and return the results as a SIAResults instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ return SIAResults(self.execute_votable(), url=self.queryurl) class SIAResults(DALResults, DatalinkMixin): """ The list of matching images resulting from an image (SIA) query. Each record contains a set of metadata that describes an available image matching the query constraints. The number of records in the results is available via the :py:attr:`nrecs` attribute or by passing it to the Python built-in ``len()`` function. This class supports iterable semantics; thus, individual records (in the form of :py:class:`~pyvo.dal.sia.SIARecord` instances) are typically accessed by iterating over an ``SIAResults`` instance. >>> results = pyvo.imagesearch(url, pos=[12.24, -13.1], size=0.1) >>> for image in results: ... print("{0}: {1}".format(image.title, title.getdataurl())) Alternatively, records can be accessed randomly via :py:meth:`getrecord` or through a Python Database API (v2) Cursor (via :py:meth:`~pyvo.dal.query.DALResults.cursor`). Column-based data access is possible via the :py:meth:`~pyvo.dal.query.DALResults.getcolumn` method. ``SIAResults`` is essentially a wrapper around an Astropy :py:mod:`~astropy.io.votable` :py:class:`~astropy.io.votable.tree.Table` instance where the columns contain the various metadata describing the images. One can access that VOTable directly via the :py:attr:`~pyvo.dal.query.DALResults.votable` attribute. Thus, when one retrieves a whole column via :py:meth:`~pyvo.dal.query.DALResults.getcolumn`, the result is a Numpy array. Alternatively, one can manipulate the results as an Astropy :py:class:`~astropy.table.table.Table` via the following conversion: >>> table = results.votable.to_table() ``SIAResults`` supports the array item operator ``[...]`` in a read-only context. When the argument is numerical, the result is an :py:class:`~pyvo.dal.sia.SIARecord` instance, representing the record at the position given by the numerical index. If the argument is a string, it is interpreted as the name of a column, and the data from the column matching that name is returned as a Numpy array. """ def __init__(self, votable, **kwargs): """ Initialize datalinks """ super(SIAResults, self).__init__(votable, **kwargs) self._init_datalinks(votable) def getrecord(self, index): """ return a representation of a sia result record that follows dictionary semantics. The keys of the dictionary are those returned by this instance's fieldnames attribute. The returned record has additional image-specific properties Parameters ---------- index : int the integer index of the desired record where 0 returns the first record Returns ------- SIARecord a dictionary-like wrapper containing the result record metadata. Raises ------ IndexError if index is negative or equal or larger than the number of rows in the result table. See Also -------- Record """ return SIARecord(self, index) class SIARecord(Record): """ a dictionary-like container for data in a record from the results of an image (SIA) search, describing an available image. The commonly accessed metadata which are stadardized by the SIA protocol are available as attributes. If the metadatum accessible via an attribute is not available, the value of that attribute will be None. All metadata, including non-standard metadata, are acessible via the ``get(`` *key* ``)`` function (or the [*key*] operator) where *key* is table column name. """ @property def pos(self): """ the position of the object or observation described by this record. """ return SkyCoord( ra=self.getbyucd("POS_EQ_RA_MAIN"), dec=self.getbyucd("POS_EQ_DEC_MAIN"), unit="deg", frame="icrs") # Image Metadata @property def title(self): """ the title of the image """ return self.getbyucd("VOX:Image_Title", decode=True) @property def instr(self): """ the name of the instrument (or instruments) that produced the data that went into this image. """ return self.getbyucd("INST_ID", decode=True) @property def dateobs(self): """ the modified Julien date (MJD) of the mid-point of the observational data that went into the image, as an astropy.time.Time instance """ dateobs = self.getbyucd("VOX:Image_MJDateObs") if dateobs: return Time(dateobs, format="mjd") else: return None @property def naxes(self): """ the number of axes in this image. """ return self.getbyucd("VOX:Image_Naxes") @property def naxis(self): """ the lengths of the sides along each axis, in pix, as a astropy Quantity pix """ return self.getbyucd("VOX:Image_Naxis") * Unit("pix") @property def scale(self): """ the scale of the pixels in each image axis, in degrees/pixel, as a astropy Quantity deg / pix """ return self.getbyucd("VOX:Image_Scale") * (Unit("deg") / Unit("pix")) @property def format(self): """ the format of the image """ return self.getbyucd("VOX:Image_Format", decode=True) # Coordinate System Metadata @property def coord_frame(self): """ the coordinate system reference frame, one of the following: "ICRS", "FK5", "FK4", "ECL", "GAL", and "SGAL". """ return self.getbyucd("VOX:STC_CoordRefFrame", decode=True) @property def coord_equinox(self): """ the equinox of the used coordinate system """ return self.getbyucd("VOX:STC_CoordEquinox") @property def coord_projection(self): """ the celestial projection (TAN / ARC / SIN / etc.) """ return self.getbyucd("VOX:WCS_CoordProjection", decode=True) @property def coord_refpixel(self): """ the image pixel coordinates of the WCS reference pixel """ return self.getbyucd("VOX:WCS_CoordRefPixel") @property def coord_refvalue(self): """ the world coordinates of the WCS reference pixel. """ return self.getbyucd("VOX:WCS_CoordRefValue") @property def cdmatrix(self): """ the WCS CD matrix defining the scale and rotation (among other things) of the image. ordered as CD[i,j] = [0,0], [0,1], [1,0], [1,1]. """ return self.getbyucd("VOX:WCS_CDMatrix").reshape((2, 2)) # Spectral Bandpass Metadata @property def bandpass_id(self): """ the bandpass by name (e.g., "V", "SDSS_U", "K", "K-Band", etc.) """ return self.getbyucd("VOX:BandPass_ID", decode=True) @property def bandpass_unit(self): """ the astropy unit used to represent spectral values. """ sia = self.getbyucd("VOX:BandPass_Unit", decode=True) if sia: return Unit(sia) else: # dimensionless return Unit("") @property def bandpass_refvalue(self): """ the characteristic (reference) wavelength, frequency or energy for the bandpass model, as an astropy Quantity of bandpass_unit """ return Quantity( self.getbyucd("VOX:BandPass_RefValue"), self.bandpass_unit) @property def bandpass_hilimit(self): """ the upper limit of the bandpass, as an astropy Quantity in bandpass_unit """ return Quantity( self.getbyucd("VOX:BandPass_HiLimit"), self.bandpass_unit) @property def bandpass_lolimit(self): """ the lower limit of the bandpass, as an astropy Quantity in bandpass_unit """ return Quantity( self.getbyucd("VOX:BandPass_LoLimit"), self.bandpass_unit) # Processig Metadata @property def pixflags(self): """ the type of processing done by the image service to produce an output image pixel a string of one or more of the following values: * C -- The image pixels were copied from a source image without change, as when an atlas image or cutout is returned. * F -- The image pixels were computed by resampling an existing image, e.g., to rescale or reproject the data, and were filtered by an interpolator. * X -- The image pixels were computed by the service directly from a primary data set hence were not filtered by an interpolator. * Z -- The image pixels contain valid flux (intensity) values, e.g., if the pixels were resampled a flux-preserving interpolator was used. * V -- The image pixels contain some unspecified visualization of the data, hence are suitable for display but not for numerical analysis. """ return self.getbyucd("VOX:Image_PixFlags", decode=True) # Access Metadata @property def acref(self): """ the URL that can be used to retrieve the image """ return self.getbyucd("VOX:Image_AccessReference", decode=True) @property def acref_ttl(self): """ the minimum time to live in seconds of the access reference """ return self.getbyucd("VOX:Image_AccessRefTTL") @property def filesize(self): """ the (estimated) size of the image in bytes """ return self.getbyucd("VOX:Image_FileSize") def getdataurl(self): """ return the URL contained in the access URL column which can be used to retrieve the dataset described by this record. None is returned if no such column exists. """ return self.acref def suggest_dataset_basename(self): """ return a default base filename that the dataset available via ``getdataset()`` can be saved as. This function is specialized for a particular service type this record originates from so that it can be used by ``cachedataset()`` via ``make_dataset_filename()``. """ out = self.title if type(out) == six.binary_type: out = out.decode('utf-8') if not out: out = "image" else: out = re.sub(r'\s+', '_', out.strip()) return out def suggest_extension(self, default=None): """ returns a recommended filename extension for the dataset described by this record. Typically, this would look at the column describing the format and choose an extension accordingly. """ return mime2extension(self.format, default) pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/0000755000175000017500000000000013125165225016622 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/dal/tests/test_sia_net.py0000644000175000017500000000521713125165225021662 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests for pyvo.dal.sia """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os, sys, shutil, re, imp import unittest, pdb import pyvo.dal.query as dalq import pyvo.dal.sia as sia # from astropy.io.vo import parse as votableparse from astropy.io.votable.tree import VOTableFile from astropy.io.votable.tree import VOTableFile from astropy.tests.helper import pytest, remote_data neat = "http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=neat&" @remote_data class NeatSIAExecuteTest(unittest.TestCase): imfile = "testimg.fits" def setUp(self): self.tearDown() def tearDown(self): if os.path.exists(self.imfile): # pass os.remove(self.imfile) def testExecute(self): q = sia.SIAQuery(neat) q.pos = (0, 0) q.size = (1.0, 1.0) q.format = "all" q["NAXIS"] = ",".join(str(_) for _ in (75, 75)) results = q.execute() self.assertIsInstance(results, sia.SIAResults) self.assertEquals(len(results), 5) rec = results.getrecord(0) self.assertEquals(rec.naxis, (75, 75)) def testSearch(self): srv = sia.SIAService(neat) results = srv.search(pos=(0, 0), size=(1.0, 1.0)) self.assert_(isinstance(results, sia.SIAResults)) self.assertEquals(len(results), 5) def testSia(self): results = sia.search(neat, pos=(0, 0), size=(0.25, 0.25)) self.assert_(isinstance(results, sia.SIAResults)) self.assertEquals(len(results), 5) rec = results.getrecord(0) self.assertEquals(rec.pos.ra.value, 0.0) self.assertEquals(rec.pos.dec.value, 0.0) self.assertEquals(rec.title, b"neat") self.assertIsNone(rec.dateobs) self.assertEquals(rec.naxes, 2) self.assertEquals(rec.naxis, (300, 300)) self.assertIsNone(rec.instr) self.assertIsNone(rec.format) qurl = rec.getdataurl() self.assert_(qurl is not None and len(qurl) > 0) self.assert_(not os.path.exists(self.imfile)) rec.cachedataset(self.imfile) self.assert_(os.path.exists(self.imfile)) if rec.format == b"image/fits": with open(self.imfile) as fits: hdr = fits.read(20) self.assert_(hdr.startswith("SIMPLE ="), "Not a FITS image?") __all__ = "NeatSIAExecuteTest".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": unittest.main() pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/test_ssa.py0000644000175000017500000003171113125165225021024 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests for pyvo.dal.ssa """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os, sys, shutil, re, imp, glob, tempfile, random, time import unittest, pdb import pyvo.dal.query as dalq import pyvo.dal.ssa as ssa import pyvo.dal.dbapi2 as daldbapi from astropy.time import Time # from astropy.io.vo import parse as votableparse from astropy.io.votable.tree import VOTableFile from astropy.io.votable import parse as votableparse from astropy.utils.data import get_pkg_data_filename from . import testserver testdir = os.path.dirname(sys.argv[0]) if not testdir: testdir = "tests" ssaresultfile = "data/jhu-ssa.xml" errresultfile = "data/error-ssa.xml" testserverport = 8084 testserverport += 40 testserverport += random.randint(0,9) class SSAServiceTest(unittest.TestCase): baseurl = "http://localhost/ssa" def testCtor(self): self.srv = ssa.SSAService(self.baseurl) def testProps(self): self.testCtor() self.assertEquals(self.srv.baseurl, self.baseurl) try: self.srv.baseurl = "goober" self.fail("baseurl not read-only") except AttributeError: pass def testCreateQuery(self): self.testCtor() q = self.srv.create_query() self.assert_(isinstance(q, ssa.SSAQuery)) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 1) def testCreateQueryWithArgs(self): self.testCtor() q = self.srv.create_query(pos=(0.0, 0.0), diameter=1.0, format="all") self.assert_(isinstance(q, ssa.SSAQuery)) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 4) self.assertEquals(q.pos, (0.0, 0.0)) self.assertEquals(q.diameter, 1.0) self.assertEquals(q.format, "all") self.assertEquals(q["REQUEST"], "queryData") self.assertAlmostEquals(q["POS"], "0.0,0.0") self.assertAlmostEquals(q["SIZE"], 1.0) self.assertEquals(q["FORMAT"], "all") def testCreateQueryWithKws(self): self.testCtor() q = self.srv.create_query(APERTURE=0.00028) self.assertAlmostEquals(0.00028, q["APERTURE"]) q.pos = (0.0, 0.0) q.diameter = 1.0 self.assertEquals(q.pos, (0.0, 0.0)) self.assertEquals(q.diameter, 1.0) self.assertEquals(len(q.keys()), 4) self.assertAlmostEquals(q['APERTURE'], 0.00028) self.assertEquals(q["REQUEST"], "queryData") self.assertAlmostEquals(q["POS"], "0.0,0.0") self.assertAlmostEquals(q["SIZE"], 1.0) q = self.srv.create_query( pos=(0.0, 0.0), diameter=1.0, format="all", APERTURE=0.00028) self.assert_(isinstance(q, ssa.SSAQuery)) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 5) self.assertEquals(q.pos, (0,0)) self.assertEquals(q.diameter, 1.0) self.assertEquals(q.format, "all") self.assertAlmostEquals(q['APERTURE'], 0.00028) self.assertEquals(q["REQUEST"], "queryData") self.assertAlmostEquals(q["POS"], "0.0,0.0") self.assertAlmostEquals(q["SIZE"], 1.0) self.assertEquals(q["FORMAT"], "all") class SSAQueryTest(unittest.TestCase): baseurl = "http://localhost/ssa" def testCtor(self): self.q = ssa.SSAQuery(self.baseurl) self.assertEquals(self.q.baseurl, self.baseurl) def testPos(self): self.testCtor() self.assertIsNone(self.q.pos) self.q.pos = (120.445, 30.0) self.assertEquals(self.q.pos, (120.445, 30.0)) del self.q.pos self.assert_(self.q.pos is None) self.q.pos = (180.2, -30.1) self.assertEquals(self.q.pos, (180.2, -30.1)) self.q.pos = [170.2, -20.1] self.assertEquals(self.q.pos, [170.2, -20.1]) def testSize(self): self.testCtor() self.assert_(self.q.diameter is None) self.q.diameter = 1.5 self.assertEquals(self.q.diameter, 1.5) del self.q.diameter self.assert_(self.q.diameter is None) self.q.diameter = 0.5 self.assertEquals(self.q.diameter, 0.5) def testProps(self): self.testCtor() self.assert_(self.q.format is None) self.q.format = "all" self.assertEquals(self.q.format, "all") del self.q.format self.assert_(self.q.format is None) def testFormat(self): self.testCtor() self.assert_(self.q.format is None) self.q.format = "all" self.assertEquals(self.q.format, "all") del self.q.format self.assert_(self.q.format is None) # check all special values for val in "compliant native graphic votable fits xml metadata".split(): self.q.format = val self.assertEquals(self.q.format, val) # make sure MIME-type value is accepted self.q.format = "image/jpeg" self.assertEquals(self.q.format, "image/jpeg") # check for list values self.q.format = "fits,image/jpeg" def testCreateURL(self): self.testCtor() self.q.pos = (102.5511, 24.312) qurl = self.q.queryurl self.assertEquals(qurl, self.baseurl) self.q.diameter = 1.0 qurl = self.q.queryurl self.assertAlmostEquals(self.q["POS"], "102.5511,24.312") self.assertAlmostEquals(self.q["SIZE"], 1.0) class SSAResultsTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(ssaresultfile) self.tbl = votableparse(resultfile) def testCtor(self): self.r = ssa.SSAResults(self.tbl) self.assertIsInstance(self.r._fldnames, list) self.assertIsNotNone(self.r.votable) self.assertEquals(len(self.r), 35) def testGetRecord(self): self.testCtor() rec = self.r.getrecord(0) self.assertIsInstance(rec, ssa.SSARecord) rec = self.r.getrecord(1) self.assertIsInstance(rec, ssa.SSARecord) class SSAResultsErrorTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(errresultfile) self.tbl = votableparse(resultfile) def testError(self): try: res = ssa.SSAResults(self.tbl) self.fail("Failed to detect error response") except dalq.DALQueryError as ex: self.assertEquals(ex.label, "ERROR") self.assertEquals(ex.reason, "Forced Fail") class SSARecordTest(unittest.TestCase): acref = "http://vaosa-vm1.aoc.nrao.edu/ivoa-dal/JhuSsapServlet?REQUEST=getData&FORMAT=votable&PubDID=ivo%3A%2F%2Fjhu%2Fsdss%2Fdr6%2Fspec%2F2.5%2380442261170552832" def setUp(self): resultfile = get_pkg_data_filename(ssaresultfile) self.tbl = votableparse(resultfile) self.result = ssa.SSAResults(self.tbl) self.rec = self.result.getrecord(0) def testAttr(self): self.assertEquals(self.rec.ra, 179.84916) self.assertEquals(self.rec.dec, 0.984768) self.assertEquals(self.rec.title, "SDSS J115923.80+005905.16 GALAXY") self.assertEquals( self.rec.dateobs, Time("2000-04-29 03:22:00Z", format="iso")) self.assertEquals(self.rec.instr, "SDSS 2.5-M SPEC2 v4_5") self.assertEquals(self.rec.acref, self.acref) self.assertEquals(self.rec.getdataurl(), self.acref) class SSAExecuteTest(unittest.TestCase): srvr = None @classmethod def setup_class(cls): cls.srvr = testserver.get_server(testserverport) cls.srvr.start() time.sleep(0.5) @classmethod def teardown_class(cls): if cls.srvr.is_alive(): cls.srvr.terminate() if cls.srvr.is_alive(): print("prob") def testExecute(self): q = ssa.SSAQuery("http://localhost:{0}/ssa".format(self.srvr.port)) q.pos = (0, 0) q.diameter = 1.0 q.format = "all" results = q.execute() self.assertIsInstance(results, ssa.SSAResults) self.assertEquals(len(results), 35) def testSearch(self): srv = ssa.SSAService("http://localhost:{0}/ssa".format(self.srvr.port)) results = srv.search(pos=(0.0, 0.0), diameter=1.0) self.assertIsInstance(results, ssa.SSAResults) self.assertEquals(len(results), 35) def testSsa(self): results = ssa.search( "http://localhost:{0}/ssa".format(self.srvr.port), pos=(0.0, 0.0), diameter=1.0) self.assertIsInstance(results, ssa.SSAResults) self.assertEquals(len(results), 35) def testError(self): srv = ssa.SSAService("http://localhost:{0}/err".format(self.srvr.port)) self.assertRaises(dalq.DALQueryError, srv.search, (0.0, 0.0), 1.0) class DatasetNameTest(unittest.TestCase): base = "testspec" def setUp(self): resultfile = get_pkg_data_filename(ssaresultfile) self.tbl = votableparse(resultfile) self.result = ssa.SSAResults(self.tbl) self.rec = self.result.getrecord(0) self.outdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.outdir) def cleanfiles(self, tmpdir=None): if not tmpdir: tmpdir = self.outdir if not os.path.isdir(tmpdir): return files = glob.glob(os.path.join(tmpdir, self.base+"*.*")) for f in files: os.remove(f) def testSuggest(self): self.assertEquals("SDSS_J115923.80+005905.16_GALAXY", self.rec.suggest_dataset_basename()) self.assertEquals("xml", self.rec.suggest_extension("DAT")) def testMakeDatasetName(self): self.assertTrue(os.path.isdir(self.outdir)) self.assertEquals("./SDSS_J115923.80+005905.16_GALAXY.xml", self.rec.make_dataset_filename()) self.assertEquals("./goober.xml", self.rec.make_dataset_filename(base="goober")) self.assertEquals("./SDSS_J115923.80+005905.16_GALAXY.jpg", self.rec.make_dataset_filename(ext="jpg")) self.assertEquals("./goober.jpg", self.rec.make_dataset_filename(base="goober", ext="jpg")) self.assertEquals(self.outdir+"/SDSS_J115923.80+005905.16_GALAXY.xml", self.rec.make_dataset_filename(self.outdir)) path = os.path.join(self.outdir,self.base+".xml") self.assertFalse(os.path.exists(path)) self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-1.xml") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-2.xml") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-3.xml") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) self.cleanfiles() open(os.path.join(self.outdir,self.base+".xml"),'w').close() path = os.path.join(self.outdir,self.base+"-1.xml") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(os.path.join(self.outdir,self.base+"-1.xml"),'w').close() open(os.path.join(self.outdir,self.base+"-2.xml"),'w').close() open(os.path.join(self.outdir,self.base+"-3.xml"),'w').close() path = os.path.join(self.outdir,self.base+"-4.xml") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) self.cleanfiles() self.assertEquals(os.path.join(self.outdir,self.base+".xml"), self.rec.make_dataset_filename(self.outdir, self.base)) __all__ = "SSAServiceTest SSAQueryTest SSAResultsTest SSARecordTest SSAExecuteTest DatasetNameTest".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": try: module = find_current_module(1, True) pkgdir = os.path.dirname(module.__file__) t = "aTestDALServer" mod = imp.find_module(t, [pkgdir]) testserve = imp.load_module(t, mod[0], mod[1], mod[2]) except ImportError as e: sys.stderr.write("Can't find test server: aTestDALServer.py:"+str(e)) srvr = testserver.TestServer(testserverport) try: srvr.start() unittest.main() finally: if srvr.is_alive(): srvr.terminate() pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/test_query.py0000644000175000017500000006365413125165225021416 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests for pyvo.dal.query """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os, sys, shutil, re, imp, glob, tempfile, random, time import unittest, pdb import six import pyvo.dal.query as dalq import pyvo.dal.dbapi2 as daldbapi # from astropy.io.vo import parse as votableparse from astropy.io.votable.tree import VOTableFile from astropy.io.votable import parse as votableparse from astropy.utils.data import get_pkg_data_filename from . import testserver siaresultfile = "data/neat-sia.xml" ssaresultfile = "data/jhu-ssa.xml" testserverport = 8084 testserverport += 10 testserverport += random.randint(0, 9) class DALAccessErrorTest(unittest.TestCase): reason = "nya-nya" url = "http://localhost" def testProperties2(self): e = dalq.DALAccessError(self.reason, self.url) self.assertEquals(self.reason, e.reason) self.assertEquals(self.url, e.url) def testProperties1(self): e = dalq.DALAccessError(self.reason) self.assertEquals(self.reason, e.reason) self.assert_(e.url is None) def testPropertiesDef(self): e = dalq.DALAccessError() self.assertEquals(dalq.DALAccessError._defreason, e.reason) self.assert_(e.url is None) class DALServiceErrorTest(unittest.TestCase): reason = "nya-nya" code = 404 cause = "poof" url = "http://localhost/" def testProperties4(self): e = dalq.DALServiceError(self.reason, self.code, self.cause, self.url) self.assertEquals(self.reason, e.reason) self.assertEquals(self.cause, e.cause) self.assertEquals(self.code, e.code) self.assertEquals(self.url, e.url) def testProperties3(self): e = dalq.DALServiceError(self.reason, self.code, self.cause) self.assertEquals(self.reason, e.reason) self.assertEquals(self.cause, e.cause) self.assertEquals(self.code, e.code) self.assert_(e.url is None) def testProperties2(self): e = dalq.DALServiceError(self.reason, self.code) self.assertEquals(self.reason, e.reason) self.assertEquals(self.code, e.code) self.assert_(e.cause is None) self.assert_(e.url is None) def testProperties1(self): e = dalq.DALServiceError(self.reason) self.assertEquals(self.reason, e.reason) self.assert_(e.code is None) self.assert_(e.cause is None) self.assert_(e.url is None) def testPropertiesDef(self): e = dalq.DALServiceError() self.assert_(e.reason and e.reason.startswith("Unknown service ")) self.assert_(e.code is None) self.assert_(e.cause is None) self.assert_(e.url is None) def testFromExcept(self): c = RuntimeError(self.reason) e = dalq.DALServiceError.from_except(c) self.assertEquals(e.reason, "RuntimeError: " + self.reason) self.assert_(e.cause is c) self.assert_(e.code is None) self.assert_(e.url is None) class DALQueryErrorTest(unittest.TestCase): reason = "nya-nya" label = "goofed" url = "http://localhost" def testProperties3(self): e = dalq.DALQueryError(self.reason, self.label, self.url) self.assertEquals(self.reason, e.reason) self.assertEquals(self.label, e.label) self.assertEquals(self.url, e.url) def testProperties2(self): e = dalq.DALQueryError(self.reason, self.label) self.assertEquals(self.reason, e.reason) self.assertEquals(self.label, e.label) self.assert_(e.url is None) def testProperties1(self): e = dalq.DALQueryError(self.reason) self.assertEquals(self.reason, e.reason) self.assert_(e.label is None) self.assert_(e.url is None) def testPropertiesDef(self): e = dalq.DALQueryError() self.assert_(e.reason and e.reason.startswith("Unknown DAL Query ")) self.assert_(e.label is None) self.assert_(e.label is None) self.assert_(e.url is None) class DALResultsTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(siaresultfile) self.votable = votableparse(resultfile) def testCtor(self): self.result = dalq.DALResults(self.votable) self.assert_(isinstance(self.result._fldnames, list)) self.assert_(self.result.votable is not None) def testProps(self): self.testCtor() self.assertEquals(len(self.result), 2) names = self.result.fieldnames self.assertIsInstance(names, list) self.assertEquals(len(names), 10) for i, name in enumerate(names): self.assert_( type(name) in (six.binary_type, six.text_type), "field name #{0} not a string: {1}".format(i, type(name))) self.assert_(name, "field name #{0} is empty".format(i)) fielddescs = self.result.fielddescs self.assert_(isinstance(fielddescs, list)) self.assertEquals(len(fielddescs), 10) for fielddesc in fielddescs: self.assert_(all(( hasattr(fielddesc, 'name'), hasattr(fielddesc, 'ID'), hasattr(fielddesc, 'ucd'), hasattr(fielddesc, 'datatype') ))) for i in range(len(names)): fielddesc = self.result.getdesc(names[i]) self.assert_(fielddesc is fielddescs[i]) field = self.result.getdesc("Format") self.assertEquals(field.name, "Format") self.assertEquals(field.ucd, "VOX:Image_Format") self.assertEquals(field.datatype, "char") self.assertEquals(field.arraysize, "*") self.assertIsNone(field.utype) def testValue(self): self.testCtor() self.assertEquals(self.result.getvalue("Format", 0), b"image/fits") self.assertEquals(self.result.getvalue("Format", 1), b"image/jpeg") self.assertEquals(self.result.getvalue("Dim", 0), 2) val = self.result.getvalue("Size", 0) self.assertEquals(len(val), 2) self.assertEquals(val[0], 300) self.assertEquals(val[1], 300) self.assertRaises(KeyError, self.result.getvalue, "Goober", 0) def testGetRecord(self): self.testCtor() rec = self.result.getrecord(0) self.assert_(rec is not None) self.assert_(isinstance(rec, dalq.Record)) rec = self.result.getrecord(1) self.assert_(rec is not None) self.assert_(isinstance(rec, dalq.Record)) self.assertRaises(IndexError, self.result.getrecord, 2) def testGetColumn(self): self.testCtor() col = self.result.getcolumn('Ra') shifted = col + 0.05 self.assertAlmostEquals(0.05, shifted[0]-col[0]) self.assertRaises(KeyError, self.result.getcolumn, 'goob') def testIter(self): self.testCtor() i = 0 for rec in self.result: self.assertIsInstance(rec, dalq.Record) i += 1 self.assertEquals(i, 2) def testCursor(self): self.testCtor() c = self.result.cursor() self.assertIsInstance(c, daldbapi.Cursor) def testFieldnameByUcd(self): self.testCtor() self.assertEquals( self.result.fieldname_with_ucd("POS_EQ_RA_MAIN"), "Ra") self.assertEquals( self.result.fieldname_with_ucd("VOX:Image_AccessReference"), "URL") def testByUcd(self): self.testCtor() self.assertAlmostEqual(0.0, self.result[0].getbyucd("POS_EQ_RA_MAIN")) self.assertEquals( b"http://skyview.gsfc.nasa.gov/cgi-bin/images?position=0.0%2C0.0&survey=neat&pixels=300%2C300&sampler=Clip&size=1.0%2C1.0&projection=Tan&coordinates=J2000.0&return=FITS", self.result[0].getbyucd("VOX:Image_AccessReference")) def testFieldnameByUtype(self): self.testCtor() self.assertAlmostEqual(0.0, self.result[0].getbyutype("na_ra")) self.assertEquals( b"http://skyview.gsfc.nasa.gov/cgi-bin/images?position=0.0%2C0.0&survey=neat&pixels=300%2C300&sampler=Clip&size=1.0%2C1.0&projection=Tan&coordinates=J2000.0&return=FITS", self.result[0].getbyutype("na_acref")) class RecordTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(siaresultfile) self.votable = votableparse(resultfile) self.result = dalq.DALResults(self.votable) self.rec = self.result.getrecord(0) def testFields(self): reckeys = self.rec.keys() for fieldname in self.result.fieldnames: self.assert_(fieldname in reckeys) def testValues(self): self.assertEquals(self.rec["Format"], b"image/fits") self.assertEquals(self.rec["Dim"], 2) val = self.rec["Size"] self.assertEquals(len(val), 2) self.assertEquals(val[0], 300) self.assertEquals(val[1], 300) try: self.rec["Goober"] self.fail("Failed to raise KeyError on bad key") except KeyError: pass def testSuggestExtension(self): self.assertEquals(self.rec.suggest_extension("goob"), "goob") self.assertIsNone(self.rec.suggest_extension()) def testHasKey(self): self.assertEquals(self.rec["Format"], b"image/fits") self.assertTrue("Format" in self.rec) self.assertFalse("Goober" in self.rec) class MimeCheckTestCase(unittest.TestCase): def testGood(self): self.assertTrue(dalq.is_mime_type("image/jpeg")) self.assertTrue(dalq.is_mime_type("application/fits")) self.assertTrue(dalq.is_mime_type("application/x-fits")) self.assertTrue(dalq.is_mime_type("application/fits")) self.assertTrue(dalq.is_mime_type("application/votable+xml")) self.assertTrue(dalq.is_mime_type("application/fits;convention=STScI-STIS")) def testBad(self): self.assertFalse(dalq.is_mime_type("image")) self.assertFalse(dalq.is_mime_type("image/votable/xml")) class DALServiceTest(unittest.TestCase): @classmethod def setup_class(cls): cls.srvr = testserver.get_server(testserverport) cls.srvr.start() time.sleep(0.5) def setUp(self): self.baseurl = "http://localhost:{0}/sia".format(self.srvr.port) @classmethod def teardown_class(cls): if cls.srvr.is_alive(): cls.srvr.terminate() if cls.srvr.is_alive(): print("prob") def testCtor(self): self.srv = dalq.DALService(self.baseurl) def testProps(self): self.testCtor() self.assertEquals(self.srv.baseurl, self.baseurl) def testCreateQuery(self): self.testCtor() q = self.srv.create_query() self.assertIsInstance(q, dalq.DALQuery) self.assertEquals(q.baseurl, self.baseurl) def testCreateQueryWithKws(self): self.testCtor() q = self.srv.create_query(RA=12.045, DEC=-13.08, SR=0.01) self.assertIsInstance(q, dalq.DALQuery) self.assertEquals(q.baseurl, self.baseurl) self.assertAlmostEquals(q.get('RA'), 12.045) self.assertAlmostEquals(q.get('DEC'), -13.08) self.assertAlmostEquals(q.get('SR'), 0.01) def testSearch(self): self.testCtor() res = self.srv.search(RA=12.045, DEC=-13.08, SR=0.01) self.assert_(len(res)) class DALQueryTest(unittest.TestCase): def setUp(self): self.baseurl = "http://localhost/sia" def testCtor(self): self.query = dalq.DALQuery(self.baseurl) self.assert_(self.query.get("FORMAT") is None) def testProps(self): self.testCtor() self.assertEquals(self.query.baseurl, self.baseurl) def testParam(self): self.testCtor() self.assertEquals( len(self.query.keys()), 0, "param set should be empty: {0}".format(self.query.keys())) self.assertIsNone(self.query.get("RA")) self.query["RA"] = 51.235 self.assertEquals(len(self.query.keys()), 1) self.assertEquals(self.query.get("RA"), 51.235) self.query["RA"] = 127.235 self.assertEquals(len(self.query.keys()), 1) self.assertEquals(self.query.get("RA"), 127.235) self.query["DEC"] = -13.49677 self.assertEquals(len(self.query.keys()), 2) self.assertEquals(self.query.get("DEC"), -13.49677) del self.query["RA"] self.assertEquals(len(self.query.keys()), 1) self.assertEquals(self.query.get("DEC"), -13.49677) self.assert_(self.query.get("RA") is None) class QueryExecuteTest(unittest.TestCase): srvr = None @classmethod def setup_class(cls): cls.srvr = testserver.get_server(testserverport) cls.srvr.start() time.sleep(0.5) @classmethod def teardown_class(cls): if cls.srvr.is_alive(): cls.srvr.terminate() if cls.srvr.is_alive(): print("prob") def testExecute(self): q = dalq.DALQuery("http://localhost:{0}/sia".format(self.srvr.port)) q["foo"] = "bar" results = q.execute() self.assertIsInstance(results, dalq.DALResults) self.assertEquals(len(results), 2) def testExecuteStream(self): q = dalq.DALQuery("http://localhost:{0}/sia".format(self.srvr.port)) q["foo"] = "bar" strm = q.execute_stream() self.assertIsNotNone(strm) self.assert_(hasattr(strm, "read")) results = strm.read() strm.close() self.assert_(results.startswith(b"= 3: self.assert_(isinstance(data, str) or isinstance(data, bytes)) else: self.assert_(isinstance(data, unicode) or isinstance(data, str)) self.assert_(data.startswith(b"= 3: self.assert_(isinstance(data, str) or isinstance(data, bytes)) else: self.assert_(isinstance(data, unicode) or isinstance(data, str)) self.assert_(data.startswith(b" 0) self.assertEquals(descr[1][0], 'AcRef') self.assert_(isinstance(descr[1][1], daldbapi.TypeObject)) def testInfos(self): self.testCtor() infos = self.cursor.infos() self.assertEquals(int(infos['TableRows']), 35) def testFetchOne(self): self.testCtor() pos = self.cursor.pos rec = self.cursor.fetchone() self.assertEquals(self.cursor.pos, pos + 1) rec2 = self.cursor.fetchone() # self.assert_(rec != rec2) self.assertEquals(self.cursor.pos, pos + 2) def testFetchMany(self): self.testCtor() pos = self.cursor.pos recs = self.cursor.fetchmany() self.assertEquals(len(recs), self.cursor.arraysize) recs = self.cursor.fetchmany(size = 5) self.assertEquals(len(recs), 5) recs = self.cursor.fetchmany(size = -5) def testFetchAll(self): self.testCtor() recs = self.cursor.fetchall() self.assertEquals(len(recs), 35) self.testCtor() self.cursor.fetchone() recs = self.cursor.fetchall() self.assertEquals(len(recs), 34) def testScroll(self): self.testCtor() pos = self.cursor.pos self.cursor.scroll(5) self.assertEquals(self.cursor.pos, pos + 5) self.cursor.scroll(5, mode = "absolute") self.assertEquals(self.cursor.pos, 5) try: self.cursor.scroll(-1, mode = "absolute") except daldbapi.DataError: pass self.cursor.scroll(-1) self.assertEquals(self.cursor.pos, 4) class DatasetNameTest(unittest.TestCase): base = "testds" def setUp(self): resultfile = get_pkg_data_filename(siaresultfile) self.votable = votableparse(resultfile) self.result = dalq.DALResults(self.votable) self.rec = self.result.getrecord(0) self.outdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.outdir) def cleanfiles(self, tmpdir=None): if not tmpdir: tmpdir = self.outdir if not os.path.isdir(tmpdir): return files = glob.glob(os.path.join(tmpdir, self.base+"*.*")) for f in files: os.remove(f) def testMime2Ext(self): self.assertEquals("fits", dalq.mime2extension("application/fits")) self.assertEquals("fits", dalq.mime2extension("image/fits")) self.assertEquals("fits", dalq.mime2extension("image/x-fits")) self.assertEquals("jpg", dalq.mime2extension("image/jpeg")) self.assertEquals("gif", dalq.mime2extension("image/gif")) self.assertEquals("png", dalq.mime2extension("image/png")) self.assertEquals("txt", dalq.mime2extension("text/plain")) self.assertEquals("html", dalq.mime2extension("text/html")) self.assertEquals("xml", dalq.mime2extension("text/xml")) self.assertEquals("xml", dalq.mime2extension("application/votable;convention=stsci")) self.assertEquals("xml", dalq.mime2extension("application/x-votable")) self.assertEquals("xml", dalq.mime2extension("application/votable")) self.assertEquals("xls", dalq.mime2extension("application/x-microsoft-spreadsheet", "xls")) def testSuggest(self): self.assertEquals("dataset", self.rec.suggest_dataset_basename()) self.assertEquals("DAT", self.rec.suggest_extension("DAT")) def testMakeDatasetName(self): self.assertTrue(os.path.isdir(self.outdir)) self.assertEquals("./dataset.dat", self.rec.make_dataset_filename()) self.assertEquals("./goober.dat", self.rec.make_dataset_filename(base="goober")) self.assertEquals("./dataset.fits", self.rec.make_dataset_filename(ext="fits")) self.assertEquals("./goober.fits", self.rec.make_dataset_filename(base="goober", ext="fits")) self.assertEquals(self.outdir+"/dataset.dat", self.rec.make_dataset_filename(self.outdir)) path = os.path.join(self.outdir,self.base+".dat") self.assertFalse(os.path.exists(path)) self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-1.dat") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-2.dat") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-3.dat") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) self.cleanfiles() open(os.path.join(self.outdir,self.base+".dat"),'w').close() path = os.path.join(self.outdir,self.base+"-1.dat") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(os.path.join(self.outdir,self.base+"-1.dat"),'w').close() open(os.path.join(self.outdir,self.base+"-2.dat"),'w').close() open(os.path.join(self.outdir,self.base+"-3.dat"),'w').close() path = os.path.join(self.outdir,self.base+"-4.dat") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) self.cleanfiles() self.assertEquals(os.path.join(self.outdir,self.base+".dat"), self.rec.make_dataset_filename(self.outdir, self.base)) class FormatTest(unittest.TestCase): subtests = [ ("d < 10", "d < 10"), ("t<0", "t<0"), ("d > 10", "d > 10"), ("t>0", "t>0"), ("A&P", "A&P"), ("A & P", "A & P"), ("A &amp;P", "A &P"), ("-30° ", "-30 deg "), ("-30\deg ", "-30 deg "), ("I am.
", "I am. "), ("I am;
therefore", "I am; therefore"), ("I am;
therefore", "I am; therefore"), ("

I am.

That's", "

I am.

That's"), ("goes as $v ~ r$.", "goes as v ~ r."), ("where $r$ is", "where r is"), ("$-45\deg$", "-45 deg"), ("upwards of $1M per month ($10M per", "upwards of $1M per month ($10M per") ] splittests = [ "one\n\ntwo", "one

two", "one

two", "one\para two" ] def test_simple_sub(self): for orig, trx in self.subtests: self.assertEquals(dalq.deref_markup(orig), trx) def test_mixed_sub(self): inp = """At t>0, the drop goes as $1/r^2$.
This shows""" out = """At t>0, the drop goes as 1/r^2. This shows""" self.assertEquals(dalq.deref_markup(inp), out); def test_fill(self): inp = """ The quick brown fox jumped over the lazy dogs. """ out = """The quick brown fox jumped over the lazy dogs.""" self.assertEquals(dalq.para_format_desc(inp, 38), out) def test_para_detect(self): for t in self.splittests: self.assertEquals(dalq.para_format_desc(t), "one\n\ntwo") def test_para_format(self): inp = """This tests the paragraph formatting functionality that will be used to disply resource descriptions. This is not meant for display purpoes only. It must be able to handle to handle multiple paragrasphs. The normal delimiter is a pair of consecutive new-line charactere, but other markup will be recognized as well, e.g. "<p>".

Formatting must be succeesful for $n$ paragraphs where $n > 1$. It should even work on $n^2$ paragraphs.""" out = """This tests the paragraph formatting functionality that will be used to disply resource descriptions. This is not meant for display purpoes only. It must be able to handle to handle multiple paragrasphs. The normal delimiter is a pair of consecutive new- line charactere, but other markup will be recognized as well, e.g. "

". Formatting must be succeesful for n paragraphs where n > 1. It should even work on n^2 paragraphs.""" text = dalq.para_format_desc(inp, 40) if text != out: for i in range(len(text)): if text[i] != out[i]: print("format different from expected starting with: "+ text[i:]) break self.assertEquals(text, out, "Incorrect formatting.") __all__ = "DALAccessErrorTest DALServiceErrorTest DALQueryErrorTest RecordTest EnsureBaseURLTest DALServiceTest DALQueryTest QueryExecuteTest CursorTest DatasetNameTest FormatTest".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": srvr = testserver.get_server(testserverport) try: srvr.start() unittest.main() finally: if srvr.is_alive(): srvr.terminate() pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/setup_package.py0000644000175000017500000000033213125165225022005 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import absolute_import import os def get_package_data(): paths = [os.path.join('data', '*.xml')] return {'pyvo.dal.tests':paths} pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/testserver.py0000644000175000017500000000570213125165225021406 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ a test DAL server for testing pyvo.dal """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os import multiprocessing import requests from flask import Flask, render_template, request, redirect try: from astropy.tests.disable_internet import (turn_on_internet, turn_off_internet, INTERNET_OFF) except: # for astropy ver < 0.4 def turn_on_internet(verbose=False): pass def turn_off_internet(verbose=False): pass INTERNET_OFF = False template_folder = os.path.join( os.path.dirname(os.path.realpath(__file__)), "data" ) app = Flask(__name__, template_folder=template_folder) host = "127.0.0.1" port = None @app.route("/err") def send_err(): return render_template("error-sia.xml") @app.route("/sia") def send_sia(): return render_template("neat-sia.xml") @app.route("/cs") def send_scs(): return render_template("twomass-cs.xml") @app.route("/ssa") def send_ssa(): return render_template("jhu-ssa.xml") @app.route("/sla") def send_sla(): return render_template("nrao-sla.xml") @app.route("/tap/sync", methods=["POST"]) def send_tap(): return render_template("arihip-tap.xml") @app.route("/tap/async", methods=["POST"]) def send_tap_async_redir(): return redirect("/tap/async/3bLj5O") @app.route("/tap/async/3bLj5O", methods=["GET", "POST"]) def send_tap_async(): if request.method == "GET": return render_template("arihip-tap-async-get.xml") elif request.method == "POST": return render_template("arihip-tap-async.xml") @app.route("/tap/async/3bLj5O/results/result") def send_tap_async_result(): return render_template("arihip-tap-async-result.xml") @app.route("/tap/async/3bLj5O/phase", methods=["POST"]) def send_tap_async_phase(): return redirect("/tap/async/3bLj5O") @app.route("/datalink_ssa") def send_datalink_ssa(): return render_template("datalink-ssa.xml", addr="http://{}:{}".format( host, port)) @app.route("/dlmeta") def send_dlmeta(): return render_template("datalink.xml") class PortProcess(multiprocessing.Process): port = None def server_running(port=8081): url = "http://localhost:{0}/path".format(port) try: r = requests.get(url) return True except requests.exceptions.ConnectionError: return False def find_available_port(baseport=8081, limit=8181, step=1): port = baseport while port < limit: if not server_running(port): return port port += step raise RuntimeError("No port available!") def get_server(baseport=8081, limit=8181, step=1): port = find_available_port(baseport, limit, step) process = multiprocessing.Process(target=app.run, kwargs={ "port": port }) process.port = port globals()["port"] = port return process if __name__ == "__main__": app.run() pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/data/0000755000175000017500000000000013240100417017521 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/dal/tests/data/datalink-ssa.xml0000644000175000017500000005156713125165225022646 0ustar noahfxnoahfx Spectra from the Flash and Heros Echelle spectrographs developed at Landessternwarte Heidelberg and mounted at La Silla and various other observatories. The data mostly contains spectra of OB stars. Heros was the name of the instrument after Flash got a second channel in 1995. This resource contains data associated with the publication 1996A&A...312..539S. For advice on how to cite the resource(s) that contributed to this result, see http://dc.zah.uni-heidelberg.de/tableinfo/flashheros.data SSAP Exactly 30 rows were returned. This means your query probably reached the match limit. Increase MAXREC. A measure of how closely the record matches your query. Higher numbers mean better matches. Observed position RA and Dec as a 2-real array as required by SSAP. Target RA and Dec as a 2-real array as required by SSAP. URL of a preview for the dataset, where available. Access key for the data MIME type of the file served Size of the data in bytes Title or the dataset (usually, spectrum) Dataset identifier assigned by the creator Dataset identifier assigned by the publisher Processing/Creation date Date last published. Bandpass (i.e., rough spectral location) of this dataset; use something generic like 'Optical' here. Creator assigned version for this dataset (will be incremented when this particular item is changed). Common name of object observed. Object class (star, QSO,...; use Simbad object classification http://simbad.u-strasbg.fr/simbad/sim-display?data=otypes if at all possible) Redshift of target object Equatorial (ICRS) position of the target object. Signal-to-noise ratio estimated for this dataset ICRS location of aperture center Angular diameter of aperture Midpoint of exposure Exposure duration Midpoint of region covered in this dataset Width of the spectrum Lower value of spectral coordinate Upper value of spectral coordinate Number of points in the spectrum Local observation key (used to connect to single orders). Type of flux calibration Data model name and version System RA and Dec are given in Time conversion factor in Osuna-Salgado convention. Spectral conversion factor in Osuna-Salgado convention UCD of the spectral column Unit of the spectral column Flux/magnitude conversion factor in Osuna-Salgado convention UCD of the flux column Unit of the flux column Type of data (spectrum, time series, etc) Publisher of the datasets included here. Creator of the datasets included here. IOVA id of the originating data collection Instrument or code used to produce these datasets Method of generation for the data. Process used to produce the data URL or bibcode of a publication describing this data. Statistical error in flux Systematic error in flux Type of flux calibration Bin size in wavelength Statistical error in wavelength Systematic error in wavelength Type of wavelength calibration Resolution on the spectral axis Statistical error in position Type of calibration in spatial coordinates Spatial resolution of data
0.0 304.446675349555 38.0329313131891 304.446675349555 38.0329313131891 NaN NaN http://dc.zah.uni-heidelberg.de/getproduct/flashheros/data/ca90/f0011.mt?preview=True http://dc.zah.uni-heidelberg.de/getproduct/flashheros/data/ca90/f0011.mt image/fits 100800 Flash/Heros p Cyg 1990-10-11 09:28:07.500003 ivo://org.gavo.dc/~?flashheros/data/ca90/f0011.mt 1993-05-18T00:00:00 2017-02-22T13:19:40.966036 p Cyg star NaN NaN NaN NaN 304.446675349555 38.0329313131891 17.6 48175.39453125 NaN 5.40127e-07 2.78147e-07 4.01053e-07 6.792e-07 23181 ca90/f0011.mt NORMALIZED
The pubisher DID of the dataset of interest Recalibrate the spectrum. Right now, the only recalibration supported is max(flux)=1 ('RELATIVE'). Spectral cutout interval MIME type of the output format
pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/data/datalink.xml0000644000175000017500000001336513125165225022054 0ustar noahfxnoahfx Data links for data sets. Publisher data set id; this is an identifier for the dataset in question and can be used to retrieve the data. URL to retrieve the data or access the service. Identifier for the type of service if accessURL refers to a service. If accessURL is empty, this column gives the reason why. More information on this link What kind of data is linked here? Standard identifiers here include science, calibration, preview, info, auxiliary MIME type for the data returned. Size of the resource at access_url
ivo://org.gavo.dc/~?flashheros/data/ca90/f0011.mt http://dc.zah.uni-heidelberg.de/flashheros/q/echdl/dlmeta?ID=ivo%3A%2F%2Forg.gavo.dc%2F%7E%3Fflashheros%2Fdata_raw%2Fca90%2Fn0011.mt Split Echelle Orders #progenitor application/x-votable+xml;content=datalink -1
ivo://org.gavo.dc/~?flashheros/data/ca90/f0011.mt ndndtdihpgea #proc -1
ivo://org.gavo.dc/~?flashheros/data/ca90/f0011.mt http://dc.zah.uni-heidelberg.de/getproduct/flashheros/data/ca90/f0011.mt The full dataset. #this image/fits 100800
ivo://org.gavo.dc/~?flashheros/data/ca90/f0011.mt http://dc.zah.uni-heidelberg.de/getproduct/flashheros/data/ca90/f0011.mt?preview=True A preview for the dataset. #preview image/png -1
The pubisher DID of the dataset of interest Recalibrate the spectrum. Right now, the only recalibration supported is max(flux)=1 ('RELATIVE'). Spectral cutout interval MIME type of the output format
pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/data/arihip-tap.xml0000644000175000017500000000304313125165225022313 0ustar noahfxnoahfx The catalogue ARIHIP has been constructed by selecting the 'best data' for a given star from combinations of HIPPARCOS data with Boss' GC and/or the Tycho-2 catalogue as well as the FK6. It provides 'best data' for 90 842 stars with a typical mean error of 0.89 mas/year (about a factor of 1.3 better than Hipparcos for this sample of stars). Query successful -- *TAINTED*: the value was operated on in a way that unit and ucd may be severely wrong AAI=
pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/data/arihip-tap-async.xml0000644000175000017500000000201613125165225023425 0ustar noahfxnoahfx 3bLj5O PENDING 2016-05-03T10:54:03Z 3600 2016-05-05T10:54:03Z None ADQL None SELECT TOP 1 1+1 AS result FROM arihip.main doQuery pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/data/arihip-tap-async-result.xml0000644000175000017500000000304313125165225024742 0ustar noahfxnoahfx The catalogue ARIHIP has been constructed by selecting the 'best data' for a given star from combinations of HIPPARCOS data with Boss' GC and/or the Tycho-2 catalogue as well as the FK6. It provides 'best data' for 90 842 stars with a typical mean error of 0.89 mas/year (about a factor of 1.3 better than Hipparcos for this sample of stars). Query successful -- *TAINTED*: the value was operated on in a way that unit and ucd may be severely wrong AAI=
pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/data/arihip-tap-async-get.xml0000644000175000017500000000220113125165225024176 0ustar noahfxnoahfx 3bLj5O COMPLETED 2016-05-03T10:54:03Z 3600 2016-05-05T10:54:03Z None ADQL None SELECT TOP 1 1+1 AS result FROM arihip.main doQuery pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/test_tap.py0000644000175000017500000000712613125165225021025 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests for pyvo.dal.tap """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os, sys, shutil, re, imp, glob, tempfile, random, time import unittest, pdb import os.path import pyvo.dal.query as dalq import pyvo.dal.tap as tap import pyvo.dal.dbapi2 as daldbapi # from astropy.io.vo import parse as votableparse from astropy.io.votable.tree import VOTableFile from astropy.io.votable import parse as votableparse from astropy.utils.data import get_pkg_data_filename from . import testserver tapresultfile = os.path.join(os.path.dirname(__file__), "data/arihip-tap.xml") errresultfile = "data/error-tap.xml" uploadfile = "data/upload-tap.xml" testserverport = 8084 class TAPServiceTest(unittest.TestCase): baseurl = "http://localhost/tap" def testCtor(self): self.srv = tap.TAPService(self.baseurl) def testProps(self): self.testCtor() self.assertEquals(self.srv.baseurl, self.baseurl) try: self.srv.baseurl = "towel" self.fail("baseurl not read-only") except AttributeError: pass class TAPRunTest(unittest.TestCase): @classmethod def setup_class(cls): cls.srvr = testserver.get_server(testserverport) cls.srvr.start() time.sleep(0.5) @classmethod def teardown_class(cls): if cls.srvr.is_alive(): cls.srvr.terminate() def testRunSync(self): query = "SELECT TOP 1 1+1 AS result FROM arihip.main" s = tap.TAPService("http://localhost:{0}/tap".format(self.srvr.port)) r = s.run_sync(query) self.assertIsInstance(r, tap.TAPResults) self.assertEquals(r.query_status, "OVERFLOW") self.assert_(len(r) == 1) def testRunAsync(self): query = "SELECT TOP 1 1+1 AS result FROM arihip.main" s = tap.TAPService("http://localhost:{0}/tap".format(self.srvr.port)) r = s.run_async(query) self.assertIsInstance(r, tap.TAPResults) self.assertEquals(r.query_status, "OVERFLOW") self.assert_(len(r) == 1) def testRunSyncUpload(self): query = "SELECT * FROM TAP_UPLOAD.t1" s = tap.TAPService("http://localhost:{0}/tap".format(self.srvr.port)) r = s.run_sync(query, uploads = {'t1': open(tapresultfile)}) self.assertIsInstance(r, tap.TAPResults) self.assertEquals(r.query_status, "OVERFLOW") self.assert_(len(r) == 1) def testRunAsyncUpload(self): query = "SELECT * FROM TAP_UPLOAD.t1" s = tap.TAPService("http://localhost:{0}/tap".format(self.srvr.port)) r = s.run_async(query, uploads = {'t1': open(tapresultfile)}) self.assertIsInstance(r, tap.TAPResults) self.assertEquals(r.query_status, "OVERFLOW") self.assert_(len(r) == 1) __all__ = "TAPServiceTest TAPRunTest".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": try: module = find_current_module(1, True) pkgdir = os.path.dirname(module.__file__) t = "testserver" mod = imp.find_module(t, [pkgdir]) testserve = imp.load_module(t, mod[0], mod[1], mod[2]) except ImportError as e: sys.stderr.write("Can't find test server: testserver.py:" + str(e)) srvr = testserver.TestServer(testserverport) try: srvr.start() unittest.main() finally: if srvr.is_alive(): srvr.terminate() pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/test_sla.py0000644000175000017500000001446113125165225021020 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests for pyvo.dal.sla """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os, sys, shutil, re, imp, random, time import unittest, pdb import pyvo.dal.query as dalq import pyvo.dal.sla as sla import pyvo.dal.dbapi2 as daldbapi # from astropy.io.vo import parse as votableparse from astropy.io.votable.tree import VOTableFile from astropy.io.votable import parse as votableparse from astropy.utils.data import get_pkg_data_filename from . import testserver slaresultfile = "data/nrao-sla.xml" errresultfile = "data/error-sla.xml" testserverport = 8084 testserverport += 30 testserverport += random.randint(0,9) class SLAServiceTest(unittest.TestCase): baseurl = "http://localhost/sla" def testCtor(self): self.srv = sla.SLAService(self.baseurl) def testProps(self): self.testCtor() self.assertEquals(self.srv.baseurl, self.baseurl) try: self.srv.baseurl = "goober" self.fail("baseurl not read-only") except AttributeError: pass def testCreateQuery(self): self.testCtor() q = self.srv.create_query() self.assert_(isinstance(q, sla.SLAQuery)) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 1) def testCreateQueryWithArgs(self): self.testCtor() q = self.srv.create_query(wavelength=(7.6e-6,1.e-5)) self.assertIsInstance(q, sla.SLAQuery) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 2) self.assertEquals(q.wavelength, (7.6e-6,1.e-5)) qurl = q.queryurl self.assertEquals(q["REQUEST"], "queryData") self.assertEquals(q["WAVELENGTH"], "7.6e-06/1e-05") class SLAQueryTest(unittest.TestCase): baseurl = "http://localhost/sla" def testCtor(self): self.q = sla.SLAQuery(self.baseurl) self.assertEquals(self.q.baseurl, self.baseurl) def testWavelength(self): self.testCtor() self.assert_(self.q.wavelength is None) self.q.wavelength = (7.6e-6,1.e-5) self.assertEquals(self.q.wavelength, (7.6e-6,1.e-5)) del self.q.wavelength self.assert_(self.q.wavelength is None) def testCreateURL(self): self.testCtor() self.q.wavelength = (7.6e-6, 1.e-5) qurl = self.q.queryurl self.assertEquals(qurl, self.baseurl) class SLAResultsTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(slaresultfile) self.tbl = votableparse(resultfile) def testCtor(self): self.r = sla.SLAResults(self.tbl) self.assertIsInstance(self.r._fldnames, list) self.assertIsNotNone(self.r.votable) self.assertEquals(len(self.r), 21) def testGetRecord(self): self.testCtor() rec = self.r.getrecord(0) self.assertIsInstance(rec, sla.SLARecord) rec = self.r.getrecord(1) self.assertIsInstance(rec, sla.SLARecord) class SLAResultsErrorTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(errresultfile) self.tbl = votableparse(resultfile) def testError(self): try: res = sla.SLAResults(self.tbl) self.fail("Failed to detect error response") except dalq.DALQueryError as ex: self.assertEquals(ex.label, "ERROR") self.assertEquals(ex.reason, "Forced Fail") class SLARecordTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(slaresultfile) self.tbl = votableparse(resultfile) self.result = sla.SLAResults(self.tbl) self.rec = self.result.getrecord(0) def testAttr(self): self.assertEquals(self.rec.title, "JPL: CH2OHCOCH2OH v29=1 65(10,55)-65( 9,56)") self.assertAlmostEquals(self.rec.wavelength.value, 0.0026007993198247656) self.assertEquals(self.rec.species_name, b"Dihydroxyacetone") self.assertTrue(self.rec.status is None) self.assertTrue(self.rec.initial_level is None) self.assertTrue(self.rec.final_level is None) class SLAExecuteTest(unittest.TestCase): srvr = None @classmethod def setup_class(cls): cls.srvr = testserver.get_server(testserverport) cls.srvr.start() time.sleep(0.5) @classmethod def teardown_class(cls): if cls.srvr.is_alive(): cls.srvr.terminate() if cls.srvr.is_alive(): print("prob") def testExecute(self): q = sla.SLAQuery("http://localhost:{0}/sla".format(self.srvr.port)) q.wavelength = (0.00260075, 0.00260080) results = q.execute() self.assert_(isinstance(results, sla.SLAResults)) self.assertEquals(len(results), 21) def testSearch(self): srv = sla.SLAService("http://localhost:{0}/sla".format(self.srvr.port)) results = srv.search(wavelength=(0.00260075, 0.00260080)) self.assert_(isinstance(results, sla.SLAResults)) self.assertEquals(len(results), 21) def testSla(self): results = sla.search("http://localhost:{0}/sla".format(self.srvr.port), wavelength=(0.00260075, 0.00260080)) self.assert_(isinstance(results, sla.SLAResults)) self.assertEquals(len(results), 21) def testError(self): srv = sla.SLAService("http://localhost:{0}/err".format(self.srvr.port)) self.assertRaises(dalq.DALQueryError, srv.search, (0.00260075, 0.00260080)) __all__ = "SLAServiceTest SLAQueryTest SLAResultsTest SLARecordTest SLAExecuteTest".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": try: module = find_current_module(1, True) pkgdir = os.path.dirname(module.__file__) t = "aTestDALServer" mod = imp.find_module(t, [pkgdir]) testserve = imp.load_module(t, mod[0], mod[1], mod[2]) except ImportError as e: sys.stderr.write("Can't find test server: aTestDALServer.py:"+str(e)) srvr = testserver.TestServer(testserverport) try: srvr.start() unittest.main() finally: if srvr.is_alive(): srvr.terminate() pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/test_datalink.py0000644000175000017500000000464013125165225022026 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests for pyvo.dal.datalink """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os, sys, shutil, re, imp, glob, tempfile, random, time import unittest, pdb import os.path import pyvo as vo import pyvo.dal.datalink as datalink # from astropy.io.vo import parse as votableparse from astropy.io.votable.tree import VOTableFile from astropy.io.votable import parse as votableparse from astropy.utils.data import get_pkg_data_filename from . import testserver tapresultfile = os.path.join(os.path.dirname(__file__), "data/arihip-tap.xml") errresultfile = "data/error-tap.xml" uploadfile = "data/upload-tap.xml" testserverport = 8084 class DatalinkRunTest(unittest.TestCase): ssa_baseurl = "http://localhost:{0}/datalink_ssa?" dl_baseurl = "http://localhost:{0}/datalink?" @classmethod def setup_class(cls): cls.srvr = testserver.get_server(testserverport) cls.srvr.start() time.sleep(0.5) @classmethod def teardown_class(cls): if cls.srvr.is_alive(): cls.srvr.terminate() def testDatalinkExtern(self): self.results = vo.spectrumsearch(self.ssa_baseurl.format( self.srvr.port), (30, 30), maxrec=30) self.assert_(len(self.results)) datalink = next(self.results.iter_datalinks()) row = datalink[0] self.assertEqual(row.semantics, "#progenitor") row = datalink[1] self.assertEqual(row.semantics, "#proc") row = datalink[2] self.assertEqual(row.semantics, "#this") row = datalink[3] self.assertEqual(row.semantics, "#preview") __all__ = "DatalinkRunTest".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": try: module = find_current_module(1, True) pkgdir = os.path.dirname(module.__file__) t = "testserver" mod = imp.find_module(t, [pkgdir]) testserve = imp.load_module(t, mod[0], mod[1], mod[2]) except ImportError as e: sys.stderr.write("Can't find test server: testserver.py:" + str(e)) srvr = testserver.TestServer(testserverport) try: srvr.start() unittest.main() finally: if srvr.is_alive(): srvr.terminate() pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/test_scs.py0000644000175000017500000001617113125165225021031 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests for pyvo.dal.scs """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os, sys, shutil, re, imp, random, time import unittest, pdb import pyvo.dal.query as dalq from pyvo.dal import scs as cs import pyvo.dal.dbapi2 as daldbapi # from astropy.io.vo import parse as votableparse from astropy.io.votable.exceptions import W22 from astropy.io.votable import parse as votableparse from astropy.utils.data import get_pkg_data_filename from astropy.coordinates import SkyCoord from . import testserver csresultfile = "data/twomass-cs.xml" errresultfile = "data/error-cs.xml" testserverport = 8084 testserverport += 20 testserverport += random.randint(0,9) class SCSServiceTest(unittest.TestCase): baseurl = "http://localhost/cs" srv = None def testCtor(self): self.srv = cs.SCSService(self.baseurl) def testProps(self): self.testCtor() self.assertEquals(self.srv.baseurl, self.baseurl) def testCreateQuery(self): self.testCtor() q = self.srv.create_query() self.assertIsInstance(q, cs.SCSQuery) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 0) def testCreateQueryWithArgs(self): self.testCtor() q = self.srv.create_query(pos=(0.0, 0.0), radius=1.0, verbosity=2) self.assert_(isinstance(q, cs.SCSQuery)) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 4) self.assertEquals(q.pos, (0.0, 0.0)) self.assertEquals(q.radius, 1.0) self.assertEquals(q.verbosity, 2) class SCSQueryTest(unittest.TestCase): baseurl = "http://localhost/cs" def testCtor(self): self.q = cs.SCSQuery(self.baseurl) self.assertEquals(self.q.baseurl, self.baseurl) def testPos(self): self.testCtor() self.assert_(self.q.pos is None) self.assert_(self.q.radius is None) self.q.pos = (120.445, 40.1434) self.assertEquals(self.q.pos, (120.445, 40.1434)) self.q.radius = 0.25 self.assertEquals(self.q.radius, 0.25) self.q.pos = (400.0, -60.0) self.assertEquals(self.q.pos, (400.0, -60.0)) def testCreateURL(self): self.testCtor() self.assertEquals(self.q.queryurl, self.baseurl) self.q.pos = (102.5511, 24.312) self.q.radius = 0.1 self.assertAlmostEquals(self.q["RA"], 102.5511) self.assertAlmostEquals(self.q["DEC"], 24.312) self.assertAlmostEquals(self.q["SR"], 0.1) self.q.radius = 0.05 self.assertAlmostEquals(self.q["SR"], 0.05) class CSResultsTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(csresultfile) self.tbl = votableparse(resultfile) def testCtor(self): self.r = cs.SCSResults(self.tbl) self.assertIsInstance(self.r._fldnames, list) self.assertIsNotNone(self.r.votable) self.assertEquals(len(self.r), 2) def testGetRecord(self): self.testCtor() rec = self.r.getrecord(0) self.assertIsInstance(rec, cs.SCSRecord) class CSResultsErrorTest(unittest.TestCase): def testErrorVOTableInfo(self): resultfile = get_pkg_data_filename(errresultfile) self.tbl = votableparse(resultfile) try: res = cs.SCSResults(self.tbl) self.fail("Failed to detect error response") except dalq.DALQueryError as ex: self.assertEquals(ex.label, "Error") self.assertEquals(ex.reason, "Forced Fail") def testErrorResourceInfo(self): resultfile = get_pkg_data_filename("data/error3-cs.xml") self.tbl = votableparse(resultfile) try: res = cs.SCSResults(self.tbl) self.fail("Failed to detect error response") except dalq.DALQueryError as ex: self.assertEquals(ex.label, "Error") self.assertEquals(ex.reason, "Forced Fail") def testErrorParam(self): resultfile = get_pkg_data_filename("data/error2-cs.xml") self.tbl = votableparse(resultfile) try: res = cs.SCSResults(self.tbl) self.fail("Failed to detect error response") except dalq.DALQueryError as ex: self.assertEquals(ex.label, "Error") # Note: because it is stored in a PARAM and the datatype is "char", # the value will come out as bytes (rather than unicode) self.assertEquals(ex.reason, b"DEC parameter out-of-range") # def testErrorDefParam(self): # Will not raise if VOTable version is 1.0 def _testErrorDefParam(self): resultfile = get_pkg_data_filename("error4-cs.xml") self.assertRaises(W22, votableparse, resultfile) class CSRecordTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(csresultfile) self.tbl = votableparse(resultfile) self.result = cs.SCSResults(self.tbl) self.rec = self.result.getrecord(0) def testAttr(self): self.assertEquals(self.rec.pos.ra.value, 0.065625) self.assertEquals(self.rec.pos.dec.value, -8.8911667) self.assertEquals(self.rec.id, b"34") class CSExecuteTest(unittest.TestCase): baseurl = "http://localhost:{0}/cs?" srvr = None @classmethod def setup_class(cls): cls.srvr = testserver.get_server(testserverport) cls.srvr.start() time.sleep(0.5) @classmethod def teardown_class(cls): if cls.srvr.is_alive(): cls.srvr.terminate() if cls.srvr.is_alive(): print("prob") def testExecute(self): q = cs.SCSQuery(self.baseurl.format(self.srvr.port)) q.pos = (0.0, 0.0) q.radius = 0.25 results = q.execute() self.assertIsInstance(results, cs.SCSResults) self.assertEquals(len(results), 2) def testSearch(self): srv = cs.SCSService(self.baseurl.format(self.srvr.port)) results = srv.search(pos=(0.0, 0.0), radius=0.25) self.assertIsInstance(results, cs.SCSResults) self.assertEquals(len(results), 2) def testConesearch(self): results = cs.search( self.baseurl.format(self.srvr.port), pos=(0.0, 0.0), radius=0.25) self.assert_(isinstance(results, cs.SCSResults)) self.assertEquals(len(results), 2) __all__ = "SCSServiceTest SCSQueryTest CSResultsTest CSRecordTest CSExecuteTest".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": try: module = find_current_module(1, True) pkgdir = os.path.dirname(module.__file__) t = "testserver" mod = imp.find_module(t, [pkgdir]) testserver = imp.load_module(t, mod[0], mod[1], mod[2]) except ImportError as e: sys.stderr.write("Can't find test server: testserver.py:"+str(e)) srvr = testserver.TestServer(testserverport) try: srvr.start() unittest.main() finally: if srvr.is_alive(): srvr.terminate() pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/__init__.py0000644000175000017500000000010013125165225020722 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst pyvo-0.6.1+dfsg.orig/pyvo/dal/tests/test_sia.py0000644000175000017500000003411613125165225021014 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Tests for pyvo.dal.sia """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os, sys, shutil, re, imp, glob, tempfile, random, time import unittest, pdb import pyvo.dal.query as dalq import pyvo.dal.sia as sia import pyvo.dal.dbapi2 as daldbapi # from astropy.io.vo import parse as votableparse from astropy.io.votable.tree import VOTableFile from astropy.io.votable import parse as votableparse from astropy.utils.data import get_pkg_data_filename from astropy.units import UnitConversionError from . import testserver siaresultfile = "data/neat-sia.xml" errresultfile = "data/error-sia.xml" testserverport = 8084 testserverport += 50 testserverport += random.randint(0,9) class SIAServiceTest(unittest.TestCase): baseurl = "http://localhost/sia" def testCtor(self): self.srv = sia.SIAService(self.baseurl) def testProps(self): self.testCtor() self.assertEquals(self.srv.baseurl, self.baseurl) def testCreateQuery(self): self.testCtor() q = self.srv.create_query() self.assertIsInstance(q, sia.SIAQuery) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 0) def testCreateQueryWithArgs(self): self.testCtor() q = self.srv.create_query( pos=(0, 0), size=(1.0, 1.0), format="all", intersect="overlaps", verbosity=2) self.assert_(isinstance(q, sia.SIAQuery)) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 5) self.assertEquals(q.pos, (0.0, 0.0)) self.assertEquals(q.size, (1.0, 1.0)) self.assertEquals(q.format, "all") self.assertEquals(q.intersect, "overlaps") self.assertEquals(q.verbosity, 2) self.assertEquals(q["POS"], "0.0,0.0") self.assertEquals(q["SIZE"], "1.0,1.0") self.assertEquals(q["FORMAT"], "ALL") self.assertEquals(q["INTERSECT"], "OVERLAPS") self.assertEquals(q["VERB"], 2) def testCreateQueryWithKws(self): self.testCtor() q = self.srv.create_query(CDELT=0.00028) self.assertAlmostEquals(0.00028, q["CDELT"]) q.pos = (0, 0) q.size = (1.0, 1.0) self.assertEquals(q["POS"], "0.0,0.0") self.assertEquals(q["SIZE"], "1.0,1.0") q = self.srv.create_query( pos=(0, 0), size=(1.0, 1.0), format="all", intersect="overlaps", verbosity=2, CDELT=0.00028) self.assertIsInstance(q, sia.SIAQuery) self.assertEquals(q.baseurl, self.baseurl) self.assertEquals(len(q.keys()), 6) self.assertEquals(q.pos, (0.0, 0.0)) self.assertEquals(q.size, (1.0, 1.0)) self.assertEquals(q.format, "all") self.assertEquals(q.intersect, "overlaps") self.assertEquals(q.verbosity, 2) self.assertAlmostEquals(q['CDELT'], 0.00028) class SIAQueryTest(unittest.TestCase): baseurl = "http://localhost/sia" def testCtor(self): self.q = sia.SIAQuery(self.baseurl) self.assertEquals(self.q.baseurl, self.baseurl) def testPos(self): self.testCtor() self.assert_(self.q.pos is None) self.q.pos = (180.2, -30.1) self.assertEquals(self.q.pos, (180.2, -30.1)) self.q.pos = [170.2, -20.1] self.assertEquals(self.q.pos, [170.2, -20.1]) def testBadPos(self): self.testCtor() try: self.q.pos = 22.3 self.fail("pos took scalar value") except ValueError: pass try: self.q.pos = range(4) self.fail("pos took bad-length array value") except ValueError: pass try: self.q.pos = "a b".split() self.fail("pos took string values") except ValueError: pass def testSize(self): self.testCtor() self.assert_(self.q.size is None) self.q.size = (1.0, 2.0) self.assertEquals(len(self.q.size), 2) self.assertEquals(self.q.size, (1.0, 2.0)) del self.q.size self.assert_(self.q.size is None) self.q.size = 0.5 self.assertRaises(TypeError, len, self.q.size) self.assertEquals(self.q.size, 0.5) self.q.size = [1.0, 2.0] self.assertEquals(self.q.size, [1.0, 2.0]) def testProps(self): self.testCtor() self.assertIsNone(self.q.verbosity) self.q.verbosity = 1 self.assertEquals(self.q.verbosity, 1) del self.q.verbosity self.assertIsNone(self.q.verbosity) self.assertIsNone(self.q.intersect) self.q.intersect = "covers" self.assertEquals(self.q.intersect, "covers") del self.q.intersect self.assertIsNone(self.q.intersect) def testFormat(self): self.testCtor() self.assert_(self.q.format is None) self.q.format = "ALL" self.assertEquals(self.q.format, "ALL") del self.q.format self.assert_(self.q.format is None) self.q.format = "ALL" self.assertEquals(self.q.format, "ALL") #self.q.format = ["graphic-png", "jpeg", "gif"] #self.assertEquals(self.q.format, ["GRAPHIC-png", "jpeg", "gif"]) self.q.format = "image/fits" self.assertEquals(self.q.format, "image/fits") self.q.format = ["image/fits"] self.assertEquals(self.q.format, ["image/fits"]) self.q.format = ("image/fits",) self.assertEquals(self.q.format, ("image/fits",)) self.q.format = {"image/fits"} self.assertEquals(self.q.format, {"image/fits"}) self.q.format = ["image/fits", "text/html"] self.assertEquals(self.q.format, ["image/fits", "text/html"]) self.assertEquals(len(self.q.format), 2) self.q.format = ("image/fits", "text/html") self.assertEquals(self.q.format, ("image/fits", "text/html")) self.assertEquals(len(self.q.format), 2) self.q.format = {"image/fits", "text/html"} self.assertEquals(self.q.format, {"image/fits", "text/html"}) self.assertEquals(len(self.q.format), 2) def _assertPropSetRaises(self, extype, obj, att, val): try: setattr(obj, att, val) self.fail("Failed to raise ValueError for {0}={1}".format( att,str(val))) except extype: pass except Exception as ex: self.fail("Raised wrong exception: {0}: {1}".format( str(type(ex)), str(ex))) def testCreateURL(self): self.testCtor() qurl = self.q.queryurl self.assertEquals(qurl, self.baseurl) class SIAResultsTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(siaresultfile) self.tbl = votableparse(resultfile) def testCtor(self): self.r = sia.SIAResults(self.tbl) self.assert_(isinstance(self.r._fldnames, list)) self.assert_(self.r.votable is not None) self.assertEquals(len(self.r), 2) def testGetRecord(self): self.testCtor() rec = self.r.getrecord(0) self.assert_(isinstance(rec, sia.SIARecord)) rec = self.r.getrecord(1) self.assert_(isinstance(rec, sia.SIARecord)) class SIAResultsErrorTest(unittest.TestCase): def setUp(self): resultfile = get_pkg_data_filename(errresultfile) self.tbl = votableparse(resultfile) def testError(self): try: res = sia.SIAResults(self.tbl) self.fail("Failed to detect error response") except dalq.DALQueryError as ex: self.assertEquals(ex.label, "ERROR") self.assertEquals(ex.reason, "Forced Fail") class SIARecordTest(unittest.TestCase): acref = "http://skyview.gsfc.nasa.gov/cgi-bin/images?position=0.0%2C0.0&survey=neat&pixels=300%2C300&sampler=Clip&size=1.0%2C1.0&projection=Tan&coordinates=J2000.0&return=FITS" def setUp(self): resultfile = get_pkg_data_filename(siaresultfile) self.tbl = votableparse(resultfile) self.result = sia.SIAResults(self.tbl) self.rec = self.result.getrecord(0) def testAttr(self): self.assertEquals(self.rec.pos.ra.deg, 0.0) self.assertEquals(self.rec.pos.dec.deg, 0.0) self.assertEquals(self.rec.title, "neat") self.assert_(self.rec.dateobs is None) self.assertEquals(self.rec.naxes, 2) self.assertEquals(self.rec.naxis[0].value, 300) self.assertEquals(self.rec.naxis[1].value, 300) self.assert_(self.rec.instr is None) self.assertEquals(self.rec.acref, self.acref) self.assertEquals(self.rec.getdataurl(), self.acref) class SIAExecuteTest(unittest.TestCase): srvr = None @classmethod def setup_class(cls): cls.srvr = testserver.get_server(testserverport) cls.srvr.start() time.sleep(0.5) @classmethod def teardown_class(cls): if cls.srvr.is_alive(): cls.srvr.terminate() if cls.srvr.is_alive(): print("prob") def testExecute(self): q = sia.SIAQuery("http://localhost:{0}/sia".format(self.srvr.port)) q.pos = (0, 0) q.size = (1.0, 1.0) q.format = "all" results = q.execute() self.assert_(isinstance(results, sia.SIAResults)) self.assertEquals(len(results), 2) def testSearch(self): srv = sia.SIAService("http://localhost:{0}/sia".format(self.srvr.port)) results = srv.search(pos=(0, 0), size=(1.0, 1.0)) self.assert_(isinstance(results, sia.SIAResults)) self.assertEquals(len(results), 2) def testSia(self): results = sia.search( "http://localhost:{0}/sia".format(self.srvr.port), pos=(0, 0), size=(1.0, 1.0)) self.assertIsInstance(results, sia.SIAResults) self.assertEquals(len(results), 2) def testError(self): srv = sia.SIAService("http://localhost:{0}/err".format(self.srvr.port)) self.assertRaises(dalq.DALQueryError, srv.search, (0.0, 0.0), 1.0) class DatasetNameTest(unittest.TestCase): base = "testim" def setUp(self): resultfile = get_pkg_data_filename(siaresultfile) self.tbl = votableparse(resultfile) self.result = sia.SIAResults(self.tbl) self.rec = self.result.getrecord(0) self.outdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.outdir) def cleanfiles(self, tmpdir=None): if not tmpdir: tmpdir = self.outdir if not os.path.isdir(tmpdir): return files = glob.glob(os.path.join(tmpdir, self.base+"*.*")) for f in files: os.remove(f) def testSuggest(self): title = self.rec.title self.assertEquals(title, self.rec.suggest_dataset_basename()) self.assertEquals("fits", self.rec.suggest_extension("DAT")) def testMakeDatasetName(self): self.assertEquals("./neat.fits", self.rec.make_dataset_filename()) self.assertEquals("./goober.fits", self.rec.make_dataset_filename(base="goober")) self.assertEquals("./neat.jpg", self.rec.make_dataset_filename(ext="jpg")) self.assertEquals("./goober.jpg", self.rec.make_dataset_filename(base="goober", ext="jpg")) self.assertEquals(self.outdir+"/neat.fits", self.rec.make_dataset_filename(self.outdir)) path = os.path.join(self.outdir,self.base+".fits") self.assertFalse(os.path.exists(path)) self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-1.fits") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-2.fits") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(path,'w').close() self.assertTrue(os.path.exists(path)) path = os.path.join(self.outdir,self.base+"-3.fits") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) self.cleanfiles() open(os.path.join(self.outdir,self.base+".fits"),'w').close() path = os.path.join(self.outdir,self.base+"-1.fits") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) open(os.path.join(self.outdir,self.base+"-1.fits"),'w').close() open(os.path.join(self.outdir,self.base+"-2.fits"),'w').close() open(os.path.join(self.outdir,self.base+"-3.fits"),'w').close() path = os.path.join(self.outdir,self.base+"-4.fits") self.assertEquals(path, self.rec.make_dataset_filename(self.outdir, self.base)) self.cleanfiles() self.assertEquals(os.path.join(self.outdir,self.base+".fits"), self.rec.make_dataset_filename(self.outdir, self.base)) __all__ = "SIAServiceTest SIAQueryTest SIAResultsTest SIARecordTest SIAExecuteTest DatasetNameTest".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": try: module = find_current_module(1, True) pkgdir = os.path.dirname(module.__file__) t = "aTestDALServer" mod = imp.find_module(t, [pkgdir]) testserve = imp.load_module(t, mod[0], mod[1], mod[2]) except ImportError as e: sys.stderr.write("Can't find test server: aTestDALServer.py:"+str(e)) srvr = testserver.TestServer(testserverport) try: srvr.start() unittest.main() finally: if srvr.is_alive(): srvr.terminate() pyvo-0.6.1+dfsg.orig/pyvo/dal/datalink.py0000644000175000017500000003470613125165225017633 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ A module for accessing remote source and observation catalogs """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import warnings from .query import ( DALResults, DALQuery, DALService, Record, DALServiceError, PyvoUserWarning) from .mixin import AvailabilityMixin, CapabilityMixin from astropy.io.votable.tree import Param # monkeypatch astropy with group support in RESOURCE def _monkeypath_astropy_resource_groups(): from astropy.io.votable.tree import Resource, Group from astropy.utils.collections import HomogeneousList old_group_unknown_tag = Group._add_unknown_tag def new_group_unknown_tag(self, iterator, tag, data, config, pos): if tag == "PARAM": return self._add_param(self, iterator, tag, data, config, pos) else: old_group_unknown_tag(self, iterator, tag, data, config, pos) old_init = Resource.__init__ def new_init(self, *args, **kwargs): old_init(self, *args, **kwargs) self._groups = HomogeneousList(Group) Resource.__init__ = new_init def resource_groups(self): return self._groups Resource.groups = property(resource_groups) def resource_add_group(self, iterator, tag, data, config, pos): group = Group(self, config=config, pos=pos, **data) self.groups.append(group) group.parse(iterator, config) Resource._add_group = resource_add_group old_resource_unknown_tag = Resource._add_unknown_tag def new_resource_unknown_tag(self, iterator, tag, data, config, pos): if tag == "GROUP": return self._add_group(iterator, tag, data, config, pos) else: old_resource_unknown_tag(iterator, tag, data, config, pos) Resource._add_unknown_tag = new_resource_unknown_tag _monkeypath_astropy_resource_groups() __all__ = [ "search", "DatalinkService", "DatalinkQuery", "DatalinkResults"] def search(url, id, responseformat=None, **keywords): """ submit a Datalink query that returns rows matching the criteria given. Parameters ---------- url : str the base URL of the query service. id : str the dataset identifier responseformat : str the output format Returns ------- DatalinkResults a container holding a table of matching catalog records Raises ------ DALServiceError for errors connecting to or communicating with the service. DALQueryError if the service responds with an error, including a query syntax error. """ service = DatalinkService(url) return service.search(id, responseformat, **keywords) class DatalinkService(DALService, AvailabilityMixin, CapabilityMixin): """ a representation of a Datalink service """ def __init__(self, baseurl): """ instantiate a Datalink service Parameters ---------- baseurl : str the base URL that should be used for forming queries to the service. """ super(DatalinkService, self).__init__(baseurl) def run_sync(self, id, responseformat=None, **keywords): """ runs sync query and returns its result Parameters ---------- id : str the dataset identifier responseformat : str the output format Returns ------- DatalinkResults the query result See Also -------- DatalinkResults """ return self.create_query(id, responseformat, **keywords).execute() #alias for service discovery search = run_sync def create_query(self, id, responseformat=None, **keywords): """ create a query object that constraints can be added to and then executed. The input arguments will initialize the query with the given values. Parameters ---------- baseurl : str the base URL for the Datalink service id : str the dataset identifier responseformat : str the output format """ return DatalinkQuery( self.baseurl, id, responseformat, **keywords) class DatalinkQuery(DALQuery): """ a class for preparing an query to an Datalink service. Query constraints are added via its service type-specific methods. The various execute() functions will submit the query and return the results. The base URL for the query, which controls where the query will be sent when one of the execute functions is called, is typically set at construction time; however, it can be updated later via the :py:attr:`~pyvo.dal.query.DALQuery.baseurl` to send a configured query to another service. In addition to the search constraint attributes described below, search parameters can be set generically by name via dict semantics. The typical function for submitting the query is ``execute()``; however, alternate execute functions provide the response in different forms, allowing the caller to take greater control of the result processing. """ @classmethod def from_resource(cls, row, resource): """ Creates a instance from a Record and a Datalink Resource. XML Hierarchy: """ # TODO: implement the full xml hierarchy group_input_params = next( group for group in resource.groups if group.name == "inputParams") dl_params = {_.name: _ for _ in resource.params} input_params = ( _ for _ in group_input_params.entries if type(_) == Param) if "accessURL" not in dl_params: raise DALServiceError("Datalink has no accessURL") query_params = {} for input_param in input_params: if input_param.value: query_params[input_param.name] = input_param.value elif input_param.ref: query_params[input_param.name] = row[input_param.ref] return cls(dl_params["accessURL"].value, **query_params) def __init__( self, baseurl, id=None, responseformat=None, **keywords): """ initialize the query object with the given parameters Parameters ---------- baseurl : str the Datalink baseurl id : str the dataset identifier responseformat : str the output format """ super(DatalinkQuery, self).__init__(baseurl, **keywords) if id: self["ID"] = id if responseformat: self["RESPONSEFORMAT"] = responseformat def execute(self): """ submit the query and return the results as a DatalinkResults instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ return DatalinkResults(self.execute_votable(), self.queryurl) class DatalinkResults(DALResults): """ The list of matching records resulting from an datalink query. Each record contains a set of metadata that describes an available record matching the query constraints. The number of records in the results is available by passing it to the Python built-in ``len()`` function. This class supports iterable semantics; thus, individual records (in the form of :py:class:`~pyvo.dal.query.Record` instances) are typically accessed by iterating over an ``DatalinkResults`` instance. Alternatively, records can be accessed randomly via :py:meth:`getrecord` or through a Python Database API (v2) Cursor (via :py:meth:`~pyvo.dal.query.DALResults.cursor`). Column-based data access is possible via the :py:meth:`~pyvo.dal.query.DALResults.getcolumn` method. ``DatalinkResults`` is essentially a wrapper around an Astropy :py:mod:`~astropy.io.votable` :py:class:`~astropy.io.votable.tree.Table` instance where the columns contain the various metadata describing the images. One can access that VOTable directly via the :py:attr:`~pyvo.dal.query.DALResults.votable` attribute. Thus, when one retrieves a whole column via :py:meth:`~pyvo.dal.query.DALResults.getcolumn`, the result is a Numpy array. Alternatively, one can manipulate the results as an Astropy :py:class:`~astropy.table.table.Table` via the following conversion: >>> table = results.table ``DatalinkResults`` supports the array item operator ``[...]`` in a read-only context. When the argument is numerical, the result is an :py:class:`~pyvo.dal.query.Record` instance, representing the record at the position given by the numerical index. If the argument is a string, it is interpreted as the name of a column, and the data from the column matching that name is returned as a Numpy array. """ def getrecord(self, index): """ return a representation of a datalink result record that follows dictionary semantics. The keys of the dictionary are those returned by this instance's fieldnames attribute. The returned record has the additional function :py:meth:`~pyvo.dal.query.DALResults.getdataset` Parameters ---------- index : int the integer index of the desired record where 0 returns the first record Returns ------- REc a dictionary-like wrapper containing the result record metadata. Raises ------ IndexError if index is negative or equal or larger than the number of rows in the result table. See Also -------- Record """ return DatalinkRecord(self, index) def bysemantics(self, semantics): """ return the rows with the dataset identified by the given semantics Returns ------- Sequence of DatalinkRecord a sequence of dictionary-like wrappers containing the result record. """ # TODO: get semantics with astropy and implement resursive lookup for record in self: if record.semantics == semantics: yield record def getdataset(self): """ return the first row with the dataset identified by semantics #this Returns ------- DatalinkRecord a dictionary-like wrapper containing the result record. """ try: return next(self.bysemantics("#this")) except StopIteration: raise ValueError("No row with semantics #this found!") class DatalinkRecord(Record): """ a dictionary-like container for data in a record from the results of an datalink query, The commonly accessed metadata which are stadardized by the datalink standard are available as attributes. If the metadatum accessible via an attribute is not available, the value of that attribute will be None. All metadata, including non-standard metadata, are acessible via the ``get(`` *key* ``)`` function (or the [*key*] operator) where *key* is table column name. """ @property def id(self): """ Input identifier """ return self.get("ID", decode=True) @property def access_url(self): """ Link to data """ return self.get("access_url", decode=True) @property def error_message(self): """ Error if an access_url cannot be created """ return self.get("error_message", decode=True) @property def description(self): """ Human-readable text describing this link """ return self.get("description", decode=True) @property def semantics(self): """ Term from a controlled vocabulary describing the link """ return self.get("semantics", decode=True) @property def content_type(self): """ Mime-type of the content the link returns """ return self.get("content_type", decode=True) @property def content_length(self): """ Size of the download the link returns """ return int(self["content_length"]) def getdataurl(self): """ return the URL contained in the access URL column which can be used to retrieve the dataset described by this record. Raises :py:class:`~pyvo.dal.query.DALServiceError` if theres an error. """ if self.error_message: raise DALServiceError(self.error_message) return self.access_url class DatalinkMixin(object): """ Mixing for datalink functionallity If you mix this in, you have to call _init_datalinks in your constructor. """ _datalinks = None def iter_datalinks(self): """ Iterates over all datalinks in a DALResult. """ if self._datalinks is None: raise RuntimeError( "iter_datalinks called without previous init_datalinks") if len(self._datalinks) < 1: return if len(self._datalinks) > 1: warnings.warn( "Got more than one datalink element!", PyvoUserWarning) datalink = next(iter(self._datalinks)) for record in self: query = DatalinkQuery.from_resource(record, datalink) yield query.execute() def _init_datalinks(self, votable): # this can be overridden to specialize for a particular DAL protocol adhocs = ( resource for resource in votable.resources if resource.type == "meta" and resource.utype == "adhoc:service" ) datalinks = ( adhoc for adhoc in adhocs if any( param.name == "standardID" and param.value.lower( ).startswith(b"ivo://ivoa.net/std/datalink") for param in adhoc.params)) self._datalinks = list(datalinks) pyvo-0.6.1+dfsg.orig/pyvo/dal/__init__.py0000644000175000017500000000144613125165225017576 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst from .sia import search as imagesearch from .ssa import search as spectrumsearch from .sla import search as linesearch from .scs import search as conesearch from .tap import search as tablesearch from .sia import SIAService, SIAQuery, SIARecord from .ssa import SSAService, SSAQuery, SSARecord from .sla import SLAService, SLAQuery, SLARecord from .scs import SCSService, SCSQuery, SCSRecord from .tap import TAPService, TAPQuery, AsyncTAPJob from .query import ( DALAccessError, DALProtocolError, DALFormatError, DALServiceError, DALQueryError) __all__ = [ "imagesearch", "spectrumsearch", "linesearch", "conesearch", "tablesearch", "SIAService", "SSAService", "SLAService", "SCSService", "TAPService", "AsyncTAPJob"] pyvo-0.6.1+dfsg.orig/pyvo/registry/0000755000175000017500000000000013125165225016570 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/registry/regtap.py0000644000175000017500000003447013125165225020434 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ a module for basic VO Registry interactions. A VO registry is a database of VO resources--data collections and services--that are available for VO applications. Typically, it is aware of the resources from all over the world. A registry can find relevent data collections and services through search queries--typically, subject-based. The registry responds with a list of records describing matching resources. With a record in hand, the application can use the information in the record to access the resource directly. Most often, the resource is a data service that can be queried for individual datasets of interest. This module provides basic, low-level access to the RegTAP Registries using standardized TAP-based services. """ from __future__ import ( absolute_import, division, print_function, unicode_literals) import os from ..dal import scs, sia, ssa, sla, tap, query as dalq __all__ = ["search", "RegistryResource", "RegistryResults", "ivoid2service"] REGISTRY_BASEURL = os.environ.get("IVOA_REGISTRY") or "http://dc.g-vo.org/tap" _service_type_map = { "image": "sia", "spectrum": "ssa", "scs": "conesearch", "line": "slap", "sla": "slap", "table": "tap" } def search(keywords=None, servicetype=None, waveband=None, datamodel=None): """ execute a simple query to the RegTAP registry. Parameters ---------- keywords : list of str keyword terms to match to registry records. Use this parameter to find resources related to a particular topic. servicetype : str the service type to restrict results to. Allowed values include 'conesearch', 'sia' , 'ssa', 'slap', 'tap' waveband : str the name of a desired waveband; resources returned will be restricted to those that indicate as having data in that waveband. Allowed values include 'radio', 'millimeter', 'infrared', 'optical', 'uv', 'euv', 'x-ray' 'gamma-ray' datamodel : str the name of the datamodel to search for; makes only sence in conjunction with servicetype tap (or no servicetype). See http://wiki.ivoa.net/twiki/bin/view/IVOA/IvoaDataModel for more informations about data models. Returns ------- RegistryResults a container holding a table of matching resource (e.g. services) See Also -------- RegistryResults """ if not any((keywords, servicetype, waveband, datamodel)): raise dalq.DALQueryError( "No search parameters passed to registry search") joins = set(["rr.interface", "rr.resource"]) wheres = list() if keywords: joins.add("rr.res_subject") joins.add("rr.resource") wheres.extend(["({})".format(" AND ".join(""" ( 1=ivo_nocasematch(res_subject, '%{0}%') OR 1=ivo_hasword(res_description, '{0}') OR 1=ivo_hasword(res_title, '{0}') )""".format(tap.escape(keyword)) for keyword in keywords ))]) if servicetype: servicetype = _service_type_map.get(servicetype, servicetype) joins.add("rr.interface") wheres.append("standard_id LIKE 'ivo://ivoa.net/std/{}%'".format( tap.escape(servicetype))) wheres.append("intf_type = 'vs:paramhttp'") else: wheres.append("""( standard_id LIKE 'ivo://ivoa.net/std/conesearch%' OR standard_id LIKE 'ivo://ivoa.net/std/sia%' OR standard_id LIKE 'ivo://ivoa.net/std/ssa%' OR standard_id LIKE 'ivo://ivoa.net/std/slap%' OR standard_id LIKE 'ivo://ivoa.net/std/tap%' )""") if waveband: joins.add("rr.resource") wheres.append("1 = ivo_hashlist_has('{}', waveband)".format( tap.escape(waveband))) if datamodel: joins.add("rr.interface") joins.add("rr.res_detail") wheres.append("intf_type = 'vs:paramhttp'") wheres.append("detail_xpath='/capability/dataModel/@ivo-id'") wheres.append( "1=ivo_nocasematch(detail_value, 'ivo://ivoa.net/std/{0}%')".format( tap.escape(datamodel))) query = """SELECT DISTINCT rr.interface.*, rr.capability.*, rr.resource.* FROM rr.capability {} {} """.format( ''.join("NATURAL JOIN {} ".format(j) for j in joins), ("WHERE " if wheres else "") + " AND ".join(wheres) ) service = tap.TAPService(REGISTRY_BASEURL) query = RegistryQuery(service.baseurl, query, maxrec=service.hardlimit) return query.execute() class RegistryQuery(tap.TAPQuery): def execute(self): """ submit the query and return the results as a RegistryResults instance Raises ------ DALServiceError for errors connecting to or communicating with the service DALQueryError for errors either in the input query syntax or other user errors detected by the service DALFormatError for errors parsing the VOTable response """ return RegistryResults(self.execute_votable(), self.queryurl) class RegistryResults(dalq.DALResults): """ an iterable set of results from a registry query. Each record is returned as RegistryResults """ def getrecord(self, index): """ return all the attributes of a resource record with the given index as SimpleResource instance (a dictionary-like object). Parameters ---------- index : int the zero-based index of the record """ return RegistryResource(self, index) class RegistryResource(dalq.Record): """ a dictionary for the resource metadata returned in one record of a registry query. A SimpleResource acts as a dictionary, so in general, all attributes can be accessed by name via the [] operator, and the attribute names can by returned via the keys() function. For convenience, it also stores key values as properties; these include: Properties ---------- title : bytes the title of the resource shortname : bytes the resource's short name ivoid : bytes the IVOA identifier for the resource (identifier will also work) accessurl : str when the resource is a service, the service's access URL. """ _service = None @property def ivoid(self): """ the IVOA identifier for the resource. """ return self.get("ivoid", decode=True) @property def res_type(self): """ the resource types that characterize this resource. """ return self.get("res_type", decode=True) @property def short_name(self): """ the short name for the resource """ return self.get("short_name", decode=True) @property def res_title(self): """ the title of the resource """ return self.get("res_title", default=None, decode=True) @property def content_levels(self): """ a list of content level labels that describe the intended audience for this resource. """ return self.get("content_level", default="", decode=True).split("#") @property def res_description(self): """ the textual description of the resource. See Also -------- SimpleResource.describe """ return self.get("res_description", decode=True) @property def reference_url(self): """ URL pointing to a human-readable document describing this resource. """ return self.get("reference_url", decode=True) @property def creators(self): """ The creator(s) of the resource in the ordergiven by the resource record author """ return self.get("creator_seq", default="", decode=True).split(";") @property def content_types(self): """ list of natures or genres of the content of the resource. """ return self.get("content_type", decode=True).split("#") @property def source_format(self): """ The format of source_value. """ return self.get("source_format", decode=True) @property def region_of_regard(self): """ numeric value representing the angle, given in decimal degrees, by which a positional query against this resource should be "blurred" in order to get an appropriate match. """ return float(self.get("region_of_regard", 0)) @property def waveband(self): """ a list of names of the wavebands that the resource provides data for """ return self.get("waveband", default="", decode=True).split("#") @property def access_url(self): """ the URL that can be used to access the service resource. """ return self.get("access_url", decode=True) @property def standard_id(self): """ the IVOA standard identifier """ return self.get("standard_id", decode=True) @property def service(self): """ return an appropriate DALService subclass for this resource that can be used to search the resource. Return None if the resource is not a recognized DAL service. Currently, only Conesearch, SIA, SSA, and SLA services are supported. """ if self.access_url: for key, value in { "ivo://ivoa.net/std/conesearch": scs.SCSService, "ivo://ivoa.net/std/sia": sia.SIAService, "ivo://ivoa.net/std/ssa": ssa.SSAService, "ivo://ivoa.net/std/sla": sla.SLAService, "ivo://ivoa.net/std/tap": tap.TAPService, }.items(): if key in self.standard_id: self._service = value(self.access_url) return self._service def search(self, *args, **keys): """ assuming this resource refers to a searchable service, execute a search against the resource. This is equivalent to: self.to_service().search(*args, **keys) The arguments provided should be appropriate for the service that the DAL service type would expect. See the documentation for the appropriate service type: ============ ========================================= Service type Use the argument syntax for ============ ========================================= catalog :py:meth:`pyvo.dal.scs.SCSService.search` image :py:meth:`pyvo.dal.sia.SIAService.search` spectrum :py:meth:`pyvo.dal.ssa.SSAService.search` line :py:meth:`pyvo.dal.sla.SLAService.search` database *not yet supported* ============ ========================================= Raises ------ RuntimeError if the resource does not describe a searchable service. """ if not self.service: raise dalq.DALServiceError( "resource, {0}, is not a searchable service".format( self.short_name)) return self.service.search(*args, **keys) def describe(self, verbose=False, width=78, file=None): """ Print a summary description of this resource. Parameters ---------- verbose : bool If false (default), only user-oriented information is printed; if true, additional information will be printed as well. width : int Format the description with given character-width. out : writable file-like object If provided, write information to this output stream. Otherwise, it is written to standard out. """ restype = "Custom Service" stdid = self.get("standard_id").lower() if stdid: if stdid.startswith("ivo://ivoa.net/std/conesearch"): restype = "Catalog Cone-search Service" elif stdid.startswith("ivo://ivoa.net/std/sia"): restype = "Image Data Service" elif stdid.startswith("ivo://ivoa.net/std/ssa"): restype = "Spectrum Data Service" elif stdid.startswith("ivo://ivoa.net/std/slap"): restype = "Spectral Line Database Service" elif stdid.startswith("ivo://ivoa.net/std/tap"): restype = "Table Access Protocol Service" print(restype, file=file) print(dalq.para_format_desc(self.res_title), file=file) print("Short Name: " + self.short_name, file=file) #print("Publisher: " + dalq.para_format_desc(self.publisher), file=file) print("IVOA Identifier: " + self.ivoid, file=file) if self.access_url: print("Base URL: " + self.access_url, file=file) if self.res_description: print(file=file) print(dalq.para_format_desc(self.res_description), file=file) print(file=file) if self.short_name: print(dalq.para_format_desc("Subjects: {}".format(self.short_name)), file=file) if self.waveband: val = (str(v) for v in self.waveband) print(dalq.para_format_desc("Waveband Coverage: " + ", ".join(val)), file=file) if verbose: if self.standard_id: print("StandardID: " + self.standard_id, file=file) if self.reference_url: print("More info: " + self.reference_url, file=file) def ivoid2service(ivoid): service = tap.TAPService(REGISTRY_BASEURL) results = service.run_sync(""" SELECT DISTINCT access_url, standard_id FROM rr.capability NATURAL JOIN rr.interface WHERE ivoid = '{}' """.format(tap.escape(ivoid))) for result in results: for ivo, cls in { "ivo://ivoa.net/std/conesearch": scs.SCSService, "ivo://ivoa.net/std/sia": sia.SIAService, "ivo://ivoa.net/std/ssa": ssa.SSAService, "ivo://ivoa.net/std/sla": sla.SLAService, "ivo://ivoa.net/std/tap": tap.TAPService, }.items(): if result["standard_id"] in ivo: return cls(result["access_url"]) return None pyvo-0.6.1+dfsg.orig/pyvo/registry/tests/0000755000175000017500000000000013125165225017732 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/registry/tests/setup_package.py0000644000175000017500000000034313125165225023117 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import absolute_import import os def get_package_data(): paths = [ os.path.join('data', '*.xml') ] return { 'pyvo.registry.tests': paths} pyvo-0.6.1+dfsg.orig/pyvo/registry/tests/data/0000755000175000017500000000000013240100417020631 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/registry/tests/__init__.py0000644000175000017500000000010013125165225022032 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst pyvo-0.6.1+dfsg.orig/pyvo/registry/__init__.py0000644000175000017500000000036213125165225020702 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ a package for interacting with registries. The regtap module supports access to the IVOA Registries """ from . import regtap search = regtap.search __all__ = ["search"] pyvo-0.6.1+dfsg.orig/pyvo/conftest.py0000644000175000017500000000046613125165225017125 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst # this contains imports plugins that configure py.test for astropy tests. # by importing them here in conftest.py they are discoverable by py.test # no matter how it is invoked within the source tree. from astropy.tests.pytest_plugins import * pyvo-0.6.1+dfsg.orig/pyvo/pyvo.cfg0000644000175000017500000000000113125165225016365 0ustar noahfxnoahfx pyvo-0.6.1+dfsg.orig/pyvo/_astropy_init.py0000644000175000017500000001234113125165225020156 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst __all__ = ['__version__', '__githash__', 'test'] # this indicates whether or not we are in the package's setup.py try: _ASTROPY_SETUP_ except NameError: from sys import version_info if version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = False try: from .version import version as __version__ except ImportError: __version__ = '' try: from .version import githash as __githash__ except ImportError: __githash__ = '' # set up the test command def _get_test_runner(): import os from astropy.tests.helper import TestRunner return TestRunner(os.path.dirname(__file__)) def test(package=None, test_path=None, args=None, plugins=None, verbose=False, pastebin=None, remote_data=False, pep8=False, pdb=False, coverage=False, open_files=False, **kwargs): """ Run the tests using `py.test `__. A proper set of arguments is constructed and passed to `pytest.main`_. .. _py.test: http://pytest.org/latest/ .. _pytest.main: http://pytest.org/latest/builtin.html#pytest.main Parameters ---------- package : str, optional The name of a specific package to test, e.g. 'io.fits' or 'utils'. If nothing is specified all default tests are run. test_path : str, optional Specify location to test by path. May be a single file or directory. Must be specified absolutely or relative to the calling directory. args : str, optional Additional arguments to be passed to pytest.main_ in the ``args`` keyword argument. plugins : list, optional Plugins to be passed to pytest.main_ in the ``plugins`` keyword argument. verbose : bool, optional Convenience option to turn on verbose output from py.test_. Passing True is the same as specifying ``'-v'`` in ``args``. pastebin : {'failed','all',None}, optional Convenience option for turning on py.test_ pastebin output. Set to ``'failed'`` to upload info for failed tests, or ``'all'`` to upload info for all tests. remote_data : bool, optional Controls whether to run tests marked with @remote_data. These tests use online data and are not run by default. Set to True to run these tests. pep8 : bool, optional Turn on PEP8 checking via the `pytest-pep8 plugin `_ and disable normal tests. Same as specifying ``'--pep8 -k pep8'`` in ``args``. pdb : bool, optional Turn on PDB post-mortem analysis for failing tests. Same as specifying ``'--pdb'`` in ``args``. coverage : bool, optional Generate a test coverage report. The result will be placed in the directory htmlcov. open_files : bool, optional Fail when any tests leave files open. Off by default, because this adds extra run time to the test suite. Requires the `psutil `_ package. parallel : int, optional When provided, run the tests in parallel on the specified number of CPUs. If parallel is negative, it will use the all the cores on the machine. Requires the `pytest-xdist `_ plugin installed. Only available when using Astropy 0.3 or later. kwargs Any additional keywords passed into this function will be passed on to the astropy test runner. This allows use of test-related functionality implemented in later versions of astropy without explicitly updating the package template. """ test_runner = _get_test_runner() return test_runner.run_tests( package=package, test_path=test_path, args=args, plugins=plugins, verbose=verbose, pastebin=pastebin, remote_data=remote_data, pep8=pep8, pdb=pdb, coverage=coverage, open_files=open_files, **kwargs) if not _ASTROPY_SETUP_: # noqa import os from warnings import warn from astropy.config.configuration import ( update_default_config, ConfigurationDefaultMissingError, ConfigurationDefaultMissingWarning) # add these here so we only need to cleanup the namespace at the end config_dir = None if not os.environ.get('ASTROPY_SKIP_CONFIG_UPDATE', False): config_dir = os.path.dirname(__file__) config_template = os.path.join(config_dir, __package__ + ".cfg") if os.path.isfile(config_template): try: update_default_config( __package__, config_dir, version=__version__) except TypeError as orig_error: try: update_default_config(__package__, config_dir) except ConfigurationDefaultMissingError as e: wmsg = (e.args[0] + " Cannot install default profile. If you are " "importing from source, this is expected.") warn(ConfigurationDefaultMissingWarning(wmsg)) del e except Exception: raise orig_error pyvo-0.6.1+dfsg.orig/pyvo/tests/0000755000175000017500000000000013125165225016062 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/pyvo/tests/coveragerc0000644000175000017500000000140013125165225020120 0ustar noahfxnoahfx[run] source = {packagename} omit = {packagename}/_astropy_init* {packagename}/conftest* {packagename}/cython_version* {packagename}/setup_package* {packagename}/*/setup_package* {packagename}/*/*/setup_package* {packagename}/tests/* {packagename}/*/tests/* {packagename}/*/*/tests/* {packagename}/version* [report] exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about packages we have installed except ImportError # Don't complain if tests don't hit assertions raise AssertionError raise NotImplementedError # Don't complain about script hooks def main\(.*\): # Ignore branches that don't pertain to this version of Python pragma: py{ignore_python_version}pyvo-0.6.1+dfsg.orig/pyvo/tests/setup_package.py0000644000175000017500000000021013125165225021240 0ustar noahfxnoahfxfrom __future__ import absolute_import def get_package_data(): return { _ASTROPY_PACKAGE_NAME_ + '.tests': ['coveragerc']} pyvo-0.6.1+dfsg.orig/pyvo/tests/__init__.py0000644000175000017500000000010013125165225020162 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst pyvo-0.6.1+dfsg.orig/pyvo/__init__.py0000644000175000017500000000306513125165225017035 0ustar noahfxnoahfx# Licensed under a 3-clause BSD style license - see LICENSE.rst """ PyVO is a package providing access to remote data and services of the Virtual observatory (VO) using Python. The pyvo module currently provides these main capabilities: * find archives that provide particular data of a particular type and/or relates to a particular topic * regsearch() * search an archive for datasets of a particular type * imagesearch(), spectrumsearch() * do simple searches on catalogs or databases * conesearch(), linesearch(), tablesearch() * get information about an object via its name * resolve(), object2pos(), object2sexapos() Submodules provide additional functions and classes for greater control over access to these services. This module also exposes the exception classes raised by the above functions, of which DALAccessError is the root parent exception. """ # Affiliated packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # ---------------------------------------------------------------------------- # For egg_info test builds to pass, put package imports here. if not _ASTROPY_SETUP_: from . import registry from .dal import ssa, sia, sla, scs, tap from .registry import search as regsearch from .dal import ( imagesearch, spectrumsearch, conesearch, linesearch, tablesearch, DALAccessError, DALProtocolError, DALFormatError, DALServiceError, DALQueryError) pyvo-0.6.1+dfsg.orig/scripts/0000755000175000017500000000000013125165225015412 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/scripts/ex_casA_image_cat.py0000644000175000017500000000172313125165225021323 0ustar noahfxnoahfx#! /usr/bin/env python import pyvo as vo # find archives with x-ray images archives = vo.regsearch(servicetype='image', waveband='xray') # position of my favorite source pos = vo.object2pos('Cas A') # find images and list in a file with open('cas-a.csv', 'w') as csv: print >> csv, "Archive short name,Archive title,Image", \ "title,RA,Dec,format,URL" for arch in archives: print "searching %s..." % arch.shortname try: matches = arch.search(pos=pos, size=0.25) except vo.DALAccessError, ex: print "Trouble accessing %s archive (%s)"\ % (arch.shortname, str(ex)) continue print "...found %d images" % matches.nrecs for image in matches: print >> csv, ','.join( (arch.shortname, arch.title, image.title, str(image.ra), str(image.dec), image.format, image.getdataurl()) ) pyvo-0.6.1+dfsg.orig/scripts/ex_get_cutouts.py0000644000175000017500000000135013125165225021024 0ustar noahfxnoahfx#! /usr/bin/env python import pyvo as vo # obtain your list of positions from somewhere sourcenames = ["ngc4258", "m101", "m51"] mysources = {} for src in sourcenames: mysources[src] = vo.object2pos(src) # create an output directory for cutouts import os if not os.path.exists("NVSSimages"): os.mkdir("NVSSimages") # setup a query object for NVSS nvss = "http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=nvss&" query = vo.sia.SIAQuery(nvss) query.size = 0.2 # degrees query.format = 'image/fits' for name, pos in mysources.items(): query.pos = pos results=query.execute() for image in results: print "Downloading %s..." % name image.cachedataset(filename="NVSSimages/%s.fits" % name) pyvo-0.6.1+dfsg.orig/scripts/README.rst0000644000175000017500000000112413125165225017077 0ustar noahfxnoahfxExample Scripts =============== This directory contains example scripts that illustrate the use of pyvo. Be sure that that pyvo (e.g. the parent of this directory) is in your PYTHONPATH. (And don't forget: these require network access, and an unreliable or slow network may produce poor results.) ex_casA_image_cat.py discovers archives that may have x-ray images and then queries those archives to find what x-ray images they have of CasA; the list of images are saved to a CSV file. ex_get_cutouts.py downloads cut-out images for a list of source from the NVSS survey. pyvo-0.6.1+dfsg.orig/docs/0000755000175000017500000000000013125165225014653 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/docs/pyvo/0000755000175000017500000000000013125165225015650 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/docs/pyvo/dal.rst0000644000175000017500000000124413125165225017143 0ustar noahfxnoahfx .. py:module:: pyvo.dal ******************** The pyvo.dal Package ******************** The ``pyvo.dal`` module encapsulates the implementation of the DAL services. The :py:mod:`~pyvo.dal.query` sub-module provides DAL behaviors through a set of base classes and common exception classes. The implementations for the specific types of services are handle the sub-modules :py:mod:`~pyvo.dal.scs`, :py:mod:`~pyvo.dal.sia`, :py:mod:`~pyvo.dal.ssa`, :py:mod:`~pyvo.dal.sla`, :py:mod:`~pyvo.dal.tap` .. automodapi:: pyvo.dal.query .. automodapi:: pyvo.dal.scs .. automodapi:: pyvo.dal.sia .. automodapi:: pyvo.dal.ssa .. automodapi:: pyvo.dal.sla .. automodapi:: pyvo.dal.tap pyvo-0.6.1+dfsg.orig/docs/pyvo/registry.rst0000644000175000017500000000040213125165225020246 0ustar noahfxnoahfx .. py:module:: pyvo.registry ========================= The pyvo.registry Package ========================= This package contains a implementations of the RegTAP interface in the :py:mod:`pyvo.registry.regtap` module. .. automodapi:: pyvo.registry.regtap pyvo-0.6.1+dfsg.orig/docs/pyvo/vo.rst0000644000175000017500000001172513125165225017034 0ustar noahfxnoahfx.. _about-vo: ********************************** About the Virtual Observatory (VO) ********************************** The Virtual Observatory refers to a web of data, services, technologies, and software that together create an environment for conducting astronomical research on the network. In particular, it takes advantage of the wealth of data available on-line from astronomical archives around the world. This web is held together through a set of open standards that give application common ways of doing things, such as discovering and retrieving data. PyVO has been developed to understand the VO standards, allowing one to interact with the VO using Python. The initial focus of PyVO is on data discovery and retrieval. It is typically not necessary to understand the details of the VO standards or the underlying architecture of the VO in order to use PyVO and access remote data; feel free to skip to the section on :ref:`getting-started`. However, if you are completely new to the concepts of the VO, or you encounter something you don't understand, you can read on for an overview of VO services. .. _about-data-disc: ========================= Data Discovery and Access ========================= An archive can expose its data to VO by providing standard data access services. Each major class of astronomical data (images, spectra, catalogs, etc.) have its own standard service protocol that allows users to discover what data they have and to download it. (The full set of data access standards is often referred to as the VO's *Data Access Layer* or DAL.) These protocols are all similar, sharing a largely common query interface, dataset metadata and query response. Typically, queries are formed by a set of keyword=value parameters appended onto a base URL. Responses come in the form of a table, in VOTable format (an XML format). There are two basic kinds of data access services, dataset services and catalog services. With a dataset service, one sends a query that asks what datasets the archive has of a particular type and that match the user's constraints. Currently, PyVO supports querying two types of dataset services: * `Simple Image Access (SIA) `_ -- a service for finding images * `Simple Spectral Access (SSA) `_ -- a service for finding spectra The response is a table where each row is describes a single downloadable dataset (an image or a spectrum) available in the archive that matches the input query. The columns contain metadata describing the dataset. One of the columns gives the format of the image (in the format of a MIME-type, such as "image/fits"), and another provides a URL that can be used to download that image. Typically, users will ask for data that over lap some region of the sky; however, other constraints on the query (such as waveband) can be set to further restrict the search results. With a catalog service, there is no dataset to download; instead one is simply searching a catalog for its records. Currently, PyVO supports querying three types of catalog services: * `Simple Cone Search (SCS) `_ -- a service for positional searching a source catalog or an observation log. * `Simple Line Access (SLAP) `_ -- a service for finding data about spectral lines, including their rest frequencies. * `Table Access Protocol (TAP) `_ -- a service for flexible access to source catalogs using custom search parameters. In all DAL search results, the archive can provide a rich set of metadata that describe whatever it is that is being searched for. The DAL call for the columns for the results table to be tagged with special labels that allow applications pick out particular kinds of information. These labels are separate from the column names, so while different archives may give their right ascension columns different names, they will share a common label, allowing an application to properly interpret the table. ==================== Discovering Archives ==================== If you don't know what archive or archives to search, you can discover them by searching what is called a VO Registry. This is a special database containing descriptions of all archives, services, and other resources known to the VO. Queries to a registry are good for finding services of a particular type (like image services or source catalog services) or have data related to a particular science topic. The registry is important for finding new data that you might not be aware exists or is available. Imagine for example that you want to find all image data available of your favorite source. You would first query the registry to find all image archives; you could then systematically search all of those archive for images overlapping the position of your source. You might further downselect the list of images based on the image metadata returned. pyvo-0.6.1+dfsg.orig/docs/pyvo/getting_started.rst0000644000175000017500000002444513125165225021602 0ustar noahfxnoahfx.. _getting-started: ************************* Getting Started With PyVO ************************* PyVO lets your find and download astronomical data available from archives that support standard VO service protocols. The different types of services that are supported will access different types of data. Nevertheless, these services have similar interfaces: position and size parameters are named `pos` and `size` and accept instances of `~astropy.coordinates.SkyCoord` or `~astropy.units.Quantity` with any appropiate unit as well as scalar or sequences of floats in the default unit, and results are returned as a `~astropy.io.votable.tree.VOTableFile` instance encapsulated in a `~pyvo.dal.query.DALResults` subclass. .. _getting-started-examples: ============== A Few Examples ============== As a quick overview, we start with an example that illustrates a number of the key features of the PyVO's data discovery capabilities. Here, we are interested in X-ray images of our favorite source, supernova remnant, Cas A. This examples finds out where X-ray images can be found and saves references to those images to a simple CSV file (a table with comma-separated values): .. code-block:: python :linenos: from csv import writer import pyvo as vo from astropy.coordinates import SkyCoord # find archives with x-ray images archives = vo.regsearch(servicetype='image', waveband='x-ray') # position of my favorite source pos = SkyCoord.from_name('Cas A') # find images and list in a CSV file with open('cas-a.csv', 'w') as csvfile: csv = writer(csvfile) csv.writerow([ "Archive", "title", "Image title", "format", "RA", "Dec", "URL"]) for arch in archives: print "searching {0}...".format(arch.res_title) try: matches = arch.search(pos=pos, size=0.25) except vo.DALAccessError as ex: print "Trouble accessing {0} archive {1}".format( arch.res_title, str(ex)) continue print "...found {0} images".format(len(matches)) for image in matches: csv.writerow([ arch.res_title, image.title, str(image.ra), str(image.dec), image.format, image.getdataurl()]) You might notice a few things in this example at the labeled line numbers: 2. Most of the time, you can what you will need from the top ``pyvo`` module; just import it. 6. The first step is to find archives that might have data were interested in. To do this, we use the ``regsearch()`` function to search the VO registry for relevent archives given the type of data were interested (images) and our waveband of interest. 9. We look up the source position using the `~astropy.coordinates.SkyCoord.from_name` function. 17. The results we got back from our registry query behaves like a list--in particular, we can iterate through each of the archives that were returned. 18. A registry query will return a variety of information about each service it finds, like its "res_title". These are accessible as properties. 21. Each item returned by the registry search represents a service at some archive that can return images. (This is because we said ``servicetype='image'`` in line 5.) We can find out what images the archive has via its ``search()`` function by giving it a "rectangular" region of the sky. Our search region is a square that is 0.25 degrees on a side, centered on the position of Cas A. 23. Sometimes, services are not up or working properly. The ``DALAccessError`` exception is a base class for the various things that can go wrong when querying a service (including the registry). If one of our searches fail, we are noting it and going on to the next one. PyVO provides more detailed exception classes if you want to distinguish betweeen different types of errors (like input errors). 28. Calling ``len`` with the result object as argument tells the number of items returned in the results (the ``archives`` list has this property, too). Each represents an image that overlaps our search region. 29. As with the registry search results, we can iterate through the images that were matched. 30 . For each image found, we will write out a row into our output list, copying data about both the image and the archive it came from. One of the important pieces of information we want about the image is where to get it: the ``image.getdataurl()`` function returns a URL that can be used to retrieve the data. There are five different kind of VO search services supported by PyVO and they all work the same way: * you can execute search via a search function to which you pass in search constraints as keyword arguments, * you get back a list of items that match your constraints which you can iterate through, * catchable exceptions will be thrown if anything goes wrong, * each returned record will have properties holding metadata that describe that item, and * when searching for a dataset, the record will include a URL for downloading the dataset. Here's another example searching for images. In this example, we want to download cutout images for the NVSS survey for a list of sources. We already know what archive we want to go to for images; that is, we already know the NVSS image service URL we need to use. In this example, we show a slightly different way to pass search parameters as well as how to download the images. .. code-block:: python :linenos: import pyvo as vo from astropy.coordinates import SkyCoord # obtain your list of positions from somewhere sourcenames = ["ngc4258", "m101", "m51"] mysources = {} for src in sourcenames: mysources[src] = SkyCoord.from_name(src) # create an output directory for cutouts import os if not os.path.exists("NVSSimages"): os.mkdir("NVSSimages") nvss = "http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=nvss&" for name, pos in mysources.items(): query = vo.sia.SIAQuery( nvss, pos=(pos.ra.deg, pos.dec.deg), # degrees format='image/fits') results = query.execute() for image in results: print "Downloading {0}...".format(name) image.cachedataset(filename="NVSSimages/{0}.fits".format(name)) You might notice: 5. We created a simple list of three sources, but you might load them in from a catalog our your own table. 18. Instead of passing a `~astropy.coordinates.SkyCoord` instance to specify the search position, we instantiate a `~pyvo.dal.sia.SIAQuery` instance with a tuple of icrs decimal degrees as the pos parameter. 20. We'll ask only for FITS images. 21. We iterate through sources in our list, setting the query position to that of the source and executing it. 24. We can download each image to a directory via the ``cachedataset()`` function. .. _getting-started-pyvo: =================================== What's available in the pyvo Module =================================== The :py:mod:`pyvo` module is organized such that most of what might need is available at the top of the module; that is, simply importing this module is sufficient for most uses: .. code-block:: python import pyvo as vo The module's search capabilities are available through top-level functions. Four of the functions represent what's referred to as the *VO Data Access Layer* (DAL): * :py:func:`~pyvo.conesearch` -- search a remote catalog for data about sources or observations located within some radius of a given position. * :py:func:`~pyvo.imagesearch` -- search an image archive for images that overlap a region of the sky * :py:func:`~pyvo.spectrumsearch` -- search an image archive for spectra observed within some radius of a given position. * :py:func:`~pyvo.linesearch` -- search a remote spectral line database for data about emission lines. * :py:func:`~pyvo.tablesearch` -- search a remote database for generic data. We'll show you how to use these in the next chapter, :ref:`data-access`. All the DAL search functions require a URL that represents the location of the service as its first argument. If you don't know the URL, you can look it up through a search of the VO Registry: * :py:func:`~pyvo.regsearch` -- search the VO Registry to find services and archives. The Registry is discussed more in a subsequent chapter, :ref:`registry-access`. The :py:mod:`pyvo` module also makes available a set of exceptions that are thrown by the above functions when things go wrong. These are described in the :ref:`data-access` chapter under the section, :ref:`data-access-exceptions`: ============================================ =================================================================================== :py:class:`~pyvo.dal.query.DALAccessError` a base class for all failures while accessing a DAL service :py:class:`~pyvo.dal.query.DALProtocolError` a base exception indicating that a DAL service responded in an erroneous way. :py:class:`~pyvo.dal.query.DALFormatError` an exception indicating that a DAL response contains fatal format errors. :py:class:`~pyvo.dal.query.DALServiceError` an exception indicating a failure communicating with a DAL service. :py:class:`~pyvo.dal.query.DALQueryError` an exception indicating an error by a working DAL service while processing a query. ============================================ =================================================================================== .. raw:: html
Finally, we will see in the next chapter that additional features are available in sub-modules, each associated with a different type of services. This includes: =========================== ==================================================== :py:mod:`pyvo.dal.sia` Classes for accessing image services :py:mod:`pyvo.dal.ssa` Classes for accessing spectrum services :py:mod:`pyvo.dal.scs` Classes for accessing catalog services :py:mod:`pyvo.dal.sla` Classes for accessing spectral line catalog services :py:mod:`pyvo.dal.tap` Classes for accessing table access services :py:mod:`pyvo.registry` Classes for accessing the registry =========================== ==================================================== pyvo-0.6.1+dfsg.orig/docs/pyvo/ivoareferences.rst0000644000175000017500000000053213125165225021402 0ustar noahfxnoahfx.. _VOTable standard: http://www.ivoa.net/documents/VOTable/ .. _SIA standard: http://www.ivoa.net/documents/SIA/ .. _SCS standard: http://www.ivoa.net/documents/latest/ConeSearch.html .. _SSA standard: http://www.ivoa.net/documents/SSA/ .. _SLA standard: http://www.ivoa.net/documents/SLAP/ .. _PEP 249: http://www.python.org/dev/peps/pep-0249/ pyvo-0.6.1+dfsg.orig/docs/pyvo/install.rst0000644000175000017500000000254113125165225020052 0ustar noahfxnoahfx *************** Installing PyVO *************** Information about this package, including latest releases, can be found at: http://dev.usvao.org/vao/wiki/Products/PyVO Source code can be found on GitHub at: http://github.com/pyvirtobs/pyvo This implementation requires the following prerequisite packages: * numpy * astropy If you install PyVO from a source distribution, these must be installed first. As an Astropy affiliate, this package uses the Astropy build infrastructure. Releases of PyVO are available from `PyPI `; thus, it and its prerequisites can be most easily installed using ``pip``: pip install pyvo Alternatively, you can download and unpack a source tar-ball (pyvo-x.x.tar.gz). To install directly into the python installation, type as root user inside the distributions root directory: python setup.py install To install into a special directory called, say, $MYPYVO (which need not require root permission), first be sure that astropy and numpy are in your PYTHONPATH (if they are also installed in a non-standard place). Next, try: python setup.py install --home=$MYPYVO To just try out PyVO in this directory, you can build it in "developer" mode via: python setup.py build_ext --inplace In this mode, update your PYTHONPATH to include the directory containing this file. pyvo-0.6.1+dfsg.orig/docs/pyvo/ref.rst0000644000175000017500000000036413125165225017161 0ustar noahfxnoahfx ******************** PyVO Reference Guide ******************** PyVO is a package providing access to remote data and services of the Virtual Observatory (VO) using Python. **Contents** .. toctree:: :maxdepth: 2 mod dal registry pyvo-0.6.1+dfsg.orig/docs/pyvo/mod.rst0000644000175000017500000000654013125165225017166 0ustar noahfxnoahfx ================ The pyvo Package ================ .. py:module:: pyvo PyVO is a package providing access to remote data and services of the Virtual observatory (VO) using Python. The pyvo module currently provides these main capabilities: * find archives that provide particular data of a particular type and/or relates to a particular topic * :py:func:`~pyvo.regsearch` * search an archive for datasets of a particular type * :py:func:`~pyvo.imagesearch`, :py:func:`~pyvo.spectrumsearch` * do simple searches on catalogs or databases * :py:func:`~pyvo.conesearch`, :py:func:`~pyvo.linesearch`, :py:func:`~pyvo.tablesearch` Submodules provide additional functions and classes for greater control over access to these services. This module also exposes the exception classes raised by the above functions, of which DALAccessError is the root parent exception. ########### API Summary ########### ********* Functions ********* =============================== ================================================================ :py:func:`~pyvo.conesearch` equivalent to :py:func:`pyvo.dal.scs.search` :py:func:`~pyvo.imagesearch` equivalent to :py:func:`pyvo.dal.sia.search` :py:func:`~pyvo.spectrumsearch` equivalent to :py:func:`pyvo.dal.ssa.search` :py:func:`~pyvo.linesearch` equivalent to :py:func:`pyvo.dal.sla.search` :py:func:`~pyvo.tablesearch` equivalent to :py:func:`pyvo.dal.tap.search` :py:func:`~pyvo.regsearch` equivalent to :py:func:`pyvo.registry.regtap.search` =============================== ================================================================ ********** Exceptions ********** .. .. currentmodule:: pyvo.dal.query .. autosummary:: DALAccessError DALProtocolError DALFormatError DALServiceError DALQueryError .. currentmodule:: pyvo ============================================ ================================== :py:class:`~pyvo.dal.query.DALAccessError` a base class for all failures while accessing a DAL service :py:class:`~pyvo.dal.query.DALProtocolError` a base exception indicating that a DAL service responded in an erroneous way. :py:class:`~pyvo.dal.query.DALFormatError` an exception indicating that a DAL response contains fatal format errors. :py:class:`~pyvo.dal.query.DALServiceError` an exception indicating a failure communicating with a DAL service. :py:class:`~pyvo.dal.query.DALQueryError` an exception indicating an error by a working DAL service while processing a query. ============================================ ================================== ########### API Details ########### ********* Functions ********* .. py:function:: regsearch([keywords=None, servicetype=None, waveband=None]) equivalent to :py:func:`pyvo.registry.regtap.search` .. py:function:: conesearch(url[, radius=1.0, verbosity=2]) equivalent to :py:func:`pyvo.dal.scs.search` .. py:function:: imagesearch(url, pos, size[, format='all', intersect='overlaps', verbosity=2]) equivalent to :py:func:`pyvo.dal.sia.search` .. py:function:: spectrumsearch(url, pos, size[, format='all']) equivalent to :py:func:`pyvo.dal.ssa.search` .. py:function:: linesearch(url, wavelength) equivalent to :py:func:`pyvo.dal.sla.search` .. py:function:: tablesearch(url, query[, language="ADQL", maxrec=None, uploads=None]) equivalent to :py:func:`pyvo.dal.tap.search` pyvo-0.6.1+dfsg.orig/docs/pyvo/data_access.rst0000644000175000017500000016654313125165225020653 0ustar noahfxnoahfx .. include:: ivoareferences.rst .. _data-access: ******************** Data Access Services ******************** In this section, we look at the interfaces for accessing remote archives for data using the standard VO interfaces. Four types of data access services are currently supported by PyVO: * `Simple Image Access (SIA) <#data-access-sia>`_ -- an interface for finding images in an archive * `Simple Spectral Access (SSA) <#data-access-ssa>`_ -- finding spectra in an archive * `Simple Cone Search (SCS) <#data-access-scs>`_ -- for positional searching a source catalog or an observation log. * `Simple Line Access (SLAP) <#data-access-sla>`_ -- finding data about spectral lines, including their rest frequencies. * `Table Access Protocol (TAP) <#data-access-tap>`_ -- accessing source catalogs using custom search parameters. The sub-sections below look at the PyVO interface for each type of service in more detail. There is also an additional sub-section about how to access datasets with `datalink <#datalink>`_ You will find the interfaces have a common design and thus behave in the same way. Common parameters like `pos` and `size` accept `~astropy.coordinates.SkyCoord` or `~astropy.units.Quantity` instances as well as scalar or sequences of floats. For simplicity, we show the usage of the latter in this section. .. _data-access-sia: ========================= Simple Image Access (SIA) ========================= In this section, we will examine the API for finding images in an archive while as well as highlight the parts of the interface that is common to all of the data access services. A Simple Image Access service is a service provided by an image archive which complies with the IVOA standard, `Simple Image Access (SIA) `_. Like all data access services, one sends a query to the service via a simple URL which is made up of a base URL and one or more *name=value* parameters that define the query appended to it. PyVO takes care of building and submitting the query, but to get started we need to have the base URL of the service. This base URL is often called the *access URL*. How do we get the access URL? We can discover them by querying the VO Registry (see :ref:`registry-access`). Also, to help you get started, we also list a few sample services in :ref:`sample_sia_services`. For the examples below, we assume that you have imported PyVO via the following: >>> import pyvo as vo .. _sia-func: -------------------------- The Simple Search Function -------------------------- As illustrated in :ref:`getting-started-examples`, you can query a data access service through a simple function from the pyvo module. For images, this is the :py:func:`~pyvo.imagesearch` function. Here's an example searching for PNG-formatted preview images from the NASA HEASARC `SkyView `_ archive of sky surveys. >>> url = 'http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?' >>> previews = vo.imagesearch(url, pos=(350.85, 58.815), size=(0.25, 0.25), format='image/png') >>> previews.nrecs 43 >>> len(previews) 43 What is returned is a list of records in which one describes an image that is available from the archive. We have not downloaded any actual files, yet; this is just information about them. One bit of the information is the URL we can use to download it. This example shows that the archive has 43 images available. You'll notice that we displayed this number in two ways. It shows that our response comes packaged in a results object that has some attributes and, as we'll see, functions that help you understand and navigate its contents. It also implies that it can behave like a list. This is common to all results from data access searches. The list behavior makes it easy to iterate through the results and do something with them, like print information about the images and download them: .. code-block:: python import os os.mkdir('skyview_images') for image in previews: print 'fetching', image.title image.cachedataset(dir='skyview_images') We might highlight now a few things that are specific to the :py:func:`~pyvo.imagesearch` function. You can see that we control the format of the images that are selected and returned with the ``format`` argument. To select a specific format, you give it a mime type; for example, you can ask only for FITS images with ``format='image/fits'``. If we just wanted graphical preview images, but didn't care what specific format, we could say, ``format='graphic'``. If we want to see all available formats we could specify ``format='all'`` or leave the argument out of the call altogether: >>> images = vo.imagesearch(url, pos=(350.85, 58.815), size=0.25) Another parameter called :py:attr:`~pyvo.dal.sia.SIAQuery.intersect` makes the service more picky about how the images returned intersect with the search box. For example, if you are search a fairly small region of the sky with an interest in high resolution observations, then you can add ``intersect='enclosed'`` to require that the image be completely enclosed by the search region to be returned; this can be helpful for filtering out low-resolution survey images from your results. A service may support more search parameters than just the ones names as arguments to the :py:func:`~pyvo.imagesearch` function. Some parameters correspond to ones defined by the `SIA standard`_ but are used less often. The service may support its own custom parameters as well. Arbitrary parameters can be included in the query by passing them as named keyword=value arguments to the function: >>> nvssims = vo.imagesearch(url, pos=(350.85, 58.815), size=0.2, survey='nvss') It's worth remembering that the whenever you access a service over the network, things can go wrong: you may loose your network connection, the remote site might go down, the specific service may go down, and so on. In the VO, access to some services can fail if the service is not sufficiently compliant with the underlying standard. In all such cases, PyVO will throw a useful exception; these are discussed below in :ref:`data-access-exceptions`. So when you encounter an error while accessing a service, keep in mind: * the problem may not be with your query or the PyVO software; it may be the remote service. * if you are accessing many services as part of a script, be sure to catch exceptions to allow for graceful recovery. .. _sia-results: ------------------ The Results Object ------------------ When you send a query to a VO data access service, it returns a table of matches in `VOTable `_ format. PyVO uses the Astropy VOTable parser (``astropy.io.votable``) to parse the file and then raps it in a helper class that helps you access the results. With the :py:func:`~pyvo.imagesearch` function, the results come in the form of an :py:class:`~pyvo.dal.sia.SIAResults` class. Most of its capabilities comes from the more general :py:class:`~pyvo.dal.query.DALResults` class which is common to all the data access services. It provides some public attributes and functions that can be helpful for interpreting. Four attributes of interest are: ================================================ ========================================================= attribute description ================================================ ========================================================= :py:attr:`~pyvo.dal.query.DALResults.queryurl` the full query that was sent to the service, including all the search parameters. ------------------------------------------------ --------------------------------------------------------- :py:attr:`~pyvo.dal.query.DALResults.fieldnames` a list of the fieldnames in this resultset. ------------------------------------------------ --------------------------------------------------------- :py:attr:`~pyvo.dal.query.DALResults.votable` the wrapped :py:class:`astropy.io.votable.tree.Table` object containing the results (see :ref:`Using Astropy to Process Results`) ------------------------------------------------ --------------------------------------------------------- :py:attr:`~pyvo.dal.query.DALResults.table` an :py:class:`astropy.table.Table` version of the results (see :ref:`Using Astropy to Process Results`) ================================================ ========================================================= As we've noted, the results are fundementally a table where the rows reflect, in the case of SIA and the :py:func:`~pyvo.imagesearch` function, matching images available from the archive. The columns of the table represent the image metadata. You find out the names of these columns with the :py:meth:`~pyvo.dal.query.DALResults.fieldnames()` method: >>> previews.fieldnames [u'Survey', u'Ra', u'Dec', u'Dim', u'Size', u'Scale', u'Format', u'PixFlags', u'URL', u'LogicalName'] Most image services let you control the amount of metadata you get back with the `verbosity`parameter. (Passing ``verbosity=3`` to :py:func:`~pyvo.imagesearch` will return all the columns the service has available.) You can get more information about a column either :py:meth:`~pyvo.dal.query.DALResults.getdesc` which returns the description of the column with a given name or :py:attr:`~pyvo.dal.query.DALResults.fielddescs` which contains all of the column descriptions in a list in the order they appeared in the result. >>> deccol = previews.getdesc('Dec') >>> deccol.datatype u'double' >>> deccol.ucd u'POS_EQ_DEC_MAIN' The column metadata that you can access includes: ============ ================================================================ attribute description ============ ================================================================ name the name given to the column by the archive ------------ ---------------------------------------------------------------- datatype the type of the value (this corresponds to types of the VOTable format, not Python types) ------------ ---------------------------------------------------------------- description a short text description of the column ------------ ---------------------------------------------------------------- ucd a special standard label for interpreting semantically what the values represent (see below) ------------ ---------------------------------------------------------------- utype a secondary standard label for interpreting semantically what the values represent; this is more precise than the UCD (see below). ------------ ---------------------------------------------------------------- arraysize a coded description of the array shape of the value (if not provided, defaults to a scalar) ============ ================================================================ Some of this metadata is optional; thus, an archive may not provide all of them. It's worth noting that the column names are not standardized. That is, archives can name these columns as they see best. PyVO uses either the special UCD or UType attribute (whose values are set by the data access standard) to figure out what the columns represent. This help comes into play when you look at individual rows of the table. You can extract an entire column using the :py:meth:`~pyvo.dal.query.DALResults.getcolumn` method: >>> decs = previews.getcolumn('Dec') >>> decs = previews.table['Dec'] # equivalent The result will be a Numpy masked array. Note that if you are accessing data by columns, a more flexible interface is provided by the Astropy Table instance, available via the :py:attr:`~pyvo.dal.query.DALResults.table` attribute (see :ref:`Using Astropy to Process Results`). Often, however, when dealing with data access query results, it is more convenient to process them by rows. To make this easier, you can deal with the results as if it were a list of records. That is, you can: * use ``len()`` to determine number of records in the results * you can access a record via "bracket", ``[]``, operator: >>> first_rec = previews[0] >>> last_rec = previews[-1] * and, you can iterate through the records using a ``for`` loop: >>> for rec in previews: ... print rec.ra, rec.dec, rec.title Finally, we mention that the result objects support the `PEP 249`_ standard, the Python Database API (DB-APIv2) as an alternative way to iterate through the results. To use this interface, call the :py:meth:`~pyvo.dal.query.DALResults.cursor` method which will return a DB-APIv2 ``Cursor`` instance. See the `PEP 249`_ standard for more details. .. _sia-rec: ----------------- The Result Record ----------------- As we saw in the previous section, you can iterate through a query results object to get at individual records. These records are specialized for the particular type of service you queried, but there is some common behavior. For example, for all data access services, the record behaves like an immutable dictionary where the keys are the names of the columns from the result table: >>> first = previews[0] >>> first.keys() [u'Survey', u'Ra', u'Dec', u'Dim', u'Size', u'Scale', u'Format', u'PixFlags', u'URL', u'LogicalName'] >>> first['Format'] 'image/png' As was mentioned in the previous subsection, the column names are not standardized, so PyVO uses other metadata figure out what the columns contain regardless of what they are called. To make it easier to access, PyVO makes certain values available as attributes of the record. For example, the title of the image, which SkyView calls "LogicalName", is made available via the ``title`` attribute: >>> first.title 2massh >>> first.ra 350.85000000000002 The data PyVO can expect to find depends on the type of service that was called. Thus, for each type of service, PyVO provides a specialized class. In the case of results from :py:func:`~pyvo.imagesearch`, an individual record is available as an :py:class:`~pyvo.dal.sia.SIARecord` instance. Here are the standard attributes it provides: ==================================================== ======================================================================== attribute description ==================================================== ======================================================================== :py:attr:`~pyvo.dal.sia.SIARecord.ra` the IRCS right ascension of the center of the image in decimal degrees ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.dec` the IRCS declination of the center of the image in decimal degrees ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.title` the name or identifier of the image as given by the archive ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.instr` the name of the instrument (or instruments) that produced the data that went into this image. ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.dateobs` the modified Julien date (MJD) of the mid-point of the observational data that went into the image ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.naxes` the number of axes in the image ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.naxis` the lengths of the axes in the image in pixels ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.scale` the scale of the pixels in each image axis in degrees/pixels ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.format` the format of the image ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.coord_frame` the coordinate system reference frame, one of the following: "ICRS", "FK5", "FK4", "ECL", "GAL", and "SGAL". ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.coord_equinox` the equinox of the used coordinate system ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.coord_projection` the celestial projection (TAN / ARC / SIN / etc.) ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.coord_refpixel` the image pixel coordinates of the WCS reference pixel ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.coord_refvalue` the world coordinates of the WCS reference pixel ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.cdmatrix` the WCS CD matrix defining the scale and rotation (among other things) of the image. ordered as CD[i,j] = [0,0], [0,1], [1,0], [1,1] ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.bandpass_id` the bandpass by name (e.g., "V", "SDSS_U", "K", "K-Band", etc.) ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.bandpass_unit` the astropy unit used to represent spectral values ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.bandpass_refvalue` the characteristic (reference) wavelength, frequency or energy for the bandpass model, as an astropy Quantity of bandpass_unit ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.bandpass_hilimit` the upper limit of the bandpass, as an astropy Quantity in bandpass_unit ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.bandpass_lolimit` the lower limit of the bandpass, as an astropy Quantity in bandpass_unit ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.pixflags` the type of processing done by the image service to produce an output image pixel a string of one or more of the following values: * C -- The image pixels were copied from a source image without change, as when an atlas image or cutout is returned. * F -- The image pixels were computed by resampling an existing image, e.g., to rescale or reproject the data, and were filtered by an interpolator. * X -- The image pixels were computed by the service directly from a primary data set hence were not filtered by an interpolator. * Z -- The image pixels contain valid flux (intensity) values, e.g., if the pixels were resampled a flux-preserving interpolator was used. * V -- The image pixels contain some unspecified visualization of the data, hence are suitable for display but not for numerical analysis. ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.acref` the URL that can be used to retrieve the image ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.acref_ttl` the minimum time to live in seconds of the access reference ---------------------------------------------------- ------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.filesize` the (estimated) size of the image in bytes ==================================================== ======================================================================== When the data access service search for datasets, as is the case with :py:func:`~pyvo.imagesearch` and :py:func:`~pyvo.spectrumsearch`, one of the columns in the result will be a URL for downloading the dataset. There are two record methods that are particularly helpful for downloading the dataset. First, you can get the URL yourself for downloading the dataset via the :py:meth:`~pyvo.dal.query.Record.getdataurl`: >>> image = previews[0] >>> image.getdataurl() 'http://skyview.gsfc.nasa.gov/cgi-bin/images?position=350.85%2C58.815&survey=2massh&pixels=300%2C300&sampler=Clip&size=0.25%2C0.25&projection=Tan&coordinates=J2000.0&nofits=1&quicklook=png&return=png' The :py:meth:`~pyvo.dal.query.Record.cachedataset` will use this URL to actually download the image: >>> image.title '2massh' >>> image.format 'image/png' >>> image.cachedataset("2massh.png") This will simply save the downloaded image in the current directory with the name ``2massh.png``. :py:meth:`~pyvo.dal.query.Record.cachedataset` can help you out with filenames when downloading a bunch of images: >>> import os >>> os.mkdir("skyview_previews") >>> for image in previews: ... image.cachedataset(dir="skyview_previews") In the above example, :py:meth:`~pyvo.dal.query.Record.cachedataset` will pick a default name to use based on the image title and format. And don't worry: if the name already exists on disk, it won't get overwritten. Rather :py:meth:`~pyvo.dal.query.Record.cachedataset` will insert a sequence number into the name instead. .. _data-access-exceptions: ------------------------------------- When Things Go Wrong: Handling Errors ------------------------------------- Whenever you access a service over the network, things can go wrong: you may lose your network connection, the remote site might go down, the specific service may go down, and so on. In the VO, access to some services can fail if the service is not sufficiently compliant with the underlying standard. In all such cases, PyVO will throw a useful exception. In other cases, a service might be mildly non-compliant, and so you may see numereous warnings printed to your screen. When this happens, you will still have a result set you can work with; however, some of the data may not be fully available (e.g. with the proper Python type). So, when you encounter issues while accessing a VO service, keep in mind: * the problem may not be with your query or the PyVO software; it may be the remote service. * when there are warnings, the result is often still useable. * if you are access many services as part of a script, be sure to catch exceptions to allow for graceful recovery. There are three specific kinds of errors that can occur during a data access search call like :py:func:`~pyvo.imagesearch` and they all have a common base class, :py:class:`~pyvo.dal.query.DALAccessError`. Thus, if you are not picky about what might go wrong, you can catch just this base class. For instance, recall our example from :ref:`getting-started-examples` in which we were searching several services: .. code-block:: python for arch in archives: print "searching %s..." % arch.shortname try: matches = arch.search(pos=pos, size=0.25) except vo.DALAccessError, ex: print "Trouble accessing %s archive (%s)"\ % (arch.shortname, str(ex)) continue print "...found %d images" % len(matches) In this example, if something went wrong, we just reported the problem and went onto the next service. You can distinguish between three different errors: ============================================ =================================================================================== Exception class description of failure ============================================ =================================================================================== :py:class:`~pyvo.dal.query.DALServiceError` an exception indicating a failure communicating with a DAL service. This will be thrown when the service is either unreachable or returns with an HTTP protocol error. -------------------------------------------- ----------------------------------------------------------------------------------- :py:class:`~pyvo.dal.query.DALFormatError` an exception indicating that a DAL response contains fatal format errors. This will be thrown if the return VOTable is unparse-able due to format errors (like being illegal XML). -------------------------------------------- ----------------------------------------------------------------------------------- :py:class:`~pyvo.dal.query.DALQueryError` an exception indicating an error by a working DAL service while processing a query. In this case, the service returns with a legal response but is reporting a problem preventing records to be returned. Common reasons for this include illegal input parameters or the number of results exceeds the service's limits. ============================================ =================================================================================== The first two indicate a problem with the service while the third one indicates a user/client error. The first two have a common base exception called :py:class:`~pyvo.dal.query.DALProtocolError` which you can catch handle service errors separatel from user errors. .. _sia-query: ------------------- Using Query Objects ------------------- Internally, the data access search functions like :py:func:`~pyvo.imagesearch` uses a special query class to execute the query. This class can sometimes be useful at the query level. Query classes are specialized to the type of service being accessed and have built-in knowledge the input parameters it accepts. In the case of searching for images via an SIA service, one can use a subclass of :py:class:`~pyvo.dal.sia.SIAQuery`. .. code-block:: python from pyvo.dal.sia import SIAQuery url = 'http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=dss2&' query = SIAQuery(url, (350.85, 58.815), 0.25, format='image/fits') images = query.execute() Where the query object can be useful is when you want to modify something to match service specific requirements. You might note two other useful members of the query class. First is :py:attr:`~pyvo.dal.query.DALQuery.queryurl`: this contains the query URL it will use when you execute it: >>> query.queryurl 'http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=dss2&FORMAT=image%2Ffits&POS=350.85,58.815&SIZE=0.25,0.25' This is often useful for debugging. For instance, you can copy this url into your browser and view the VOTable results directly. If you want to work with the service response directly, perhaps use your own parser, you can use the :py:meth:`~pyvo.dal.query.DALQuery.execute_stream` method to execute it. The result will be a file-like object that will stream the raw XML response from the service. Other ``execute_*`` functions are available to provide access to other forms of the output. We end this examination with another example of how to create a query object, using a Service instance: .. code-block:: python from vo.sia import SIAService url = 'http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=dss2&' query = SIAService(url).create_query(size=0.1, format='image/fits') query.pos = (350.85, 58.815) The :py:meth:`~pyvo.dal.sia.SIAService.create_query` method both instantiates the query object and presets several constraints, the same provide by the :py:func:`~pyvo.imagesearch` function. This is method is a feature of a *service class*, the topic of the next subsection. .. _service-objects: --------------- Service Objects --------------- Each of the data access services have an associated Service class, a class that represents a specific service itself. At a minimum, it will simply wrap a service's access URL; however, when created as a result of a registry query, service objects can also contain other metadata about the service (see :ref:`registry-access`). In most cases, you won't need to work with service objects directly. You may find them useful in scripts that have to manage many services in a session. Here's a simple way to create a service instance: .. code-block:: python url = 'http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=dss2&' service = vo.sia.SIAService(url) You can get service instances from the results of a registry query: >>> hla = vo.regsearch(['Hubble Legacy Archive'], servicetype='image') >>> len(hla) 1 >>> service = hla[0].service >>> service.description 'The Hubble Space Telecope Legacy Archive (HLA) was developed at the Space Telescope Science Institute to optimize the science return from HST instruments. This resource is an image service which accesses all HLA observation data. The calibrated data is fully online with several forms of access including footprint visualization, composite images, extracted spectra and source lists.' You can search a service with its :py:meth:`~pyvo.dal.sia.SIAService.search` method; its signature is just like the plain function, except the access URL is not needed: .. code-block:: python results = service.search(pos=(350.85,58.815), size=0.1) Or, you can use its :py:meth:`~pyvo.dal.sia.SIAService.create_query` method to create a query object: .. code-block:: python query = service.create_query() # no search parameters are set query = service.create_query(size=0.1, format='image/fits') A Service Object that is embedded with metadata can be useful in some contexts such as a GUI application where you might want an object that represents a service to be self-describing. -------------------------------------- Summary of Common Data Access Features -------------------------------------- In this section, we've examined the API for finding images using the `Simple Image Access standard service `_, highlighting the features that are common to all the data access services. For reference, we summarize those common features here: * Data access searches can be executed via functions available in the :py:mod:`pyvo` module. These include :py:func:`~pyvo.imagesearch()`, :py:func:`~pyvo.spectrumsearch()`, :py:func:`~pyvo.conesearch()`, and :py:func:`~pyvo.linesearch()`. (:py:func:`~pyvo.regsearch()`, used for discovering services, works in a similar way.) See :ref:`sia-func`. * To connect with a data access services, you need its *access URL*, a base URL that PyVO uses to build and execute your query. This is passed as the first argument to the data access service or it can be used to create a query object. See :ref:`sia-func`. * The results of a search query is a results object (a subclass of the :py:class:`~pyvo.dal.query.DALResults` class) which wraps around the parsed VOTable response. Each row in the table represents matched item, such as an image or a catalog record. See :ref:`sia-results`. * There are three ways to interact with the results: * You can iterate through the records, treating the results object like a list. Individual record objects give you intelligent access to the record metadata. * You can treat the results as an Astropy :py:class:`~astropy.table.Table`; this is especially useful for catalog results that you might combine with your own data. * You can interact with the results directly as a VOTable :py:class:`~astropy.io.votable.tree.Table`. While less flexible than a general Astropy :py:class:`~astropy.table.Table`, it retains all of the VOTable-specific metadata. See :ref:`sia-results`. * When you iterate through a results instance to get at individual records, the records will be a specialization of the the :py:class:`~pyvo.dal.query.Record` class. You can access key standard metadata as properties of the records. The properties available depend on the type of service the results come from. See :ref:`sia-results` and :ref:`sia-rec`. * If the service searches for datasets (i.e. :py:func:`~pyvo.imagesearch()` and :py:func:`~pyvo.spectrumsearch()`), you can access the dataset via the record instance. You can use either :py:meth:`~pyvo.dal.query.Record.getdataurl` to get the URL to the dataset, or you can use :py:meth:`~pyvo.dal.query.Record.cachedataset` to actually download it to disk. See :ref:`sia-rec`. * If a problem occurs while accessing a service, PyVO will raise a specialized exception derived from :py:class:`~pyvo.dal.query.DALAccessError`. When accessing many services (say, in a script), it's useful to catch these exceptions as a guard against services that are down or don't operate properly. See :ref:`data-access-exceptions`. * An alternative way to create data access queries is with a *query object*. This can be useful when you want to modify query bevahiour. The :py:meth:`~pyvo.dal.query.DALQuery.getqueryurl` method will give you the full query URL that will be sent to the service, which can be helpful for debugging. See :ref:`sia-query`. * Service objects are also available for representing a service; there is a class for each type of service. These are not normally used directly by users, but they can be helpful when managing a number of different services discovered from a registry. See :ref:`service-objects`. .. _sample_sia_services: --------------------------- A Few Sample Image Services --------------------------- You can discover image services with queries to the VO registry (see :ref:`registry-access`). Here, though, list a few service access URLs that can be used with the examples shown above. NASA HEASARC SkyView Archive: *http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?* Hubble Legacy Archive (HLA): *http://hla.stsci.edu/cgi-bin/hlaSIAP.cgi?imagetype=best&inst=ACS,ACSGrism,WFC3,WFPC2,NICMOS,NICGRISM,COS,STIS,FOS,GHRS&proprietary=false&* Digitized Sky Survey (DSS2):: *http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=dss2&* NRAO VLA Sky Survey (NVSS):: *http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=nvss&* IRSA Two Micron All-Sky Survey (2MASS):: *http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=2mass&* .. _data-access-ssa: ============================ Simple Spectral Access (SSA) ============================ Searching for spectra is much like searching for images, although the search parameters are a little different. Instead of searching a rectangular region of the sky, we look for spectra falling within a circular region of the sky. Nevertheless, the spectrum search API follows the same pattern as described in the previous section (:ref:`data-access-sia`). .. _ssa-func: --------------------------- The Spectra Search Function --------------------------- The :py:func:`~pyvo.spectrumsearch` function can be used to find spectra from some region of the sky. Here's an example of search for spectra of lensed QSOs in the direction of the Coma cluster: >>> url = 'http://dc.zah.uni-heidelberg.de/mlqso/q/q/ssap.xml?' >>> spectra = vo.spectrumsearch(url, pos=(194.9529, 27.9805556), size=0.1) >>> len(spectra) 180 You can restrict the results to a specific format. This service happens to have previews in JPEG format: >>> previews = vo.spectrumsearch(url, pos=(194.9529, 27.9805556), size=0.1, format='image/jpeg') >>> len(spectra) 36 In addition to accepting formats values as MIME-type names, some special values accepted, including "fits" for FITS format and "xml" for VOTable format: >>> spectra = vo.spectrumsearch(url, pos=(194.9529, 27.9805556), size=0.1, format='fits') >>> len(spectra) 36 See the :py:attr:`pyvo.dal.ssa.SSAQuery.format` for a full enumeration of the special format values. Just like searching for images, you can iterate through your results to process the spectra: .. code-block:: python import os os.mkdir("cdfs-spectra") for spec in spectra: print "Downloading %s..." % spec.title spec.cachedataset(dir="cdfs-spectra") .. _ssa-rec: ---------------------------- Spectrum Results and Records ---------------------------- The results object returned by a spectrum search the same interface as what is returned from a image search (see :ref:`sia-results`): * you can treat the results like a list of records: iterate through the records or access specific records with the bracket (``[``*i*``]``) operator. * Use :py:attr:`~pyvo.dal.query.DALResults.fieldnames` and :py:meth:`~pyvo.dal.query.DALResults.fielddescs` to access the record field names and descriptions. * Handle the results as a Astropy :py:class:`~astropy.table.Table` or or VOTable :py:class:`~astropy.io.votable.tree.Table`. When you process the results like a list of records, each record will be a :py:class:`pyvo.dal.ssa.SSARecord` instance. Just like its image counterpart, you can treat the record like a dictionary where the keys are the field names: >>> rec = spectra[0] >>> rec.keys() ('Survey', 'Ra', 'Dec', 'Dim', 'Size', 'Scale', 'Format', 'PixFlags', 'URL', 'LogicalName') In addition, the record provides properties that allow you to pick out key metadata about the spectrum regardless of what the column names are. These include: .. the length of a link in the table below makes the first column larger than it needs to be; it can be effectively narrowed by making the second column super wide. ========================================== ================================================================================================================== property description ========================================== ================================================================================================================== :py:attr:`~pyvo.dal.ssa.SSARecord.pos` the IRCS position of the center of the spectrum in decimal degrees ------------------------------------------ ------------------------------------------------------------------------------------------------------------------ :py:attr:`~pyvo.dal.ssa.SSARecord.title` the name or identifier of the spectrum as given by the archive ------------------------------------------ ------------------------------------------------------------------------------------------------------------------ :py:attr:`~pyvo.dal.ssa.SSARecord.format` the format of the spectrum. ------------------------------------------ ------------------------------------------------------------------------------------------------------------------ :py:attr:`~pyvo.dal.ssa.SSARecord.dateobs` the modified Julien date (MJD) of the mid-point of the observational data that went into the image (optional) ------------------------------------------ ------------------------------------------------------------------------------------------------------------------ :py:attr:`~pyvo.dal.ssa.SSARecord.instr` the name of the instrument (or instruments) that produced the data that went into this image. ------------------------------------------ ------------------------------------------------------------------------------------------------------------------ :py:attr:`~pyvo.dal.sia.SIARecord.acref` the URL that can be used to retrieve the image (equivalent to the output of :py:meth:`~pyvo.dal.sia.SIARecord.getdataurl`) ========================================== ================================================================================================================== .. raw:: html
Just like retrieving images, we can download individual spectrum datasets using the :py:meth:`~pyvo.dal.sia.SIARecord.getdataurl` and :py:meth:`~pyvo.dal.sia.SIARecord.cachedataset`. -------------------------- Search and Service Classes -------------------------- Just as in the image search case, the spectrum interface also has a query class (see :ref:`sia-query`) and service class (see :ref:`service-objects`). The query class, :py:class:`~pyvo.dal.ssa.SSAQuery`, differs from its SIA conterpart in the search parameters it exposes as properties: .. the length of a link in the table below makes the first column larger than it needs to be; it can be effectively narrowed by making the second column super wide. +---------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ | :py:class:`~pyvo.dal.ssa.SSAQuery` search constraint properties | +=============================================+=============================================================================================================================================================+ | :py:attr:`~pyvo.dal.ssa.SSAQuery.pos` | the center position of the circular search region given as a 2-element | | | tuple denoting RA and declination | +---------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ | :py:attr:`~pyvo.dal.ssa.SSAQuery.diameter` | the diameter of the circular search region | +---------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ | :py:attr:`~pyvo.dal.ssa.SSAQuery.time` | the range of observation time to restrict spectra to | +---------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ | :py:attr:`~pyvo.dal.ssa.SSAQuery.format` | the desired format of the images to be returned | +---------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ .. raw:: html
The SSA service standard defines a large number of additional (optional) constraints that can be set via the :py:meth:`~pyvo.dal.query.DALQuery.set` method. The `IVOA `_ documentation describes them all. Note that there is also a Service class, :py:class:`~pyvo.dal.ssa.SSAService`, for SSA services which act just like its SIA counterpart. .. _data-access-scs: ======================== Simple Cone Search (SCS) ======================== Owing in part to its simplicity, Simple Cone Search (SCS) services are the most prevalent of the data access services in the VO. It is used to select records from source and observation catalogs. That is, each record in a cone-search-able catalog represents either a discreet source in the sky or an observation; consequently, each record has a postion associated with it. A cone search of such a catalog returns all records that are within some given distance of a search position (i.e. that fall within a circle or "cone" on the sky). ------------------------------- The Simple Cone Search Function ------------------------------- The :py:func:`~pyvo.conesearch` function can be used to submit position-based catalog queries. Here's an example that selects guide stars from the Guide Start Catalog (v2.3) within 3 arcminutes of a position: >>> url = 'http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23&' >>> stars = vo.conesearch(url, pos=(161.265, -59.68), radius=0.05) >>> len(stars) 525 The results table (stored in ``stars`` in the above example) must have at least three columns: a source or observation identifier, a right ascension, and a declination. Typically though, it will include any number of other attributes of the source (we'll explore this in the next section). The :py:func:`~pyvo.conesearch` function provides some coarse-grain control over how many columns are returned via its ``verbosity`` parameter. It takes an integer value--1, 2, or 3. If it is 1, the service will return the minimum set that the publisher has decided is sufficient for describing the source or observation. A value of 3 returns all of the catalog's columns that are available. So, if you are mainly just interested in source positions (say, for example, to plot the sources over an image), you can set the ``verbosity`` parameter to zero. If are looking for specific characteristics of the sources, such as photometry measurements, then you probably want to set it to 3. If you don't set it, the service is obligated to assume a value of 2. Note that supporting the ``verbosity`` parameter is optional for a service; that is, the service is allowed to ignore the ``verbosity`` value and return all of the available columms, regardless. ----------------------------------- The Cone Search Results and Records ----------------------------------- The results that come back from a cone search are wrapped as an :py:class:`~pyvo.dal.scs.SCSResults` object, and when we iterate through the results, each record is provided as an :py:class:`~pyvo.dal.scs.SCSRecord` instance. In addition to the required identifier, right ascension, and declination columns, the results table will have a number of other columns describing the source or observation. Using the example from the previous section, we can see what information we have about our guide stars: >>> stars.fieldnames [u'hstID', u'ra', u'dec', u'GSC1ID', u'raEpsilon', u'decEpsilon', u'epoch', u'FpgMag', u'JpgMag', u'NpgMag', u'UMag', u'BMag', u'VMag', u'RMag', u'IMag', u'JMag', u'HMag', u'KMag', u'FpgMagCode', u'JpgMagCode', u'NpgMagCode', u'UMagCode', u'BMagCode', u'VMagCode', u'RMagCode', u'IMagCode', u'JMagCode', u'HMagCode', u'KMagCode', u'FpgMagErr', u'JpgMagErr', u'NpgMagErr', u'UMagErr', u'BMagErr', u'VMagErr', u'RMagErr', u'IMagErr', u'JMagErr', u'HMagErr', u'KMagErr', u'class', u'sourceStatus', u'semiMajorAxis', u'positionangle', u'eccentricity', u'variableFlag', u'multipleFlag', u'distance'] >>> stars.getdesc('IMag').description u'I band magnitude' For more information about inspecting the table header information, see the about section on :ref:`sia-results`. Recall that the SCS standard does not mandate standardized column names (as discussed in the section about :ref:`sia-results`); thus, the columns will retain their original names from when they were first published. The :py:class:`~pyvo.dal.scs.SCSRecord` class provides access to access to the three required columns (regardless of what they are called) as record properties: >>> star = stars[0] >>> (star.id, star.ra, star.dec) ('S4B0000701', 161.26477050781301, -59.6844291687012) In particular, those properties are: ====================================== ================================================================================================================== property description ====================================== ================================================================================================================== :py:attr:`~pyvo.dal.scs.SCSRecord.id` the name or identifier of the spectrum as given by the archive -------------------------------------- ------------------------------------------------------------------------------------------------------------------ :py:attr:`~pyvo.dal.scs.SCSRecord.pos` the IRCS position of the center of the spectrum in decimal degrees ====================================== ================================================================================================================== -------------------------- Search and Service Classes -------------------------- Like the other data access services, SCS also has query and service classes (see :ref:`sia-query` and :ref:`service-objects`, respectively): :py:class:`~pyvo.dal.scs.SCSQuery` and :py:class:`~pyvo.dal.scs.SCSService`. The search constraints that can be set as properties on an :py:class:`~pyvo.dal.scs.SCSQuery` instance are as follows: .. autosummary:: ~pyvo.dal.scs.SCSQuery.pos ~pyvo.dal.scs.SCSQuery.radius ~pyvo.dal.scs.SCSQuery.verbosity .. _data-access-sla: ========================= Simple Line Access (SLAP) ========================= If you do spectral line studies, you may on occasion need to consult a database of spectral line transitions. For example, if you are planning spectral observations within an arbitrary bandpass window, you may need to determine what lines can appear there. A few such databases are available in the VO as Spectral Line Access (SLA, or sometimes called SLAP) services. Here's an example searching the Splatalogue database: >>> url = 'http://find.nrao.edu/splata-slap/slap' >>> lines = vo.linesearch(url, wavelength="0.2110/0.2120") >>> len(lines) 54 >>> for line in lines: ... print("{0}: {1}".format(line['molformula'], line.wavelength)) ... H(beta): 0.211086447683 H: 0.211061133375 g-CH3CH2OH: 0.211008648827 He(beta): 0.211000464262 .. note:: Because of their specialized nature, there are very few SLA services available and the community's experience with them is still low. Consequently, you may experience service compliance issues, and some of the features of the :py:mod:`pyvo.dal.sla` module may not work as expected with the currently available services. .. _data-access-tap: =========================== Table Access Protocol (TAP) =========================== The Table Access Protocol supports querying remote astronomical databases using a SQL-like language, called ADQL. It also provides facilities for discovering metadata for the tables and facilities on the remote side. Combined with a capability for uploading table to the remote service, TAP enables complex query patterns including joins of multiple local and remote data sources. In order to access a TAP service, one needs to build a :py:class:`~pyvo.dal.tap.TAPService` object with the TAP service's access URL. >>> import pyvo as vo >>> url = "http://dc.g-vo.org/tap" >>> service = vo.dal.TAPService(url) --------------- Running queries --------------- To start a query, call the :py:class:`~pyvo.dal.tap.TAPService.run_sync` method of the service object: >>> query = "SELECT TOP 5 raj2000, dej2000, rv FROM rave.main WHERE rv BETWEEN 40 AND 70" >>> result = service.run_sync(query) .. note:: Many services set a low default value for the MAXREC parameter. You can override it with: >>> result = service.run_sync(query, maxrec=1000000) >>> for row in result: ... print("{0} {1} {2}".format(row["raj2000"], row["dej2000"], row["rv"])) ... 319.03904167 -20.19277778 40.0 343.12045833 -16.03422222 40.0 53.863 -15.60930556 40.0 58.36320833 -2.98438889 40.0 164.00508333 -26.09277778 40.0 .. note:: There is also a shortcut for building the service object and calling its ``run_sync`` method in one go if you do not need the service object again: >>> result = vo.tablesearch(url, query) On repeated queries to the same service, this convenience has a significant performance penalty. -------------------- Asynchronous queries -------------------- Asynchronous queries do not need an active TCP connection while being executed. This is useful for running time-consuming queries and/or via unstable Internet connections. It also allows to retrieve the query result from an uri, which is handy for crossmatches etc. They have nearly the same functionality as synchronous queries, except that they return a :py:class:`~pyvo.dal.tap.AsyncTAPJob` object instead of a result. >>> job = service.submit_job(query) >>> print(job.jobId) 1sEa5g >>> print(job.phase) PENDING This will create the job on the server, but doesn't start it yet. >>> job.run() .. note:: You can obtain the job url with :py:class:`~pyvo.dal.tap.AsyncTAPJob.url`. This url can be used to re-attach the job at a later point >>> job = vo.dal.AsyncTAPJob(joburl) >>> print(job.phase) RUN In the typical case, one simply calls the :py:class:`~pyvo.dal.tap.AsyncTAPJob.wait` method. This will block until the remote job has finished (by being completed or aborted, or by causing an error condition). If the job's ``phase`` is ``COMPLETED`` after ``wait`` has returned (one can check that by calling :py:class:`~pyvo.dal.tap.AsyncTAPJob.raise_if_error`), the job's result can be retrieved by calling :py:class:`~pyvo.dal.tap.AsyncTAPJob.fetch_result`: >>> job.wait() >>> job.raise_if_error() >>> result = job.fetch_result() >>> for row in result: ... print("{0} {1} {2}".format(row["raj2000"], row["dej2000"], row["rv"])) ... 319.03904167 -20.19277778 40.0 343.12045833 -16.03422222 40.0 53.863 -15.60930556 40.0 58.36320833 -2.98438889 40.0 164.00508333 -26.09277778 40.0 .. note:: if you want to get the result directly, run :py:class:`~pyvo.dal.tap.TAPService.run_async` instead. The result url can be obtained with :py:class:`~pyvo.dal.tap.TAPResults.queryurl` Here is a list of the relevant attributes and methods of AsyncTAPJob: .. autosummary:: ~pyvo.dal.tap.AsyncTAPJob.jobId ~pyvo.dal.tap.AsyncTAPJob.phase ~pyvo.dal.tap.AsyncTAPJob.execution_duration ~pyvo.dal.tap.AsyncTAPJob.destruction ~pyvo.dal.tap.AsyncTAPJob.quote ~pyvo.dal.tap.AsyncTAPJob.owner ~pyvo.dal.tap.AsyncTAPJob.run ~pyvo.dal.tap.AsyncTAPJob.abort ~pyvo.dal.tap.AsyncTAPJob.wait ~pyvo.dal.tap.AsyncTAPJob.raise_if_error ~pyvo.dal.tap.AsyncTAPJob.fetch_result --------------------------------- Capabilities and service metadata --------------------------------- There are three types of service metadata: * Availability, if and since the service is running. * Capabilities, which describe the different interfaces and available functions. * Tables, which contains a listing of the actual tables. >>> service = TAPService(url) >>> print(service.available) True >>> print(service.up_since) datetime.datetime(2000, 0, 0, 0, 0, 0) >>> print(service.capabilities) >>> print(service.tables.keys()) The keys within tables are the fully qualified table names as they can be used in queries. To inspect the column metadata for a table, see the column property of a give table. >>> service.tables["rave.main"].columns See also http://docs.astropy.org/en/stable/table/index.html. .. note:: Some TAP services have tables metadata of several megabytes. Hence, accessing ``.tables`` may incur a significant price. Expert may be better of pulling necessary metadata from ``TAP_SCHEMA`` by standard TAP means. The structure of ``service.capabilities`` should be considered an implementation detail at this point. It is preferable to access the information through properties on service instead: .. autosummary:: ~pyvo.dal.tap.TAPService.maxrec ~pyvo.dal.tap.TAPService.hardlimit ~pyvo.dal.tap.TAPService.upload_methods ------- Uploads ------- Uploads allow to use the result of other queries as input. A common use case are positional crossmatches using data from different catalogs. File uploads are specified using the `uploads` parameter, which is a dict of tablename: uri >>> service.run_sync(query, uploads={'t1': 'http://example.org/votable.xml'}) >>> service.run_sync(query, uploads={'t1': result}) >>> service.run_sync(query, uploads={'t1': open('/path/to/votable.xml')}) >>> service.run_sync(query, uploads={'t1': result.table}) >>> service.run_sync(query, uploads={'t1': '/path/to/votable.xml'}) .. note:: To check if the service supports the desired URI scheme, evaluate the value of :py:class:`~pyvo.dal.tap.TAPService.upload_methods` Your upload can be referenced using 'TAP_UPLOAD.t1' as table name. .. _datalink: ======== Datalink ======== Datalink allows to access datasets with multiple resources per row in the data access layer results. One can use the :py:class:`~pyvo.dal.datalink.DatalinkMixin.iter_datalinks` method to get an iterator which returns :py:class:`~pyvo.dal.datalink.DatalinkResults` instances. >>> dlres = next(result.iter_datalinks()) You can then either select a specific dataset >>> dlrow = dlres.bysemantics("#preview") or the main dataset. >>> dlrow = datalink.getdataset() To save the file to disk, use :py:class:`~pyvo.dal.query.cachedataset` as described in :ref:`sia-results`. >>> dlrow.cachedataset() .. rubric:: Footnotes .. [#f1] *UCD* stands for *unified content descriptor*. For more information, see the `IVOA UCD standard `_, as well as the list of valid version 1+ UCDs on the `CDS UCD Info page `_. Note that SIA version 1 uses the older style UCD1 labels described `here `_. pyvo-0.6.1+dfsg.orig/docs/pyvo/registry_access.rst0000644000175000017500000004664613125165225021613 0ustar noahfxnoahfx.. _registry-access: *************************************** Discovering Services with a VO Registry *************************************** In the preceding section, :ref:`data-access`, we showed how you could retrieve data from various archives using standard VO services they provide. To search a particular archive, one needs the *base URL* of that archives service. This section shows how to discover VO services (and their base URLs) using what is known as a *VO Registry service*. A *VO Registry* is a on-line database of all data collections and services know to the Virtual Observatory. Using the :py:mod:`pyvo.registry` module, you can search discover archive services based on type, waveband, and topic keywords. .. _simple-discovery: ======================== Simple Service Discovery ======================== The most common use of the registry is to find archives with :ref:`VO data access services `, and the simplest way to do this is to use the :py:func:`~pyvo.regsearch` function. For example, to find data collections that contain searchable infrared images: >>> services = vo.regsearch(servicetype='sia', waveband='infrared') >>> len(services) 15 Aha! Perhaps you didn't realize there were that many. What collections are these? >>> for service in services: ... print coll.title Chandra X-ray Observatory Data Archive Chandra Source Catalog Swift BAT All-Sky Survey: keV GRANAT/SIGMA HEAO 1A ROSAT High Resolution Image Pointed Observations Mosaic: Intensity INTEGRAL/Spectral Imager Galactic Center Survey Nine Year INTEGRAL IBIS keV Galactic Plane Survey: PSPC summed pointed observations, 1 degree cutoff, PSPC summed pointed observations, 2 degree cutoff, PSPC summed pointed observations, 0.6 degree cutoff, ROSAT All-Sky X-ray Survey Band: ROSAT All-Sky X-ray Background Survey: Band RXTE Allsky keV ROSAT Survey and Pointed Images As you can gather, each record in the registry search results represents a different service (in this case, an image service). Included in the record is the all-important base URL for the service: >>> services[0].access_url 'http://cda.harvard.edu/cxcsiap/queryImages?' However, it's not necessary to keep track of that URL because you can now search that collection directly via the registry record: >>> images = services[0].search(pos=(350.85, 58.815), size=0.25) >>> len(images) 474 (See :ref:`data-access-sia` to learn what to do with image search results.) Other types of services via the ``servicetype`` parameter: +---------------------------+----------------------------------------+ | set ``servicetype`` to... | ...to find: | +===========================+========================================+ | sia | Simple Image Access (SIA) services | +---------------------------+----------------------------------------+ | ssa | Simple Spectral Access (SSA) services | +---------------------------+----------------------------------------+ | conesearch | Simple Cone Search (SCS) services | +---------------------------+----------------------------------------+ | slap | Simple Line Access (SLA) services | +---------------------------+----------------------------------------+ | tap | Table Access Protocol (TAP) services | +---------------------------+----------------------------------------+ .. raw:: html
For example, to find all known Cone Search services: >>> cats = vo.regsearch(servicetype='conesearch') >>> len(cats) 18189 Wow, that's a lot of catalogs. (Most of these are from the `Vizier Catalog Archive `_; every Vizier catalog that includes a position is available as a Cone Search service.) For just catalogs related to blazars: >>> cats = vo.regsearch(keywords=['blazar'], servicetype='conesearch') >>> len(cats) 146 How about blazars observed with Fermi? >>> cats = vo.regsearch(keywords=['blazar', 'Fermi'], servicetype='conesearch') >>> len(cats) 244 Sometimes you may be looking for a particular catalog or image collections that you already know exists, and you just need to learn the base URL for the service. The ``keywords`` parameter can be used to find it. For example, suppose you want to get cutout images from the NRAO VLA Sky Survey (NVSS): >>> colls = vo.regsearch(keywords=["NVSS"], servicetype='sia') >>> for coll in colls: ... print coll.res_title NVSS Sydney University Molonglo Sky Survey Obviously, the first record is the NVSS image archive. The SUMSS collection was matched as well because its description in the registry happens to include the string, "NVSS". If you want to limit the search results to a certain datamodel, include the ``datamodel`` parameter: >>> obscores = vo.regsearch(datamodel="obscore") .. _reg-results: =========================== Registry Search Result Data =========================== As you can see from the examples above, a search will often return more than one record, and so sometimes you need to review some of the resource metadata to determine which one or ones you want. You may have noticed that the results behave similarly to the results from the data access services (see :ref:`data-access-sia`). Like them, registry search results are returned as a :py:class:`~pyvo.registry.regtap.RegistryResults` instance, and each record is represented as a :py:class:`~pyvo.registry.regtap.RegistryResource` instance. A :py:class:`~pyvo.registry.regtap.RegistryRecord` record acts like a dictionary where the keys are the column names from the results table; using our NVSS example from the previous section, >>> nvss = colls[0] >>> nvss.keys() ['cap_index', 'res_description', 'intf_type', 'standard_id', 'cap_index_', 'url_use', 'res_type', 'intf_role', 'cap_description', 'wsdl_url', 'source_format', 'res_version', 'ivoid__', 'content_level', 'source_value', 'std_version', 'updated', 'short_name', 'query_type', 'creator_seq', 'intf_index', 'content_type', 'harvested_from', 'res_title', 'region_of_regard', 'created', 'rights', 'waveband', 'reference_url', 'ivoid', 'cap_type', 'access_url', 'ivoid_', 'result_type'] >>> nvss['waveband'] ('Radio',) Some of the more useful items are available as properties: ================================================================== ========================================================================================================================================================================== :py:attr:`~pyvo.registry.regtap.RegistryResource.ivoid` the IVOA identifier for the resource. :py:attr:`~pyvo.registry.regtap.RegistryResource.res_type` the resource types that characterize this resource. :py:attr:`~pyvo.registry.regtap.RegistryResource.short_name` the short name for the resource :py:attr:`~pyvo.registry.regtap.RegistryResource.res_title` the title of the resource :py:attr:`~pyvo.registry.regtap.RegistryResource.content_levels` a list of content level labels that describe the intended audience for this resource. :py:attr:`~pyvo.registry.regtap.RegistryResource.res_description` the textual description of the resource. :py:attr:`~pyvo.registry.regtap.RegistryResource.reference_url` URL pointing to a human-readable document describing this resource. :py:attr:`~pyvo.registry.regtap.RegistryResource.creators` The creator(s) of the resource in the ordergiven by the resource record author :py:attr:`~pyvo.registry.regtap.RegistryResource.content_types` the IVOA identifier of the service standard it supports :py:attr:`~pyvo.registry.regtap.RegistryResource.source_format` the format of source_value. :py:attr:`~pyvo.registry.regtap.RegistryResource.region_of_regard` numeric value representing the angle, given in decimal degrees, by which a positional query against this resource should be "blurred" in order to get an appropriate match. :py:attr:`~pyvo.registry.regtap.RegistryResource.waveband` a list of names of the wavebands that the resource provides data for :py:attr:`~pyvo.registry.regtap.RegistryResource.access_url` the URL that can be used to access the service resource :py:attr:`~pyvo.registry.regtap.RegistryResource.standard_id` the IVOA standard identifier ================================================================== ========================================================================================================================================================================== .. raw:: html
If you are looking for a particular data collection or catalog, as we did above when we looked for the NVSS archive, often simply reviewing the titles is sufficient. Other times, particularly when you are not sure what you are looking for, it helps to look deeper. The resource description, available via the :py:attr:`~pyvo.registry.regtap.ResourceRecord.res_description` property, tends to be the most revealing. It contains a paragraph (or two) summarizing the catalog or data collection. It will often describe the scientific intent behind the collection. The :py:attr:`~pyvo.registry.regtap.RegistryResource.short_name` can also be helpful, as well. This name is meant to be short--16 characters or fewer; consequently, the value is often includes the abbreviation for the project or observatory that produced the collection or catalog. A selection of the resource metadata, including the title, shortname and desription, can be printed out in a summary form with the :py:meth:`~pyvo.registry.regtap.RegistryResource.describe` function. .. code-block:: python >>> nvss.describe() Image Data Service NVSS Short Name: NVSS IVOA Identifier: ivo://nasa.heasarc/skyview/nvss Base URL: http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=nvss& The NRAO VLA Sky Survey is currently underway at the VLA and data is made available to the public as soon as processed. SkyView has copied the NVSS intensity data from the NRAO FTP site. The full NVSS survey data includes information on other Stokes parameters. Note that SkyView may be slightly out of date with regard to the latest releases of NVSS data. The current information was copied in November 1997. Observations for the 1.4 GHz NRAO VLA Sky Survey (NVSS) began in 1993 September and should cover the sky north of -40 deg declination (82% of the celestial sphere) before the end of 1996. The principal data products will be:

  1. A set of 2326 continuum map "cubes," each covering 4 deg X 4 deg with three planes containing Stokes I, Q, and U images. These maps were made with a relatively large restoring beam (45 arcsec FWHM) to yield the high surface-brightness sensitivity needed for completeness and photometric accuracy. Their rms brightness fluctuations are about 0.45 mJy/beam = 0.14 K (Stokes I) and 0.29 mJy/beam = 0.09 K (Stokes Q and U). The rms uncertainties in right ascension and declination vary from 0.3 arcsec for strong (S > 30 mJy) point sources to 5 arcsec for the faintest (S = 2.5 mJy) detectable sources.
  2. Lists of discrete sources.
The NVSS is being made as a service to the astronomical community, and the data products are being released as soon as they are produced and verified.

The NVSS survey is included on the SkyView High Resolution Radio Coverage map . This map shows coverage on an Aitoff projection of the sky in equatorial coordinates. Subjects: NVSS Waveband Coverage: radio As the examples in this chapter suggest, queries to the registry are often done interactively. You will find the need to review the results by eye, to further refine the collections and catalogs that you discover. In the :ref:`last section of this chapter `, we present a few tips for working with the registry within scripts in a non-interactive context. .. _reg-to-service: ============================================== Working with Service Objects from the Registry ============================================== In the previous chapter, :ref:`data-access`, we introduced the *Service classes* (e.g. :py:class:`~pyvo.dal.sia.SIAService`). These are classes whose instances represent a particular service, and its most important function is to remember the base URL for the service and allow us to query it without having to pass around the URL directly. Further, in the section, :ref:`service-objects`, we saw how we can create service objects directly from a registry search record. Here's a refresher example, based on the NVSS example from the previous section: >>> nvss = colls[0].service # converts record to serviec object >>> nvss.baseurl 'http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=nvss&' >>> query = nvss.create_query(size=0.25, format="image/fits") Thus, not only does this service instance contain the base URL but it also includes all of the metadata from the registry that desribes the service. With this service object, we can either call its :py:attr:`~pyvo.dal.sia.SIAService.search` function directly or create query objects to get cutouts for a whole list of sources. .. _registry-resolve: Retrieving a Service By Its Identifier -------------------------------------- Our discussion of service metadata offers an opportunity to highlight another important property, the service's *IVOA Identifier* (sometimes referred to as its *ivoid*). This is a globally-unique identifier that takes the form of a `URI `_: >>> colls = vo.regsearch(keywords=["NVSS"], servicetype='sia') >>> for coll in colls: ... print coll.identifier ivo://nasa.heasarc/skyview/nvss ivo://nasa.heasarc/skyview/sumss This identifier can be used to uniquely retrieve a service desription from the registry. >>> nvss = vo.registry.ivoid2service('ivo://nasa.heasarc/skyview/nvss') >>> nvss.title, nvss.baseurl ('NVSS', 'http://skyview.gsfc.nasa.gov/cgi-bin/vo/sia.pl?survey=nvss&') >>> # search the service in one call >>> cutouts1 = nvss.search(pos=(148.8888, 69.065) size=0.2) >>> nvssq = nvss.create_query(size=0.2) # or create a query object >>> nvssq.pos = (350.85, 58.815) >>> cutouts2 = nvssq.execute() .. note :: If you want to keep a reference to a single service (say, as part of a list of favorite services), it is better to save the identifier than the base URL. Over time, a service's base URL can change; however, the identifier will stay the same. As we end this discussion of the service objects, you can hopefully see that there is a straight-forward chain of discovery classes that connect the registry down through to a dataset. Spelled out in all its detail, it looks like this: .. code-block:: python services = vo.regsearch(keywords=["NVSS"], servicetype='sia') # RegistryResults nvss = services[0] # RegistryResource nvsss = nvss.service # SIAService nq = nvss.create_query(pos=(350.85, 58.815), size=0.25, format="image/fits") # SIAQuery images = nq.execute() # SIAResults firstim = images[0] # SIARecord Most of the time, it's not necessary to follow all these steps yourself, so there are functions and methods that provide syntactic shortcuts. However, when you need some finer control over the process, it is possible to jump off the fast track and work directly with an underlying object. .. _reg-tips: ============================================ Tips for Accessing the Registry from Scripts ============================================ .. eventually we want to replace prose recipes with code (or built-in tools) As we've seen from the examples in this chapter, discovering and selecting services from the registry is often an interative process, particulary when you are not sure what you are looking for and you use the registry as a tool for exploration. In this mode, you will find yourself reviewing registry search results by eye to focus in on those data collections and services of interest. However, there are a few use cases where non-interactive registry queries--i.e., queries that you can run blindly from a script--work well: #. Taking an inventory of all data available for particular postion and/or topic. #. Compiling a list of catalogs that include columns that contain particular kinds of data. #. Recalling a service of set of services by their IVOA identifiers. #. Look for new catalogs or data collections related to a particular topic and recently added to the VO. The Data Inventory ------------------ The :ref:`first example ` in the chapter, :ref:`getting-started`, is an example of creating an inventory of a available data. In that case, it was an inventory of available X-ray images of the Cas A supernova remnant. We didn't actually download these images; instead, we created a table describing the images along with the URL for downloading them later, as desired. The Hunt for Measurements ------------------------- You may be creating your own catalog of objects selected for a particular science study. You may want to fill out the columns of your source table with attributes of interest, such as photometry measurements. To do this, you'll need to find the catalogs that have this data. One simple recipe for doing this would be: #. query the registry for all catalogs related to your science using the ``keywords``, ``waveband``, and ``servicetype`` as applicable. #. For each catalog found, run a metadata search (which just returns an empty table). #. Search the columns of each table and find those where the name, ucd, or utype attributes contain the string "mag". The selection of columns is somewhat crude for more detailed kinds of data. Using the UCD label, it's possible to identify columns with particular kinds of magnitudes (e.g J, V, bolometric, etc.) as well as of other types of quantities, such as redshift. See the `CDS UCD Info page `_ for a list of ucds that you can look for. Recalling a Favorite Service ---------------------------- In the previous section, :ref:`registry-resolve`, we discussed how one might create a list of favorite services which include their IVOA Identifiers. Each can be resolved into a service object using the :py:meth:`~pyvo.registry.regtap.ivoid2service` so that the service can be searched. You may, for example, want to re-search a set of archives periodically to determine if it has any new data since the last time you checked. Discovering New Additions to the VO ----------------------------------- In a similar vein, you may be interested in knowing when new catalogs or data collections, particularly any related to a topic of interest, become available in VO. Here's a recipe for a script that you would run periodically which can accomplish this: #. Execute a registry query that looks for potentially interesting catalogs and collections. #. Extract the list of IVOA identifiers returned in the results. #. From disk, open the registry search results saved from the previous run of the script and extract the identifiers. #. Compare the two lists of identifiers, finding those that appear in the new results that are not in the previous results. These represent the new additions to the VO. #. Create a union of the two search result tables and save that as the latest result. #. Report the new additions. pyvo-0.6.1+dfsg.orig/docs/pyvo/index.rst0000644000175000017500000000045513125165225017515 0ustar noahfxnoahfx:tocdepth: 2 ################# PyVO User's Guide ################# PyVO is a package providing access to remote data and services of the Virtual Observatory (VO) using Python. **Contents** .. toctree:: :maxdepth: 2 vo install getting_started data_access registry_access ref pyvo-0.6.1+dfsg.orig/docs/make.bat0000644000175000017500000001064113125165225016262 0ustar noahfxnoahfx@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end pyvo-0.6.1+dfsg.orig/docs/_templates/0000755000175000017500000000000013125165225017010 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/docs/_templates/autosummary/0000755000175000017500000000000013125165225021376 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/docs/_templates/autosummary/class.rst0000644000175000017500000000037313125165225023240 0ustar noahfxnoahfx{% extends "autosummary_core/class.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}pyvo-0.6.1+dfsg.orig/docs/_templates/autosummary/module.rst0000644000175000017500000000037413125165225023421 0ustar noahfxnoahfx{% extends "autosummary_core/module.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}pyvo-0.6.1+dfsg.orig/docs/_templates/autosummary/base.rst0000644000175000017500000000037213125165225023044 0ustar noahfxnoahfx{% extends "autosummary_core/base.rst" %} {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}pyvo-0.6.1+dfsg.orig/docs/conf.py0000644000175000017500000001423213125165225016154 0ustar noahfxnoahfx# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # # Astropy documentation build configuration file. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default. Some values are defined in # the global Astropy configuration which is loaded here before anything else. # See astropy.sphinx.conf for which values are set there. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('..')) # IMPORTANT: the above commented section was generated by sphinx-quickstart, but # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left # commented out with this explanation to make it clear why this should not be # done. If the sys.path entry above is added, when the astropy.sphinx.conf # import occurs, it will import the *source* version of astropy instead of the # version installed (if invoked as "make html" or directly with sphinx), or the # version in the build directory (if "python setup.py build_sphinx" is used). # Thus, any C-extensions that are needed to build the documentation will *not* # be accessible, and the documentation will not build correctly. import datetime import os import sys # Load all of the global Astropy configuration from astropy_helpers.sphinx.conf import * # Get configuration information from setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.2' # astropy_helpers sets 1.2 as the required default; forcing 1.1 seems to # work fine, but this may need to change needs_sphinx = '1.1' # To perform a Sphinx version check that needs to be more specific than # major.minor, call `check_sphinx_version("x.y.z")` here. # check_sphinx_version("1.2.1") # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns.append('_templates') # Add any paths that contain templates here, relative to this directory. if 'templates_path' not in locals(): # in case parent conf.py defines it templates_path = [] templates_path.append('_templates') # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog += """ """ # -- Project information ------------------------------------------------------ # This does not *have* to match the package name, but typically does project = setup_cfg['package_name'] author = setup_cfg['author'] copyright = '{0}, {1}'.format( datetime.datetime.now().year, setup_cfg['author']) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. __import__(setup_cfg['package_name']) package = sys.modules[setup_cfg['package_name']] # The short X.Y version. version = package.__version__.split('-', 1)[0] # The full version, including alpha/beta/rc tags. release = package.__version__ # -- Options for HTML output --------------------------------------------------- # A NOTE ON HTML THEMES # The global astropy configuration uses a custom theme, 'bootstrap-astropy', # which is installed along with astropy. A different theme can be used or # the options for this theme can be modified by overriding some of the # variables set in the global configuration. The variables set in the # global configuration are listed below, commented out. # Add any paths that contain custom themes here, relative to this directory. # To use a different custom theme, add the directory containing the theme. #html_theme_path = [] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. To override the custom theme, set this to the # name of a builtin theme or the name of a custom theme in html_theme_path. #html_theme = None # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = '' # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '' # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". html_title = '{0} v{1}'.format(project, release) # Output file base name for HTML help builder. htmlhelp_basename = project + 'doc' # -- Options for LaTeX output -------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [('index', project + '.tex', project + u' Documentation', author, 'manual')] # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [('index', project.lower(), project + u' Documentation', [author], 1)] # -- Options for the edit_on_github extension ---------------------------------------- if eval(setup_cfg.get('edit_on_github')): extensions += ['astropy.sphinx.ext.edit_on_github'] versionmod = __import__(setup_cfg['package_name'] + '.version') edit_on_github_project = setup_cfg['github_project'] if versionmod.release: edit_on_github_branch = "v" + versionmod.version else: edit_on_github_branch = "master" edit_on_github_source_root = "" edit_on_github_doc_root = "docs" pyvo-0.6.1+dfsg.orig/docs/Makefile0000644000175000017500000001116413125165225016316 0ustar noahfxnoahfx# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest #This is needed with git because git doesn't create a dir if it's empty $(shell [ -d "_static" ] || mkdir -p _static) help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR) -rm -rf api html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." pyvo-0.6.1+dfsg.orig/docs/index.rst0000644000175000017500000000045113125165225016514 0ustar noahfxnoahfxDocumentation ============= This is an affiliated package for the AstroPy package. The documentation for this package is here: .. toctree:: :maxdepth: 2 pyvo/index.rst .. note:: Do not edit this page - instead, place all documentation for the affiliated package inside ``pyvo/`` pyvo-0.6.1+dfsg.orig/CONTRIBUTORS.rst0000644000175000017500000000166413125165225016421 0ustar noahfxnoahfxPyVO Contributors ----------------- PyVO is an open source project developed through GitHub at https://github.com/pyvirtobs; community contributions are welcome. This project began in 2012 as a product of the US Virtual Astronomical Observatory, funded through a cooperative agreement with the US National Science Foundation. PyVO was developed with contributions from the following developers: Contributor's Name | Affiliations | GitHub identity ---------------------|----------------|------------------- Stefan Becker | Heidelberg Univ. | funbaker Thomas Boch | CDS | tboch Carlos Brandt | Universita di Roma La Sapienza | chbrandt Markus Demleitner | Heidelberg Univ. | msdemlei Christoph Deil | MPI for Nuclear Physics | cdeil Mike Fitzpatrick | NOAO | Matthew Graham | Caltech | doccosmos Gus Muench | Harvard/CfA, AAS | augustfly Ray Plante | NCSA/UIUC, NIST | RayPlante Brigitta Sipocz | Cambridge, UK | bsipocz Doug Tody | NRAO | pyvo-0.6.1+dfsg.orig/ah_bootstrap.py0000644000175000017500000010650213125165225016766 0ustar noahfxnoahfx""" This bootstrap module contains code for ensuring that the astropy_helpers package will be importable by the time the setup.py script runs. It also includes some workarounds to ensure that a recent-enough version of setuptools is being used for the installation. This module should be the first thing imported in the setup.py of distributions that make use of the utilities in astropy_helpers. If the distribution ships with its own copy of astropy_helpers, this module will first attempt to import from the shipped copy. However, it will also check PyPI to see if there are any bug-fix releases on top of the current version that may be useful to get past platform-specific bugs that have been fixed. When running setup.py, use the ``--offline`` command-line option to disable the auto-upgrade checks. When this module is imported or otherwise executed it automatically calls a main function that attempts to read the project's setup.cfg file, which it checks for a configuration section called ``[ah_bootstrap]`` the presences of that section, and options therein, determine the next step taken: If it contains an option called ``auto_use`` with a value of ``True``, it will automatically call the main function of this module called `use_astropy_helpers` (see that function's docstring for full details). Otherwise no further action is taken (however, ``ah_bootstrap.use_astropy_helpers`` may be called manually from within the setup.py script). Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same names as the arguments to `use_astropy_helpers`, and can be used to configure the bootstrap script when ``auto_use = True``. See https://github.com/astropy/astropy-helpers for more details, and for the latest version of this module. """ import contextlib import errno import imp import io import locale import os import re import subprocess as sp import sys try: from ConfigParser import ConfigParser, RawConfigParser except ImportError: from configparser import ConfigParser, RawConfigParser if sys.version_info[0] < 3: _str_types = (str, unicode) _text_type = unicode PY3 = False else: _str_types = (str, bytes) _text_type = str PY3 = True # What follows are several import statements meant to deal with install-time # issues with either missing or misbehaving pacakges (including making sure # setuptools itself is installed): # Some pre-setuptools checks to ensure that either distribute or setuptools >= # 0.7 is used (over pre-distribute setuptools) if it is available on the path; # otherwise the latest setuptools will be downloaded and bootstrapped with # ``ez_setup.py``. This used to be included in a separate file called # setuptools_bootstrap.py; but it was combined into ah_bootstrap.py try: import pkg_resources _setuptools_req = pkg_resources.Requirement.parse('setuptools>=0.7') # This may raise a DistributionNotFound in which case no version of # setuptools or distribute is properly installed _setuptools = pkg_resources.get_distribution('setuptools') if _setuptools not in _setuptools_req: # Older version of setuptools; check if we have distribute; again if # this results in DistributionNotFound we want to give up _distribute = pkg_resources.get_distribution('distribute') if _setuptools != _distribute: # It's possible on some pathological systems to have an old version # of setuptools and distribute on sys.path simultaneously; make # sure distribute is the one that's used sys.path.insert(1, _distribute.location) _distribute.activate() imp.reload(pkg_resources) except: # There are several types of exceptions that can occur here; if all else # fails bootstrap and use the bootstrapped version from ez_setup import use_setuptools use_setuptools() # Note: The following import is required as a workaround to # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this # module now, it will get cleaned up after `run_setup` is called, but that will # later cause the TemporaryDirectory class defined in it to stop working when # used later on by setuptools try: import setuptools.py31compat except ImportError: pass # matplotlib can cause problems if it is imported from within a call of # run_setup(), because in some circumstances it will try to write to the user's # home directory, resulting in a SandboxViolation. See # https://github.com/matplotlib/matplotlib/pull/4165 # Making sure matplotlib, if it is available, is imported early in the setup # process can mitigate this (note importing matplotlib.pyplot has the same # issue) try: import matplotlib matplotlib.use('Agg') import matplotlib.pyplot except: # Ignore if this fails for *any* reason* pass # End compatibility imports... # In case it didn't successfully import before the ez_setup checks import pkg_resources from setuptools import Distribution from setuptools.package_index import PackageIndex from setuptools.sandbox import run_setup from distutils import log from distutils.debug import DEBUG # TODO: Maybe enable checking for a specific version of astropy_helpers? DIST_NAME = 'astropy-helpers' PACKAGE_NAME = 'astropy_helpers' # Defaults for other options DOWNLOAD_IF_NEEDED = True INDEX_URL = 'https://pypi.python.org/simple' USE_GIT = True OFFLINE = False AUTO_UPGRADE = True # A list of all the configuration options and their required types CFG_OPTIONS = [ ('auto_use', bool), ('path', str), ('download_if_needed', bool), ('index_url', str), ('use_git', bool), ('offline', bool), ('auto_upgrade', bool) ] class _Bootstrapper(object): """ Bootstrapper implementation. See ``use_astropy_helpers`` for parameter documentation. """ def __init__(self, path=None, index_url=None, use_git=None, offline=None, download_if_needed=None, auto_upgrade=None): if path is None: path = PACKAGE_NAME if not (isinstance(path, _str_types) or path is False): raise TypeError('path must be a string or False') if PY3 and not isinstance(path, _text_type): fs_encoding = sys.getfilesystemencoding() path = path.decode(fs_encoding) # path to unicode self.path = path # Set other option attributes, using defaults where necessary self.index_url = index_url if index_url is not None else INDEX_URL self.offline = offline if offline is not None else OFFLINE # If offline=True, override download and auto-upgrade if self.offline: download_if_needed = False auto_upgrade = False self.download = (download_if_needed if download_if_needed is not None else DOWNLOAD_IF_NEEDED) self.auto_upgrade = (auto_upgrade if auto_upgrade is not None else AUTO_UPGRADE) # If this is a release then the .git directory will not exist so we # should not use git. git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) if use_git is None and not git_dir_exists: use_git = False self.use_git = use_git if use_git is not None else USE_GIT # Declared as False by default--later we check if astropy-helpers can be # upgraded from PyPI, but only if not using a source distribution (as in # the case of import from a git submodule) self.is_submodule = False @classmethod def main(cls, argv=None): if argv is None: argv = sys.argv config = cls.parse_config() config.update(cls.parse_command_line(argv)) auto_use = config.pop('auto_use', False) bootstrapper = cls(**config) if auto_use: # Run the bootstrapper, otherwise the setup.py is using the old # use_astropy_helpers() interface, in which case it will run the # bootstrapper manually after reconfiguring it. bootstrapper.run() return bootstrapper @classmethod def parse_config(cls): if not os.path.exists('setup.cfg'): return {} cfg = ConfigParser() try: cfg.read('setup.cfg') except Exception as e: if DEBUG: raise log.error( "Error reading setup.cfg: {0!r}\n{1} will not be " "automatically bootstrapped and package installation may fail." "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) return {} if not cfg.has_section('ah_bootstrap'): return {} config = {} for option, type_ in CFG_OPTIONS: if not cfg.has_option('ah_bootstrap', option): continue if type_ is bool: value = cfg.getboolean('ah_bootstrap', option) else: value = cfg.get('ah_bootstrap', option) config[option] = value return config @classmethod def parse_command_line(cls, argv=None): if argv is None: argv = sys.argv config = {} # For now we just pop recognized ah_bootstrap options out of the # arg list. This is imperfect; in the unlikely case that a setup.py # custom command or even custom Distribution class defines an argument # of the same name then we will break that. However there's a catch22 # here that we can't just do full argument parsing right here, because # we don't yet know *how* to parse all possible command-line arguments. if '--no-git' in argv: config['use_git'] = False argv.remove('--no-git') if '--offline' in argv: config['offline'] = True argv.remove('--offline') return config def run(self): strategies = ['local_directory', 'local_file', 'index'] dist = None # First, remove any previously imported versions of astropy_helpers; # this is necessary for nested installs where one package's installer # is installing another package via setuptools.sandbox.run_setup, as in # the case of setup_requires for key in list(sys.modules): try: if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): del sys.modules[key] except AttributeError: # Sometimes mysterious non-string things can turn up in # sys.modules continue # Check to see if the path is a submodule self.is_submodule = self._check_submodule() for strategy in strategies: method = getattr(self, 'get_{0}_dist'.format(strategy)) dist = method() if dist is not None: break else: raise _AHBootstrapSystemExit( "No source found for the {0!r} package; {0} must be " "available and importable as a prerequisite to building " "or installing this package.".format(PACKAGE_NAME)) # This is a bit hacky, but if astropy_helpers was loaded from a # directory/submodule its Distribution object gets a "precedence" of # "DEVELOP_DIST". However, in other cases it gets a precedence of # "EGG_DIST". However, when activing the distribution it will only be # placed early on sys.path if it is treated as an EGG_DIST, so always # do that dist = dist.clone(precedence=pkg_resources.EGG_DIST) # Otherwise we found a version of astropy-helpers, so we're done # Just active the found distribution on sys.path--if we did a # download this usually happens automatically but it doesn't hurt to # do it again # Note: Adding the dist to the global working set also activates it # (makes it importable on sys.path) by default. try: pkg_resources.working_set.add(dist, replace=True) except TypeError: # Some (much) older versions of setuptools do not have the # replace=True option here. These versions are old enough that all # bets may be off anyways, but it's easy enough to work around just # in case... if dist.key in pkg_resources.working_set.by_key: del pkg_resources.working_set.by_key[dist.key] pkg_resources.working_set.add(dist) @property def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname)) def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist def get_index_dist(self): if not self.download: log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) return None log.warn( "Downloading {0!r}; run setup.py with the --offline option to " "force offline installation.".format(DIST_NAME)) try: dist = self._do_download() except Exception as e: if DEBUG: raise log.warn( 'Failed to download and/or install {0!r} from {1!r}:\n' '{2}'.format(DIST_NAME, self.index_url, str(e))) dist = None # No need to run auto-upgrade here since we've already presumably # gotten the most up-to-date version from the package index return dist def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): with _silence(): run_setup(os.path.join(path, 'setup.py'), ['egg_info']) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist def _do_download(self, version='', find_links=None): if find_links: allow_hosts = '' index_url = None else: allow_hosts = None index_url = self.index_url # Annoyingly, setuptools will not handle other arguments to # Distribution (such as options) before handling setup_requires, so it # is not straightforward to programmatically augment the arguments which # are passed to easy_install class _Distribution(Distribution): def get_option_dict(self, command_name): opts = Distribution.get_option_dict(self, command_name) if command_name == 'easy_install': if find_links is not None: opts['find_links'] = ('setup script', find_links) if index_url is not None: opts['index_url'] = ('setup script', index_url) if allow_hosts is not None: opts['allow_hosts'] = ('setup script', allow_hosts) return opts if version: req = '{0}=={1}'.format(DIST_NAME, version) else: req = DIST_NAME attrs = {'setup_requires': [req]} try: if DEBUG: _Distribution(attrs=attrs) else: with _silence(): _Distribution(attrs=attrs) # If the setup_requires succeeded it will have added the new dist to # the main working_set return pkg_resources.working_set.by_key.get(DIST_NAME) except Exception as e: if DEBUG: raise msg = 'Error retrieving {0} from {1}:\n{2}' if find_links: source = find_links[0] elif index_url != INDEX_URL: source = index_url else: source = 'PyPI' raise Exception(msg.format(DIST_NAME, source, repr(e))) def _do_upgrade(self, dist): # Build up a requirement for a higher bugfix release but a lower minor # release (so API compatibility is guaranteed) next_version = _next_version(dist.parsed_version) req = pkg_resources.Requirement.parse( '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) package_index = PackageIndex(index_url=self.index_url) upgrade = package_index.obtain(req) if upgrade is not None: return self._do_download(version=upgrade.version) def _check_submodule(self): """ Check if the given path is a git submodule. See the docstrings for ``_check_submodule_using_git`` and ``_check_submodule_no_git`` for further details. """ if (self.path is None or (os.path.exists(self.path) and not os.path.isdir(self.path))): return False if self.use_git: return self._check_submodule_using_git() else: return self._check_submodule_no_git() def _check_submodule_using_git(self): """ Check if the given path is a git submodule. If so, attempt to initialize and/or update the submodule if needed. This function makes calls to the ``git`` command in subprocesses. The ``_check_submodule_no_git`` option uses pure Python to check if the given path looks like a git submodule, but it cannot perform updates. """ cmd = ['git', 'submodule', 'status', '--', self.path] try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except _CommandNotFound: # The git command simply wasn't found; this is most likely the # case on user systems that don't have git and are simply # trying to install the package from PyPI or a source # distribution. Silently ignore this case and simply don't try # to use submodules return False stderr = stderr.strip() if returncode != 0 and stderr: # Unfortunately the return code alone cannot be relied on, as # earlier versions of git returned 0 even if the requested submodule # does not exist # This is a warning that occurs in perl (from running git submodule) # which only occurs with a malformatted locale setting which can # happen sometimes on OSX. See again # https://github.com/astropy/astropy/issues/2749 perl_warning = ('perl: warning: Falling back to the standard locale ' '("C").') if not stderr.strip().endswith(perl_warning): # Some other unknown error condition occurred log.warn('git submodule command failed ' 'unexpectedly:\n{0}'.format(stderr)) return False # Output of `git submodule status` is as follows: # # 1: Status indicator: '-' for submodule is uninitialized, '+' if # submodule is initialized but is not at the commit currently indicated # in .gitmodules (and thus needs to be updated), or 'U' if the # submodule is in an unstable state (i.e. has merge conflicts) # # 2. SHA-1 hash of the current commit of the submodule (we don't really # need this information but it's useful for checking that the output is # correct) # # 3. The output of `git describe` for the submodule's current commit # hash (this includes for example what branches the commit is on) but # only if the submodule is initialized. We ignore this information for # now _git_submodule_status_re = re.compile( '^(?P[+-U ])(?P[0-9a-f]{40}) ' '(?P\S+)( .*)?$') # The stdout should only contain one line--the status of the # requested submodule m = _git_submodule_status_re.match(stdout) if m: # Yes, the path *is* a git submodule self._update_submodule(m.group('submodule'), m.group('status')) return True else: log.warn( 'Unexpected output from `git submodule status`:\n{0}\n' 'Will attempt import from {1!r} regardless.'.format( stdout, self.path)) return False def _check_submodule_no_git(self): """ Like ``_check_submodule_using_git``, but simply parses the .gitmodules file to determine if the supplied path is a git submodule, and does not exec any subprocesses. This can only determine if a path is a submodule--it does not perform updates, etc. This function may need to be updated if the format of the .gitmodules file is changed between git versions. """ gitmodules_path = os.path.abspath('.gitmodules') if not os.path.isfile(gitmodules_path): return False # This is a minimal reader for gitconfig-style files. It handles a few of # the quirks that make gitconfig files incompatible with ConfigParser-style # files, but does not support the full gitconfig syntax (just enough # needed to read a .gitmodules file). gitmodules_fileobj = io.StringIO() # Must use io.open for cross-Python-compatible behavior wrt unicode with io.open(gitmodules_path) as f: for line in f: # gitconfig files are more flexible with leading whitespace; just # go ahead and remove it line = line.lstrip() # comments can start with either # or ; if line and line[0] in (':', ';'): continue gitmodules_fileobj.write(line) gitmodules_fileobj.seek(0) cfg = RawConfigParser() try: cfg.readfp(gitmodules_fileobj) except Exception as exc: log.warn('Malformatted .gitmodules file: {0}\n' '{1} cannot be assumed to be a git submodule.'.format( exc, self.path)) return False for section in cfg.sections(): if not cfg.has_option(section, 'path'): continue submodule_path = cfg.get(section, 'path').rstrip(os.sep) if submodule_path == self.path.rstrip(os.sep): return True return False def _update_submodule(self, submodule, status): if status == ' ': # The submodule is up to date; no action necessary return elif status == '-': if self.offline: raise _AHBootstrapSystemExit( "Cannot initialize the {0} submodule in --offline mode; " "this requires being able to clone the submodule from an " "online repository.".format(submodule)) cmd = ['update', '--init'] action = 'Initializing' elif status == '+': cmd = ['update'] action = 'Updating' if self.offline: cmd.append('--no-fetch') elif status == 'U': raise _AHBoostrapSystemExit( 'Error: Submodule {0} contains unresolved merge conflicts. ' 'Please complete or abandon any changes in the submodule so that ' 'it is in a usable state, then try again.'.format(submodule)) else: log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 'attempt to use the submodule as-is, but try to ensure ' 'that the submodule is in a clean state and contains no ' 'conflicts or errors.\n{2}'.format(status, submodule, _err_help_msg)) return err_msg = None cmd = ['git', 'submodule'] + cmd + ['--', submodule] log.warn('{0} {1} submodule with: `{2}`'.format( action, submodule, ' '.join(cmd))) try: log.info('Running `{0}`; use the --no-git option to disable git ' 'commands'.format(' '.join(cmd))) returncode, stdout, stderr = run_cmd(cmd) except OSError as e: err_msg = str(e) else: if returncode != 0: err_msg = stderr if err_msg is not None: log.warn('An unexpected error occurred updating the git submodule ' '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, _err_help_msg)) class _CommandNotFound(OSError): """ An exception raised when a command run with run_cmd is not found on the system. """ def run_cmd(cmd): """ Run a command in a subprocess, given as a list of command-line arguments. Returns a ``(returncode, stdout, stderr)`` tuple. """ try: p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) # XXX: May block if either stdout or stderr fill their buffers; # however for the commands this is currently used for that is # unlikely (they should have very brief output) stdout, stderr = p.communicate() except OSError as e: if DEBUG: raise if e.errno == errno.ENOENT: msg = 'Command not found: `{0}`'.format(' '.join(cmd)) raise _CommandNotFound(msg, cmd) else: raise _AHBoostrapSystemExit( 'An unexpected error occurred when running the ' '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) # Can fail of the default locale is not configured properly. See # https://github.com/astropy/astropy/issues/2749. For the purposes under # consideration 'latin1' is an acceptable fallback. try: stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' except ValueError: # Due to an OSX oddity locale.getdefaultlocale() can also crash # depending on the user's locale/language settings. See: # http://bugs.python.org/issue18378 stdio_encoding = 'latin1' # Unlikely to fail at this point but even then let's be flexible if not isinstance(stdout, _text_type): stdout = stdout.decode(stdio_encoding, 'replace') if not isinstance(stderr, _text_type): stderr = stderr.decode(stdio_encoding, 'replace') return (p.returncode, stdout, stderr) def _next_version(version): """ Given a parsed version from pkg_resources.parse_version, returns a new version string with the next minor version. Examples ======== >>> _next_version(pkg_resources.parse_version('1.2.3')) '1.3.0' """ if hasattr(version, 'base_version'): # New version parsing from setuptools >= 8.0 if version.base_version: parts = version.base_version.split('.') else: parts = [] else: parts = [] for part in version: if part.startswith('*'): break parts.append(part) parts = [int(p) for p in parts] if len(parts) < 3: parts += [0] * (3 - len(parts)) major, minor, micro = parts[:3] return '{0}.{1}.{2}'.format(major, minor + 1, 0) class _DummyFile(object): """A noop writeable object.""" errors = '' # Required for Python 3.x encoding = 'utf-8' def write(self, s): pass def flush(self): pass @contextlib.contextmanager def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr _err_help_msg = """ If the problem persists consider installing astropy_helpers manually using pip (`pip install astropy_helpers`) or by manually downloading the source archive, extracting it, and installing by running `python setup.py install` from the root of the extracted source code. """ class _AHBootstrapSystemExit(SystemExit): def __init__(self, *args): if not args: msg = 'An unknown problem occurred bootstrapping astropy_helpers.' else: msg = args[0] msg += '\n' + _err_help_msg super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) if sys.version_info[:2] < (2, 7): # In Python 2.6 the distutils log does not log warnings, errors, etc. to # stderr so we have to wrap it to ensure consistency at least in this # module import distutils class log(object): def __getattr__(self, attr): return getattr(distutils.log, attr) def warn(self, msg, *args): self._log_to_stderr(distutils.log.WARN, msg, *args) def error(self, msg): self._log_to_stderr(distutils.log.ERROR, msg, *args) def fatal(self, msg): self._log_to_stderr(distutils.log.FATAL, msg, *args) def log(self, level, msg, *args): if level in (distutils.log.WARN, distutils.log.ERROR, distutils.log.FATAL): self._log_to_stderr(level, msg, *args) else: distutils.log.log(level, msg, *args) def _log_to_stderr(self, level, msg, *args): # This is the only truly 'public' way to get the current threshold # of the log current_threshold = distutils.log.set_threshold(distutils.log.WARN) distutils.log.set_threshold(current_threshold) if level >= current_threshold: if args: msg = msg % args sys.stderr.write('%s\n' % msg) sys.stderr.flush() log = log() BOOTSTRAPPER = _Bootstrapper.main() def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run() pyvo-0.6.1+dfsg.orig/setup.cfg0000644000175000017500000000234413125165225015547 0ustar noahfxnoahfx[build_sphinx] source-dir = docs build-dir = docs/_build all_files = 1 [build_docs] source-dir = docs build-dir = docs/_build all_files = 1 [upload_docs] upload-dir = docs/_build/html show-response = 1 [pytest] minversion = 2.2 norecursedirs = build docs/_build [ah_bootstrap] auto_use = True [pep8] # E101 - mix of tabs and spaces # W191 - use of tabs # W291 - trailing whitespace # W292 - no newline at end of file # W293 - trailing whitespace # W391 - blank line at end of file # E111 - 4 spaces per indentation level # E112 - 4 spaces per indentation level # E113 - 4 spaces per indentation level # E901 - SyntaxError or IndentationError # E902 - IOError select = E101,W191,W291,W292,W293,W391,E111,E112,E113,E901,E902 exclude = extern,sphinx,*parsetab.py [metadata] package_name = pyvo description = Astropy affiliated package for accessing Virtual Observatory data and services long_description = author = the Virtual Astronomical Observatory (VAO) author_email = rplante@ncsa.illinois.edu license = BSD url = http://dev.usvao.org/pyvo edit_on_github = False github_project = pyvirtobs/pyvo install_requires = astropy requests # version should be PEP440 compatible (http://www.python.org/dev/peps/pep-0440) version = 0.6.0 [entry_points] pyvo-0.6.1+dfsg.orig/README.rst0000644000175000017500000000410513125165225015412 0ustar noahfxnoahfxPyVO =================================== .. image:: http://img.shields.io/badge/powered%20by-AstroPy-orange.svg?style=flat :target: http://www.astropy.org :alt: Powered by Astropy Badge .. image:: https://travis-ci.org/pyvirtobs/pyvo.svg :target: https://travis-ci.org/pyvirtobs/pyvo :alt: Travis Status .. image:: https://coveralls.io/repos/github/pyvirtobs/pyvo/badge.svg?branch=master :target: https://coveralls.io/github/pyvirtobs/pyvo?branch=master :alt: Coverage Status PyVO is a package providing access to remote data and services of the Virtual observatory (VO) using Python. Its development was launched by NSF/NASA-funded Virtual Astronomical Observatory (VAO, www.usvao.org) project (formerly under the name VAOpy) as part of its initiative to bring VO capabilities to desktop. Its goal is allow astronomers and tool developers to access data and services from remote archives and other web resources. It takes advantage VO standards to give access to thousands of catalogs, data archives, information services, and analysis tools. It also takes advantage of the general capabilities of Astopy (and numpy), and so a secondary goal is to provide a development platform for migrating more VO capabilities into Astropy. Source code can be found on `GitHub ` Releases of PyVO are available from `PyPI `; thus, it and its prerequisites can be most easily installed using ``pip``: pip install pyvo Alternatively, you can do a source install: python setup.py install EXAMPLES -------- Many instructive examples can be found in the PyVO User's Manual (http://pyvo.readthedocs.org). Additional examples can be found in the scripts directory. UNIT TESTS ---------- PyVO uses the Astropy framework for unit tests which is built into the setup script. To run the tests, type: python setup.py test This will run all unit tests that do not require a network connection. To run all tests, including those that access the network, add the --remote-data (-R) option: python setup.py test -R pyvo-0.6.1+dfsg.orig/licenses/0000755000175000017500000000000013125165225015530 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/licenses/README.rst0000644000175000017500000000024213125165225017215 0ustar noahfxnoahfxLicenses ======== This directory holds license and credit information for the affiliated package, works the affiliated package is derived from, and/or datasets. pyvo-0.6.1+dfsg.orig/licenses/ASTROPY_LICENSE.rst0000644000175000017500000000273013125165225020567 0ustar noahfxnoahfxCopyright (c) 2011-2013, Astropy Developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Astropy Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. pyvo-0.6.1+dfsg.orig/licenses/LICENSE.rst0000644000175000017500000000275113125165225017351 0ustar noahfxnoahfxCopyright (c) 2013, Virtual Astronomical Observatory, LLC (VAO) All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the VAO nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. pyvo-0.6.1+dfsg.orig/CHANGES.rst0000644000175000017500000000275113125165225015532 0ustar noahfxnoahfx0.6.1 ---------------- * Add Datalink interface * Put some common functionallity in Mixins * Minor fixes and improvements 0.6 ---------------- * Using RegTAP as the only registry interface * Added a datamodel keyword to registry search * Using the six libray to address Python 2/3 compatibility issues * AsyncTAPJob is now context aware * Improvement upload handling; it is no longer necessary to specifiy the type of upload * Allow astropy's SkyCoord and Quantity as input parameters 0.5.2 ---------------- Remove trailing ? from query urls VOTable fieldnames are now gathered from names only instead of ID and name 0.5.1 ---------------- * fix content decoding related error in async result handling 0.5 ---------------- * added a RegTAP interface * removed urllib in favor of the requests library * deprecated vao registry interface * minor improvements and fixes 0.4.1 ------------------ * fix a bug where maxrec wasn't send to the server 0.4 ---------------- * Use astropy tables for table metadata * fix another content encoding error 0.3.2 ------------------ * Adding table property to DALResults. This is a shortcut to access the astropy table * Improved Error Handling 0.3.1 ------------------ * fix an error where the content wasn't decoded properly * fix a bug where POST parameters are submitted as GET parameters 0.3 ---------------- Adding TAP API 0.1 ---------------- This is the last release that supports Python 2.6. This release only contains bug fixes beyond 0.0beta2. pyvo-0.6.1+dfsg.orig/.travis.yml0000644000175000017500000000711413125165225016037 0ustar noahfxnoahfx# We set the language to c because python isn't supported on the MacOS X nodes # on Travis. However, the language ends up being irrelevant anyway, since we # install Python ourselves using conda. language: c os: - linux # Setting sudo to false opts in to Travis-CI container-based builds. sudo: false # The apt packages below are needed for sphinx builds. A full list of packages # that can be included can be found here: # # https://github.com/travis-ci/apt-package-whitelist/blob/master/ubuntu-precise addons: apt: packages: - graphviz - texlive-latex-extra - dvipng env: global: # The following versions are the 'default' for tests, unless # overridden underneath. They are defined here in order to save having # to repeat them for all configurations. - PYTHON_VERSION=3.6 - NUMPY_VERSION=stable - ASTROPY_VERSION=stable - MAIN_CMD='python setup.py' - SETUP_CMD='test' - PIP_DEPENDENCIES='requests python-dateutil flask' - EVENT_TYPE='pull_request push' - CONDA_DEPENDENCIES='' - CONDA_CHANNELS='astropy-ci-extras astropy' matrix: # Make sure that egg_info works without dependencies - PYTHON_VERSION=2.7 SETUP_CMD='egg_info' - PYTHON_VERSION=3.3 SETUP_CMD='egg_info' NUMPY_VERSION=1.11 - PYTHON_VERSION=3.4 SETUP_CMD='egg_info' - PYTHON_VERSION=3.5 SETUP_CMD='egg_info' - PYTHON_VERSION=3.6 SETUP_CMD='egg_info' matrix: # Don't wait for allowed failures fast_finish: true include: # Try MacOS X - os: osx env: SETUP_CMD='test' # Do a coverage test. - os: linux env: SETUP_CMD='test --coverage' # Now try Astropy dev and LTS vesions with the latest 3.x and 2.7. - os: linux env: PYTHON_VERSION=2.7 ASTROPY_VERSION=development EVENT_TYPE='pull_request push cron' - os: linux env: ASTROPY_VERSION=development EVENT_TYPE='pull_request push cron' - os: linux env: PYTHON_VERSION=2.7 ASTROPY_VERSION=lts - os: linux env: ASTROPY_VERSION=lts # try all python versions - os: linux env: PYTHON_VERSION=2.7 NUMPY_VERSION=1.7 - os: linux env: PYTHON_VERSION=3.3 NUMPY_VERSION=1.8 - os: linux env: PYTHON_VERSION=3.4 NUMPY_VERSION=1.9 - os: linux env: PYTHON_VERSION=3.5 NUMPY_VERSION=1.11 # Do a PEP8 test with pycodestyle - os: linux env: MAIN_CMD='pycodestyle pyvo --count' SETUP_CMD='' allow_failures: # Do a PEP8 test with pycodestyle # (allow to fail unless your code completely compliant) - os: linux env: MAIN_CMD='pycodestyle pyvo --count' SETUP_CMD='' # Check for sphinx doc build warnings # (allow to fail because docs aren't in the right state yet') - os: linux env: SETUP_CMD='build_docs -w' # Unresolvable conda dependencies - os: linux env: PYTHON_VERSION=3.3 NUMPY_VERSION=1.11 install: - git clone git://github.com/astropy/ci-helpers.git - source ci-helpers/travis/setup_conda.sh script: - $MAIN_CMD $SETUP_CMD after_success: # If coveralls.io is set up for this package, uncomment the line # below and replace "packagename" with the name of your package. # The coveragerc file may be customized as needed for your package. - if [[ $SETUP_CMD == 'test --coverage' ]]; then coveralls --rcfile='pyvo/tests/coveragerc'; fi pyvo-0.6.1+dfsg.orig/cextern/0000755000175000017500000000000013125165225015373 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/cextern/README.rst0000644000175000017500000000054213125165225017063 0ustar noahfxnoahfxExternal Packages/Libraries =========================== This directory contains C extensions included with the package. Note that only C extensions should be included in this directory - pure Cython code should be placed in the package source tree, and wrapper Cython code for C libraries included here should be in the packagename/wrappers directory. pyvo-0.6.1+dfsg.orig/MANIFEST.in0000644000175000017500000000207313125165225015463 0ustar noahfxnoahfxinclude README.rst include CHANGES.rst include ez_setup.py include ah_bootstrap.py include setup.cfg recursive-include packagename *.pyx *.c *.pxd recursive-include docs * recursive-include licenses * recursive-include cextern * recursive-include scripts * prune build prune docs/_build prune docs/api # the next few stanzas are for astropy_helpers. It's derived from the # astropy_helpers/MANIFEST.in, but requires additional includes for the actual # package directory and egg-info. include astropy_helpers/README.rst include astropy_helpers/CHANGES.rst include astropy_helpers/LICENSE.rst recursive-include astropy_helpers/licenses * include astropy_helpers/ez_setup.py include astropy_helpers/ah_bootstrap.py recursive-include astropy_helpers/astropy_helpers *.py *.pyx *.c *.h recursive-include astropy_helpers/astropy_helpers.egg-info * # include the sphinx stuff with "*" because there are css/html/rst/etc. recursive-include astropy_helpers/astropy_helpers/sphinx * prune astropy_helpers/build prune astropy_helpers/astropy_helpers/tests global-exclude *.pyc *.o pyvo-0.6.1+dfsg.orig/astropy_helpers/0000755000175000017500000000000013125165225017146 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/examples/0000755000175000017500000000000013125165225015541 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/examples/serialquery.py0000644000175000017500000000467613125165225020475 0ustar noahfxnoahfx# An example script that does a registry query (first argument to main, # here "standard stars") and then runs cone searches (RA, DEC, SRC # taken from the command line in that order) against them until the # first returns a result. That result is then broadcast to all SAMP # clients. # # Usage example: Start TOPCAT, then run # # python serialquery.py 314 27.1 2 # # After a short while you should see a result table in TOPCAT. # # Disclaimer: Of course, this isn't how you should write a command line # interface. # # This file has been put in the public domain by the authors, # Markus Demleitner and # Stefan Becker . import contextlib import os import sys import tempfile import time import astropy from astropy.vo.samp import SAMPIntegratedClient from pyvo.dal import scs from pyvo.registry import regtap @contextlib.contextmanager def samp_accessible(astropy_table): """a context manager making astroy_table available under a (file) URL for the controlled section. This is useful with uploads. """ handle, f_name = tempfile.mkstemp(suffix=".xml") with os.fdopen(handle, "w") as f: astropy_table.write(output=f, format="votable") try: yield "file://"+f_name finally: os.unlink(f_name) @contextlib.contextmanager def SAMP_conn(): """a context manager to give the controlled block a SAMP connection. The program will disconnect as the controlled block is exited. """ client = SAMPIntegratedClient(name="serialquery", description="A serial SCS querier.") client.connect() try: yield client finally: client.disconnect() def broadcast(astropy_table): """broadcasts an astropy table object to all SAMP clients on the local bus. """ with SAMP_conn() as client: with samp_accessible(astropy_table) as table_url: client.notify_all( { "samp.mtype": "table.load.votable", "samp.params": { "url": table_url, }}) time.sleep(2) # hack: give other clients time to pick our table up def main(query_terms, ra, dec, sr): for resource in regtap.search( keywords=[query_terms], servicetype="image" ): print(resource.res_title) result = resource.service.search((ra, dec), sr) print(len(result)) if result: break else: sys.exit("No service has results for CIRCLE({0}, {1}, {2})".format( ra, dec, sr)) broadcast(result.table) if __name__=="__main__": # serialquery.py RA DEC SR main("standard stars", *[float(v) for v in sys.argv[1:]]) pyvo-0.6.1+dfsg.orig/examples/notebooks/0000755000175000017500000000000013125165225017544 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/examples/notebooks/working_with_registry_results.ipynb0000644000175000017500000001623713125165225027044 0ustar noahfxnoahfx{ "metadata": { "name": "", "signature": "sha256:77ab35415e38567d49af91382dac230998db0223d904a781aafade553a673424" }, "nbformat": 3, "nbformat_minor": 0, "worksheets": [ { "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "As you can see from the examples in simple_service_discovery,\n", "a search will often return more than one record,\n", "and so sometimes you need to review some of the\n", "resource metadata to determine which one or ones you want. You may\n", "have noticed that the results behave similarly to the results from the\n", "data access services. Like them, registry search results are returned\n", "as a `~pyvo.registry.regtap.RegistryResults` instance, and each\n", "record is represented as a `~pyvo.registry.regtap.RegistryResource` instance. \n", "\n", "A `~pyvo.registry.regtap.RegistryRecord` record acts like a\n", "dictionary where the keys are the column names from the results table;\n", "using our NVSS example from simple_service_discovery." ] }, { "cell_type": "code", "collapsed": false, "input": [ "import pyvo as vo\n", "colls = vo.regsearch(keywords=[\"NVSS\"], servicetype='sia')\n", "nvss = colls[0]\n", "print(nvss.res_title)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "code", "collapsed": false, "input": [ "nvss.keys()" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "If you are looking for a particular data collection or catalog, as we\n", "did above when we looked for the NVSS archive, often simply reviewing\n", "the titles is sufficient. Other times, particularly when you are not\n", "sure what you are looking for, it helps to look deeper. \n", "\n", "The resource description, available via the \n", "`~pyvo.registry.regtap.ResourceRecord.res_description` property,\n", "tends to be the most revealing. It contains a paragraph (or two)\n", "summarizing the catalog or data collection. It will often describe\n", "the scientific intent behind the collection. \n", "\n", "The `~pyvo.registry.regtap.RegistryResource.short_name` can also be\n", "helpful, as well. This name is meant to be short--16 characters or\n", "fewer; consequently, the value is often includes the abbreviation for the\n", "project or observatory that produced the collection or catalog. \n", "\n", "A selection of the resource metadata, including the title, shortname and\n", "desription, can be printed out in a summary form with\n", "the `~pyvo.registry.regtap.RegistryResource.describe` function." ] }, { "cell_type": "code", "collapsed": false, "input": [ "nvss.describe()" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Registry Records can be converted to a service object" ] }, { "cell_type": "code", "collapsed": false, "input": [ "nvss = colls[0].service # converts record to service object\n", "print(nvss.baseurl)\n", "query = nvss.create_query(size=0.25, format=\"image/fits\")" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Thus, not only does this service instance contain the base URL but it\n", "also includes all of the metadata from the registry that desribes the\n", "service. With this service object, we can either call its \n", "`~pyvo.dal.sia.SIAService.search` function directly or \n", "create query objects to get cutouts for a whole list of sources." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Our discussion of service metadata offers an opportunity to highlight\n", "another important property, the service's *IVOA Identifier* (sometimes\n", "referred to as its *ivoid*). This is a globally-unique identifier\n", "that takes the form of a \n", "`URI `_:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "colls = vo.regsearch(keywords=[\"NVSS\"], servicetype='sia')\n", "for coll in colls:\n", " print coll.ivoid" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "This identifier can be used to uniquely retrieve a service desription\n", "from the registry." ] }, { "cell_type": "code", "collapsed": false, "input": [ "nvss = vo.registry.ivoid2service('ivo://nasa.heasarc/skyview/nvss')\n", "nvss.baseurl" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "code", "collapsed": false, "input": [ "cutouts1 = nvss.search(pos=(148.8888, 69.065), size=0.2)\n", "nvssq = nvss.create_query(size=0.2) # or create a query object\n", "nvssq.pos = (350.85, 58.815)\n", "cutouts2 = nvssq.execute()" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "As we end this discussion of the service objects, you can hopefully\n", "see that there is a straight-forward chain of discovery classes that\n", "connect the registry down through to a dataset. Spelled out in all\n", "its detail, it looks like this:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "services = vo.regsearch(keywords=[\"NVSS\"],\n", " servicetype='sia') # RegistryResults\n", "nvss = services[0] # RegistryResource\n", "nvsss = nvss.service # SIAService\n", "nq = nvss.create_query(pos=(350.85, 58.815),\n", " size=0.25, \n", " format=\"image/fits\") # SIAQuery\n", "images = nq.execute() # SIAResults\n", "firstim = images[0] # SIARecord\n", "\n", "print(type(services))\n", "print(type(nvss))\n", "print(type(nq))\n", "print(type(images))\n", "print(type(firstim))" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Most of the time, it's not necessary to follow all these steps\n", "yourself, so there are functions and methods that provide syntactic\n", "shortcuts. However, when you need some finer control over the\n", "process, it is possible to jump off the fast track and work directly\n", "with an underlying object. " ] } ], "metadata": {} } ] }pyvo-0.6.1+dfsg.orig/examples/notebooks/simple_service_discovery.ipynb0000644000175000017500000001234413125165225025713 0ustar noahfxnoahfx{ "metadata": { "name": "", "signature": "sha256:e5bd7bbcbb2f77cb4950888f893c03f6a26e81270b3d9c3d8e35af9c004f431e" }, "nbformat": 3, "nbformat_minor": 0, "worksheets": [ { "cells": [ { "cell_type": "code", "collapsed": false, "input": [ "import pyvo as vo" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The most common use of the registry is to find archives with \n", "`VO data access services`, and the simplest way\n", "to do this is to use the \n", "`~pyvo.regsearch` function. For example, to find data\n", "collections that contain searchable X-ray images:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "services = vo.regsearch(servicetype='sia', waveband='x-ray')\n", "len(services)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Aha! Perhaps you didn't realize there were that many.\n", "What collections are these?" ] }, { "cell_type": "code", "collapsed": false, "input": [ "for service in services:\n", " print(service.res_title)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "As you can gather, each record in the registry search results\n", "represents a different service (in this case, an image service).\n", "Included in the record is the all-important base URL for the service:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "print(services[0].access_url)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "However, it's not necessary to keep track of that URL because you can\n", "now search that collection directly via the registry record:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "images = services[0].search(pos=(350.85, 58.815), size=0.25)\n", "len(images)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Other types of services via the ``servicetype`` parameter:\n", "\n", "| set ``servicetype`` to... | ...to find:\n", "| ------------------------- | -------------------------------------\n", "| sia | Simple Image Access (SIA) services\n", "| ssa | Simple Spectral Access (SSA) services\n", "| scs | Simple Cone Search (SCS) services\n", "| slap | Simple Line Access (SLA) services\n", "| tap | Table Access Protocol (TAP) services\n", "\n", "For example, to find all known Cone Search services:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "cats = vo.regsearch(servicetype='conesearch')\n", "len(cats)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Wow, that's a lot of catalogs. (Most of these are from the\n", "`Vizier Catalog Archive `)\n", "every Vizier catalog that includes a position is available as a Cone\n", "Search service.) For just catalogs related to blazars:" ] }, { "cell_type": "code", "collapsed": false, "input": [ "cats = vo.regsearch(keywords=['blazar'], servicetype='conesearch')\n", "len(cats)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "How about blazars observed with Fermi?" ] }, { "cell_type": "code", "collapsed": false, "input": [ "cats = vo.regsearch(keywords=['blazar', 'Fermi'], servicetype='conesearch')\n", "len(cats)" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Sometimes you may be looking for a particular catalog or image collections\n", "that you already know exists, and you just need to learn the base URL\n", "for the service. The ``keywords`` parameter can be used to find it.\n", "For example, suppose you want to get cutout images from the NRAO VLA\n", "Sky Survey (NVSS):" ] }, { "cell_type": "code", "collapsed": false, "input": [ "colls = vo.regsearch(keywords=[\"NVSS\"], servicetype='sia')\n", "for coll in colls:\n", " print coll.res_title" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "code", "collapsed": false, "input": [], "language": "python", "metadata": {}, "outputs": [] } ], "metadata": {} } ] }pyvo-0.6.1+dfsg.orig/distribute_setup.py0000644000175000017500000004241013125165225017674 0ustar noahfxnoahfx#!python """Bootstrap distribute installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from distribute_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import os import shutil import sys import time import fnmatch import tempfile import tarfile import optparse from distutils import log try: from site import USER_SITE except ImportError: USER_SITE = None try: import subprocess def _python_cmd(*args): args = (sys.executable,) + args return subprocess.call(args) == 0 except ImportError: # will be used for python 2.3 def _python_cmd(*args): args = (sys.executable,) + args # quoting arguments if windows if sys.platform == 'win32': def quote(arg): if ' ' in arg: return '"%s"' % arg return arg args = [quote(arg) for arg in args] return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 DEFAULT_VERSION = "0.6.45" DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" SETUPTOOLS_FAKED_VERSION = "0.6c11" SETUPTOOLS_PKG_INFO = """\ Metadata-Version: 1.0 Name: setuptools Version: %s Summary: xxxx Home-page: xxx Author: xxx Author-email: xxx License: xxx Description: xxx """ % SETUPTOOLS_FAKED_VERSION def _install(tarball, install_args=()): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # installing log.warn('Installing Distribute') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _build_egg(egg, tarball, to_dir): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # building an egg log.warn('Building a Distribute egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) finally: os.chdir(old_wd) shutil.rmtree(tmpdir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') def _do_download(version, download_base, to_dir, download_delay): egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' % (version, sys.version_info[0], sys.version_info[1])) if not os.path.exists(egg): tarball = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, tarball, to_dir) sys.path.insert(0, egg) import setuptools setuptools.bootstrap_install_from = egg def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15, no_fake=True): # making sure we use the absolute path to_dir = os.path.abspath(to_dir) was_imported = 'pkg_resources' in sys.modules or \ 'setuptools' in sys.modules try: try: import pkg_resources # Setuptools 0.7b and later is a suitable (and preferable) # substitute for any Distribute version. try: pkg_resources.require("setuptools>=0.7b") return except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): pass if not hasattr(pkg_resources, '_distribute'): if not no_fake: _fake_setuptools() raise ImportError except ImportError: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("distribute>=" + version) return except pkg_resources.VersionConflict: e = sys.exc_info()[1] if was_imported: sys.stderr.write( "The required version of distribute (>=%s) is not available,\n" "and can't be installed while this script is running. Please\n" "install a more recent version first, using\n" "'easy_install -U distribute'." "\n\n(Currently using %r)\n" % (version, e.args[0])) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) finally: if not no_fake: _create_fake_setuptools_pkg_info(to_dir) def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15): """Download distribute from a specified location and return its filename `version` should be a valid distribute version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen tgz_name = "distribute-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) src = dst = None if not os.path.exists(saveto): # Avoid repeated downloads try: log.warn("Downloading %s", url) src = urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src.read() dst = open(saveto, "wb") dst.write(data) finally: if src: src.close() if dst: dst.close() return os.path.realpath(saveto) def _no_sandbox(function): def __no_sandbox(*args, **kw): try: from setuptools.sandbox import DirectorySandbox if not hasattr(DirectorySandbox, '_old'): def violation(*args): pass DirectorySandbox._old = DirectorySandbox._violation DirectorySandbox._violation = violation patched = True else: patched = False except ImportError: patched = False try: return function(*args, **kw) finally: if patched: DirectorySandbox._violation = DirectorySandbox._old del DirectorySandbox._old return __no_sandbox def _patch_file(path, content): """Will backup the file then patch it""" f = open(path) existing_content = f.read() f.close() if existing_content == content: # already patched log.warn('Already patched.') return False log.warn('Patching...') _rename_path(path) f = open(path, 'w') try: f.write(content) finally: f.close() return True _patch_file = _no_sandbox(_patch_file) def _same_content(path, content): f = open(path) existing_content = f.read() f.close() return existing_content == content def _rename_path(path): new_name = path + '.OLD.%s' % time.time() log.warn('Renaming %s to %s', path, new_name) os.rename(path, new_name) return new_name def _remove_flat_installation(placeholder): if not os.path.isdir(placeholder): log.warn('Unkown installation at %s', placeholder) return False found = False for file in os.listdir(placeholder): if fnmatch.fnmatch(file, 'setuptools*.egg-info'): found = True break if not found: log.warn('Could not locate setuptools*.egg-info') return log.warn('Moving elements out of the way...') pkg_info = os.path.join(placeholder, file) if os.path.isdir(pkg_info): patched = _patch_egg_dir(pkg_info) else: patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) if not patched: log.warn('%s already patched.', pkg_info) return False # now let's move the files out of the way for element in ('setuptools', 'pkg_resources.py', 'site.py'): element = os.path.join(placeholder, element) if os.path.exists(element): _rename_path(element) else: log.warn('Could not find the %s element of the ' 'Setuptools distribution', element) return True _remove_flat_installation = _no_sandbox(_remove_flat_installation) def _after_install(dist): log.warn('After install bootstrap.') placeholder = dist.get_command_obj('install').install_purelib _create_fake_setuptools_pkg_info(placeholder) def _create_fake_setuptools_pkg_info(placeholder): if not placeholder or not os.path.exists(placeholder): log.warn('Could not find the install location') return pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) setuptools_file = 'setuptools-%s-py%s.egg-info' % \ (SETUPTOOLS_FAKED_VERSION, pyver) pkg_info = os.path.join(placeholder, setuptools_file) if os.path.exists(pkg_info): log.warn('%s already exists', pkg_info) return log.warn('Creating %s', pkg_info) try: f = open(pkg_info, 'w') except EnvironmentError: log.warn("Don't have permissions to write %s, skipping", pkg_info) return try: f.write(SETUPTOOLS_PKG_INFO) finally: f.close() pth_file = os.path.join(placeholder, 'setuptools.pth') log.warn('Creating %s', pth_file) f = open(pth_file, 'w') try: f.write(os.path.join(os.curdir, setuptools_file)) finally: f.close() _create_fake_setuptools_pkg_info = _no_sandbox( _create_fake_setuptools_pkg_info ) def _patch_egg_dir(path): # let's check if it's already patched pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') if os.path.exists(pkg_info): if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): log.warn('%s already patched.', pkg_info) return False _rename_path(path) os.mkdir(path) os.mkdir(os.path.join(path, 'EGG-INFO')) pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') f = open(pkg_info, 'w') try: f.write(SETUPTOOLS_PKG_INFO) finally: f.close() return True _patch_egg_dir = _no_sandbox(_patch_egg_dir) def _before_install(): log.warn('Before install bootstrap.') _fake_setuptools() def _under_prefix(location): if 'install' not in sys.argv: return True args = sys.argv[sys.argv.index('install') + 1:] for index, arg in enumerate(args): for option in ('--root', '--prefix'): if arg.startswith('%s=' % option): top_dir = arg.split('root=')[-1] return location.startswith(top_dir) elif arg == option: if len(args) > index: top_dir = args[index + 1] return location.startswith(top_dir) if arg == '--user' and USER_SITE is not None: return location.startswith(USER_SITE) return True def _fake_setuptools(): log.warn('Scanning installed packages') try: import pkg_resources except ImportError: # we're cool log.warn('Setuptools or Distribute does not seem to be installed.') return ws = pkg_resources.working_set try: setuptools_dist = ws.find( pkg_resources.Requirement.parse('setuptools', replacement=False) ) except TypeError: # old distribute API setuptools_dist = ws.find( pkg_resources.Requirement.parse('setuptools') ) if setuptools_dist is None: log.warn('No setuptools distribution found') return # detecting if it was already faked setuptools_location = setuptools_dist.location log.warn('Setuptools installation detected at %s', setuptools_location) # if --root or --preix was provided, and if # setuptools is not located in them, we don't patch it if not _under_prefix(setuptools_location): log.warn('Not patching, --root or --prefix is installing Distribute' ' in another location') return # let's see if its an egg if not setuptools_location.endswith('.egg'): log.warn('Non-egg installation') res = _remove_flat_installation(setuptools_location) if not res: return else: log.warn('Egg installation') pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') if (os.path.exists(pkg_info) and _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): log.warn('Already patched.') return log.warn('Patching...') # let's create a fake egg replacing setuptools one res = _patch_egg_dir(setuptools_location) if not res: return log.warn('Patching complete.') _relaunch() def _relaunch(): log.warn('Relaunching...') # we have to relaunch the process # pip marker to avoid a relaunch bug _cmd1 = ['-c', 'install', '--single-version-externally-managed'] _cmd2 = ['-c', 'install', '--record'] if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2: sys.argv[0] = 'setup.py' args = [sys.executable] + sys.argv sys.exit(subprocess.call(args)) def _extractall(self, path=".", members=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). """ import copy import operator from tarfile import ExtractError directories = [] if members is None: members = self for tarinfo in members: if tarinfo.isdir(): # Extract directories with a safe mode. directories.append(tarinfo) tarinfo = copy.copy(tarinfo) tarinfo.mode = 448 # decimal for oct 0700 self.extract(tarinfo, path) # Reverse sort directories. if sys.version_info < (2, 4): def sorter(dir1, dir2): return cmp(dir1.name, dir2.name) directories.sort(sorter) directories.reverse() else: directories.sort(key=operator.attrgetter('name'), reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: dirpath = os.path.join(path, tarinfo.name) try: self.chown(tarinfo, dirpath) self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError: e = sys.exc_info()[1] if self.errorlevel > 1: raise else: self._dbg(1, "tarfile: %s" % e) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the distribute package """ install_args = [] if options.user_install: if sys.version_info < (2, 6): log.warn("--user requires Python 2.6 or later") raise SystemExit(1) install_args.append('--user') return install_args def _parse_args(): """ Parse the command line for options """ parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package (requires Python 2.6 or later)') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the distribute package') options, args = parser.parse_args() # positional arguments are ignored return options def main(version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" options = _parse_args() tarball = download_setuptools(download_base=options.download_base) return _install(tarball, _build_install_args(options)) if __name__ == '__main__': sys.exit(main()) pyvo-0.6.1+dfsg.orig/ez_setup.py0000644000175000017500000003071213125165225016136 0ustar noahfxnoahfx#!/usr/bin/env python """ Setuptools bootstrapping installer. Maintained at https://github.com/pypa/setuptools/tree/bootstrap. Run this script to install or upgrade setuptools. """ import os import shutil import sys import tempfile import zipfile import optparse import subprocess import platform import textwrap import contextlib import json import codecs from distutils import log try: from urllib.request import urlopen from urllib.parse import urljoin except ImportError: from urllib2 import urlopen from urlparse import urljoin try: from site import USER_SITE except ImportError: USER_SITE = None LATEST = object() DEFAULT_VERSION = LATEST DEFAULT_URL = "https://pypi.io/packages/source/s/setuptools/" DEFAULT_SAVE_DIR = os.curdir MEANINGFUL_INVALID_ZIP_ERR_MSG = 'Maybe {0} is corrupted, delete it and try again.' def _python_cmd(*args): """ Execute a command. Return True if the command succeeded. """ args = (sys.executable,) + args return subprocess.call(args) == 0 def _install(archive_filename, install_args=()): """Install Setuptools.""" with archive_context(archive_filename): # installing log.warn('Installing Setuptools') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 def _build_egg(egg, archive_filename, to_dir): """Build Setuptools egg.""" with archive_context(archive_filename): # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') class ContextualZipFile(zipfile.ZipFile): """Supplement ZipFile class to support context manager for Python 2.6.""" def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def __new__(cls, *args, **kwargs): """Construct a ZipFile or ContextualZipFile as appropriate.""" if hasattr(zipfile.ZipFile, '__exit__'): return zipfile.ZipFile(*args, **kwargs) return super(ContextualZipFile, cls).__new__(cls) @contextlib.contextmanager def archive_context(filename): """ Unzip filename to a temporary directory, set to the cwd. The unzipped target is cleaned up after. """ tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) try: with ContextualZipFile(filename) as archive: archive.extractall() except zipfile.BadZipfile as err: if not err.args: err.args = ('', ) err.args = err.args + ( MEANINGFUL_INVALID_ZIP_ERR_MSG.format(filename), ) raise # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) yield finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _do_download(version, download_base, to_dir, download_delay): """Download Setuptools.""" py_desig = 'py{sys.version_info[0]}.{sys.version_info[1]}'.format(sys=sys) tp = 'setuptools-{version}-{py_desig}.egg' egg = os.path.join(to_dir, tp.format(**locals())) if not os.path.exists(egg): archive = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, archive, to_dir) sys.path.insert(0, egg) # Remove previously-imported pkg_resources if present (see # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). if 'pkg_resources' in sys.modules: _unload_pkg_resources() import setuptools setuptools.bootstrap_install_from = egg def use_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=DEFAULT_SAVE_DIR, download_delay=15): """ Ensure that a setuptools version is installed. Return None. Raise SystemExit if the requested version or later cannot be installed. """ version = _resolve_version(version) to_dir = os.path.abspath(to_dir) # prior to importing, capture the module state for # representative modules. rep_modules = 'pkg_resources', 'setuptools' imported = set(sys.modules).intersection(rep_modules) try: import pkg_resources pkg_resources.require("setuptools>=" + version) # a suitable version is already installed return except ImportError: # pkg_resources not available; setuptools is not installed; download pass except pkg_resources.DistributionNotFound: # no version of setuptools was found; allow download pass except pkg_resources.VersionConflict as VC_err: if imported: _conflict_bail(VC_err, version) # otherwise, unload pkg_resources to allow the downloaded version to # take precedence. del pkg_resources _unload_pkg_resources() return _do_download(version, download_base, to_dir, download_delay) def _conflict_bail(VC_err, version): """ Setuptools was imported prior to invocation, so it is unsafe to unload it. Bail out. """ conflict_tmpl = textwrap.dedent(""" The required version of setuptools (>={version}) is not available, and can't be installed while this script is running. Please install a more recent version first, using 'easy_install -U setuptools'. (Currently using {VC_err.args[0]!r}) """) msg = conflict_tmpl.format(**locals()) sys.stderr.write(msg) sys.exit(2) def _unload_pkg_resources(): sys.meta_path = [ importer for importer in sys.meta_path if importer.__class__.__module__ != 'pkg_resources.extern' ] del_modules = [ name for name in sys.modules if name.startswith('pkg_resources') ] for mod_name in del_modules: del sys.modules[mod_name] def _clean_check(cmd, target): """ Run the command to download target. If the command fails, clean up before re-raising the error. """ try: subprocess.check_call(cmd) except subprocess.CalledProcessError: if os.access(target, os.F_OK): os.unlink(target) raise def download_file_powershell(url, target): """ Download the file at url to target using Powershell. Powershell will validate trust. Raise an exception if the command cannot complete. """ target = os.path.abspath(target) ps_cmd = ( "[System.Net.WebRequest]::DefaultWebProxy.Credentials = " "[System.Net.CredentialCache]::DefaultCredentials; " '(new-object System.Net.WebClient).DownloadFile("%(url)s", "%(target)s")' % locals() ) cmd = [ 'powershell', '-Command', ps_cmd, ] _clean_check(cmd, target) def has_powershell(): """Determine if Powershell is available.""" if platform.system() != 'Windows': return False cmd = ['powershell', '-Command', 'echo test'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_powershell.viable = has_powershell def download_file_curl(url, target): cmd = ['curl', url, '--location', '--silent', '--output', target] _clean_check(cmd, target) def has_curl(): cmd = ['curl', '--version'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_curl.viable = has_curl def download_file_wget(url, target): cmd = ['wget', url, '--quiet', '--output-document', target] _clean_check(cmd, target) def has_wget(): cmd = ['wget', '--version'] with open(os.path.devnull, 'wb') as devnull: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: return False return True download_file_wget.viable = has_wget def download_file_insecure(url, target): """Use Python to download the file, without connection authentication.""" src = urlopen(url) try: # Read all the data in one block. data = src.read() finally: src.close() # Write all the data in one block to avoid creating a partial file. with open(target, "wb") as dst: dst.write(data) download_file_insecure.viable = lambda: True def get_best_downloader(): downloaders = ( download_file_powershell, download_file_curl, download_file_wget, download_file_insecure, ) viable_downloaders = (dl for dl in downloaders if dl.viable()) return next(viable_downloaders, None) def download_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=DEFAULT_SAVE_DIR, delay=15, downloader_factory=get_best_downloader): """ Download setuptools from a specified location and return its filename. `version` should be a valid setuptools version number that is available as an sdist for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. ``downloader_factory`` should be a function taking no arguments and returning a function for downloading a URL to a target. """ version = _resolve_version(version) # making sure we use the absolute path to_dir = os.path.abspath(to_dir) zip_name = "setuptools-%s.zip" % version url = download_base + zip_name saveto = os.path.join(to_dir, zip_name) if not os.path.exists(saveto): # Avoid repeated downloads log.warn("Downloading %s", url) downloader = downloader_factory() downloader(url, saveto) return os.path.realpath(saveto) def _resolve_version(version): """ Resolve LATEST version """ if version is not LATEST: return version meta_url = urljoin(DEFAULT_URL, '/pypi/setuptools/json') resp = urlopen(meta_url) with contextlib.closing(resp): try: charset = resp.info().get_content_charset() except Exception: # Python 2 compat; assume UTF-8 charset = 'UTF-8' reader = codecs.getreader(charset) doc = json.load(reader(resp)) return str(doc['info']['version']) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the setuptools package. Returns list of command line arguments. """ return ['--user'] if options.user_install else [] def _parse_args(): """Parse the command line for options.""" parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the setuptools package') parser.add_option( '--insecure', dest='downloader_factory', action='store_const', const=lambda: download_file_insecure, default=get_best_downloader, help='Use internal, non-validating downloader' ) parser.add_option( '--version', help="Specify which version to download", default=DEFAULT_VERSION, ) parser.add_option( '--to-dir', help="Directory to save (and re-use) package", default=DEFAULT_SAVE_DIR, ) options, args = parser.parse_args() # positional arguments are ignored return options def _download_args(options): """Return args for download_setuptools function from cmdline args.""" return dict( version=options.version, download_base=options.download_base, downloader_factory=options.downloader_factory, to_dir=options.to_dir, ) def main(): """Install or upgrade setuptools and EasyInstall.""" options = _parse_args() archive = download_setuptools(**_download_args(options)) return _install(archive, _build_install_args(options)) if __name__ == '__main__': sys.exit(main()) pyvo-0.6.1+dfsg.orig/.gitmodules0000644000175000017500000000015413125165225016100 0ustar noahfxnoahfx[submodule "astropy_helpers"] path = astropy_helpers url = https://github.com/astropy/astropy-helpers.git pyvo-0.6.1+dfsg.orig/tests/0000755000175000017500000000000013125165225015065 5ustar noahfxnoahfxpyvo-0.6.1+dfsg.orig/tests/README.txt0000644000175000017500000000013213125165225016557 0ustar noahfxnoahfxThis directory holds extra unittest tests not to be run as part of python setup.py test. pyvo-0.6.1+dfsg.orig/tests/testLicense.py0000644000175000017500000000334613125165225017727 0ustar noahfxnoahfx#!/usr/bin/env python """ Tests for pyvo.dal.query """ from __future__ import print_function, division import os, sys, shutil, re, imp, glob import unittest, pdb license_ref_line = \ "# Licensed under a 3-clause BSD style license - see LICENSE.rst" license_file = "licenses/LICENSE.rst" class LicenseTestCase(unittest.TestCase): def assertHasLicenseRef(self, filename): with open(filename) as srcf: lines = srcf.readlines() self.assertTrue( len(filter(lambda ln: ln.startswith(license_ref_line), lines)) > 0, "{0} does not have license reference line".format(filename)) self.assertTrue(lines[0].startswith(license_ref_line) or lines[1].startswith(license_ref_line), "license reference line is not 1st or 2nd line in {0}".format(license_ref_line)) def testHasLicense(self): self.assertTrue(os.path.exists(license_file), "license/LICENSE.rst appears to be missing (what dir are you in?)") def list_py_files(arg, dirname, names): return map(lambda f: (f[:-3], os.path.join(dirname,f)), filter(lambda n: n.endswith(".py"), names)) for dirp, dirs, files in os.walk("pyvo"): for fname in files: if not fname.endswith(".py") or fname == "version.py": continue path = os.path.join(dirp, fname) name = re.sub(r'/', "_", path) f = "lambda s: s.assertHasLicenseRef('{0}')".format(path) setattr(LicenseTestCase, "test_"+name, eval(f)) __all__ = "LicenseTestCase".split() def suite(): tests = [] for t in __all__: tests.append(unittest.makeSuite(globals()[t])) return unittest.TestSuite(tests) if __name__ == "__main__": unittest.main() pyvo-0.6.1+dfsg.orig/setup.py0000644000175000017500000001110113125165225015427 0ustar noahfxnoahfx#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import glob import os import sys import ah_bootstrap from setuptools import setup # A dirty hack to get around some early import/configurations ambiguities if sys.version_info[0] >= 3: import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True from astropy_helpers.setup_helpers import (register_commands, get_debug_option, get_package_info) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'pyvo') DESCRIPTION = metadata.get('description', 'Astropy affiliated package') AUTHOR = metadata.get('author', '') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', '') # order of priority for long_description: # (1) set in setup.cfg, # (2) load LONG_DESCRIPTION.rst, # (3) load README.rst, # (4) package docstring readme_glob = 'README*' _cfg_long_description = metadata.get('long_description', '') if _cfg_long_description: LONG_DESCRIPTION = _cfg_long_description elif os.path.exists('LONG_DESCRIPTION.rst'): with open('LONG_DESCRIPTION.rst') as f: LONG_DESCRIPTION = f.read() elif len(glob.glob(readme_glob)) > 0: with open(glob.glob(readme_glob)[0]) as f: LONG_DESCRIPTION = f.read() else: # Get the long description from the package's docstring __import__(PACKAGENAME) package = sys.modules[PACKAGENAME] LONG_DESCRIPTION = package.__doc__ # Store the package name in a built-in variable so it's easy # to get from other parts of the setup infrastructure builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME # VERSION should be PEP440 compatible (http://www.python.org/dev/peps/pep-0440) VERSION = metadata.get('version', '0.0.dev') # Indicates if this version is a release version RELEASE = 'dev' not in VERSION if not RELEASE: VERSION += get_git_devstr(False) # Populate the dict of setup command overrides; this should be done before # invoking any other functionality from distutils since it can potentially # modify distutils' behavior. cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE) # Freeze build information in version.py generate_version_py(PACKAGENAME, VERSION, RELEASE, get_debug_option(PACKAGENAME)) # Treat everything in scripts except README* as a script to be installed scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) if not os.path.basename(fname).startswith('README')] # Get configuration information from all of the various subpackages. # See the docstring for setup_helpers.update_package_files for more # details. package_info = get_package_info() # Add the project-global data package_info['package_data'].setdefault(PACKAGENAME, []) package_info['package_data'][PACKAGENAME].append('data/*') # Define entry points for command-line scripts entry_points = {'console_scripts': []} entry_point_list = conf.items('entry_points') for entry_point in entry_point_list: entry_points['console_scripts'].append('{0} = {1}'.format(entry_point[0], entry_point[1])) # Include all .c files, recursively, including those generated by # Cython, since we can not do this in MANIFEST.in with a "dynamic" # directory name. c_files = [] for root, dirs, files in os.walk(PACKAGENAME): for filename in files: if filename.endswith('.c'): c_files.append( os.path.join( os.path.relpath(root, PACKAGENAME), filename)) package_info['package_data'][PACKAGENAME].extend(c_files) # Note that requires and provides should not be included in the call to # ``setup``, since these are now deprecated. See this link for more details: # https://groups.google.com/forum/#!topic/astropy-dev/urYO8ckB2uM setup(name=PACKAGENAME, version=VERSION, description=DESCRIPTION, scripts=scripts, install_requires=metadata.get('install_requires', 'astropy').strip().split(), author=AUTHOR, author_email=AUTHOR_EMAIL, license=LICENSE, url=URL, long_description=LONG_DESCRIPTION, cmdclass=cmdclassd, zip_safe=False, use_2to3=False, entry_points=entry_points, **package_info )