devpi-common-3.2.2/0000755000076500000240000000000013263321717015066 5ustar fschulzestaff00000000000000devpi-common-3.2.2/CHANGELOG0000644000076500000240000000625313263321713016302 0ustar fschulzestaff00000000000000 .. towncrier release notes start 3.2.2 (2018-04-11) ================== Other Changes ------------- - fix deprecation warning from pkg_resources. 3.2.1 (2018-01-18) ================== Bug Fixes --------- - fix issue496: PyPy 5.10 wheel upload failed because the version in the filename is longer again, the check for it is now removed, because it's pointless. 3.2.0 (2017-11-23) ================== No significant changes. 3.2.0rc1 (2017-09-08) ===================== Bug Fixes --------- - fix issue343: enhanced ``splitbasename`` to split the name and version correctly in more cases. - fix for url decoding issue with mirrors. When package filenames contain characters such as `!` or `+`, these get URL encoded to `%21` and `%2B` in the remote simple index. This fix ensures that in the filename saved to the disk cache these are decoded back to `!` or `+`. 3.1.0 (2017-04-18) ================== - add ``username``, ``password``, ``hostname`` and ``port`` properties to URL objects - expose SSLError on Session object to allow checking for verification errors - add ``max_retries`` keyword option to ``new_requests_session``. - fix ``get_latest_version`` when there are no versions. 3.0.1 (2016-07-07) ================== - fix issue355: accept PyPy version numbers in package filenames 3.0.0 (2016-05-12) ================== - fully implement normalization from PEP-503 to allow pip 8.1.2 to install packages with dots in their name - dropped support for Python 2.6. 2.0.10 (2016-05-11) =================== - revert the normalization change, as it causes other issues 2.0.9 (2016-05-11) ================== - fix issue343 and issue344: fully implement normalization from PEP-503 to allow pip 8.1.2 to install packages with dots in their name 2.0.8 (2015-11-11) ================== - fix URL.joinpath to not add double slashes 2.0.7 (2015-09-14) ================== - fix issue272: added __ne__ to URL class, so comparisons work correctly with Python 2.x 2.0.6 (2015-05-13) ================== - add devpi_common.type.parse_hash_spec helper for parsing "HASH_TYPE=VALUE" strings into an callable algorithm and the value - add hash_type, hash_value and hash_algo to URL class 2.0.5 (2015-02-24) ================== - added code to allow filtering on stable version numbers. 2.0.4 (2014-11-27) ================== - gracefully handle missing toxresult files. They can be missing on replicas while it's catching up with the master. 2.0.3 (2014-09-22) ================== - added code to iterate over toxresults handling all the details. 2.0.2 ===== - fix issue144: offer session.Errors for catching the possible exceptions that requests can throw (RequestException and urllib3..HTTPError currently) 2.0.1 ===== - fix issue145: re-introduce propmapping so that devpi-common has higher chances to work for devpi-server<2.0 2.0 === - avoid depending on requests-2.0.1 which does not support SNI. addresses issue21 - fix issue104: don't define an entrypoint. Thanks Maximilien Riehl. - fix issue88: don't do our own proxy handling because requests-2.2.1 does it itself. 1.2 === - initial release, shifted functionality from devpi-server and devpi-client devpi-common-3.2.2/devpi_common/0000755000076500000240000000000013263321717017545 5ustar fschulzestaff00000000000000devpi-common-3.2.2/devpi_common/__init__.py0000644000076500000240000000003013263321713021643 0ustar fschulzestaff00000000000000# __version__ = '3.2.2' devpi-common-3.2.2/devpi_common/archive.py0000644000076500000240000001177513263321713021547 0ustar fschulzestaff00000000000000""" remotely based on some code from https://pypi.python.org/pypi/Archive/0.3 """ import os import tarfile import zipfile import py class UnsupportedArchive(ValueError): pass def Archive(path_or_file): """ return in-memory Archive object, wrapping ZipArchive or TarArchive with uniform methods. If an error is raised, any passed in file will be closed. An Archive instance acts as a context manager so that you can use:: with Archive(...) as archive: archive.extract(...) # or other methods and be sure that file handles will be closed. If you do not use it as a context manager, you need to call archive.close() yourself. """ if hasattr(path_or_file, "seek"): f = path_or_file else: f = open(str(path_or_file), "rb") try: try: return ZipArchive(f) except zipfile.BadZipfile: f.seek(0) try: return TarArchive(f) except tarfile.TarError: raise UnsupportedArchive() except Exception: f.close() raise class BaseArchive(object): class FileNotExist(ValueError): """ File does not exist. """ def __init__(self, file): self.file = file def read(self, name): f = self.getfile(name) try: return f.read() finally: f.close() def close(self): self.file.close() def __enter__(self): return self def __exit__(self, *args): self.close() class TarArchive(BaseArchive): def __init__(self, file): super(TarArchive, self).__init__(file) self._archive = tarfile.open(mode="r", fileobj=file) def namelist(self, *args, **kwargs): return self._archive.getnames(*args, **kwargs) def printdir(self, *args, **kwargs): self._archive.list(*args, **kwargs) def getfile(self, name): try: member = self._archive.getmember(name) except KeyError: raise self.FileNotExist(name) else: return self._archive.extractfile(member) def extract(self, to_path=''): to_path = py.path.local(to_path) members = self._archive.getmembers() for member in members: target = to_path.join(member.name, abs=True) if not target.relto(to_path): raise ValueError("archive name %r out of bound" %(member.name,)) self._archive.extractall(str(to_path)) class ZipArchive(BaseArchive): def __init__(self, file): super(ZipArchive, self).__init__(file) self._archive = zipfile.ZipFile(file) def printdir(self, *args, **kwargs): self._archive.printdir(*args, **kwargs) def namelist(self, *args, **kwargs): return self._archive.namelist(*args, **kwargs) def getfile(self, name): try: return self._archive.open(name) except KeyError: raise self.FileNotExist(name) def extract(self, to_path='', safe=False): # XXX unify with TarFile.extract basedir = py.path.local(to_path) unzipfile = self._archive members = unzipfile.namelist() for name in members: fpath = basedir.join(name, abs=True) if not fpath.relto(basedir): raise ValueError("out of bound path name:" + name) if name.endswith(basedir.sep) or name[-1] == "/": fpath.ensure(dir=1) else: fpath.dirpath().ensure(dir=1) with fpath.open("wb") as f: f.write(unzipfile.read(name)) def zip_dir(basedir, dest=None): if dest is None: f = py.io.BytesIO() else: f = open(str(dest), "wb") zip = py.std.zipfile.ZipFile(f, "w") try: _writezip(zip, basedir) finally: zip.close() if dest is None: return f.getvalue() def _writezip(zip, basedir): for p in basedir.visit(): if p.check(dir=1): if not p.listdir(): path = p.relto(basedir) + "/" zipinfo = py.std.zipfile.ZipInfo(path) zip.writestr(zipinfo, "") else: path = p.relto(basedir) zip.writestr(path, p.read("rb")) def zip_dict(contentdict): f = py.io.BytesIO() zip = py.std.zipfile.ZipFile(f, "w") _writezip_fromdict(zip, contentdict) zip.close() return f.getvalue() def _writezip_fromdict(zip, contentdict, prefixes=()): for name, val in contentdict.items(): if isinstance(val, dict): newprefixes = prefixes + (name,) if not val: path = os.sep.join(newprefixes) + os.sep zipinfo = py.std.zipfile.ZipInfo(path) zip.writestr(zipinfo, "") else: _writezip_fromdict(zip, val, newprefixes) else: path = os.sep.join(prefixes + (name,)) if py.builtin._istext(val): val = val.encode("ascii") zip.writestr(path, val) devpi-common-3.2.2/devpi_common/metadata.py0000644000076500000240000001454113263321713021700 0ustar fschulzestaff00000000000000import posixpath import re import py from pkg_resources import parse_version, Requirement from .types import CompareMixin from .validation import normalize_name ALLOWED_ARCHIVE_EXTS = set( ".dmg .deb .msi .rpm .exe .egg .whl .tar.gz " ".tar.bz2 .tar .tgz .zip .doc.zip".split()) _releasefile_suffix_rx = re.compile(r"(\.zip|\.tar\.gz|\.tgz|\.tar\.bz2|" "\.doc\.zip|" "\.macosx-\d+.*|" "\.linux-.*|" "\.[^\.]*\.rpm|" "\.win-amd68-py[23]\.\d\..*|" "\.win32-py[23]\.\d\..*|" "\.win.*\..*|" "-(?:py|cp|ip|pp|jy)[23][\d\.]*.*\..*|" ")$", re.IGNORECASE) # see also PEP425 for supported "python tags" _pyversion_type_rex = re.compile( r"(?:py|cp|ip|pp|jy)([\d\.py]+).*\.(exe|egg|msi|whl)", re.IGNORECASE) _ext2type = dict(exe="bdist_wininst", egg="bdist_egg", msi="bdist_msi", whl="bdist_wheel") _wheel_file_re = re.compile( r"""^(?P(?P.+?)-(?P.*?)) ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?) \.whl|\.dist-info)$""", re.VERBOSE) _pep404_nameversion_re = re.compile( r"^(?P[^.]+?)-(?P" r"(?:[1-9]\d*!)?" # [N!] r"(?:0|[1-9]\d*)" # N r"(?:\.(?:0|[1-9]\d*))*" # (.N)* r"(?:(?:a|b|rc)(?:0|[1-9]\d*))?" # [{a|b|rc}N] r"(?:\.post(?:0|[1-9]\d*))?" # [.postN] r"(?:\.dev(?:0|[1-9]\d*))?" # [.devN] r"(?:\+(?:[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?" # local version r")$") _legacy_nameversion_re = re.compile( r"^(?P[^.]+?)-(?P" r"(?:[1-9]\d*!)?" # [N!] r"(?:0|[1-9]\d*)" # N r"(?:\.(?:0|[1-9]\d*))*" # (.N)* r"(?:(?:a|b|rc|alpha|beta)(?:0|[1-9]\d*))?" # [{a|b|rc}N] r"(?:\.post(?:0|[1-9]\d*))?" # [.postN] r"(?:\.dev(?:0|[1-9]\d*))?" # [.devN] r"(?:\-(?:[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?" # local version r")$") def get_pyversion_filetype(basename): _,_,suffix = splitbasename(basename) if suffix in (".zip", ".tar.gz", ".tgz", "tar.bz2"): return ("source", "sdist") m = _pyversion_type_rex.search(suffix) if not m: return ("any", "bdist_dumb") pyversion, ext = m.groups() if pyversion == "2.py3": # "universal" wheel with no C pyversion = "2.7" # arbitrary but pypi/devpi makes no special use # of "pyversion" anyway?! elif "." not in pyversion: pyversion = ".".join(pyversion) return (pyversion, _ext2type[ext]) def splitbasename(path, checkarch=True): nameversion, ext = splitext_archive(path) if ext == '.whl': m = _wheel_file_re.match(path) if m: info = m.groupdict() return ( info['name'], info['ver'], '-%s-%s-%s.whl' % (info['pyver'], info['abi'], info['plat'])) if checkarch and ext.lower() not in ALLOWED_ARCHIVE_EXTS: raise ValueError("invalid archive type %r in: %s" % (ext, path)) m = _releasefile_suffix_rx.search(path) if m: ext = m.group(1) if len(ext): nameversion = path[:-len(ext)] else: nameversion = path if '-' not in nameversion: # no version return nameversion, "", ext m = _pep404_nameversion_re.match(nameversion) if m: (projectname, version) = m.groups() return projectname, version, ext m = _legacy_nameversion_re.match(nameversion) if m: (projectname, version) = m.groups() return projectname, version, ext (projectname, version) = nameversion.rsplit('-', 1) return projectname, version, ext DOCZIPSUFFIX = ".doc.zip" def splitext_archive(basename): basename = getattr(basename, "basename", basename) if basename.lower().endswith(DOCZIPSUFFIX): ext = basename[-len(DOCZIPSUFFIX):] base = basename[:-len(DOCZIPSUFFIX)] else: base, ext = posixpath.splitext(basename) if base.lower().endswith('.tar'): ext = base[-4:] + ext base = base[:-4] return base, ext class Version(CompareMixin): def __init__(self, versionstring): self.string = versionstring self.cmpval = parse_version(versionstring) def __str__(self): return self.string def is_prerelease(self): if hasattr(self.cmpval, 'is_prerelease'): return self.cmpval.is_prerelease # backward compatibility for x in self.cmpval: if x.startswith('*') and x < '*final': return True return False class BasenameMeta(CompareMixin): def __init__(self, obj, sameproject=False): self.obj = obj basename = getattr(obj, "basename", obj) if not isinstance(basename, py.builtin._basestring): raise ValueError("need object with basename attribute") assert "/" not in basename, (obj, basename) name, version, ext = splitbasename(basename, checkarch=False) self.name = name self.version = version self.ext = ext if sameproject: self.cmpval = (parse_version(version), normalize_name(name), ext) else: self.cmpval = (normalize_name(name), parse_version(version), ext) def __repr__(self): return "" %(self.name, self.version) def sorted_sameproject_links(links): s = sorted((BasenameMeta(link, sameproject=True) for link in links), reverse=True) return [x.obj for x in s] def get_latest_version(seq, stable=False): if not seq: return versions = map(Version, seq) if stable: versions = [x for x in versions if not x.is_prerelease()] if not versions: return return max(versions).string def get_sorted_versions(versions, reverse=True, stable=False): versions = sorted(map(Version, versions), reverse=reverse) if stable: versions = [x for x in versions if not x.is_prerelease()] return [x.string for x in versions] def is_archive_of_project(basename, targetname): nameversion, ext = splitext_archive(basename) # we don't check for strict equality because pypi currently # shows "x-docs-1.0.tar.gz" for targetname "x" (however it was uploaded) if not normalize_name(nameversion).startswith(targetname): return False if ext.lower() not in ALLOWED_ARCHIVE_EXTS: return False return True def parse_requirement(s): return Requirement.parse(s) devpi-common-3.2.2/devpi_common/proc.py0000644000076500000240000000121313263321713021053 0ustar fschulzestaff00000000000000import sys from subprocess import Popen, CalledProcessError, PIPE def check_output(*args, **kwargs): # subprocess.check_output does not exist on python26 if "universal_newlines" not in kwargs: kwargs["universal_newlines"] = True popen = Popen(stdout=PIPE, *args, **kwargs) output, unused_err = popen.communicate() retcode = popen.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = args[0] if sys.version_info < (2,7): raise CalledProcessError(retcode, cmd) else: raise CalledProcessError(retcode, cmd, output=output) return output devpi-common-3.2.2/devpi_common/request.py0000644000076500000240000000170713263321713021610 0ustar fschulzestaff00000000000000import sys from requests import Session from requests.adapters import HTTPAdapter from requests.exceptions import ConnectionError, RequestException, BaseHTTPError, SSLError class RetrySession(Session): def __init__(self, max_retries): super(RetrySession, self).__init__() if max_retries is not None: self.mount('https://', HTTPAdapter(max_retries=max_retries)) self.mount('http://', HTTPAdapter(max_retries=max_retries)) def new_requests_session(agent=None, max_retries=None): if agent is None: agent = "devpi" else: agent = "devpi-%s/%s" % agent agent += " (py%s; %s)" % (sys.version.split()[0], sys.platform) session = RetrySession(max_retries) session.headers["user-agent"] = agent session.ConnectionError = ConnectionError session.RequestException = RequestException session.Errors = (RequestException, BaseHTTPError) session.SSLError = SSLError return session devpi-common-3.2.2/devpi_common/types.py0000644000076500000240000001200113263321713021251 0ustar fschulzestaff00000000000000from __future__ import unicode_literals import hashlib import operator import py FunctionType = py.std.types.FunctionType # re-introduced for 2.0 series but not used anymore def propmapping(name, convert=None): if convert is None: def fget(self): return self._mapping.get(name) else: def fget(self): x = self._mapping.get(name) if x is not None: x = convert(x) return x fget.__name__ = name return property(fget) def canraise(Error): def wrap(func): func.Error = Error return func return wrap def cached_property(f): """returns a cached property that is calculated by function f""" def get(self): try: return self._property_cache[f] except AttributeError: self._property_cache = {} except KeyError: pass x = self._property_cache[f] = f(self) return x def set(self, val): propcache = self.__dict__.setdefault("_property_cache", {}) propcache[f] = val return property(get, set) class CompareMixin(object): def _cmp(self, other, op): return op(self.cmpval, other.cmpval) def __lt__(self, other): return self._cmp(other, operator.lt) def __le__(self, other): return self._cmp(other, operator.le) def __eq__(self, other): return self._cmp(other, operator.eq) def __ne__(self, other): return self._cmp(other, operator.ne) def __ge__(self, other): return self._cmp(other, operator.ge) def __gt__(self, other): return self._cmp(other, operator.gt) class lazydecorator: """ lazy decorators: remove global state from your app, e.g. Bottle and Flask. A lazy decorator takes the place of another decorator, but just memoizes decoration parameters and lets you choose when to apply the actual decorator. This means that you are not tied to apply decorators like the typical flask/bottle ``@app.route("/index")`` at import time and thus don't need to create a global ``app`` object. Example usage in a module.py: from lazydecorator import lazydecorator route = lazydecorator() class MyServer: @route("/index") def index(self): pass The lazydecorator "route" instance returns the same ``index`` function it receives but sets an attribute to remember the ``("/index")`` parameter. Later, after importing the ``module`` you can then apply your ``@app.route`` decorator like this:: def create_app(): app = Bottle() import module myserver = module.MyServer() module.route.discover_and_call(myserver, app.route) # The app.route decorator is called with the bound # ``myserver.index`` method order of registrations is preserved. (c) holger krekel, 2013, License: MIT """ def __init__(self): self.attrname = "_" + hex(id(self)) self.num = 0 def __call__(self, *args, **kwargs): def decorate(func): try: num, siglist = getattr(func, self.attrname) except AttributeError: siglist = [] func.__dict__[self.attrname] = (self.num, siglist) self.num += 1 siglist.append((args, kwargs)) return func return decorate def discover(self, obj): decitems = [] if isinstance(obj, dict): def iter(): for name in obj: yield name, obj[name] else: def iter(): for name in dir(obj): yield name, getattr(obj, name) for name, func in iter(): func_orig = func if not isinstance(func, FunctionType): try: func = func.__func__ except AttributeError: continue try: num, siglist = getattr(func, self.attrname) except AttributeError: continue decitems.append((num, func_orig, siglist)) decitems.sort() l = [] for num, func_orig, siglist in decitems: for args, kwargs in siglist: l.append((func_orig, args, kwargs)) return l def discover_and_call(self, obj, dec): for func, args, kwargs in self.discover(obj): newfunc = dec(*args, **kwargs)(func) assert newfunc == func def ensure_unicode(x): if py.builtin._istext(x): return x return py.builtin._totext(x, "utf8") def parse_hash_spec(fragment): """ Return (hashtype, hash_value) from parsing a given X=Y fragment. X must be a supported algorithm by the python hashlib module.""" parts = fragment.split("=", 1) if len(parts) == 2: algoname, hash_value = parts algo = getattr(hashlib, algoname, None) if algo is not None: return algo, hash_value return None, None devpi-common-3.2.2/devpi_common/url.py0000644000076500000240000001251013263321713020714 0ustar fschulzestaff00000000000000import sys import posixpath from devpi_common.types import cached_property, ensure_unicode, parse_hash_spec from requests.models import parse_url if sys.version_info >= (3, 0): from urllib.parse import urlparse, urlunsplit, urljoin, unquote else: from urlparse import urlparse, urlunsplit, urljoin from urllib import unquote def _joinpath(url, args, asdir=False): new = url for arg in args[:-1]: new = urljoin(new, arg.rstrip("/")) + "/" new = urljoin(new, args[-1]) if asdir: new = new.rstrip("/") + "/" return new class URL: def __init__(self, url="", *args, **kwargs): if isinstance(url, URL): url = url.url if args: url = _joinpath(url, args, **kwargs) if not url: url = "" self.url = ensure_unicode(url) def __nonzero__(self): return bool(self.url) __bool__ = __nonzero__ def __repr__(self): c = repr(self.url.encode("utf8")) if sys.version_info >= (3,0): c = c.lstrip("b") return "" % c def __eq__(self, other): return self.url == getattr(other, "url", other) def __ne__(self, other): return not (self == other) def geturl_nofragment(self): """ return url without fragment """ scheme, netloc, url, params, query, ofragment = self._parsed return URL(urlunsplit((scheme, netloc, url, query, ""))) @property def hash_spec(self): hashalgo, hash_value = parse_hash_spec(self._parsed[-1]) if hashalgo: hashtype = self._parsed[-1].split("=")[0] return "%s=%s" %(hashtype, hash_value) return "" @property def hash_algo(self): return parse_hash_spec(self._parsed[-1])[0] @property def hash_value(self): return parse_hash_spec(self._parsed[-1])[1] def replace(self, **kwargs): _parsed = self._parsed url = [] for field in ('scheme', 'netloc', 'path', 'query', 'fragment'): url.append(kwargs.get(field, getattr(_parsed, field))) return URL(urlunsplit(url)) @property def netloc(self): return self._parsed.netloc @property def username(self): return self._parsed.username @property def password(self): return self._parsed.password @property def hostname(self): return self._parsed.hostname @property def port(self): return self._parsed.port @property def scheme(self): return self._parsed.scheme @property def url_nofrag(self): return self.geturl_nofragment().url def __hash__(self): return hash(self.url) @cached_property def _parsed(self): return urlparse(self.url) def is_valid_http_url(self): try: x = parse_url(self.url) except Exception: return False return x.scheme in ("http", "https") @property def path(self): return self._parsed.path @property def basename(self): return posixpath.basename(unquote(self._parsed.path)) @property def parentbasename(self): return posixpath.basename(posixpath.dirname(unquote(self._parsed.path))) @property def eggfragment(self): frag = self._parsed.fragment if frag.startswith("egg="): return frag[4:] @property def md5(self): val = self._parsed.fragment if val.startswith("md5="): return ensure_unicode(val[4:]) @property def sha256(self): val = self._parsed.fragment if val.startswith("sha256="): return ensure_unicode(val[4:]) def joinpath(self, *args, **kwargs): newurl = _joinpath(self.url, args, **kwargs) return URL(newurl) def addpath(self, *args, **kwargs): url = self.url.rstrip("/") + "/" return URL(_joinpath(url, args, **kwargs)) def relpath(self, target): """ return a relative path which will point to the target resource.""" parts1 = self.path.split("/") parts2 = target.split("/") if not parts2 or parts2[0]: raise ValueError("not an absolute target: %s" % (target,)) for i, part in enumerate(parts1): if parts2[i] == part: continue prefix = "../" * (len(parts1)-i-1) return prefix + "/".join(parts2[i:]) rest = parts2[len(parts1):] if parts1[-1]: # ends not in slash rest.insert(0, parts1[-1]) return "/".join(rest) def asdir(self): if self.url[-1:] == "/": return self return self.__class__(self.url + "/") def asfile(self): if self.url[-1:] == "/": return self.__class__(self.url.rstrip("/")) return self def torelpath(self): """ return scheme/netloc/path/fragment into a canonical relative filepath. Only the scheme, netlocation and path are mapped, fragments and queries are ignored. """ parsed = self._parsed assert parsed.scheme in ("http", "https") return "%s/%s%s" % (parsed.scheme, parsed.netloc, parsed.path) @classmethod def fromrelpath(cls, relpath): """ return url from canonical relative path. """ scheme, netlocpath = relpath.split("/", 1) return cls(scheme + "://" + netlocpath) devpi-common-3.2.2/devpi_common/validation.py0000644000076500000240000001047013263321713022247 0ustar fschulzestaff00000000000000import re from .types import ensure_unicode # below code mostly taken from pypi's mini_pkg_resources.py and webui.py # on 13th Sep 2013 from http://bitbucket.org/pypa/pypi legal_package_name = re.compile(r"^[a-z0-9\._-]+$", re.IGNORECASE) safe_filenames = re.compile(r'.+?\.(exe|tar\.gz|bz2|rpm|deb|zip|tgz|egg|dmg|msi|whl)$', re.I) safe_name_rex = re.compile('[^A-Za-z0-9]+') def normalize_name(name): """Convert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. """ name = ensure_unicode(name) return safe_name_rex.sub('-', name).lower() def safe_version(version): """Convert an arbitrary string to a standard version string Spaces become dots, and all other non-alphanumeric characters become dashes, with runs of multiple dashes condensed to a single dash. """ version = version.replace(' ','.') return safe_name_rex.sub('-', version) def is_valid_archive_name(filename): return safe_filenames.match(filename) def validate_metadata(data): # from https://bitbucket.org/pypa/pypi/src/1e31fd3cc7a72e4aa54a2bd79d50be5c8c0a3b1e/webui.py?at=default#cl-2124 ''' Validate the contents of the metadata. ''' if not data.get('name', ''): raise ValueError('Missing required field "name"') if not data.get('version', ''): raise ValueError('Missing required field "version"') if 'metadata_version' in data: #metadata_version = data['metadata_version'] del data['metadata_version'] #else: # metadata_version = '1.0' # default # Ensure that package names follow a restricted set of characters. # These characters are: # * ASCII letters (``[a-zA-Z]``) # * ASCII digits (``[0-9]``) # * underscores (``_``) # * hyphens (``-``) # * periods (``.``) # The reasoning for this restriction is codified in PEP426. For the # time being this check is only validated against brand new packages # and not pre-existing packages because of existing names that violate # this policy. if legal_package_name.search(data["name"]) is None: raise ValueError("Invalid package name. Names must contain " "only ASCII letters, digits, underscores, " "hyphens, and periods") if not data["name"][0].isalnum(): raise ValueError("Invalid package name. Names must start with " "an ASCII letter or digit") if not data["name"][-1].isalnum(): raise ValueError("Invalid package name. Names must end with " "an ASCII letter or digit") # Traditionally, package names are restricted only for # technical reasons; / is not allowed because it may be # possible to break path names for file and documentation # uploads if '/' in data['name']: raise ValueError("Invalid package name") # again, this is a restriction required by the implementation and not # mentiond in documentation; ensure name and version are valid for URLs if re.search('[<>%#"]', data['name'] + data['version']): raise ValueError('Invalid package name or version (URL safety)') # disabled some checks # # check requires and obsoletes # def validate_version_predicates(col, sequence): # try: # map(versionpredicate.VersionPredicate, sequence) # except ValueError, message: # raise ValueError, 'Bad "%s" syntax: %s'%(col, message) # for col in ('requires', 'obsoletes'): # if data.has_key(col) and data[col]: # validate_version_predicates(col, data[col]) # # # check provides # if data.has_key('provides') and data['provides']: # try: # map(versionpredicate.check_provision, data['provides']) # except ValueError, message: # raise ValueError, 'Bad "provides" syntax: %s'%message # # # check PEP 345 fields # if metadata_version == '1.2': # self._validate_metadata_1_2(data) # # # check classifiers # if data.has_key('classifiers'): # d = {} # for entry in self.store.get_classifiers(): # d[entry['classifier']] = 1 # for entry in data['classifiers']: # if d.has_key(entry): # continue # raise ValueError, 'Invalid classifier "%s"'%entry devpi-common-3.2.2/devpi_common/vendor/0000755000076500000240000000000013263321717021042 5ustar fschulzestaff00000000000000devpi-common-3.2.2/devpi_common/vendor/__init__.py0000644000076500000240000000000213263321713023137 0ustar fschulzestaff00000000000000# devpi-common-3.2.2/devpi_common/vendor/_pip.py0000644000076500000240000000731113263321713022341 0ustar fschulzestaff00000000000000""" Code taken from pip's index.py for scraping links note XXX for changes: - clean_link() is not applied """ import re from devpi_common.url import urljoin class HTMLPage(object): """Represents one page, along with its URL""" ## FIXME: these regexes are horrible hacks: _homepage_re = re.compile(r'\s*home\s*page', re.I) _download_re = re.compile(r'\s*download\s+url', re.I) ## These aren't so aweful: _rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I) _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S) _base_re = re.compile(r"""]+)""", re.I) def __init__(self, content, url, headers=None): self.content = content self.url = url self.headers = headers def __str__(self): return self.url @property def base_url(self): if not hasattr(self, "_base_url"): match = self._base_re.search(self.content) if match: self._base_url = match.group(1) else: self._base_url = self.url return self._base_url @property def links(self): """Yields all links in the page""" for match in self._href_re.finditer(self.content): url = match.group(1) or match.group(2) or match.group(3) # CHANGED from PIP original: catch parsing errors try: url = self.clean_link(urljoin(self.base_url, url)) except ValueError: continue yield Link(url, self) def rel_links(self, rels=('homepage', 'download')): for url in self.explicit_rel_links(rels): yield url for url in self.scraped_rel_links(): yield url def explicit_rel_links(self, rels=('homepage', 'download')): """Yields all links with the given relations""" for match in self._rel_re.finditer(self.content): found_rels = match.group(1).lower().split() for rel in rels: if rel in found_rels: break else: continue match = self._href_re.search(match.group(0)) if not match: continue url = match.group(1) or match.group(2) or match.group(3) url = self.clean_link(urljoin(self.base_url, url)) yield Link(url, self) def scraped_rel_links(self): for regex in (self._homepage_re, self._download_re): match = regex.search(self.content) if not match: continue href_match = self._href_re.search(self.content, pos=match.end()) if not href_match: continue url = href_match.group(1) or href_match.group(2) or href_match.group(3) if not url: continue url = self.clean_link(urljoin(self.base_url, url)) yield Link(url, self) _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) def clean_link(self, url): """Makes sure a link is fully encoded. That is, if a ' ' shows up in the link, it will be rewritten to %20 (while not over-quoting % or other characters).""" # XXX CHANGE from PIP ORIGINAL return url return self._clean_re.sub( lambda match: '%%%2x' % ord(match.group(0)), url) class Link(object): def __init__(self, url, comes_from=None): self.url = url self.comes_from = comes_from def __str__(self): if self.comes_from: return '%s (from %s)' % (self.url, self.comes_from) else: return str(self.url) def __repr__(self): return '' % self devpi-common-3.2.2/devpi_common/vendor/_verlib.py0000644000076500000240000002727413263321713023046 0ustar fschulzestaff00000000000000""" PEP386-version comparison algorithm. (c) Tarek Ziade and others extracted unmodified from https://bitbucket.org/tarek/distutilsversion licensed under the PSF license (i guess) """ import re class IrrationalVersionError(Exception): """This is an irrational version.""" pass class HugeMajorVersionNumError(IrrationalVersionError): """An irrational version because the major version number is huge (often because a year or date was used). See `error_on_huge_major_num` option in `NormalizedVersion` for details. This guard can be disabled by setting that option False. """ pass # A marker used in the second and third parts of the `parts` tuple, for # versions that don't have those segments, to sort properly. An example # of versions in sort order ('highest' last): # 1.0b1 ((1,0), ('b',1), ('f',)) # 1.0.dev345 ((1,0), ('f',), ('dev', 345)) # 1.0 ((1,0), ('f',), ('f',)) # 1.0.post256.dev345 ((1,0), ('f',), ('f', 'post', 256, 'dev', 345)) # 1.0.post345 ((1,0), ('f',), ('f', 'post', 345, 'f')) # ^ ^ ^ # 'b' < 'f' ---------------------/ | | # | | # 'dev' < 'f' < 'post' -------------------/ | # | # 'dev' < 'f' ----------------------------------------------/ # Other letters would do, but 'f' for 'final' is kind of nice. FINAL_MARKER = ('f',) VERSION_RE = re.compile(r''' ^ (?P\d+\.\d+) # minimum 'N.N' (?P(?:\.\d+)*) # any number of extra '.N' segments (?: (?P[abc]|rc) # 'a'=alpha, 'b'=beta, 'c'=release candidate # 'rc'= alias for release candidate (?P\d+(?:\.\d+)*) )? (?P(\.post(?P\d+))?(\.dev(?P\d+))?)? $''', re.VERBOSE) class NormalizedVersion(object): """A rational version. Good: 1.2 # equivalent to "1.2.0" 1.2.0 1.2a1 1.2.3a2 1.2.3b1 1.2.3c1 1.2.3.4 TODO: fill this out Bad: 1 # mininum two numbers 1.2a # release level must have a release serial 1.2.3b """ def __init__(self, s, error_on_huge_major_num=True): """Create a NormalizedVersion instance from a version string. @param s {str} The version string. @param error_on_huge_major_num {bool} Whether to consider an apparent use of a year or full date as the major version number an error. Default True. One of the observed patterns on PyPI before the introduction of `NormalizedVersion` was version numbers like this: 2009.01.03 20040603 2005.01 This guard is here to strongly encourage the package author to use an alternate version, because a release deployed into PyPI and, e.g. downstream Linux package managers, will forever remove the possibility of using a version number like "1.0" (i.e. where the major number is less than that huge major number). """ self._parse(s, error_on_huge_major_num) @classmethod def from_parts(cls, version, prerelease=FINAL_MARKER, devpost=FINAL_MARKER): return cls(cls.parts_to_str((version, prerelease, devpost))) def _parse(self, s, error_on_huge_major_num=True): """Parses a string version into parts.""" match = VERSION_RE.search(s) if not match: raise IrrationalVersionError(s) groups = match.groupdict() parts = [] # main version block = self._parse_numdots(groups['version'], s, False, 2) extraversion = groups.get('extraversion') if extraversion not in ('', None): block += self._parse_numdots(extraversion[1:], s) parts.append(tuple(block)) # prerelease prerel = groups.get('prerel') if prerel is not None: block = [prerel] block += self._parse_numdots(groups.get('prerelversion'), s, pad_zeros_length=1) parts.append(tuple(block)) else: parts.append(FINAL_MARKER) # postdev if groups.get('postdev'): post = groups.get('post') dev = groups.get('dev') postdev = [] if post is not None: postdev.extend([FINAL_MARKER[0], 'post', int(post)]) if dev is None: postdev.append(FINAL_MARKER[0]) if dev is not None: postdev.extend(['dev', int(dev)]) parts.append(tuple(postdev)) else: parts.append(FINAL_MARKER) self.parts = tuple(parts) if error_on_huge_major_num and self.parts[0][0] > 1980: raise HugeMajorVersionNumError("huge major version number, %r, " "which might cause future problems: %r" % (self.parts[0][0], s)) def _parse_numdots(self, s, full_ver_str, drop_trailing_zeros=True, pad_zeros_length=0): """Parse 'N.N.N' sequences, return a list of ints. @param s {str} 'N.N.N..." sequence to be parsed @param full_ver_str {str} The full version string from which this comes. Used for error strings. @param drop_trailing_zeros {bool} Whether to drop trailing zeros from the returned list. Default True. @param pad_zeros_length {int} The length to which to pad the returned list with zeros, if necessary. Default 0. """ nums = [] for n in s.split("."): if len(n) > 1 and n[0] == '0': raise IrrationalVersionError("cannot have leading zero in " "version number segment: '%s' in %r" % (n, full_ver_str)) nums.append(int(n)) if drop_trailing_zeros: while nums and nums[-1] == 0: nums.pop() while len(nums) < pad_zeros_length: nums.append(0) return nums def __str__(self): return self.parts_to_str(self.parts) @classmethod def parts_to_str(cls, parts): """Transforms a version expressed in tuple into its string representation.""" # XXX This doesn't check for invalid tuples main, prerel, postdev = parts s = '.'.join(str(v) for v in main) if prerel is not FINAL_MARKER: s += prerel[0] s += '.'.join(str(v) for v in prerel[1:]) if postdev and postdev is not FINAL_MARKER: if postdev[0] == 'f': postdev = postdev[1:] i = 0 while i < len(postdev): if i % 2 == 0: s += '.' s += str(postdev[i]) i += 1 return s def __repr__(self): return "%s('%s')" % (self.__class__.__name__, self) def _cannot_compare(self, other): raise TypeError("cannot compare %s and %s" % (type(self).__name__, type(other).__name__)) def __eq__(self, other): if not isinstance(other, NormalizedVersion): self._cannot_compare(other) return self.parts == other.parts def __lt__(self, other): if not isinstance(other, NormalizedVersion): self._cannot_compare(other) return self.parts < other.parts def __ne__(self, other): return not self.__eq__(other) def __gt__(self, other): return not (self.__lt__(other) or self.__eq__(other)) def __le__(self, other): return self.__eq__(other) or self.__lt__(other) def __ge__(self, other): return self.__eq__(other) or self.__gt__(other) def suggest_normalized_version(s): """Suggest a normalized version close to the given version string. If you have a version string that isn't rational (i.e. NormalizedVersion doesn't like it) then you might be able to get an equivalent (or close) rational version from this function. This does a number of simple normalizations to the given string, based on observation of versions currently in use on PyPI. Given a dump of those version during PyCon 2009, 4287 of them: - 2312 (53.93%) match NormalizedVersion without change - with the automatic suggestion - 3474 (81.04%) match when using this suggestion method @param s {str} An irrational version string. @returns A rational version string, or None, if couldn't determine one. """ try: NormalizedVersion(s) return s # already rational except IrrationalVersionError: pass rs = s.lower() # part of this could use maketrans for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), ('beta', 'b'), ('rc', 'c'), ('-final', ''), ('-pre', 'c'), ('-release', ''), ('.release', ''), ('-stable', ''), ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), ('final', '')): rs = rs.replace(orig, repl) # if something ends with dev or pre, we add a 0 rs = re.sub(r"pre$", r"pre0", rs) rs = re.sub(r"dev$", r"dev0", rs) # if we have something like "b-2" or "a.2" at the end of the # version, that is pobably beta, alpha, etc # let's remove the dash or dot rs = re.sub(r"([abc|rc])[\-\.](\d+)$", r"\1\2", rs) # 1.0-dev-r371 -> 1.0.dev371 # 0.1-dev-r79 -> 0.1.dev79 rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) # Clean: v0.3, v1.0 if rs.startswith('v'): rs = rs[1:] # Clean leading '0's on numbers. #TODO: unintended side-effect on, e.g., "2003.05.09" # PyPI stats: 77 (~2%) better rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers # zero. # PyPI stats: 245 (7.56%) better rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) # the 'dev-rNNN' tag is a dev tag rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) # clean the - when used as a pre delimiter rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) # a terminal "dev" or "devel" can be changed into ".dev0" rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) # a terminal "dev" can be changed into ".dev0" rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) # a terminal "final" or "stable" can be removed rs = re.sub(r"(final|stable)$", "", rs) # The 'r' and the '-' tags are post release tags # 0.4a1.r10 -> 0.4a1.post10 # 0.9.33-17222 -> 0.9.3.post17222 # 0.9.33-r17222 -> 0.9.3.post17222 rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) # Clean 'r' instead of 'dev' usage: # 0.9.33+r17222 -> 0.9.3.dev17222 # 1.0dev123 -> 1.0.dev123 # 1.0.git123 -> 1.0.dev123 # 1.0.bzr123 -> 1.0.dev123 # 0.1a0dev.123 -> 0.1a0.dev123 # PyPI stats: ~150 (~4%) better rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: # 0.2.pre1 -> 0.2c1 # 0.2-c1 -> 0.2c1 # 1.0preview123 -> 1.0c123 # PyPI stats: ~21 (0.62%) better rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) # Tcl/Tk uses "px" for their post release markers rs = re.sub(r"p(\d+)$", r".post\1", rs) try: NormalizedVersion(rs) return rs # already rational except IrrationalVersionError: pass return None devpi-common-3.2.2/devpi_common/viewhelp.py0000644000076500000240000000573713263321713021752 0ustar fschulzestaff00000000000000import posixpath from .url import URL class ViewLinkStore: def __init__(self, url, versiondata): self.url = URL(url) self.versiondata = versiondata def get_links(self, rel=None, for_href=None, basename=None): l = [] for linkdict in self.versiondata.get("+links", []): viewlink = ViewLink(self.url, linkdict) if (not rel or viewlink.rel == rel) and \ (not for_href or viewlink.for_href==for_href) and \ (not basename or viewlink.basename == basename): l.append(viewlink) return l def get_link(self, rel=None, basename=None, for_href=None): links = self.get_links(rel=rel, basename=basename, for_href=for_href) assert len(links) == 1 return links[0] def shadowed(self): l = [] for verdata in self.versiondata.get("+shadowing", []): l.append(ViewLinkStore(self.url.url, verdata)) return l class ViewLink: def __init__(self, base_url, linkdict): self.__dict__.update(linkdict) self.href = base_url.joinpath(self.href).url self.basename = posixpath.basename(self.href) def __repr__(self): return "<%s rel=%r href=%r>" % ( self.__class__.__name__, self.rel, self.href) class ToxResultEnv: def __init__(self, result, envname): self.host = result["host"] self.platform = result["platform"] self.envname = envname self.key = (self.host, self.platform, self.envname) env = result["testenvs"][envname] try: self.pyversion = env["python"]["version"].split(None, 1)[0] except KeyError: self.pyversion = None self.get = env.get self.setup = self._get_commands_info(self.get("setup", [])) self.test = self._get_commands_info(self.get("test", [])) self.failed = self.setup["failed"] or self.test["failed"] def _get_commands_info(self, commands): result = dict( failed=any(x["retcode"] != "0" for x in commands), commands=[]) for command in commands: result["commands"].append(dict( failed=command["retcode"] != "0", command=" ".join(command.get("command", [])), output=command.get("output", []))) return result def get_toxenvs(toxresult, seen, newest=True): envs = [] for envname in sorted(toxresult["testenvs"]): toxenv = ToxResultEnv(toxresult, envname) if toxenv.key in seen: continue if newest: seen.add(toxenv.key) envs.append(toxenv) return envs def iter_toxresults(links, load, newest=True): seen = set() for link in reversed(links): try: toxresult = load(link) except IOError: yield link, None continue try: yield link, get_toxenvs(toxresult, seen, newest=newest) except KeyError: yield link, None devpi-common-3.2.2/devpi_common.egg-info/0000755000076500000240000000000013263321717021237 5ustar fschulzestaff00000000000000devpi-common-3.2.2/devpi_common.egg-info/dependency_links.txt0000644000076500000240000000000113263321717025305 0ustar fschulzestaff00000000000000 devpi-common-3.2.2/devpi_common.egg-info/PKG-INFO0000644000076500000240000000460113263321717022335 0ustar fschulzestaff00000000000000Metadata-Version: 1.1 Name: devpi-common Version: 3.2.2 Summary: utilities jointly used by devpi-server and devpi-client Home-page: https://github.com/devpi/devpi Author: Holger Krekel Author-email: holger@merlinux.eu License: UNKNOWN Description: This package contains utility functions used by devpi-server and devpi-client. See http://doc.devpi.net for more information. ========= Changelog ========= .. towncrier release notes start 3.2.2 (2018-04-11) ================== Other Changes ------------- - fix deprecation warning from pkg_resources. 3.2.1 (2018-01-18) ================== Bug Fixes --------- - fix issue496: PyPy 5.10 wheel upload failed because the version in the filename is longer again, the check for it is now removed, because it's pointless. 3.2.0 (2017-11-23) ================== No significant changes. 3.2.0rc1 (2017-09-08) ===================== Bug Fixes --------- - fix issue343: enhanced ``splitbasename`` to split the name and version correctly in more cases. - fix for url decoding issue with mirrors. When package filenames contain characters such as `!` or `+`, these get URL encoded to `%21` and `%2B` in the remote simple index. This fix ensures that in the filename saved to the disk cache these are decoded back to `!` or `+`. 3.1.0 (2017-04-18) ================== - add ``username``, ``password``, ``hostname`` and ``port`` properties to URL objects - expose SSLError on Session object to allow checking for verification errors - add ``max_retries`` keyword option to ``new_requests_session``. - fix ``get_latest_version`` when there are no versions. Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment Classifier: Intended Audience :: Developers Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: MIT License devpi-common-3.2.2/devpi_common.egg-info/requires.txt0000644000076500000240000000003313263321717023633 0ustar fschulzestaff00000000000000requests>=2.3.0 py>=1.4.20 devpi-common-3.2.2/devpi_common.egg-info/SOURCES.txt0000644000076500000240000000140113263321717023117 0ustar fschulzestaff00000000000000CHANGELOG LICENSE MANIFEST.in README.rst setup.cfg setup.py tox.ini devpi_common/__init__.py devpi_common/archive.py devpi_common/metadata.py devpi_common/proc.py devpi_common/request.py devpi_common/types.py devpi_common/url.py devpi_common/validation.py devpi_common/viewhelp.py devpi_common.egg-info/PKG-INFO devpi_common.egg-info/SOURCES.txt devpi_common.egg-info/dependency_links.txt devpi_common.egg-info/requires.txt devpi_common.egg-info/top_level.txt devpi_common/vendor/__init__.py devpi_common/vendor/_pip.py devpi_common/vendor/_verlib.py testing/test_archive.py testing/test_lazydecorator.py testing/test_metadata.py testing/test_proc.py testing/test_request.py testing/test_types.py testing/test_url.py testing/test_validation.py testing/data/slash.tar.gzdevpi-common-3.2.2/devpi_common.egg-info/top_level.txt0000644000076500000240000000001513263321717023765 0ustar fschulzestaff00000000000000devpi_common devpi-common-3.2.2/LICENSE0000644000076500000240000000204513263321713016070 0ustar fschulzestaff00000000000000 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. devpi-common-3.2.2/MANIFEST.in0000644000076500000240000000020313263321713016613 0ustar fschulzestaff00000000000000include CHANGELOG include README.rst include setup.py include tox.ini include LICENSE recursive-include testing *.py graft testing devpi-common-3.2.2/PKG-INFO0000644000076500000240000000460113263321717016164 0ustar fschulzestaff00000000000000Metadata-Version: 1.1 Name: devpi-common Version: 3.2.2 Summary: utilities jointly used by devpi-server and devpi-client Home-page: https://github.com/devpi/devpi Author: Holger Krekel Author-email: holger@merlinux.eu License: UNKNOWN Description: This package contains utility functions used by devpi-server and devpi-client. See http://doc.devpi.net for more information. ========= Changelog ========= .. towncrier release notes start 3.2.2 (2018-04-11) ================== Other Changes ------------- - fix deprecation warning from pkg_resources. 3.2.1 (2018-01-18) ================== Bug Fixes --------- - fix issue496: PyPy 5.10 wheel upload failed because the version in the filename is longer again, the check for it is now removed, because it's pointless. 3.2.0 (2017-11-23) ================== No significant changes. 3.2.0rc1 (2017-09-08) ===================== Bug Fixes --------- - fix issue343: enhanced ``splitbasename`` to split the name and version correctly in more cases. - fix for url decoding issue with mirrors. When package filenames contain characters such as `!` or `+`, these get URL encoded to `%21` and `%2B` in the remote simple index. This fix ensures that in the filename saved to the disk cache these are decoded back to `!` or `+`. 3.1.0 (2017-04-18) ================== - add ``username``, ``password``, ``hostname`` and ``port`` properties to URL objects - expose SSLError on Session object to allow checking for verification errors - add ``max_retries`` keyword option to ``new_requests_session``. - fix ``get_latest_version`` when there are no versions. Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment Classifier: Intended Audience :: Developers Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: MIT License devpi-common-3.2.2/README.rst0000644000076500000240000000020013263321713016541 0ustar fschulzestaff00000000000000 This package contains utility functions used by devpi-server and devpi-client. See http://doc.devpi.net for more information. devpi-common-3.2.2/setup.cfg0000644000076500000240000000021013263321717016700 0ustar fschulzestaff00000000000000[bdist_wheel] universal = 1 [devpi:upload] formats = sdist.tgz,bdist_wheel [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 devpi-common-3.2.2/setup.py0000755000076500000240000000250413263321713016600 0ustar fschulzestaff00000000000000#! /usr/bin/env python import io import os import re from setuptools import setup, find_packages def get_changelog(): text = io.open(os.path.join(here, 'CHANGELOG'), encoding='utf-8').read() header_matches = list(re.finditer('^=+$', text, re.MULTILINE)) # until fifth header text = text[:header_matches[5].start()] # all lines without fifth release number lines = text.splitlines()[:-1] return "=========\nChangelog\n=========\n\n" + "\n".join(lines) if __name__ == "__main__": here = os.path.abspath(".") README = io.open(os.path.join(here, 'README.rst'), encoding='utf-8').read() CHANGELOG = get_changelog() setup( name="devpi-common", description="utilities jointly used by devpi-server and devpi-client", long_description="\n\n".join([README, CHANGELOG]), version='3.2.2', packages=find_packages(), install_requires=["requests>=2.3.0", "py>=1.4.20"], url="https://github.com/devpi/devpi", maintainer="Holger Krekel", maintainer_email="holger@merlinux.eu", classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", ], ) devpi-common-3.2.2/testing/0000755000076500000240000000000013263321717016543 5ustar fschulzestaff00000000000000devpi-common-3.2.2/testing/data/0000755000076500000240000000000013263321717017454 5ustar fschulzestaff00000000000000devpi-common-3.2.2/testing/data/slash.tar.gz0000644000076500000240000000015713263321713021714 0ustar fschulzestaff00000000000000‹%pTRíÎ1ƒ0@?Å?ˆ±Èñž‘R Yò,èR$©fš=i·¸Ûë9Ï-]ªt1ŽGvŸyÜC­õ^bš¢ï†QS.×¾uz¯ÛcÉ9-­mßv¿zø£‹êñR(devpi-common-3.2.2/testing/test_archive.py0000644000076500000240000001020413263321713021566 0ustar fschulzestaff00000000000000from subprocess import Popen, PIPE import py import pytest import sys from devpi_common.archive import * datadir = py.path.local(__file__).dirpath("data") def check_files(tmpdir): assert tmpdir.join("1").isfile() assert tmpdir.join("sub", "1").isfile() def _writedir(tmpdir, contentdict, prefixes=()): for name, val in contentdict.items(): if isinstance(val, dict): newprefixes = prefixes + (name,) if not val: tmpdir.mkdir(*newprefixes) else: _writedir(tmpdir, val, newprefixes) else: tmpdir.ensure(*(prefixes + (name,))).write(val) def create_tarfile_fromdict(tmpdir, contentdict): tar = py.path.local.sysfind("tar") if not tar: pytest.skip("tar command not found") if sys.platform.startswith('win'): pytest.skip("tar command not working properly on Windows") tardir = tmpdir.join("create") _writedir(tardir, contentdict) files = [x.relto(tardir) for x in tardir.visit(lambda x: x.isfile())] with tardir.as_cwd(): popen = Popen([str(tar), "cvf", "-" ] + files, stdout=PIPE) out, err = popen.communicate() return out @pytest.fixture(params=["tar", "zip"]) def archive_path(request, tmpdir): contentdict = {"1": "file1", "sub": {"1": "subfile"}} if request.param == "zip": content = zip_dict(contentdict) else: content = create_tarfile_fromdict(tmpdir, contentdict) p = tmpdir.join("content.%s" % request.param) p.write(content, "wb") return p class TestArchive: @pytest.yield_fixture(params=["path", "file"]) def archive(self, request, archive_path): if request.param == "path": arch = Archive(archive_path) else: f = archive_path.open("rb") arch = Archive(f) yield arch arch.close() def test_namelist(self, archive): namelist = archive.namelist() assert len(namelist) == 2 assert "1" in namelist assert "sub/1" in namelist def test_unknown_archive(self): with pytest.raises(UnsupportedArchive): Archive(py.io.BytesIO(b"123")) def test_read(self, archive): assert archive.read("1") == b"file1" assert archive.read("sub/1") == b"subfile" def test_getfile(self, archive): assert archive.getfile("1").read() == b"file1" assert archive.getfile("sub/1").read() == b"subfile" def test_getfile_not_exists(self, archive): with pytest.raises(archive.FileNotExist): archive.getfile("123") assert issubclass(archive.FileNotExist, ValueError) def test_extract(self, archive, tmpdir): target = tmpdir.join("extract") archive.extract(target) assert target.join("1").read() == "file1" assert target.join("sub/1").read() == "subfile" def test_printdir(self, archive, capsys): archive.printdir() out, err = capsys.readouterr() assert "sub/1" in out def test_tarfile_outofbound(tmpdir): with Archive(datadir.join("slash.tar.gz")) as archive: with pytest.raises(ValueError): archive.extract(tmpdir) #def test_zipfile_outofbound(tmpdir): # archive = get_archive(datadir.join("slash.zip").read()) # with pytest.raises(ValueError): # archive.extract(tmpdir) def test_zip_dict(tmpdir): content = zip_dict({"one": {"nested": "1"}, "two": {}}) with Archive(py.io.BytesIO(content)) as archive: archive.extract(tmpdir) assert tmpdir.join("one", "nested").read() == "1" assert tmpdir.join("two").isdir() def test_zip_dir(tmpdir): source = tmpdir.join("source") newdest = tmpdir.join("newdest") dest = tmpdir.join("dest.zip") source.ensure("file") source.ensure("sub", "subfile") zip_dir(source, dest) with Archive(dest) as archive: archive.extract(newdest) assert newdest.join("file").isfile() assert newdest.join("sub", "subfile").isfile() newdest.remove() with Archive(py.io.BytesIO(zip_dir(source))) as archive: archive.extract(newdest) assert newdest.join("file").isfile() assert newdest.join("sub", "subfile").isfile() devpi-common-3.2.2/testing/test_lazydecorator.py0000644000076500000240000000353313263321713023036 0ustar fschulzestaff00000000000000from devpi_common.types import lazydecorator pytest_plugins = "pytester" def test_simpler(): dec = lazydecorator() class A: @dec(5) @dec(1, kw=3) def f(self): pass a = A() l2 = [] def anotherdec(arg, kw=None): def wrapped(func): l2.append((func, arg, kw)) return func return wrapped dec.discover_and_call(a, anotherdec) assert len(l2) == 2 assert l2[0] == (a.f, 1, 3) assert l2[1] == (a.f, 5, None) def test_simpler_dict(): dec = lazydecorator() @dec() def f(): pass @dec(x=1) def g(): pass d = {"f": f, "g": g, "something": lambda: None} l = dec.discover(d) assert len(l) == 2 assert l[0] == (f, (), {}) assert l[1] == (g, (), dict(x=1)) def test_multi(): dec = lazydecorator() class A: @dec(1) def c(self): pass @dec(2) def b(self): pass @dec(3) def a(self): pass a = A() l2 = [] def anotherdec(arg, kw=None): def wrapped(func): l2.append((func, arg)) return func return wrapped dec.discover_and_call(a, anotherdec) assert len(l2) == 3 assert l2[0] == (a.c, 1) assert l2[1] == (a.b, 2) assert l2[2] == (a.a, 3) def test_simpler_mod(testdir): p = testdir.makepyfile(""" from devpi_common.types import lazydecorator dec = lazydecorator() @dec("world") @dec("hello") def f(): pass """) mod = p.pyimport() l = [] def anotherdec(arg): def wrapped(func): l.append((arg, func)) return func return wrapped mod.dec.discover_and_call(mod, anotherdec) assert len(l) == 2 assert l == [("hello", mod.f), ("world", mod.f)] devpi-common-3.2.2/testing/test_metadata.py0000644000076500000240000001515413263321713021736 0ustar fschulzestaff00000000000000import pytest from devpi_common.metadata import * @pytest.mark.parametrize(("releasename", "expected"), [ ("pytest-2.3.4.zip", ("pytest", "2.3.4", ".zip")), ("pytest-2.3.4-py27.egg", ("pytest", "2.3.4", "-py27.egg")), ("dddttt-0.1.dev38-py2.7.egg", ("dddttt", "0.1.dev38", "-py2.7.egg")), ("devpi-0.9.5.dev1-cp26-none-linux_x86_64.whl", ("devpi", "0.9.5.dev1", "-cp26-none-linux_x86_64.whl")), ("wheel-0.21.0-py2.py3-none-any.whl", ("wheel", "0.21.0", "-py2.py3-none-any.whl")), ("green-0.4.0-py2.5-win32.egg", ("green", "0.4.0", "-py2.5-win32.egg")), ("Candela-0.2.1.macosx-10.4-x86_64.exe", ("Candela", "0.2.1", ".macosx-10.4-x86_64.exe")), ("Cambiatuscromos-0.1.1alpha.linux-x86_64.exe", ("Cambiatuscromos", "0.1.1alpha", ".linux-x86_64.exe")), ("Aesthete-0.4.2.win32.exe", ("Aesthete", "0.4.2", ".win32.exe")), ("DTL-1.0.5.win-amd64.exe", ("DTL", "1.0.5", ".win-amd64.exe")), ("Cheetah-2.2.2-1.x86_64.rpm", ("Cheetah", "2.2.2-1", ".x86_64.rpm")), ("Cheetah-2.2.2-1.src.rpm", ("Cheetah", "2.2.2-1", ".src.rpm")), ("Cheetah-2.2.2-1.x85.rpm", ("Cheetah", "2.2.2-1", ".x85.rpm")), ("Cheetah-2.2.2.dev1.x85.rpm", ("Cheetah", "2.2.2.dev1", ".x85.rpm")), ("Cheetah-2.2.2.dev1.noarch.rpm", ("Cheetah", "2.2.2.dev1", ".noarch.rpm")), ("deferargs.tar.gz", ("deferargs", "", ".tar.gz")), ("hello-1.0.doc.zip", ("hello", "1.0", ".doc.zip")), ("Twisted-12.0.0.win32-py2.7.msi", ("Twisted", "12.0.0", ".win32-py2.7.msi")), ("django_ipware-0.0.8-py3-none-any.whl", ("django_ipware", "0.0.8", "-py3-none-any.whl")), ("my-binary-package-name-1-4-3-yip-0.9.tar.gz", ("my-binary-package-name-1-4-3-yip", "0.9", ".tar.gz")), ("my-binary-package-name-1-4-3-yip-0.9+deadbeef.tar.gz", ("my-binary-package-name-1-4-3-yip", "0.9+deadbeef", ".tar.gz")), ("cffi-1.6.0-pp251-pypy_41-macosx_10_11_x86_64.whl", ("cffi", "1.6.0", "-pp251-pypy_41-macosx_10_11_x86_64.whl")), ("argon2_cffi-18.2.0.dev0.0-pp2510-pypy_41-macosx_10_13_x86_64.whl", ("argon2_cffi", "18.2.0.dev0.0", "-pp2510-pypy_41-macosx_10_13_x86_64.whl")), ]) def test_splitbasename(releasename, expected): result = splitbasename(releasename) assert result == expected @pytest.mark.parametrize(("releasename", "expected"), [ ("x-2.3.zip", ("source", "sdist")), ("x-2.3-0.4.0.win32-py3.1.exe", ("3.1", "bdist_wininst")), ("x-2.3-py27.egg", ("2.7", "bdist_egg")), ("wheel-0.21.0-py2.py3-none-any.whl", ("2.7", "bdist_wheel")), ("devpi-0.9.5.dev1-cp26-none-linux_x86_64.whl", ("2.6", "bdist_wheel")), ("greenlet-0.4.0-py3.3-win-amd64.egg", ("3.3", "bdist_egg")), ("greenlet-0.4.0.linux-x86_64.tar.gz", ("any", "bdist_dumb")), ("cffi-1.6.0-pp251-pypy_41-macosx_10_11_x86_64.whl", ("2.5.1", "bdist_wheel")), ("cryptography-1.4-pp253-pypy_41-linux_x86_64.whl", ("2.5.3", "bdist_wheel")), ("argon2_cffi-18.2.0.dev0.0-pp2510-pypy_41-macosx_10_13_x86_64.whl", ("2.5.1.0", "bdist_wheel")), ]) def test_get_pyversion_filetype(releasename, expected): result = get_pyversion_filetype(releasename) assert result == expected @pytest.mark.parametrize(("releasename", "expected"), [ ("pytest-2.3.4.zip", ("pytest-2.3.4", ".zip")), ("green-0.4.0-py2.5-win32.egg", ("green-0.4.0-py2.5-win32", ".egg")), ("green-0.4.0-py2.5-win32.egg", ("green-0.4.0-py2.5-win32", ".egg")), ("green-1.0.tar.gz", ("green-1.0", ".tar.gz")), ("green-1.0.tar.gz", ("green-1.0", ".tar.gz")), ("green-1.0.doc.zip", ("green-1.0", ".doc.zip")), ]) def test_splitext_archive(releasename, expected): assert splitext_archive(releasename) == expected @pytest.mark.parametrize(("expected", "versions"), [ (None, []), ("1.0", ["1.0"]), ("1.0", ["1.0", "0.9"]), ("1.0.1.dev0", ["1.0", "1.0.1.dev0"]), ("2.0-alpha1", ["1.0", "2.0a0", "2.0.a0", "2.0-alpha1"]), ("2.0-beta1", ["1.0", "2.0b0", "2.0.b0", "2.0-beta1"]), ("2.0-rc1", ["1.0", "2.0rc0", "2.0.rc0", "2.0-rc1"]), ("2.0-pre1", ["1.0", "2.0pre0", "2.0.pre0", "2.0-pre1"]), ]) def test_get_latest_version(expected, versions): assert get_latest_version(versions) == expected @pytest.mark.parametrize(("expected", "versions"), [ (None, ["1.0rc1"]), ("1.0", ["1.0"]), ("1.0", ["1.0", "0.9"]), ("1.0", ["1.0", "1.0.1.dev0"]), ("1.0", ["1.0", "2.0a0", "2.0.a0", "2.0-alpha1"]), ("1.0", ["1.0", "2.0b0", "2.0.b0", "2.0-beta1"]), ("1.0", ["1.0", "2.0rc0", "2.0.rc0", "2.0-rc1"]), ("1.0", ["1.0", "2.0pre0", "2.0.pre0", "2.0-pre1"]), ]) def test_get_latest_stable_version(expected, versions): assert get_latest_version(versions, stable=True) == expected def test_version(): ver1 = Version("1.0") ver2 = Version("1.1") assert max([ver1, ver2]) == ver2 class TestBasenameMeta: def test_doczip(self): meta1 = BasenameMeta("x-1.0.doc.zip") assert meta1.name == "x" assert meta1.version == "1.0" assert meta1.ext == ".doc.zip" def test_two_comparison(self): meta1 = BasenameMeta("x-1.0.tar.gz") meta2 = BasenameMeta("x-1.1.tar.gz") assert meta1 != meta2 assert meta1 < meta2 assert meta1.name == "x" assert meta1.version == "1.0" assert meta1.ext == ".tar.gz" assert meta1.obj == "x-1.0.tar.gz" def test_normalize_equal(self): meta1 = BasenameMeta("x-1.0.tar.gz") meta2 = BasenameMeta("X-1.0.tar.gz") assert meta1 == meta2 meta3 = BasenameMeta("X-1.0.zip") assert meta3 != meta1 assert meta3 > meta1 def test_basename_attribute(self): class B: basename = "x-1.0.tar.gz" meta1 = BasenameMeta(B) meta2 = BasenameMeta("x-1.0.tar.gz") assert meta1 == meta2 def test_noversion_sameproject(self): meta1 = BasenameMeta("py-1.0.zip", sameproject=True) meta2 = BasenameMeta("master", sameproject=True) meta3 = BasenameMeta("zer", sameproject=True) assert meta1 > meta2 assert meta2 < meta3 assert meta1 > meta3 def test_notsameproject(self): meta1 = BasenameMeta("py-1.0.zip") meta2 = BasenameMeta("abc-1.0.zip") meta3 = BasenameMeta("zbc-1.0.zip") assert meta1 > meta2 assert meta1 < meta3 def test_sort_sameproject_links(self): links = ["master", "py-1.0.zip", "trunk.zip", "py-1.1.tgz"] newlinks = sorted_sameproject_links(links) assert newlinks == ["py-1.1.tgz", "py-1.0.zip", "trunk.zip", "master"] def test_parse_requirement(): req = parse_requirement("hello>=1.0") assert req.project_name == "hello" assert "1.0" in req assert "0.9" not in req -1.0 devpi-common-3.2.2/testing/test_proc.py0000644000076500000240000000056113263321713021115 0ustar fschulzestaff00000000000000import pytest import py from devpi_common.proc import * @pytest.fixture def hg(): hg = py.path.local.sysfind("hg") if not hg: pytest.skip("no hg") return str(hg) def test_check_output(hg): assert check_output([hg, "--version"]) def test_checkoutput_error(hg): with pytest.raises(CalledProcessError): check_output([hg, "qlwkje"]) devpi-common-3.2.2/testing/test_request.py0000644000076500000240000000222713263321713021643 0ustar fschulzestaff00000000000000import sys import requests import pytest from devpi_common.request import new_requests_session @pytest.mark.parametrize('max_retries', [ None, 0, 2, ]) def test_env(monkeypatch, max_retries): from urllib3.util.retry import Retry monkeypatch.setenv("HTTP_PROXY", "http://this") monkeypatch.setenv("HTTPS_PROXY", "http://that") orig_increment = Retry.increment increment_retry_totals = [] def increment(self, *args, **kwargs): increment_retry_totals.append(self.total) return orig_increment(self, *args, **kwargs) monkeypatch.setattr(Retry, "increment", increment) session = new_requests_session(max_retries=max_retries) with pytest.raises(requests.exceptions.RequestException): session.get("http://example.com") assert tuple(increment_retry_totals) in ((0,), (2, 1, 0)) def test_useragent(): s = new_requests_session(agent=("hello", "1.2")) ua = s.headers["user-agent"] assert "devpi-hello/1.2" in ua assert sys.version.split()[0] in ua assert "*" not in ua def test_exception_attributes(): session = new_requests_session() assert isinstance(session.Errors, tuple) devpi-common-3.2.2/testing/test_types.py0000644000076500000240000000143313263321713021315 0ustar fschulzestaff00000000000000 from devpi_common.types import * def test_CompareMixin(): class A(CompareMixin): def __init__(self, count): self.cmpval = count l = list(map(A, range(10))) assert max(reversed(l)).cmpval == 9 def test_parsehashspec(): hash_algo, hash_value = parse_hash_spec("l1kj23") assert hash_algo is None and hash_value is None hash_algo, hash_value = parse_hash_spec("xyz=123098123") assert hash_algo is None and hash_value is None digest = hashlib.md5(b'123').hexdigest() hash_algo, hash_value = parse_hash_spec("md5=" + digest) assert hash_algo(b'123').hexdigest() == digest digest = hashlib.sha256(b'123').hexdigest() hash_algo, hash_value = parse_hash_spec("sha256=" + digest) assert hash_algo(b'123').hexdigest() == digest devpi-common-3.2.2/testing/test_url.py0000644000076500000240000002237113263321713020757 0ustar fschulzestaff00000000000000from __future__ import unicode_literals import hashlib import posixpath import pytest from devpi_common.url import URL class TestURL: def test_basename(self): d = URL("http://codespeak.net/basename") assert d.basename == "basename" d = URL("http://codespeak.net") assert not d.basename def test_repr(self): d = URL("http://host.com/path") assert repr(d) == "" def test_parentbasename(self): d = URL("http://codespeak.net/simple/basename/") assert d.parentbasename == "basename" assert d.basename == "" def test_hashing(self): assert hash(URL("http://a")) == hash(URL("http://a")) assert URL("http://a") == URL("http://a") def test_eggfragment(self): url = URL("http://a/py.tar.gz#egg=py-dev") assert url.eggfragment == "py-dev" def test_md5(self): url = URL("http://a/py.tar.gz#md5=123123") assert url.md5 == "123123" assert url.hash_algo == hashlib.md5 assert url.hash_value == "123123" @pytest.mark.parametrize("hashtype,hash_value", [ ("sha256", "090123"), ("sha224", "1209380123"), ("md5", "102938") ]) def test_hashtypes(self, hashtype, hash_value): link = URL('py-1.4.12.zip#%s=%s' % (hashtype, hash_value)) assert link.hash_algo == getattr(hashlib, hashtype) assert link.hash_value == hash_value def test_nohashtypes(self): link = URL("whateveer#lqk=123") assert link.hash_value is None and link.hash_algo is None @pytest.mark.parametrize("url,path,expected", [ ("http://root", "dir1", "http://root/dir1"), ("http://root", "dir1/", "http://root/dir1/"), ("http://root/", "dir1/", "http://root/dir1/"), ("http://root/dir1", "dir2", "http://root/dir2"), ("http://root/dir1/", "dir2/", "http://root/dir1/dir2/"), ("http://root/dir1/", "/dir2", "http://root/dir2"), ("http://root/dir1/", "/dir2/", "http://root/dir2/"), ("http://root/dir1/dir3", "dir2", "http://root/dir1/dir2"), ("http://root/dir1/dir3/", "dir2/", "http://root/dir1/dir3/dir2/"), ("http://root/dir1/dir3/", "/dir2", "http://root/dir2"), ("http://root/dir1/dir3/", "/dir2/", "http://root/dir2/"), ]) def test_joinpath(self, url, path, expected): d_url = URL(url) url_joined = d_url.joinpath(path).url assert url_joined == expected assert URL(url, path).url == expected assert d_url.joinpath(path, "end").url == expected.rstrip('/') + "/end" assert URL(url, path, "end").url == expected.rstrip('/') + "/end" assert d_url.joinpath(path, "end", asdir=1).url == expected.rstrip('/') + "/end/" assert URL(url, path, "end", asdir=1).url == expected.rstrip('/') + "/end/" def test_addpath(self): url = URL("http://root.com/path") assert url.addpath("sub").url == "http://root.com/path/sub" assert url.addpath("sub", asdir=1).url == "http://root.com/path/sub/" url = URL("http://root.com/path/") assert url.addpath("sub").url == "http://root.com/path/sub" assert url.addpath("sub", asdir=1).url == "http://root.com/path/sub/" def test_instantiate_with_url(self): url = URL("http://hesie.de") assert URL(url) == url def test_empty_url(self): assert not URL("") assert not URL() url = URL(None) assert url.url == "" def test_asdir(self): assert URL("http://heise.de").asdir().url == "http://heise.de/" assert URL("http://py.org/path").asdir().url == "http://py.org/path/" assert URL("http://py.org/path/").asdir().url == "http://py.org/path/" def test_asfile(self): assert URL("http://heise.de").asfile().url == "http://heise.de" assert URL("http://heise.de/").asfile().url == "http://heise.de" assert URL("http://x.de/path/").asfile().url == "http://x.de/path" assert URL("http://x.de/path").asfile().url == "http://x.de/path" def test_joinpath_asdir(self): url = URL("http://heise.de") new = url.joinpath("hello", asdir=1) assert new.url == "http://heise.de/hello/" new = url.joinpath("hello/", asdir=1) assert new.url == "http://heise.de/hello/" def test_geturl_nofrag(self): url = URL("http://a/py.tar.gz#egg=py-dev") assert url.geturl_nofragment() == "http://a/py.tar.gz" def test_url_nofrag(self): url = URL("http://a/py.tar.gz#egg=py-dev") res = url.url_nofrag assert not isinstance(res, URL) assert res == "http://a/py.tar.gz" @pytest.mark.parametrize("url,path,expected", [ ("/something/this", "/something/that", "that"), ("/something/this", "/something/that/", "that/"), ("/something/this", "/something/this", "this"), ("/something/this", "/something/this/", "this/"), ("/something/this", "/", "../"), ("/", "/this/that/", "this/that/"), ("/something/this/", "/something/that", "../that"), ("/something/this/", "/other/that", "../../other/that"), ("/something/this/", "/other/that", "../../other/that"), ("/something/this/", "/something/this/that", "that"), ("/something/this/", "/something/this/that/there", "that/there"), ]) def test_relpath(self, url, path, expected): url = URL("http://example.com" + url) relpath = url.relpath(path) assert relpath == expected def test_relpath_edge_case(self): with pytest.raises(ValueError): URL("http://qwe/path").relpath("lkjqwe") def test_netloc(self): assert URL("http://qwe/").netloc == 'qwe' assert URL("http://foo:pass@qwe/").netloc == 'foo:pass@qwe' def test_replace(self): url = URL("http://qwe/foo?bar=ham#hash") assert url.replace(scheme='https').url == "https://qwe/foo?bar=ham#hash" assert url.replace(scheme='').url == "//qwe/foo?bar=ham#hash" assert url.replace(netloc='world').url == "http://world/foo?bar=ham#hash" assert url.replace(netloc='').url == "http:///foo?bar=ham#hash" assert url.replace(path='/').url == "http://qwe/?bar=ham#hash" assert url.replace(path='').url == "http://qwe?bar=ham#hash" assert url.replace(query='').url == "http://qwe/foo#hash" assert url.replace(fragment='').url == "http://qwe/foo?bar=ham" assert url.replace(fragment='foo').url == "http://qwe/foo?bar=ham#foo" # original shouldn't have changed assert url.url == "http://qwe/foo?bar=ham#hash" # trying to change something not existing does nothing assert url.replace(foo='https').url == "http://qwe/foo?bar=ham#hash" def test_replace_nothing(self): url = URL("http://qwe/foo?bar=ham#hash") new_url = url.replace() assert new_url is not url assert new_url.url == url.url def test_comparison(self): base = URL('https://pypi.python.org') url = URL('https://pypi.python.org/simple/foo').replace(path='') assert base == url assert not (base != url) def test_username(self): assert URL('http://example.com').username is None assert URL('http://user@example.com').username == 'user' assert URL('http://user:password@example.com').username == 'user' assert URL('https://example.com:443').username is None assert URL('https://user@example.com:443').username == 'user' assert URL('https://user:password@example.com:443').username == 'user' def test_password(self): assert URL('http://example.com').password is None assert URL('http://user@example.com').password is None assert URL('http://user:password@example.com').password == 'password' assert URL('https://example.com:443').password is None assert URL('https://user@example.com:443').password is None assert URL('https://user:password@example.com:443').password == 'password' def test_hostname(self): assert URL('http://example.com').hostname == 'example.com' assert URL('http://user@example.com').hostname == 'example.com' assert URL('http://user:password@example.com').hostname == 'example.com' assert URL('https://example.com:443').hostname == 'example.com' assert URL('https://user@example.com:443').hostname == 'example.com' assert URL('https://user:password@example.com:443').hostname == 'example.com' def test_port(self): assert URL('http://example.com').port is None assert URL('http://user@example.com').port is None assert URL('http://user:password@example.com').port is None assert URL('https://example.com:443').port == 443 assert URL('https://user@example.com:443').port == 443 assert URL('https://user:password@example.com:443').port == 443 # # test torelpath/fromrelpath # @pytest.mark.parametrize("url", [ "http://codespeak.net", "https://codespeak.net", "http://codespeak.net/path", "http://codespeak.net:3123/path", "https://codespeak.net:80/path", ]) def test_canonical_url_path_mappings(url): url = URL(url) path = url.torelpath() assert path[0] != "/" assert posixpath.normpath(path) == path back_url = URL.fromrelpath(path) assert url == back_url devpi-common-3.2.2/testing/test_validation.py0000644000076500000240000000133413263321713022303 0ustar fschulzestaff00000000000000from devpi_common.validation import normalize_name, validate_metadata import pytest def names(*args): return pytest.mark.parametrize("name", args) @names("hello-xyz", "hello_xyz", "hello.xyz", "Hello.XYZ", "Hello___XYZ") def test_safe_name(name): assert normalize_name(name) == "hello-xyz" class TestValidateMetadata: @names("hello", "hello-xyz", "hello1-xyz", "hello_xyz") def test_valid_names(self, name): validate_metadata(data=dict(name=name, version="1.0")) @names("hello_", "hello-", "-hello", "_hello1", "hel%lo", "hello#", "hello<",) def test_invalid(self, name): pytest.raises(ValueError, lambda: validate_metadata(data=dict(name=name, version="1.0"))) devpi-common-3.2.2/tox.ini0000644000076500000240000000043013263321713016372 0ustar fschulzestaff00000000000000[pytest] norecursedirs = attic .tox addopts = -rs rsyncdirs = devpi testing rsyncignore = .tox flakes-ignore = test_*.py ImportStarUsed ImportStarUsage [tox] envlist = py27,py34,py35,pypy [testenv] deps = pytest pytest-flakes commands = py.test --flakes {posargs}